repo_url
stringclasses 14
values | instruction
stringlengths 17
1.08k
| base_commit
stringlengths 7
7
| requirements_txt
stringclasses 14
values | testbed_environment
stringclasses 2
values | solution_commit
stringlengths 7
9
| solution_patch
stringlengths 238
19.4k
| modified_files
listlengths 0
6
| id
stringlengths 3
5
| language
stringclasses 2
values | test_script
stringlengths 553
11.4k
|
---|---|---|---|---|---|---|---|---|---|---|
https://github.com/teamqurrent/httpx | Add None as default value for file parameter inside `httpx/_auth.py` class constructor | 4b5a92e | sniffio
rfc3986
httpcore>=0.18.0,<0.19.0
certifi
idna | python3.9 | c1cc6b2 | diff --git a/httpx/_auth.py b/httpx/_auth.py
--- a/httpx/_auth.py
+++ b/httpx/_auth.py
@@ -147,7 +147,7 @@ class NetRCAuth(Auth):
Use a 'netrc' file to lookup basic auth credentials based on the url host.
"""
- def __init__(self, file: typing.Optional[str]):
+ def __init__(self, file: typing.Optional[str] = None):
self._netrc_info = netrc.netrc(file)
def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]:
| [
{
"content": "import hashlib\nimport netrc\nimport os\nimport re\nimport time\nimport typing\nfrom base64 import b64encode\nfrom urllib.request import parse_http_list\n\nfrom ._exceptions import ProtocolError\nfrom ._models import Request, Response\nfrom ._utils import to_bytes, to_str, unquote\n\nif typing.TYPE_CHECKING: # pragma: no cover\n from hashlib import _Hash\n\n\nclass Auth:\n \"\"\"\n Base class for all authentication schemes.\n\n To implement a custom authentication scheme, subclass `Auth` and override\n the `.auth_flow()` method.\n\n If the authentication scheme does I/O such as disk access or network calls, or uses\n synchronization primitives such as locks, you should override `.sync_auth_flow()`\n and/or `.async_auth_flow()` instead of `.auth_flow()` to provide specialized\n implementations that will be used by `Client` and `AsyncClient` respectively.\n \"\"\"\n\n requires_request_body = False\n requires_response_body = False\n\n def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]:\n \"\"\"\n Execute the authentication flow.\n\n To dispatch a request, `yield` it:\n\n ```\n yield request\n ```\n\n The client will `.send()` the response back into the flow generator. You can\n access it like so:\n\n ```\n response = yield request\n ```\n\n A `return` (or reaching the end of the generator) will result in the\n client returning the last response obtained from the server.\n\n You can dispatch as many requests as is necessary.\n \"\"\"\n yield request\n\n def sync_auth_flow(\n self, request: Request\n ) -> typing.Generator[Request, Response, None]:\n \"\"\"\n Execute the authentication flow synchronously.\n\n By default, this defers to `.auth_flow()`. You should override this method\n when the authentication scheme does I/O and/or uses concurrency primitives.\n \"\"\"\n if self.requires_request_body:\n request.read()\n\n flow = self.auth_flow(request)\n request = next(flow)\n\n while True:\n response = yield request\n if self.requires_response_body:\n response.read()\n\n try:\n request = flow.send(response)\n except StopIteration:\n break\n\n async def async_auth_flow(\n self, request: Request\n ) -> typing.AsyncGenerator[Request, Response]:\n \"\"\"\n Execute the authentication flow asynchronously.\n\n By default, this defers to `.auth_flow()`. You should override this method\n when the authentication scheme does I/O and/or uses concurrency primitives.\n \"\"\"\n if self.requires_request_body:\n await request.aread()\n\n flow = self.auth_flow(request)\n request = next(flow)\n\n while True:\n response = yield request\n if self.requires_response_body:\n await response.aread()\n\n try:\n request = flow.send(response)\n except StopIteration:\n break\n\n\nclass FunctionAuth(Auth):\n \"\"\"\n Allows the 'auth' argument to be passed as a simple callable function,\n that takes the request, and returns a new, modified request.\n \"\"\"\n\n def __init__(self, func: typing.Callable[[Request], Request]) -> None:\n self._func = func\n\n def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]:\n yield self._func(request)\n\n\nclass BasicAuth(Auth):\n \"\"\"\n Allows the 'auth' argument to be passed as a (username, password) pair,\n and uses HTTP Basic authentication.\n \"\"\"\n\n def __init__(\n self, username: typing.Union[str, bytes], password: typing.Union[str, bytes]\n ):\n self._auth_header = self._build_auth_header(username, password)\n\n def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]:\n request.headers[\"Authorization\"] = self._auth_header\n yield request\n\n def _build_auth_header(\n self, username: typing.Union[str, bytes], password: typing.Union[str, bytes]\n ) -> str:\n userpass = b\":\".join((to_bytes(username), to_bytes(password)))\n token = b64encode(userpass).decode()\n return f\"Basic {token}\"\n\n\nclass NetRCAuth(Auth):\n \"\"\"\n Use a 'netrc' file to lookup basic auth credentials based on the url host.\n \"\"\"\n\n def __init__(self, file: typing.Optional[str]):\n self._netrc_info = netrc.netrc(file)\n\n def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]:\n auth_info = self._netrc_info.authenticators(request.url.host)\n if auth_info is None or not auth_info[2]:\n # The netrc file did not have authentication credentials for this host.\n yield request\n else:\n # Build a basic auth header with credentials from the netrc file.\n request.headers[\"Authorization\"] = self._build_auth_header(\n username=auth_info[0], password=auth_info[2]\n )\n yield request\n\n def _build_auth_header(\n self, username: typing.Union[str, bytes], password: typing.Union[str, bytes]\n ) -> str:\n userpass = b\":\".join((to_bytes(username), to_bytes(password)))\n token = b64encode(userpass).decode()\n return f\"Basic {token}\"\n\n\nclass DigestAuth(Auth):\n _ALGORITHM_TO_HASH_FUNCTION: typing.Dict[str, typing.Callable[[bytes], \"_Hash\"]] = {\n \"MD5\": hashlib.md5,\n \"MD5-SESS\": hashlib.md5,\n \"SHA\": hashlib.sha1,\n \"SHA-SESS\": hashlib.sha1,\n \"SHA-256\": hashlib.sha256,\n \"SHA-256-SESS\": hashlib.sha256,\n \"SHA-512\": hashlib.sha512,\n \"SHA-512-SESS\": hashlib.sha512,\n }\n\n def __init__(\n self, username: typing.Union[str, bytes], password: typing.Union[str, bytes]\n ) -> None:\n self._username = to_bytes(username)\n self._password = to_bytes(password)\n self._last_challenge: typing.Optional[_DigestAuthChallenge] = None\n self._nonce_count = 1\n\n def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]:\n if self._last_challenge:\n request.headers[\"Authorization\"] = self._build_auth_header(\n request, self._last_challenge\n )\n\n response = yield request\n\n if response.status_code != 401 or \"www-authenticate\" not in response.headers:\n # If the response is not a 401 then we don't\n # need to build an authenticated request.\n return\n\n for auth_header in response.headers.get_list(\"www-authenticate\"):\n if auth_header.lower().startswith(\"digest \"):\n break\n else:\n # If the response does not include a 'WWW-Authenticate: Digest ...'\n # header, then we don't need to build an authenticated request.\n return\n\n self._last_challenge = self._parse_challenge(request, response, auth_header)\n self._nonce_count = 1\n\n request.headers[\"Authorization\"] = self._build_auth_header(\n request, self._last_challenge\n )\n yield request\n\n def _parse_challenge(\n self, request: Request, response: Response, auth_header: str\n ) -> \"_DigestAuthChallenge\":\n \"\"\"\n Returns a challenge from a Digest WWW-Authenticate header.\n These take the form of:\n `Digest realm=\"realm@host.com\",qop=\"auth,auth-int\",nonce=\"abc\",opaque=\"xyz\"`\n \"\"\"\n scheme, _, fields = auth_header.partition(\" \")\n\n # This method should only ever have been called with a Digest auth header.\n assert scheme.lower() == \"digest\"\n\n header_dict: typing.Dict[str, str] = {}\n for field in parse_http_list(fields):\n key, value = field.strip().split(\"=\", 1)\n header_dict[key] = unquote(value)\n\n try:\n realm = header_dict[\"realm\"].encode()\n nonce = header_dict[\"nonce\"].encode()\n algorithm = header_dict.get(\"algorithm\", \"MD5\")\n opaque = header_dict[\"opaque\"].encode() if \"opaque\" in header_dict else None\n qop = header_dict[\"qop\"].encode() if \"qop\" in header_dict else None\n return _DigestAuthChallenge(\n realm=realm, nonce=nonce, algorithm=algorithm, opaque=opaque, qop=qop\n )\n except KeyError as exc:\n message = \"Malformed Digest WWW-Authenticate header\"\n raise ProtocolError(message, request=request) from exc\n\n def _build_auth_header(\n self, request: Request, challenge: \"_DigestAuthChallenge\"\n ) -> str:\n hash_func = self._ALGORITHM_TO_HASH_FUNCTION[challenge.algorithm.upper()]\n\n def digest(data: bytes) -> bytes:\n return hash_func(data).hexdigest().encode()\n\n A1 = b\":\".join((self._username, challenge.realm, self._password))\n\n path = request.url.raw_path\n A2 = b\":\".join((request.method.encode(), path))\n # TODO: implement auth-int\n HA2 = digest(A2)\n\n nc_value = b\"%08x\" % self._nonce_count\n cnonce = self._get_client_nonce(self._nonce_count, challenge.nonce)\n self._nonce_count += 1\n\n HA1 = digest(A1)\n if challenge.algorithm.lower().endswith(\"-sess\"):\n HA1 = digest(b\":\".join((HA1, challenge.nonce, cnonce)))\n\n qop = self._resolve_qop(challenge.qop, request=request)\n if qop is None:\n digest_data = [HA1, challenge.nonce, HA2]\n else:\n digest_data = [challenge.nonce, nc_value, cnonce, qop, HA2]\n key_digest = b\":\".join(digest_data)\n\n format_args = {\n \"username\": self._username,\n \"realm\": challenge.realm,\n \"nonce\": challenge.nonce,\n \"uri\": path,\n \"response\": digest(b\":\".join((HA1, key_digest))),\n \"algorithm\": challenge.algorithm.encode(),\n }\n if challenge.opaque:\n format_args[\"opaque\"] = challenge.opaque\n if qop:\n format_args[\"qop\"] = b\"auth\"\n format_args[\"nc\"] = nc_value\n format_args[\"cnonce\"] = cnonce\n\n return \"Digest \" + self._get_header_value(format_args)\n\n def _get_client_nonce(self, nonce_count: int, nonce: bytes) -> bytes:\n s = str(nonce_count).encode()\n s += nonce\n s += time.ctime().encode()\n s += os.urandom(8)\n\n return hashlib.sha1(s).hexdigest()[:16].encode()\n\n def _get_header_value(self, header_fields: typing.Dict[str, bytes]) -> str:\n NON_QUOTED_FIELDS = (\"algorithm\", \"qop\", \"nc\")\n QUOTED_TEMPLATE = '{}=\"{}\"'\n NON_QUOTED_TEMPLATE = \"{}={}\"\n\n header_value = \"\"\n for i, (field, value) in enumerate(header_fields.items()):\n if i > 0:\n header_value += \", \"\n template = (\n QUOTED_TEMPLATE\n if field not in NON_QUOTED_FIELDS\n else NON_QUOTED_TEMPLATE\n )\n header_value += template.format(field, to_str(value))\n\n return header_value\n\n def _resolve_qop(\n self, qop: typing.Optional[bytes], request: Request\n ) -> typing.Optional[bytes]:\n if qop is None:\n return None\n qops = re.split(b\", ?\", qop)\n if b\"auth\" in qops:\n return b\"auth\"\n\n if qops == [b\"auth-int\"]:\n raise NotImplementedError(\"Digest auth-int support is not yet implemented\")\n\n message = f'Unexpected qop value \"{qop!r}\" in digest auth'\n raise ProtocolError(message, request=request)\n\n\nclass _DigestAuthChallenge(typing.NamedTuple):\n realm: bytes\n nonce: bytes\n algorithm: str\n opaque: typing.Optional[bytes]\n qop: typing.Optional[bytes]\n",
"path": "httpx/_auth.py"
}
] | 0_0 | python | import sys
import unittest
import inspect
class TestNetRCAuthFileParam(unittest.TestCase):
def test_netrcauth_file_param_default(self):
from httpx._auth import NetRCAuth
if hasattr(NetRCAuth, "__init__"):
init_method = getattr(NetRCAuth, "__init__")
method_signature = inspect.signature(init_method)
if "file" in method_signature.parameters:
param = method_signature.parameters["file"]
self.assertIs(param.default, None, "Default value for 'file' parameter is not None")
else:
self.fail("The 'file' parameter is not present in NetRCAuth.__init__ method.")
else:
self.fail("NetRCAuth does not have an __init__ method.")
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestNetRCAuthFileParam))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == "__main__":
main()
|
https://github.com/teamqurrent/httpx | Allow tuple or list for multipart values inside `httpx/_multipart.py` in `_iter_fields` method | ccd98b1 | sniffio
rfc3986
httpcore>=0.18.0,<0.19.0
certifi
idna | python3.9 | 965b8ad | diff --git a/httpx/_multipart.py b/httpx/_multipart.py
--- a/httpx/_multipart.py
+++ b/httpx/_multipart.py
@@ -205,7 +205,7 @@ class MultipartStream(SyncByteStream, AsyncByteStream):
self, data: dict, files: RequestFiles
) -> typing.Iterator[typing.Union[FileField, DataField]]:
for name, value in data.items():
- if isinstance(value, list):
+ if isinstance(value, (tuple, list)):
for item in value:
yield DataField(name=name, value=item)
else:
| [
{
"content": "import binascii\nimport io\nimport os\nimport typing\nfrom pathlib import Path\n\nfrom ._types import (\n AsyncByteStream,\n FileContent,\n FileTypes,\n RequestFiles,\n SyncByteStream,\n)\nfrom ._utils import (\n format_form_param,\n guess_content_type,\n peek_filelike_length,\n primitive_value_to_str,\n to_bytes,\n)\n\n\ndef get_multipart_boundary_from_content_type(\n content_type: typing.Optional[bytes],\n) -> typing.Optional[bytes]:\n if not content_type or not content_type.startswith(b\"multipart/form-data\"):\n return None\n # parse boundary according to\n # https://www.rfc-editor.org/rfc/rfc2046#section-5.1.1\n if b\";\" in content_type:\n for section in content_type.split(b\";\"):\n if section.strip().lower().startswith(b\"boundary=\"):\n return section.strip()[len(b\"boundary=\") :].strip(b'\"')\n return None\n\n\nclass DataField:\n \"\"\"\n A single form field item, within a multipart form field.\n \"\"\"\n\n def __init__(\n self, name: str, value: typing.Union[str, bytes, int, float, None]\n ) -> None:\n if not isinstance(name, str):\n raise TypeError(\n f\"Invalid type for name. Expected str, got {type(name)}: {name!r}\"\n )\n if value is not None and not isinstance(value, (str, bytes, int, float)):\n raise TypeError(\n f\"Invalid type for value. Expected primitive type, got {type(value)}: {value!r}\"\n )\n self.name = name\n self.value: typing.Union[str, bytes] = (\n value if isinstance(value, bytes) else primitive_value_to_str(value)\n )\n\n def render_headers(self) -> bytes:\n if not hasattr(self, \"_headers\"):\n name = format_form_param(\"name\", self.name)\n self._headers = b\"\".join(\n [b\"Content-Disposition: form-data; \", name, b\"\\r\\n\\r\\n\"]\n )\n\n return self._headers\n\n def render_data(self) -> bytes:\n if not hasattr(self, \"_data\"):\n self._data = to_bytes(self.value)\n\n return self._data\n\n def get_length(self) -> int:\n headers = self.render_headers()\n data = self.render_data()\n return len(headers) + len(data)\n\n def render(self) -> typing.Iterator[bytes]:\n yield self.render_headers()\n yield self.render_data()\n\n\nclass FileField:\n \"\"\"\n A single file field item, within a multipart form field.\n \"\"\"\n\n CHUNK_SIZE = 64 * 1024\n\n def __init__(self, name: str, value: FileTypes) -> None:\n self.name = name\n\n fileobj: FileContent\n\n headers: typing.Dict[str, str] = {}\n content_type: typing.Optional[str] = None\n\n # This large tuple based API largely mirror's requests' API\n # It would be good to think of better APIs for this that we could include in httpx 2.0\n # since variable length tuples (especially of 4 elements) are quite unwieldly\n if isinstance(value, tuple):\n if len(value) == 2:\n # neither the 3rd parameter (content_type) nor the 4th (headers) was included\n filename, fileobj = value # type: ignore\n elif len(value) == 3:\n filename, fileobj, content_type = value # type: ignore\n else:\n # all 4 parameters included\n filename, fileobj, content_type, headers = value # type: ignore\n else:\n filename = Path(str(getattr(value, \"name\", \"upload\"))).name\n fileobj = value\n\n if content_type is None:\n content_type = guess_content_type(filename)\n\n has_content_type_header = any(\"content-type\" in key.lower() for key in headers)\n if content_type is not None and not has_content_type_header:\n # note that unlike requests, we ignore the content_type\n # provided in the 3rd tuple element if it is also included in the headers\n # requests does the opposite (it overwrites the header with the 3rd tuple element)\n headers[\"Content-Type\"] = content_type\n\n if isinstance(fileobj, (str, io.StringIO)):\n raise TypeError(f\"Expected bytes or bytes-like object got: {type(fileobj)}\")\n\n self.filename = filename\n self.file = fileobj\n self.headers = headers\n\n def get_length(self) -> int:\n headers = self.render_headers()\n\n if isinstance(self.file, (str, bytes)):\n return len(headers) + len(to_bytes(self.file))\n\n # Let's do our best not to read `file` into memory.\n file_length = peek_filelike_length(self.file)\n if file_length is None:\n # As a last resort, read file and cache contents for later.\n assert not hasattr(self, \"_data\")\n self._data = to_bytes(self.file.read())\n file_length = len(self._data)\n\n return len(headers) + file_length\n\n def render_headers(self) -> bytes:\n if not hasattr(self, \"_headers\"):\n parts = [\n b\"Content-Disposition: form-data; \",\n format_form_param(\"name\", self.name),\n ]\n if self.filename:\n filename = format_form_param(\"filename\", self.filename)\n parts.extend([b\"; \", filename])\n for header_name, header_value in self.headers.items():\n key, val = f\"\\r\\n{header_name}: \".encode(), header_value.encode()\n parts.extend([key, val])\n parts.append(b\"\\r\\n\\r\\n\")\n self._headers = b\"\".join(parts)\n\n return self._headers\n\n def render_data(self) -> typing.Iterator[bytes]:\n if isinstance(self.file, (str, bytes)):\n yield to_bytes(self.file)\n return\n\n if hasattr(self, \"_data\"):\n # Already rendered.\n yield self._data\n return\n\n if hasattr(self.file, \"seek\"):\n self.file.seek(0)\n\n chunk = self.file.read(self.CHUNK_SIZE)\n while chunk:\n yield to_bytes(chunk)\n chunk = self.file.read(self.CHUNK_SIZE)\n\n def render(self) -> typing.Iterator[bytes]:\n yield self.render_headers()\n yield from self.render_data()\n\n\nclass MultipartStream(SyncByteStream, AsyncByteStream):\n \"\"\"\n Request content as streaming multipart encoded form data.\n \"\"\"\n\n def __init__(\n self, data: dict, files: RequestFiles, boundary: typing.Optional[bytes] = None\n ) -> None:\n if boundary is None:\n boundary = binascii.hexlify(os.urandom(16))\n\n self.boundary = boundary\n self.content_type = \"multipart/form-data; boundary=%s\" % boundary.decode(\n \"ascii\"\n )\n self.fields = list(self._iter_fields(data, files))\n\n def _iter_fields(\n self, data: dict, files: RequestFiles\n ) -> typing.Iterator[typing.Union[FileField, DataField]]:\n for name, value in data.items():\n if isinstance(value, list):\n for item in value:\n yield DataField(name=name, value=item)\n else:\n yield DataField(name=name, value=value)\n\n file_items = files.items() if isinstance(files, typing.Mapping) else files\n for name, value in file_items:\n yield FileField(name=name, value=value)\n\n def iter_chunks(self) -> typing.Iterator[bytes]:\n for field in self.fields:\n yield b\"--%s\\r\\n\" % self.boundary\n yield from field.render()\n yield b\"\\r\\n\"\n yield b\"--%s--\\r\\n\" % self.boundary\n\n def iter_chunks_lengths(self) -> typing.Iterator[int]:\n boundary_length = len(self.boundary)\n # Follow closely what `.iter_chunks()` does.\n for field in self.fields:\n yield 2 + boundary_length + 2\n yield field.get_length()\n yield 2\n yield 2 + boundary_length + 4\n\n def get_content_length(self) -> int:\n return sum(self.iter_chunks_lengths())\n\n # Content stream interface.\n\n def get_headers(self) -> typing.Dict[str, str]:\n content_length = str(self.get_content_length())\n content_type = self.content_type\n return {\"Content-Length\": content_length, \"Content-Type\": content_type}\n\n def __iter__(self) -> typing.Iterator[bytes]:\n for chunk in self.iter_chunks():\n yield chunk\n\n async def __aiter__(self) -> typing.AsyncIterator[bytes]:\n for chunk in self.iter_chunks():\n yield chunk\n",
"path": "httpx/_multipart.py"
}
] | 0_1 | python | import sys
import unittest
import inspect
class TestMultipartStreamIterFields(unittest.TestCase):
def test_iter_fields_code(self):
from httpx._multipart import MultipartStream
source_lines = inspect.getsourcelines(MultipartStream._iter_fields)
found_isinstance_tuple = any("isinstance" in line and "tuple" in line for line in source_lines[0])
self.assertTrue(found_isinstance_tuple, "The line with 'isinstance' and 'tuple' was not found in MultipartStream._iter_fields")
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestMultipartStreamIterFields))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == "__main__":
main()
|
https://github.com/teamqurrent/httpx | The `primitive_value_to_str` function inside `httpx/_utils.py` returns 'true' or 'false' or '' when the value is boolean or None. It returns str(value) otherwise. Modify primitive_value_to_str function to return str(value) if value is of type str, float or int. Otherwise raise TypeError with the error message: 'Expected str, int, float, bool, or None. Got '{type}''. Update the file tests/models/test_queryparams.py to add a new test test_invalid_query_params() which expects a TypeError when bytes is passed. | 10a3b68 | sniffio
rfc3986
httpcore>=0.18.0,<0.19.0
certifi
idna | python3.9 | 4cbf13e | diff --git a/httpx/_utils.py b/httpx/_utils.py
--- a/httpx/_utils.py
+++ b/httpx/_utils.py
@@ -67,7 +67,11 @@ def primitive_value_to_str(value: "PrimitiveData") -> str:
return "false"
elif value is None:
return ""
- return str(value)
+ elif isinstance(value, (str, float, int)):
+ return str(value)
+ raise TypeError(
+ f"Expected str, int, float, bool, or None. Got {type(value).__name__!r}."
+ )
def is_known_encoding(encoding: str) -> bool:
diff --git a/tests/models/test_queryparams.py b/tests/models/test_queryparams.py
--- a/tests/models/test_queryparams.py
+++ b/tests/models/test_queryparams.py
@@ -87,6 +87,13 @@ def test_empty_query_params():
assert str(q) == "a="
+def test_invalid_query_params():
+ with pytest.raises(
+ TypeError, match=r"Expected str, int, float, bool, or None. Got 'bytes'."
+ ):
+ httpx.QueryParams({"a": b"bytes"})
+
+
def test_queryparam_update_is_hard_deprecated():
q = httpx.QueryParams("a=123")
with pytest.raises(RuntimeError):
| [
{
"content": "import codecs\nimport email.message\nimport logging\nimport mimetypes\nimport netrc\nimport os\nimport re\nimport sys\nimport time\nimport typing\nfrom pathlib import Path\nfrom urllib.request import getproxies\n\nimport sniffio\n\nfrom ._types import PrimitiveData\n\nif typing.TYPE_CHECKING: # pragma: no cover\n from ._urls import URL\n\n\n_HTML5_FORM_ENCODING_REPLACEMENTS = {'\"': \"%22\", \"\\\\\": \"\\\\\\\\\"}\n_HTML5_FORM_ENCODING_REPLACEMENTS.update(\n {chr(c): \"%{:02X}\".format(c) for c in range(0x1F + 1) if c != 0x1B}\n)\n_HTML5_FORM_ENCODING_RE = re.compile(\n r\"|\".join([re.escape(c) for c in _HTML5_FORM_ENCODING_REPLACEMENTS.keys()])\n)\n\n\ndef normalize_header_key(\n value: typing.Union[str, bytes],\n lower: bool,\n encoding: typing.Optional[str] = None,\n) -> bytes:\n \"\"\"\n Coerce str/bytes into a strictly byte-wise HTTP header key.\n \"\"\"\n if isinstance(value, bytes):\n bytes_value = value\n else:\n bytes_value = value.encode(encoding or \"ascii\")\n\n return bytes_value.lower() if lower else bytes_value\n\n\ndef normalize_header_value(\n value: typing.Union[str, bytes], encoding: typing.Optional[str] = None\n) -> bytes:\n \"\"\"\n Coerce str/bytes into a strictly byte-wise HTTP header value.\n \"\"\"\n if isinstance(value, bytes):\n return value\n return value.encode(encoding or \"ascii\")\n\n\ndef primitive_value_to_str(value: \"PrimitiveData\") -> str:\n \"\"\"\n Coerce a primitive data type into a string value.\n\n Note that we prefer JSON-style 'true'/'false' for boolean values here.\n \"\"\"\n if value is True:\n return \"true\"\n elif value is False:\n return \"false\"\n elif value is None:\n return \"\"\n return str(value)\n\n\ndef is_known_encoding(encoding: str) -> bool:\n \"\"\"\n Return `True` if `encoding` is a known codec.\n \"\"\"\n try:\n codecs.lookup(encoding)\n except LookupError:\n return False\n return True\n\n\ndef format_form_param(name: str, value: str) -> bytes:\n \"\"\"\n Encode a name/value pair within a multipart form.\n \"\"\"\n\n def replacer(match: typing.Match[str]) -> str:\n return _HTML5_FORM_ENCODING_REPLACEMENTS[match.group(0)]\n\n value = _HTML5_FORM_ENCODING_RE.sub(replacer, value)\n return f'{name}=\"{value}\"'.encode()\n\n\n# Null bytes; no need to recreate these on each call to guess_json_utf\n_null = b\"\\x00\"\n_null2 = _null * 2\n_null3 = _null * 3\n\n\ndef guess_json_utf(data: bytes) -> typing.Optional[str]:\n # JSON always starts with two ASCII characters, so detection is as\n # easy as counting the nulls and from their location and count\n # determine the encoding. Also detect a BOM, if present.\n sample = data[:4]\n if sample in (codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE):\n return \"utf-32\" # BOM included\n if sample[:3] == codecs.BOM_UTF8:\n return \"utf-8-sig\" # BOM included, MS style (discouraged)\n if sample[:2] in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE):\n return \"utf-16\" # BOM included\n nullcount = sample.count(_null)\n if nullcount == 0:\n return \"utf-8\"\n if nullcount == 2:\n if sample[::2] == _null2: # 1st and 3rd are null\n return \"utf-16-be\"\n if sample[1::2] == _null2: # 2nd and 4th are null\n return \"utf-16-le\"\n # Did not detect 2 valid UTF-16 ascii-range characters\n if nullcount == 3:\n if sample[:3] == _null3:\n return \"utf-32-be\"\n if sample[1:] == _null3:\n return \"utf-32-le\"\n # Did not detect a valid UTF-32 ascii-range character\n return None\n\n\nclass NetRCInfo:\n def __init__(self, files: typing.Optional[typing.List[str]] = None) -> None:\n if files is None:\n files = [os.getenv(\"NETRC\", \"\"), \"~/.netrc\", \"~/_netrc\"]\n self.netrc_files = files\n\n @property\n def netrc_info(self) -> typing.Optional[netrc.netrc]:\n if not hasattr(self, \"_netrc_info\"):\n self._netrc_info = None\n for file_path in self.netrc_files:\n expanded_path = Path(file_path).expanduser()\n try:\n if expanded_path.is_file():\n self._netrc_info = netrc.netrc(str(expanded_path))\n break\n except (netrc.NetrcParseError, IOError): # pragma: no cover\n # Issue while reading the netrc file, ignore...\n pass\n return self._netrc_info\n\n def get_credentials(self, host: str) -> typing.Optional[typing.Tuple[str, str]]:\n if self.netrc_info is None:\n return None\n\n auth_info = self.netrc_info.authenticators(host)\n if auth_info is None or auth_info[2] is None:\n return None\n return (auth_info[0], auth_info[2])\n\n\ndef get_ca_bundle_from_env() -> typing.Optional[str]:\n if \"SSL_CERT_FILE\" in os.environ:\n ssl_file = Path(os.environ[\"SSL_CERT_FILE\"])\n if ssl_file.is_file():\n return str(ssl_file)\n if \"SSL_CERT_DIR\" in os.environ:\n ssl_path = Path(os.environ[\"SSL_CERT_DIR\"])\n if ssl_path.is_dir():\n return str(ssl_path)\n return None\n\n\ndef parse_header_links(value: str) -> typing.List[typing.Dict[str, str]]:\n \"\"\"\n Returns a list of parsed link headers, for more info see:\n https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Link\n The generic syntax of those is:\n Link: < uri-reference >; param1=value1; param2=\"value2\"\n So for instance:\n Link; '<http:/.../front.jpeg>; type=\"image/jpeg\",<http://.../back.jpeg>;'\n would return\n [\n {\"url\": \"http:/.../front.jpeg\", \"type\": \"image/jpeg\"},\n {\"url\": \"http://.../back.jpeg\"},\n ]\n :param value: HTTP Link entity-header field\n :return: list of parsed link headers\n \"\"\"\n links: typing.List[typing.Dict[str, str]] = []\n replace_chars = \" '\\\"\"\n value = value.strip(replace_chars)\n if not value:\n return links\n for val in re.split(\", *<\", value):\n try:\n url, params = val.split(\";\", 1)\n except ValueError:\n url, params = val, \"\"\n link = {\"url\": url.strip(\"<> '\\\"\")}\n for param in params.split(\";\"):\n try:\n key, value = param.split(\"=\")\n except ValueError:\n break\n link[key.strip(replace_chars)] = value.strip(replace_chars)\n links.append(link)\n return links\n\n\ndef parse_content_type_charset(content_type: str) -> typing.Optional[str]:\n # We used to use `cgi.parse_header()` here, but `cgi` became a dead battery.\n # See: https://peps.python.org/pep-0594/#cgi\n msg = email.message.Message()\n msg[\"content-type\"] = content_type\n return msg.get_content_charset(failobj=None)\n\n\nSENSITIVE_HEADERS = {\"authorization\", \"proxy-authorization\"}\n\n\ndef obfuscate_sensitive_headers(\n items: typing.Iterable[typing.Tuple[typing.AnyStr, typing.AnyStr]]\n) -> typing.Iterator[typing.Tuple[typing.AnyStr, typing.AnyStr]]:\n for k, v in items:\n if to_str(k.lower()) in SENSITIVE_HEADERS:\n v = to_bytes_or_str(\"[secure]\", match_type_of=v)\n yield k, v\n\n\n_LOGGER_INITIALIZED = False\nTRACE_LOG_LEVEL = 5\n\n\nclass Logger(logging.Logger):\n # Stub for type checkers.\n def trace(self, message: str, *args: typing.Any, **kwargs: typing.Any) -> None:\n ... # pragma: no cover\n\n\ndef get_logger(name: str) -> Logger:\n \"\"\"\n Get a `logging.Logger` instance, and optionally\n set up debug logging based on the HTTPX_LOG_LEVEL environment variable.\n \"\"\"\n global _LOGGER_INITIALIZED\n\n if not _LOGGER_INITIALIZED:\n _LOGGER_INITIALIZED = True\n logging.addLevelName(TRACE_LOG_LEVEL, \"TRACE\")\n\n log_level = os.environ.get(\"HTTPX_LOG_LEVEL\", \"\").upper()\n if log_level in (\"DEBUG\", \"TRACE\"):\n logger = logging.getLogger(\"httpx\")\n logger.setLevel(logging.DEBUG if log_level == \"DEBUG\" else TRACE_LOG_LEVEL)\n handler = logging.StreamHandler(sys.stderr)\n handler.setFormatter(\n logging.Formatter(\n fmt=\"%(levelname)s [%(asctime)s] %(name)s - %(message)s\",\n datefmt=\"%Y-%m-%d %H:%M:%S\",\n )\n )\n logger.addHandler(handler)\n\n logger = logging.getLogger(name)\n\n def trace(message: str, *args: typing.Any, **kwargs: typing.Any) -> None:\n logger.log(TRACE_LOG_LEVEL, message, *args, **kwargs)\n\n logger.trace = trace # type: ignore\n\n return typing.cast(Logger, logger)\n\n\ndef port_or_default(url: \"URL\") -> typing.Optional[int]:\n if url.port is not None:\n return url.port\n return {\"http\": 80, \"https\": 443}.get(url.scheme)\n\n\ndef same_origin(url: \"URL\", other: \"URL\") -> bool:\n \"\"\"\n Return 'True' if the given URLs share the same origin.\n \"\"\"\n return (\n url.scheme == other.scheme\n and url.host == other.host\n and port_or_default(url) == port_or_default(other)\n )\n\n\ndef is_https_redirect(url: \"URL\", location: \"URL\") -> bool:\n \"\"\"\n Return 'True' if 'location' is a HTTPS upgrade of 'url'\n \"\"\"\n if url.host != location.host:\n return False\n\n return (\n url.scheme == \"http\"\n and port_or_default(url) == 80\n and location.scheme == \"https\"\n and port_or_default(location) == 443\n )\n\n\ndef get_environment_proxies() -> typing.Dict[str, typing.Optional[str]]:\n \"\"\"Gets proxy information from the environment\"\"\"\n\n # urllib.request.getproxies() falls back on System\n # Registry and Config for proxies on Windows and macOS.\n # We don't want to propagate non-HTTP proxies into\n # our configuration such as 'TRAVIS_APT_PROXY'.\n proxy_info = getproxies()\n mounts: typing.Dict[str, typing.Optional[str]] = {}\n\n for scheme in (\"http\", \"https\", \"all\"):\n if proxy_info.get(scheme):\n hostname = proxy_info[scheme]\n mounts[f\"{scheme}://\"] = (\n hostname if \"://\" in hostname else f\"http://{hostname}\"\n )\n\n no_proxy_hosts = [host.strip() for host in proxy_info.get(\"no\", \"\").split(\",\")]\n for hostname in no_proxy_hosts:\n # See https://curl.haxx.se/libcurl/c/CURLOPT_NOPROXY.html for details\n # on how names in `NO_PROXY` are handled.\n if hostname == \"*\":\n # If NO_PROXY=* is used or if \"*\" occurs as any one of the comma\n # separated hostnames, then we should just bypass any information\n # from HTTP_PROXY, HTTPS_PROXY, ALL_PROXY, and always ignore\n # proxies.\n return {}\n elif hostname:\n # NO_PROXY=.google.com is marked as \"all://*.google.com,\n # which disables \"www.google.com\" but not \"google.com\"\n # NO_PROXY=google.com is marked as \"all://*google.com,\n # which disables \"www.google.com\" and \"google.com\".\n # (But not \"wwwgoogle.com\")\n mounts[f\"all://*{hostname}\"] = None\n\n return mounts\n\n\ndef to_bytes(value: typing.Union[str, bytes], encoding: str = \"utf-8\") -> bytes:\n return value.encode(encoding) if isinstance(value, str) else value\n\n\ndef to_str(value: typing.Union[str, bytes], encoding: str = \"utf-8\") -> str:\n return value if isinstance(value, str) else value.decode(encoding)\n\n\ndef to_bytes_or_str(value: str, match_type_of: typing.AnyStr) -> typing.AnyStr:\n return value if isinstance(match_type_of, str) else value.encode()\n\n\ndef unquote(value: str) -> str:\n return value[1:-1] if value[0] == value[-1] == '\"' else value\n\n\ndef guess_content_type(filename: typing.Optional[str]) -> typing.Optional[str]:\n if filename:\n return mimetypes.guess_type(filename)[0] or \"application/octet-stream\"\n return None\n\n\ndef peek_filelike_length(stream: typing.Any) -> typing.Optional[int]:\n \"\"\"\n Given a file-like stream object, return its length in number of bytes\n without reading it into memory.\n \"\"\"\n try:\n # Is it an actual file?\n fd = stream.fileno()\n # Yup, seems to be an actual file.\n length = os.fstat(fd).st_size\n except (AttributeError, OSError):\n # No... Maybe it's something that supports random access, like `io.BytesIO`?\n try:\n # Assuming so, go to end of stream to figure out its length,\n # then put it back in place.\n offset = stream.tell()\n length = stream.seek(0, os.SEEK_END)\n stream.seek(offset)\n except (AttributeError, OSError):\n # Not even that? Sorry, we're doomed...\n return None\n\n return length\n\n\nclass Timer:\n async def _get_time(self) -> float:\n library = sniffio.current_async_library()\n if library == \"trio\":\n import trio\n\n return trio.current_time()\n elif library == \"curio\": # pragma: no cover\n import curio\n\n return typing.cast(float, await curio.clock())\n\n import asyncio\n\n return asyncio.get_event_loop().time()\n\n def sync_start(self) -> None:\n self.started = time.perf_counter()\n\n async def async_start(self) -> None:\n self.started = await self._get_time()\n\n def sync_elapsed(self) -> float:\n now = time.perf_counter()\n return now - self.started\n\n async def async_elapsed(self) -> float:\n now = await self._get_time()\n return now - self.started\n\n\nclass URLPattern:\n \"\"\"\n A utility class currently used for making lookups against proxy keys...\n\n # Wildcard matching...\n >>> pattern = URLPattern(\"all\")\n >>> pattern.matches(httpx.URL(\"http://example.com\"))\n True\n\n # Witch scheme matching...\n >>> pattern = URLPattern(\"https\")\n >>> pattern.matches(httpx.URL(\"https://example.com\"))\n True\n >>> pattern.matches(httpx.URL(\"http://example.com\"))\n False\n\n # With domain matching...\n >>> pattern = URLPattern(\"https://example.com\")\n >>> pattern.matches(httpx.URL(\"https://example.com\"))\n True\n >>> pattern.matches(httpx.URL(\"http://example.com\"))\n False\n >>> pattern.matches(httpx.URL(\"https://other.com\"))\n False\n\n # Wildcard scheme, with domain matching...\n >>> pattern = URLPattern(\"all://example.com\")\n >>> pattern.matches(httpx.URL(\"https://example.com\"))\n True\n >>> pattern.matches(httpx.URL(\"http://example.com\"))\n True\n >>> pattern.matches(httpx.URL(\"https://other.com\"))\n False\n\n # With port matching...\n >>> pattern = URLPattern(\"https://example.com:1234\")\n >>> pattern.matches(httpx.URL(\"https://example.com:1234\"))\n True\n >>> pattern.matches(httpx.URL(\"https://example.com\"))\n False\n \"\"\"\n\n def __init__(self, pattern: str) -> None:\n from ._urls import URL\n\n if pattern and \":\" not in pattern:\n raise ValueError(\n f\"Proxy keys should use proper URL forms rather \"\n f\"than plain scheme strings. \"\n f'Instead of \"{pattern}\", use \"{pattern}://\"'\n )\n\n url = URL(pattern)\n self.pattern = pattern\n self.scheme = \"\" if url.scheme == \"all\" else url.scheme\n self.host = \"\" if url.host == \"*\" else url.host\n self.port = url.port\n if not url.host or url.host == \"*\":\n self.host_regex: typing.Optional[typing.Pattern[str]] = None\n elif url.host.startswith(\"*.\"):\n # *.example.com should match \"www.example.com\", but not \"example.com\"\n domain = re.escape(url.host[2:])\n self.host_regex = re.compile(f\"^.+\\\\.{domain}$\")\n elif url.host.startswith(\"*\"):\n # *example.com should match \"www.example.com\" and \"example.com\"\n domain = re.escape(url.host[1:])\n self.host_regex = re.compile(f\"^(.+\\\\.)?{domain}$\")\n else:\n # example.com should match \"example.com\" but not \"www.example.com\"\n domain = re.escape(url.host)\n self.host_regex = re.compile(f\"^{domain}$\")\n\n def matches(self, other: \"URL\") -> bool:\n if self.scheme and self.scheme != other.scheme:\n return False\n if (\n self.host\n and self.host_regex is not None\n and not self.host_regex.match(other.host)\n ):\n return False\n if self.port is not None and self.port != other.port:\n return False\n return True\n\n @property\n def priority(self) -> typing.Tuple[int, int, int]:\n \"\"\"\n The priority allows URLPattern instances to be sortable, so that\n we can match from most specific to least specific.\n \"\"\"\n # URLs with a port should take priority over URLs without a port.\n port_priority = 0 if self.port is not None else 1\n # Longer hostnames should match first.\n host_priority = -len(self.host)\n # Longer schemes should match first.\n scheme_priority = -len(self.scheme)\n return (port_priority, host_priority, scheme_priority)\n\n def __hash__(self) -> int:\n return hash(self.pattern)\n\n def __lt__(self, other: \"URLPattern\") -> bool:\n return self.priority < other.priority\n\n def __eq__(self, other: typing.Any) -> bool:\n return isinstance(other, URLPattern) and self.pattern == other.pattern\n",
"path": "httpx/_utils.py"
},
{
"content": "import pytest\n\nimport httpx\n\n\n@pytest.mark.parametrize(\n \"source\",\n [\n \"a=123&a=456&b=789\",\n {\"a\": [\"123\", \"456\"], \"b\": 789},\n {\"a\": (\"123\", \"456\"), \"b\": 789},\n [(\"a\", \"123\"), (\"a\", \"456\"), (\"b\", \"789\")],\n ((\"a\", \"123\"), (\"a\", \"456\"), (\"b\", \"789\")),\n ],\n)\ndef test_queryparams(source):\n q = httpx.QueryParams(source)\n assert \"a\" in q\n assert \"A\" not in q\n assert \"c\" not in q\n assert q[\"a\"] == \"123\"\n assert q.get(\"a\") == \"123\"\n assert q.get(\"nope\", default=None) is None\n assert q.get_list(\"a\") == [\"123\", \"456\"]\n\n assert list(q.keys()) == [\"a\", \"b\"]\n assert list(q.values()) == [\"123\", \"789\"]\n assert list(q.items()) == [(\"a\", \"123\"), (\"b\", \"789\")]\n assert len(q) == 2\n assert list(q) == [\"a\", \"b\"]\n assert dict(q) == {\"a\": \"123\", \"b\": \"789\"}\n assert str(q) == \"a=123&a=456&b=789\"\n assert repr(q) == \"QueryParams('a=123&a=456&b=789')\"\n assert httpx.QueryParams({\"a\": \"123\", \"b\": \"456\"}) == httpx.QueryParams(\n [(\"a\", \"123\"), (\"b\", \"456\")]\n )\n assert httpx.QueryParams({\"a\": \"123\", \"b\": \"456\"}) == httpx.QueryParams(\n \"a=123&b=456\"\n )\n assert httpx.QueryParams({\"a\": \"123\", \"b\": \"456\"}) == httpx.QueryParams(\n {\"b\": \"456\", \"a\": \"123\"}\n )\n assert httpx.QueryParams() == httpx.QueryParams({})\n assert httpx.QueryParams([(\"a\", \"123\"), (\"a\", \"456\")]) == httpx.QueryParams(\n \"a=123&a=456\"\n )\n assert httpx.QueryParams({\"a\": \"123\", \"b\": \"456\"}) != \"invalid\"\n\n q = httpx.QueryParams([(\"a\", \"123\"), (\"a\", \"456\")])\n assert httpx.QueryParams(q) == q\n\n\ndef test_queryparam_types():\n q = httpx.QueryParams(None)\n assert str(q) == \"\"\n\n q = httpx.QueryParams({\"a\": True})\n assert str(q) == \"a=true\"\n\n q = httpx.QueryParams({\"a\": False})\n assert str(q) == \"a=false\"\n\n q = httpx.QueryParams({\"a\": \"\"})\n assert str(q) == \"a=\"\n\n q = httpx.QueryParams({\"a\": None})\n assert str(q) == \"a=\"\n\n q = httpx.QueryParams({\"a\": 1.23})\n assert str(q) == \"a=1.23\"\n\n q = httpx.QueryParams({\"a\": 123})\n assert str(q) == \"a=123\"\n\n q = httpx.QueryParams({\"a\": [1, 2]})\n assert str(q) == \"a=1&a=2\"\n\n\ndef test_empty_query_params():\n q = httpx.QueryParams({\"a\": \"\"})\n assert str(q) == \"a=\"\n\n q = httpx.QueryParams(\"a=\")\n assert str(q) == \"a=\"\n\n q = httpx.QueryParams(\"a\")\n assert str(q) == \"a=\"\n\n\ndef test_queryparam_update_is_hard_deprecated():\n q = httpx.QueryParams(\"a=123\")\n with pytest.raises(RuntimeError):\n q.update({\"a\": \"456\"})\n\n\ndef test_queryparam_setter_is_hard_deprecated():\n q = httpx.QueryParams(\"a=123\")\n with pytest.raises(RuntimeError):\n q[\"a\"] = \"456\"\n\n\ndef test_queryparam_set():\n q = httpx.QueryParams(\"a=123\")\n q = q.set(\"a\", \"456\")\n assert q == httpx.QueryParams(\"a=456\")\n\n\ndef test_queryparam_add():\n q = httpx.QueryParams(\"a=123\")\n q = q.add(\"a\", \"456\")\n assert q == httpx.QueryParams(\"a=123&a=456\")\n\n\ndef test_queryparam_remove():\n q = httpx.QueryParams(\"a=123\")\n q = q.remove(\"a\")\n assert q == httpx.QueryParams(\"\")\n\n\ndef test_queryparam_merge():\n q = httpx.QueryParams(\"a=123\")\n q = q.merge({\"b\": \"456\"})\n assert q == httpx.QueryParams(\"a=123&b=456\")\n q = q.merge({\"a\": \"000\", \"c\": \"789\"})\n assert q == httpx.QueryParams(\"a=000&b=456&c=789\")\n\n\ndef test_queryparams_are_hashable():\n params = (\n httpx.QueryParams(\"a=123\"),\n httpx.QueryParams({\"a\": 123}),\n httpx.QueryParams(\"b=456\"),\n httpx.QueryParams({\"b\": 456}),\n )\n\n assert len(set(params)) == 2\n",
"path": "tests/models/test_queryparams.py"
}
] | 0_2 | python | import sys
import unittest
class TestHttpxQueryParams(unittest.TestCase):
def test_query_params_with_bytes(self):
import httpx
try:
httpx.QueryParams({"a": b"bytes"})
self.fail("TypeError not raised")
except TypeError as e:
expected_message = "Expected str, int, float, bool, or None. Got 'bytes'"
self.assertIn(expected_message, str(e), "TypeError does not contain the expected message")
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestHttpxQueryParams))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == "__main__":
main()
|
https://github.com/teamqurrent/httpx | Delete `setup.py` | e5bc1ea | python3.9 | 10a3b68 | diff --git a/setup.py b/setup.py
deleted file mode 100644
--- a/setup.py
+++ /dev/null
@@ -1,31 +0,0 @@
-import sys
-
-from setuptools import setup
-
-sys.stderr.write(
- """
-===============================
-Unsupported installation method
-===============================
-httpx no longer supports installation with `python setup.py install`.
-Please use `python -m pip install .` instead.
-"""
-)
-sys.exit(1)
-
-
-# The below code will never execute, however GitHub is particularly
-# picky about where it finds Python packaging metadata.
-# See: https://github.com/github/feedback/discussions/6456
-#
-# To be removed once GitHub catches up.
-
-setup(
- name="httpx",
- install_requires=[
- "certifi",
- "sniffio",
- "rfc3986[idna2008]>=1.3,<2",
- "httpcore>=0.15.0,<0.17.0",
- ],
-)
| [
{
"content": "import sys\n\nfrom setuptools import setup\n\nsys.stderr.write(\n \"\"\"\n===============================\nUnsupported installation method\n===============================\nhttpx no longer supports installation with `python setup.py install`.\nPlease use `python -m pip install .` instead.\n\"\"\"\n)\nsys.exit(1)\n\n\n# The below code will never execute, however GitHub is particularly\n# picky about where it finds Python packaging metadata.\n# See: https://github.com/github/feedback/discussions/6456\n#\n# To be removed once GitHub catches up.\n\nsetup(\n name=\"httpx\",\n install_requires=[\n \"certifi\",\n \"sniffio\",\n \"rfc3986[idna2008]>=1.3,<2\",\n \"httpcore>=0.15.0,<0.17.0\",\n ],\n)\n",
"path": "setup.py"
}
] | 0_3 | python | import os
import sys
import unittest
class TestSetupPyExists(unittest.TestCase):
def test_setup_py_existence(self):
# Get the current directory path
directory_path = os.getcwd()
# List all files in the directory
files = os.listdir(directory_path)
# Check if setup.py exists in the list of files
self.assertNotIn("setup.py", files, "setup.py exists in the directory")
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestSetupPyExists))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == "__main__":
main()
|
|
https://github.com/teamqurrent/httpx | Modify the encoding setter method of the `Headers` class to throw a ValueError if the class instance already as a `_text` attribute | e4241c6 | sniffio
rfc3986
httpcore>=0.18.0,<0.19.0
certifi
idna | python3.9 | 59df819 | diff --git a/CHANGELOG.md b/CHANGELOG.md
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,6 +4,12 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
+## Unreleased
+
+### Fixed
+
+* Raise `ValueError` on `Response.encoding` being set after `Response.text` has been accessed. (#2852)
+
## 0.25.0 (11th Sep, 2023)
### Removed
diff --git a/httpx/_models.py b/httpx/_models.py
--- a/httpx/_models.py
+++ b/httpx/_models.py
@@ -603,6 +603,16 @@ class Response:
@encoding.setter
def encoding(self, value: str) -> None:
+ """
+ Set the encoding to use for decoding the byte content into text.
+
+ If the `text` attribute has been accessed, attempting to set the
+ encoding will throw a ValueError.
+ """
+ if hasattr(self, "_text"):
+ raise ValueError(
+ "Setting encoding after `text` has been accessed is not allowed."
+ )
self._encoding = value
@property
diff --git a/tests/models/test_responses.py b/tests/models/test_responses.py
--- a/tests/models/test_responses.py
+++ b/tests/models/test_responses.py
@@ -298,6 +298,23 @@ def test_response_force_encoding():
assert response.encoding == "iso-8859-1"
+def test_response_force_encoding_after_text_accessed():
+ response = httpx.Response(
+ 200,
+ content=b"Hello, world!",
+ )
+ assert response.status_code == 200
+ assert response.reason_phrase == "OK"
+ assert response.text == "Hello, world!"
+ assert response.encoding == "utf-8"
+
+ with pytest.raises(ValueError):
+ response.encoding = "UTF8"
+
+ with pytest.raises(ValueError):
+ response.encoding = "iso-8859-1"
+
+
def test_read():
response = httpx.Response(
200,
| [
{
"content": "# Changelog\n\nAll notable changes to this project will be documented in this file.\n\nThe format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).\n\n## 0.25.0 (11th Sep, 2023)\n\n### Removed\n\n* Drop support for Python 3.7. (#2813)\n\n### Added\n\n* Support HTTPS proxies. (#2845)\n* Change the type of `Extensions` from `Mapping[Str, Any]` to `MutableMapping[Str, Any]`. (#2803)\n* Add `socket_options` argument to `httpx.HTTPTransport` and `httpx.AsyncHTTPTransport` classes. (#2716)\n* The `Response.raise_for_status()` method now returns the response instance. For example: `data = httpx.get('...').raise_for_status().json()`. (#2776)\n\n### Fixed\n\n* Return `500` error response instead of exceptions when `raise_app_exceptions=False` is set on `ASGITransport`. (#2669)\n* Ensure all `WSGITransport` environs have a `SERVER_PROTOCOL`. (#2708)\n* Always encode forward slashes as `%2F` in query parameters (#2723)\n* Use Mozilla documentation instead of `httpstatuses.com` for HTTP error reference (#2768)\n\n## 0.24.1 (17th May, 2023)\n\n### Added\n\n* Provide additional context in some `InvalidURL` exceptions. (#2675)\n\n### Fixed\n\n* Fix optional percent-encoding behaviour. (#2671)\n* More robust checking for opening upload files in binary mode. (#2630)\n* Properly support IP addresses in `NO_PROXY` environment variable. (#2659)\n* Set default file for `NetRCAuth()` to `None` to use the stdlib default. (#2667)\n* Set logging request lines to INFO level for async requests, in line with sync requests. (#2656)\n* Fix which gen-delims need to be escaped for path/query/fragment components in URL. (#2701)\n\n## 0.24.0 (6th April, 2023)\n\n### Changed\n\n* The logging behaviour has been changed to be more in-line with other standard Python logging usages. We no longer have a custom `TRACE` log level, and we no longer use the `HTTPX_LOG_LEVEL` environment variable to auto-configure logging. We now have a significant amount of `DEBUG` logging available at the network level. Full documentation is available at https://www.python-httpx.org/logging/ (#2547, encode/httpcore#648)\n* The `Response.iter_lines()` method now matches the stdlib behaviour and does not include the newline characters. It also resolves a performance issue. (#2423)\n* Query parameter encoding switches from using + for spaces and %2F for forward slash, to instead using %20 for spaces and treating forward slash as a safe, unescaped character. This differs from `requests`, but is in line with browser behavior in Chrome, Safari, and Firefox. Both options are RFC valid. (#2543)\n* NetRC authentication is no longer automatically handled, but is instead supported by an explicit `httpx.NetRCAuth()` authentication class. See the documentation at https://www.python-httpx.org/advanced/#netrc-support (#2525)\n\n### Removed\n\n* The `rfc3986` dependancy has been removed. (#2252)\n\n## 0.23.3 (4th Jan, 2023)\n\n### Fixed\n\n* Version 0.23.2 accidentally included stricter type checking on query parameters. This shouldn've have been included in a minor version bump, and is now reverted. (#2523, #2539)\n\n## 0.23.2 (2nd Jan, 2023)\n\n### Added\n\n* Support digest auth nonce counting to avoid multiple auth requests. (#2463)\n\n### Fixed\n\n* Multipart file uploads where the file length cannot be determine now use chunked transfer encoding, rather than loading the entire file into memory in order to determine the `Content-Length`. (#2382)\n* Raise `TypeError` if content is passed a dict-instance. (#2495)\n* Partially revert the API breaking change in 0.23.1, which removed `RawURL`. We continue to expose a `url.raw` property which is now a plain named-tuple. This API is still expected to be deprecated, but we will do so with a major version bump. (#2481)\n\n## 0.23.1 (18th Nov, 2022)\n\n**Note**: The 0.23.1 release should have used a proper version bump, rather than a minor point release.\nThere are API surface area changes that may affect some users.\nSee the \"Removed\" section of these release notes for details.\n\n### Added\n\n* Support for Python 3.11. (#2420)\n* Allow setting an explicit multipart boundary in `Content-Type` header. (#2278)\n* Allow `tuple` or `list` for multipart values, not just `list`. (#2355)\n* Allow `str` content for multipart upload files. (#2400)\n* Support connection upgrades. See https://www.encode.io/httpcore/extensions/#upgrade-requests\n\n### Fixed\n\n* Don't drop empty query parameters. (#2354)\n\n### Removed\n\n* Upload files *must* always be opened in binary mode. (#2400)\n* Drop `.read`/`.aread` from `SyncByteStream`/`AsyncByteStream`. (#2407)\n* Drop `RawURL`. (#2241)\n\n## 0.23.0 (23rd May, 2022)\n\n### Changed\n\n* Drop support for Python 3.6. (#2097)\n* Use `utf-8` as the default character set, instead of falling back to `charset-normalizer` for auto-detection. To enable automatic character set detection, see [the documentation](https://www.python-httpx.org/advanced/#character-set-encodings-and-auto-detection). (#2165)\n\n### Fixed\n\n* Fix `URL.copy_with` for some oddly formed URL cases. (#2185)\n* Digest authentication should use case-insensitive comparison for determining which algorithm is being used. (#2204)\n* Fix console markup escaping in command line client. (#1866)\n* When files are used in multipart upload, ensure we always seek to the start of the file. (#2065)\n* Ensure that `iter_bytes` never yields zero-length chunks. (#2068)\n* Preserve `Authorization` header for redirects that are to the same origin, but are an `http`-to-`https` upgrade. (#2074)\n* When responses have binary output, don't print the output to the console in the command line client. Use output like `<16086 bytes of binary data>` instead. (#2076)\n* Fix display of `--proxies` argument in the command line client help. (#2125)\n* Close responses when task cancellations occur during stream reading. (#2156)\n* Fix type error on accessing `.request` on `HTTPError` exceptions. (#2158)\n\n## 0.22.0 (26th January, 2022)\n\n### Added\n\n* Support for [the SOCKS5 proxy protocol](https://www.python-httpx.org/advanced/#socks) via [the `socksio` package](https://github.com/sethmlarson/socksio). (#2034)\n* Support for custom headers in multipart/form-data requests (#1936)\n\n### Fixed\n\n* Don't perform unreliable close/warning on `__del__` with unclosed clients. (#2026)\n* Fix `Headers.update(...)` to correctly handle repeated headers (#2038)\n\n## 0.21.3 (6th January, 2022)\n\n### Fixed\n\n* Fix streaming uploads using `SyncByteStream` or `AsyncByteStream`. Regression in 0.21.2. (#2016)\n\n## 0.21.2 (5th January, 2022)\n\n### Fixed\n\n* HTTP/2 support for tunnelled proxy cases. (#2009)\n* Improved the speed of large file uploads. (#1948)\n\n## 0.21.1 (16th November, 2021)\n\n### Fixed\n\n* The `response.url` property is now correctly annotated as `URL`, instead of `Optional[URL]`. (#1940)\n\n## 0.21.0 (15th November, 2021)\n\nThe 0.21.0 release integrates against a newly redesigned `httpcore` backend.\n\nBoth packages ought to automatically update to the required versions, but if you are\nseeing any issues, you should ensure that you have `httpx==0.21.*` and `httpcore==0.14.*` installed.\n\n### Added\n\n* The command-line client will now display connection information when `-v/--verbose` is used.\n* The command-line client will now display server certificate information when `-v/--verbose` is used.\n* The command-line client is now able to properly detect if the outgoing request\nshould be formatted as HTTP/1.1 or HTTP/2, based on the result of the HTTP/2 negotiation.\n\n### Removed\n\n* Curio support is no longer currently included. Please get in touch if you require this, so that we can assess priorities.\n\n## 0.20.0 (13th October, 2021)\n\nThe 0.20.0 release adds an integrated command-line client, and also includes some\ndesign changes. The most notable of these is that redirect responses are no longer\nautomatically followed, unless specifically requested.\n\nThis design decision prioritises a more explicit approach to redirects, in order\nto avoid code that unintentionally issues multiple requests as a result of\nmisconfigured URLs.\n\nFor example, previously a client configured to send requests to `http://api.github.com/`\nwould end up sending every API request twice, as each request would be redirected to `https://api.github.com/`.\n\nIf you do want auto-redirect behaviour, you can enable this either by configuring\nthe client instance with `Client(follow_redirects=True)`, or on a per-request\nbasis, with `.get(..., follow_redirects=True)`.\n\nThis change is a classic trade-off between convenience and precision, with no \"right\"\nanswer. See [discussion #1785](https://github.com/encode/httpx/discussions/1785) for more\ncontext.\n\nThe other major design change is an update to the Transport API, which is the low-level\ninterface against which requests are sent. Previously this interface used only primitive\ndatastructures, like so...\n\n```python\n(status_code, headers, stream, extensions) = transport.handle_request(method, url, headers, stream, extensions)\ntry\n ...\nfinally:\n stream.close()\n```\n\nNow the interface is much simpler...\n\n```python\nresponse = transport.handle_request(request)\ntry\n ...\nfinally:\n response.close()\n```\n\n### Changed\n\n* The `allow_redirects` flag is now `follow_redirects` and defaults to `False`.\n* The `raise_for_status()` method will now raise an exception for any responses\n except those with 2xx status codes. Previously only 4xx and 5xx status codes\n would result in an exception.\n* The low-level transport API changes to the much simpler `response = transport.handle_request(request)`.\n* The `client.send()` method no longer accepts a `timeout=...` argument, but the\n `client.build_request()` does. This required by the signature change of the\n Transport API. The request timeout configuration is now stored on the request\n instance, as `request.extensions['timeout']`.\n\n### Added\n\n* Added the `httpx` command-line client.\n* Response instances now include `.is_informational`, `.is_success`, `.is_redirect`, `.is_client_error`, and `.is_server_error`\n properties for checking 1xx, 2xx, 3xx, 4xx, and 5xx response types. Note that the behaviour of `.is_redirect` is slightly different in that it now returns True for all 3xx responses, in order to allow for a consistent set of properties onto the different HTTP status code types. The `response.has_redirect_location` location may be used to determine responses with properly formed URL redirects.\n\n### Fixed\n\n* `response.iter_bytes()` no longer raises a ValueError when called on a response with no content. (Pull #1827)\n* The `'wsgi.error'` configuration now defaults to `sys.stderr`, and is corrected to be a `TextIO` interface, not a `BytesIO` interface. Additionally, the WSGITransport now accepts a `wsgi_error` configuration. (Pull #1828)\n* Follow the WSGI spec by properly closing the iterable returned by the application. (Pull #1830)\n\n## 0.19.0 (19th August, 2021)\n\n### Added\n\n* Add support for `Client(allow_redirects=<bool>)`. (Pull #1790)\n* Add automatic character set detection, when no `charset` is included in the response `Content-Type` header. (Pull #1791)\n\n### Changed\n\n* Event hooks are now also called for any additional redirect or auth requests/responses. (Pull #1806)\n* Strictly enforce that upload files must be opened in binary mode. (Pull #1736)\n* Strictly enforce that client instances can only be opened and closed once, and cannot be re-opened. (Pull #1800)\n* Drop `mode` argument from `httpx.Proxy(..., mode=...)`. (Pull #1795)\n\n## 0.18.2 (17th June, 2021)\n\n### Added\n\n* Support for Python 3.10. (Pull #1687)\n* Expose `httpx.USE_CLIENT_DEFAULT`, used as the default to `auth` and `timeout` parameters in request methods. (Pull #1634)\n* Support [HTTP/2 \"prior knowledge\"](https://python-hyper.org/projects/hyper-h2/en/v2.3.1/negotiating-http2.html#prior-knowledge), using `httpx.Client(http1=False, http2=True)`. (Pull #1624)\n\n### Fixed\n\n* Clean up some cases where warnings were being issued. (Pull #1687)\n* Prefer Content-Length over Transfer-Encoding: chunked for content=<file-like> cases. (Pull #1619)\n\n## 0.18.1 (29th April, 2021)\n\n### Changed\n\n* Update brotli support to use the `brotlicffi` package (Pull #1605)\n* Ensure that `Request(..., stream=...)` does not auto-generate any headers on the request instance. (Pull #1607)\n\n### Fixed\n\n* Pass through `timeout=...` in top-level httpx.stream() function. (Pull #1613)\n* Map httpcore transport close exceptions to httpx exceptions. (Pull #1606)\n\n## 0.18.0 (27th April, 2021)\n\nThe 0.18.x release series formalises our low-level Transport API, introducing the base classes `httpx.BaseTransport` and `httpx.AsyncBaseTransport`.\n\nSee the \"[Writing custom transports](https://www.python-httpx.org/advanced/#writing-custom-transports)\" documentation and the [`httpx.BaseTransport.handle_request()`](https://github.com/encode/httpx/blob/397aad98fdc8b7580a5fc3e88f1578b4302c6382/httpx/_transports/base.py#L77-L147) docstring for more complete details on implementing custom transports.\n\nPull request #1522 includes a checklist of differences from the previous `httpcore` transport API, for developers implementing custom transports.\n\nThe following API changes have been issuing deprecation warnings since 0.17.0 onwards, and are now fully deprecated...\n\n* You should now use httpx.codes consistently instead of httpx.StatusCodes.\n* Use limits=... instead of pool_limits=....\n* Use proxies={\"http://\": ...} instead of proxies={\"http\": ...} for scheme-specific mounting.\n\n### Changed\n\n* Transport instances now inherit from `httpx.BaseTransport` or `httpx.AsyncBaseTransport`,\n and should implement either the `handle_request` method or `handle_async_request` method. (Pull #1522, #1550)\n* The `response.ext` property and `Response(ext=...)` argument are now named `extensions`. (Pull #1522)\n* The recommendation to not use `data=<bytes|str|bytes (a)iterator>` in favour of `content=<bytes|str|bytes (a)iterator>` has now been escalated to a deprecation warning. (Pull #1573)\n* Drop `Response(on_close=...)` from API, since it was a bit of leaking implementation detail. (Pull #1572)\n* When using a client instance, cookies should always be set on the client, rather than on a per-request basis. We prefer enforcing a stricter API here because it provides clearer expectations around cookie persistence, particularly when redirects occur. (Pull #1574)\n* The runtime exception `httpx.ResponseClosed` is now named `httpx.StreamClosed`. (#1584)\n* The `httpx.QueryParams` model now presents an immutable interface. There is a discussion on [the design and motivation here](https://github.com/encode/httpx/discussions/1599). Use `client.params = client.params.merge(...)` instead of `client.params.update(...)`. The basic query manipulation methods are `query.set(...)`, `query.add(...)`, and `query.remove()`. (#1600)\n\n### Added\n\n* The `Request` and `Response` classes can now be serialized using pickle. (#1579)\n* Handle `data={\"key\": [None|int|float|bool]}` cases. (Pull #1539)\n* Support `httpx.URL(**kwargs)`, for example `httpx.URL(scheme=\"https\", host=\"www.example.com\", path=\"/')`, or `httpx.URL(\"https://www.example.com/\", username=\"tom@gmail.com\", password=\"123 456\")`. (Pull #1601)\n* Support `url.copy_with(params=...)`. (Pull #1601)\n* Add `url.params` parameter, returning an immutable `QueryParams` instance. (Pull #1601)\n* Support query manipulation methods on the URL class. These are `url.copy_set_param()`, `url.copy_add_param()`, `url.copy_remove_param()`, `url.copy_merge_params()`. (Pull #1601)\n* The `httpx.URL` class now performs port normalization, so `:80` ports are stripped from `http` URLs and `:443` ports are stripped from `https` URLs. (Pull #1603)\n* The `URL.host` property returns unicode strings for internationalized domain names. The `URL.raw_host` property returns byte strings with IDNA escaping applied. (Pull #1590)\n\n### Fixed\n\n* Fix Content-Length for cases of `files=...` where unicode string is used as the file content. (Pull #1537)\n* Fix some cases of merging relative URLs against `Client(base_url=...)`. (Pull #1532)\n* The `request.content` attribute is now always available except for streaming content, which requires an explicit `.read()`. (Pull #1583)\n\n## 0.17.1 (March 15th, 2021)\n\n### Fixed\n\n* Type annotation on `CertTypes` allows `keyfile` and `password` to be optional. (Pull #1503)\n* Fix httpcore pinned version. (Pull #1495)\n\n## 0.17.0 (February 28th, 2021)\n\n### Added\n\n* Add `httpx.MockTransport()`, allowing to mock out a transport using pre-determined responses. (Pull #1401, Pull #1449)\n* Add `httpx.HTTPTransport()` and `httpx.AsyncHTTPTransport()` default transports. (Pull #1399)\n* Add mount API support, using `httpx.Client(mounts=...)`. (Pull #1362)\n* Add `chunk_size` parameter to `iter_raw()`, `iter_bytes()`, `iter_text()`. (Pull #1277)\n* Add `keepalive_expiry` parameter to `httpx.Limits()` configuration. (Pull #1398)\n* Add repr to `httpx.Cookies` to display available cookies. (Pull #1411)\n* Add support for `params=<tuple>` (previously only `params=<list>` was supported). (Pull #1426)\n\n### Fixed\n\n* Add missing `raw_path` to ASGI scope. (Pull #1357)\n* Tweak `create_ssl_context` defaults to use `trust_env=True`. (Pull #1447)\n* Properly URL-escape WSGI `PATH_INFO`. (Pull #1391)\n* Properly set default ports in WSGI transport. (Pull #1469)\n* Properly encode slashes when using `base_url`. (Pull #1407)\n* Properly map exceptions in `request.aclose()`. (Pull #1465)\n\n## 0.16.1 (October 8th, 2020)\n\n### Fixed\n\n* Support literal IPv6 addresses in URLs. (Pull #1349)\n* Force lowercase headers in ASGI scope dictionaries. (Pull #1351)\n\n## 0.16.0 (October 6th, 2020)\n\n### Changed\n\n* Preserve HTTP header casing. (Pull #1338, encode/httpcore#216, python-hyper/h11#104)\n* Drop `response.next()` and `response.anext()` methods in favour of `response.next_request` attribute. (Pull #1339)\n* Closed clients now raise a runtime error if attempting to send a request. (Pull #1346)\n\n### Added\n\n* Add Python 3.9 to officially supported versions.\n* Type annotate `__enter__`/`__exit__`/`__aenter__`/`__aexit__` in a way that supports subclasses of `Client` and `AsyncClient`. (Pull #1336)\n\n## 0.15.5 (October 1st, 2020)\n\n### Added\n\n* Add `response.next_request` (Pull #1334)\n\n## 0.15.4 (September 25th, 2020)\n\n### Added\n\n* Support direct comparisons between `Headers` and dicts or lists of two-tuples. Eg. `assert response.headers == {\"Content-Length\": 24}` (Pull #1326)\n\n### Fixed\n\n* Fix automatic `.read()` when `Response` instances are created with `content=<str>` (Pull #1324)\n\n## 0.15.3 (September 24th, 2020)\n\n### Fixed\n\n* Fixed connection leak in async client due to improper closing of response streams. (Pull #1316)\n\n## 0.15.2 (September 23nd, 2020)\n\n### Fixed\n\n* Fixed `response.elapsed` property. (Pull #1313)\n* Fixed client authentication interaction with `.stream()`. (Pull #1312)\n\n## 0.15.1 (September 23nd, 2020)\n\n### Fixed\n\n* ASGITransport now properly applies URL decoding to the `path` component, as-per the ASGI spec. (Pull #1307)\n\n## 0.15.0 (September 22nd, 2020)\n\n### Added\n\n* Added support for curio. (Pull https://github.com/encode/httpcore/pull/168)\n* Added support for event hooks. (Pull #1246)\n* Added support for authentication flows which require either sync or async I/O. (Pull #1217)\n* Added support for monitoring download progress with `response.num_bytes_downloaded`. (Pull #1268)\n* Added `Request(content=...)` for byte content, instead of overloading `Request(data=...)` (Pull #1266)\n* Added support for all URL components as parameter names when using `url.copy_with(...)`. (Pull #1285)\n* Neater split between automatically populated headers on `Request` instances, vs default `client.headers`. (Pull #1248)\n* Unclosed `AsyncClient` instances will now raise warnings if garbage collected. (Pull #1197)\n* Support `Response(content=..., text=..., html=..., json=...)` for creating usable response instances in code. (Pull #1265, #1297)\n* Support instantiating requests from the low-level transport API. (Pull #1293)\n* Raise errors on invalid URL types. (Pull #1259)\n\n### Changed\n\n* Cleaned up expected behaviour for URL escaping. `url.path` is now URL escaped. (Pull #1285)\n* Cleaned up expected behaviour for bytes vs str in URL components. `url.userinfo` and `url.query` are not URL escaped, and so return bytes. (Pull #1285)\n* Drop `url.authority` property in favour of `url.netloc`, since \"authority\" was semantically incorrect. (Pull #1285)\n* Drop `url.full_path` property in favour of `url.raw_path`, for better consistency with other parts of the API. (Pull #1285)\n* No longer use the `chardet` library for auto-detecting charsets, instead defaulting to a simpler approach when no charset is specified. (#1269)\n\n### Fixed\n\n* Swapped ordering of redirects and authentication flow. (Pull #1267)\n* `.netrc` lookups should use host, not host+port. (Pull #1298)\n\n### Removed\n\n* The `URLLib3Transport` class no longer exists. We've published it instead as an example of [a custom transport class](https://gist.github.com/florimondmanca/d56764d78d748eb9f73165da388e546e). (Pull #1182)\n* Drop `request.timer` attribute, which was being used internally to set `response.elapsed`. (Pull #1249)\n* Drop `response.decoder` attribute, which was being used internally. (Pull #1276)\n* `Request.prepare()` is now a private method. (Pull #1284)\n* The `Headers.getlist()` method had previously been deprecated in favour of `Headers.get_list()`. It is now fully removed.\n* The `QueryParams.getlist()` method had previously been deprecated in favour of `QueryParams.get_list()`. It is now fully removed.\n* The `URL.is_ssl` property had previously been deprecated in favour of `URL.scheme == \"https\"`. It is now fully removed.\n* The `httpx.PoolLimits` class had previously been deprecated in favour of `httpx.Limits`. It is now fully removed.\n* The `max_keepalive` setting had previously been deprecated in favour of the more explicit `max_keepalive_connections`. It is now fully removed.\n* The verbose `httpx.Timeout(5.0, connect_timeout=60.0)` style had previously been deprecated in favour of `httpx.Timeout(5.0, connect=60.0)`. It is now fully removed.\n* Support for instantiating a timeout config missing some defaults, such as `httpx.Timeout(connect=60.0)`, had previously been deprecated in favour of enforcing a more explicit style, such as `httpx.Timeout(5.0, connect=60.0)`. This is now strictly enforced.\n\n## 0.14.3 (September 2nd, 2020)\n\n### Added\n\n* `http.Response()` may now be instantiated without a `request=...` parameter. Useful for some unit testing cases. (Pull #1238)\n* Add `103 Early Hints` and `425 Too Early` status codes. (Pull #1244)\n\n### Fixed\n\n* `DigestAuth` now handles responses that include multiple 'WWW-Authenticate' headers. (Pull #1240)\n* Call into transport `__enter__`/`__exit__` or `__aenter__`/`__aexit__` when client is used in a context manager style. (Pull #1218)\n\n## 0.14.2 (August 24th, 2020)\n\n### Added\n\n* Support `client.get(..., auth=None)` to bypass the default authentication on a clients. (Pull #1115)\n* Support `client.auth = ...` property setter. (Pull #1185)\n* Support `httpx.get(..., proxies=...)` on top-level request functions. (Pull #1198)\n* Display instances with nicer import styles. (Eg. <httpx.ReadTimeout ...>) (Pull #1155)\n* Support `cookies=[(key, value)]` list-of-two-tuples style usage. (Pull #1211)\n\n### Fixed\n\n* Ensure that automatically included headers on a request may be modified. (Pull #1205)\n* Allow explicit `Content-Length` header on streaming requests. (Pull #1170)\n* Handle URL quoted usernames and passwords properly. (Pull #1159)\n* Use more consistent default for `HEAD` requests, setting `allow_redirects=True`. (Pull #1183)\n* If a transport error occurs while streaming the response, raise an `httpx` exception, not the underlying `httpcore` exception. (Pull #1190)\n* Include the underlying `httpcore` traceback, when transport exceptions occur. (Pull #1199)\n\n## 0.14.1 (August 11th, 2020)\n\n### Added\n\n* The `httpx.URL(...)` class now raises `httpx.InvalidURL` on invalid URLs, rather than exposing the underlying `rfc3986` exception. If a redirect response includes an invalid 'Location' header, then a `RemoteProtocolError` exception is raised, which will be associated with the request that caused it. (Pull #1163)\n\n### Fixed\n\n* Handling multiple `Set-Cookie` headers became broken in the 0.14.0 release, and is now resolved. (Pull #1156)\n\n## 0.14.0 (August 7th, 2020)\n\nThe 0.14 release includes a range of improvements to the public API, intended on preparing for our upcoming 1.0 release.\n\n* Our HTTP/2 support is now fully optional. **You now need to use `pip install httpx[http2]` if you want to include the HTTP/2 dependencies.**\n* Our HSTS support has now been removed. Rewriting URLs from `http` to `https` if the host is on the HSTS list can be beneficial in avoiding roundtrips to incorrectly formed URLs, but on balance we've decided to remove this feature, on the principle of least surprise. Most programmatic clients do not include HSTS support, and for now we're opting to remove our support for it.\n* Our exception hierarchy has been overhauled. Most users will want to stick with their existing `httpx.HTTPError` usage, but we've got a clearer overall structure now. See https://www.python-httpx.org/exceptions/ for more details.\n\nWhen upgrading you should be aware of the following public API changes. Note that deprecated usages will currently continue to function, but will issue warnings.\n\n* You should now use `httpx.codes` consistently instead of `httpx.StatusCodes`.\n* Usage of `httpx.Timeout()` should now always include an explicit default. Eg. `httpx.Timeout(None, pool=5.0)`.\n* When using `httpx.Timeout()`, we now have more concisely named keyword arguments. Eg. `read=5.0`, instead of `read_timeout=5.0`.\n* Use `httpx.Limits()` instead of `httpx.PoolLimits()`, and `limits=...` instead of `pool_limits=...`.\n* The `httpx.Limits(max_keepalive=...)` argument is now deprecated in favour of a more explicit `httpx.Limits(max_keepalive_connections=...)`.\n* Keys used with `Client(proxies={...})` should now be in the style of `{\"http://\": ...}`, rather than `{\"http\": ...}`.\n* The multidict methods `Headers.getlist()` and `QueryParams.getlist()` are deprecated in favour of more consistent `.get_list()` variants.\n* The `URL.is_ssl` property is deprecated in favour of `URL.scheme == \"https\"`.\n* The `URL.join(relative_url=...)` method is now `URL.join(url=...)`. This change does not support warnings for the deprecated usage style.\n\nOne notable aspect of the 0.14.0 release is that it tightens up the public API for `httpx`, by ensuring that several internal attributes and methods have now become strictly private.\n\nThe following previously had nominally public names on the client, but were all undocumented and intended solely for internal usage. They are all now replaced with underscored names, and should not be relied on or accessed.\n\nThese changes should not affect users who have been working from the `httpx` documentation.\n\n* `.merge_url()`, `.merge_headers()`, `.merge_cookies()`, `.merge_queryparams()`\n* `.build_auth()`, `.build_redirect_request()`\n* `.redirect_method()`, `.redirect_url()`, `.redirect_headers()`, `.redirect_stream()`\n* `.send_handling_redirects()`, `.send_handling_auth()`, `.send_single_request()`\n* `.init_transport()`, `.init_proxy_transport()`\n* `.proxies`, `.transport`, `.netrc`, `.get_proxy_map()`\n\nSee pull requests #997, #1065, #1071.\n\nSome areas of API which were already on the deprecation path, and were raising warnings or errors in 0.13.x have now been escalated to being fully removed.\n\n* Drop `ASGIDispatch`, `WSGIDispatch`, which have been replaced by `ASGITransport`, `WSGITransport`.\n* Drop `dispatch=...`` on client, which has been replaced by `transport=...``\n* Drop `soft_limit`, `hard_limit`, which have been replaced by `max_keepalive` and `max_connections`.\n* Drop `Response.stream` and` `Response.raw`, which have been replaced by ``.aiter_bytes` and `.aiter_raw`.\n* Drop `proxies=<transport instance>` in favor of `proxies=httpx.Proxy(...)`.\n\nSee pull requests #1057, #1058.\n\n### Added\n\n* Added dedicated exception class `httpx.HTTPStatusError` for `.raise_for_status()` exceptions. (Pull #1072)\n* Added `httpx.create_ssl_context()` helper function. (Pull #996)\n* Support for proxy exlcusions like `proxies={\"https://www.example.com\": None}`. (Pull #1099)\n* Support `QueryParams(None)` and `client.params = None`. (Pull #1060)\n\n### Changed\n\n* Use `httpx.codes` consistently in favour of `httpx.StatusCodes` which is placed into deprecation. (Pull #1088)\n* Usage of `httpx.Timeout()` should now always include an explicit default. Eg. `httpx.Timeout(None, pool=5.0)`. (Pull #1085)\n* Switch to more concise `httpx.Timeout()` keyword arguments. Eg. `read=5.0`, instead of `read_timeout=5.0`. (Pull #1111)\n* Use `httpx.Limits()` instead of `httpx.PoolLimits()`, and `limits=...` instead of `pool_limits=...`. (Pull #1113)\n* Keys used with `Client(proxies={...})` should now be in the style of `{\"http://\": ...}`, rather than `{\"http\": ...}`. (Pull #1127)\n* The multidict methods `Headers.getlist` and `QueryParams.getlist` are deprecated in favour of more consistent `.get_list()` variants. (Pull #1089)\n* `URL.port` becomes `Optional[int]`. Now only returns a port if one is explicitly included in the URL string. (Pull #1080)\n* The `URL(..., allow_relative=[bool])` parameter no longer exists. All URL instances may be relative. (Pull #1073)\n* Drop unnecessary `url.full_path = ...` property setter. (Pull #1069)\n* The `URL.join(relative_url=...)` method is now `URL.join(url=...)`. (Pull #1129)\n* The `URL.is_ssl` property is deprecated in favour of `URL.scheme == \"https\"`. (Pull #1128)\n\n### Fixed\n\n* Add missing `Response.next()` method. (Pull #1055)\n* Ensure all exception classes are exposed as public API. (Pull #1045)\n* Support multiple items with an identical field name in multipart encodings. (Pull #777)\n* Skip HSTS preloading on single-label domains. (Pull #1074)\n* Fixes for `Response.iter_lines()`. (Pull #1033, #1075)\n* Ignore permission errors when accessing `.netrc` files. (Pull #1104)\n* Allow bare hostnames in `HTTP_PROXY` etc... environment variables. (Pull #1120)\n* Settings `app=...` or `transport=...` bypasses any environment based proxy defaults. (Pull #1122)\n* Fix handling of `.base_url` when a path component is included in the base URL. (Pull #1130)\n\n---\n\n## 0.13.3 (May 29th, 2020)\n\n### Fixed\n\n* Include missing keepalive expiry configuration. (Pull #1005)\n* Improved error message when URL redirect has a custom scheme. (Pull #1002)\n\n## 0.13.2 (May 27th, 2020)\n\n### Fixed\n\n* Include explicit \"Content-Length: 0\" on POST, PUT, PATCH if no request body is used. (Pull #995)\n* Add `http2` option to `httpx.Client`. (Pull #982)\n* Tighten up API typing in places. (Pull #992, #999)\n\n## 0.13.1 (May 22nd, 2020)\n\n### Fixed\n\n* Fix pool options deprecation warning. (Pull #980)\n* Include `httpx.URLLib3ProxyTransport` in top-level API. (Pull #979)\n\n## 0.13.0 (May 22nd, 2020)\n\nThis release switches to `httpcore` for all the internal networking, which means:\n\n* We're using the same codebase for both our sync and async clients.\n* HTTP/2 support is now available with the sync client.\n* We no longer have a `urllib3` dependency for our sync client, although there is still an *optional* `URLLib3Transport` class.\n\nIt also means we've had to remove our UDS support, since maintaining that would have meant having to push back our work towards a 1.0 release, which isn't a trade-off we wanted to make.\n\nWe also now have [a public \"Transport API\"](https://www.python-httpx.org/advanced/#custom-transports), which you can use to implement custom transport implementations against. This formalises and replaces our previously private \"Dispatch API\".\n\n### Changed\n\n* Use `httpcore` for underlying HTTP transport. Drop `urllib3` requirement. (Pull #804, #967)\n* Rename pool limit options from `soft_limit`/`hard_limit` to `max_keepalive`/`max_connections`. (Pull #968)\n* The previous private \"Dispatch API\" has now been promoted to a public \"Transport API\". When customizing the transport use `transport=...`. The `ASGIDispatch` and `WSGIDispatch` class naming is deprecated in favour of `ASGITransport` and `WSGITransport`. (Pull #963)\n\n### Added\n\n* Added `URLLib3Transport` class for optional `urllib3` transport support. (Pull #804, #963)\n* Streaming multipart uploads. (Pull #857)\n* Logging via HTTPCORE_LOG_LEVEL and HTTPX_LOG_LEVEL environment variables\nand TRACE level logging. (Pull encode/httpcore#79)\n\n### Fixed\n\n* Performance improvement in brotli decoder. (Pull #906)\n* Proper warning level of deprecation notice in `Response.stream` and `Response.raw`. (Pull #908)\n* Fix support for generator based WSGI apps. (Pull #887)\n* Reuse of connections on HTTP/2 in close concurrency situations. (Pull encode/httpcore#81)\n* Honor HTTP/2 max concurrent streams settings (Pull encode/httpcore#89, encode/httpcore#90)\n* Fix bytes support in multipart uploads. (Pull #974)\n* Improve typing support for `files=...`. (Pull #976)\n\n### Removed\n\n* Dropped support for `Client(uds=...)` (Pull #804)\n\n## 0.13.0.dev2 (May 12th, 2020)\n\nThe 0.13.0.dev2 is a *pre-release* version. To install it, use `pip install httpx --pre`.\n\n### Added\n\n* Logging via HTTPCORE_LOG_LEVEL and HTTPX_LOG_LEVEL environment variables\nand TRACE level logging. (HTTPCore Pull #79)\n\n### Fixed\n\n* Reuse of connections on HTTP/2 in close concurrency situations. (HTTPCore Pull #81)\n* When using an `app=<ASGI app>` observe neater disconnect behaviour instead of sending empty body messages. (Pull #919)\n\n## 0.13.0.dev1 (May 6th, 2020)\n\nThe 0.13.0.dev1 is a *pre-release* version. To install it, use `pip install httpx --pre`.\n\n### Fixed\n\n* Passing `http2` flag to proxy dispatchers. (Pull #934)\n* Use [`httpcore` v0.8.3](https://github.com/encode/httpcore/releases/tag/0.8.3)\nwhich addresses problems in handling of headers when using proxies.\n\n## 0.13.0.dev0 (April 30th, 2020)\n\nThe 0.13.0.dev0 is a *pre-release* version. To install it, use `pip install httpx --pre`.\n\nThis release switches to `httpcore` for all the internal networking, which means:\n\n* We're using the same codebase for both our sync and async clients.\n* HTTP/2 support is now available with the sync client.\n* We no longer have a `urllib3` dependency for our sync client, although there is still an *optional* `URLLib3Dispatcher` class.\n\nIt also means we've had to remove our UDS support, since maintaining that would have meant having to push back our work towards a 1.0 release, which isn't a trade-off we wanted to make.\n\n### Changed\n\n* Use `httpcore` for underlying HTTP transport. Drop `urllib3` requirement. (Pull #804)\n\n### Added\n\n* Added `URLLib3Dispatcher` class for optional `urllib3` transport support. (Pull #804)\n* Streaming multipart uploads. (Pull #857)\n\n### Fixed\n\n* Performance improvement in brotli decoder. (Pull #906)\n* Proper warning level of deprecation notice in `Response.stream` and `Response.raw`. (Pull #908)\n* Fix support for generator based WSGI apps. (Pull #887)\n\n### Removed\n\n* Dropped support for `Client(uds=...)` (Pull #804)\n\n---\n\n## 0.12.1 (March 19th, 2020)\n\n### Fixed\n\n* Resolved packaging issue, where additional files were being included.\n\n## 0.12.0 (March 9th, 2020)\n\nThe 0.12 release tightens up the API expectations for `httpx` by switching to private module names to enforce better clarity around public API.\n\nAll imports of `httpx` should import from the top-level package only, such as `from httpx import Request`, rather than importing from privately namespaced modules such as `from httpx._models import Request`.\n\n### Added\n\n* Support making response body available to auth classes with `.requires_response_body`. (Pull #803)\n* Export `NetworkError` exception. (Pull #814)\n* Add support for `NO_PROXY` environment variable. (Pull #835)\n\n### Changed\n\n* Switched to private module names. (Pull #785)\n* Drop redirect looping detection and the `RedirectLoop` exception, instead using `TooManyRedirects`. (Pull #819)\n* Drop `backend=...` parameter on `AsyncClient`, in favour of always autodetecting `trio`/`asyncio`. (Pull #791)\n\n### Fixed\n\n* Support basic auth credentials in proxy URLs. (Pull #780)\n* Fix `httpx.Proxy(url, mode=\"FORWARD_ONLY\")` configuration. (Pull #788)\n* Fallback to setting headers as UTF-8 if no encoding is specified. (Pull #820)\n* Close proxy dispatches classes on client close. (Pull #826)\n* Support custom `cert` parameters even if `verify=False`. (Pull #796)\n* Don't support invalid dict-of-dicts form data in `data=...`. (Pull #811)\n\n---\n\n## 0.11.1 (January 17th, 2020)\n\n### Fixed\n\n* Fixed usage of `proxies=...` on `Client()`. (Pull #763)\n* Support both `zlib` and `deflate` style encodings on `Content-Encoding: deflate`. (Pull #758)\n* Fix for streaming a redirect response body with `allow_redirects=False`. (Pull #766)\n* Handle redirect with malformed Location headers missing host. (Pull #774)\n\n## 0.11.0 (January 9th, 2020)\n\nThe 0.11 release reintroduces our sync support, so that `httpx` now supports both a standard thread-concurrency API, and an async API.\n\nExisting async `httpx` users that are upgrading to 0.11 should ensure that:\n\n* Async codebases should always use a client instance to make requests, instead of the top-level API.\n* The async client is named as `httpx.AsyncClient()`, instead of `httpx.Client()`.\n* When instantiating proxy configurations use the `httpx.Proxy()` class, instead of the previous `httpx.HTTPProxy()`. This new configuration class works for configuring both sync and async clients.\n\nWe believe the API is now pretty much stable, and are aiming for a 1.0 release sometime on or before April 2020.\n\n### Changed\n\n- Top level API such as `httpx.get(url, ...)`, `httpx.post(url, ...)`, `httpx.request(method, url, ...)` becomes synchronous.\n- Added `httpx.Client()` for synchronous clients, with `httpx.AsyncClient` being used for async clients.\n- Switched to `proxies=httpx.Proxy(...)` for proxy configuration.\n- Network connection errors are wrapped in `httpx.NetworkError`, rather than exposing lower-level exception types directly.\n\n### Removed\n\n- The `request.url.origin` property and `httpx.Origin` class are no longer available.\n- The per-request `cert`, `verify`, and `trust_env` arguments are escalated from raising errors if used, to no longer being available. These arguments should be used on a per-client instance instead, or in the top-level API.\n- The `stream` argument has escalated from raising an error when used, to no longer being available. Use the `client.stream(...)` or `httpx.stream()` streaming API instead.\n\n### Fixed\n\n- Redirect loop detection matches against `(method, url)` rather than `url`. (Pull #734)\n\n---\n\n## 0.10.1 (December 31st, 2019)\n\n### Fixed\n\n- Fix issue with concurrent connection acquiry. (Pull #700)\n- Fix write error on closing HTTP/2 connections. (Pull #699)\n\n## 0.10.0 (December 29th, 2019)\n\nThe 0.10.0 release makes some changes that will allow us to support both sync and async interfaces.\n\nIn particular with streaming responses the `response.read()` method becomes `response.aread()`, and the `response.close()` method becomes `response.aclose()`.\n\nIf following redirects explicitly the `response.next()` method becomes `response.anext()`.\n\n### Fixed\n\n- End HTTP/2 streams immediately on no-body requests, rather than sending an empty body message. (Pull #682)\n- Improve typing for `Response.request`: switch from `Optional[Request]` to `Request`. (Pull #666)\n- `Response.elapsed` now reflects the entire download time. (Pull #687, #692)\n\n### Changed\n\n- Added `AsyncClient` as a synonym for `Client`. (Pull #680)\n- Switch to `response.aread()` for conditionally reading streaming responses. (Pull #674)\n- Switch to `response.aclose()` and `client.aclose()` for explicit closing. (Pull #674, #675)\n- Switch to `response.anext()` for resolving the next redirect response. (Pull #676)\n\n### Removed\n\n- When using a client instance, the per-request usage of `verify`, `cert`, and `trust_env` have now escalated from raising a warning to raising an error. You should set these arguments on the client instead. (Pull #617)\n- Removed the undocumented `request.read()`, since end users should not require it.\n\n---\n\n## 0.9.5 (December 20th, 2019)\n\n### Fixed\n\n- Fix Host header and HSTS rewrites when an explicit `:80` port is included in URL. (Pull #649)\n- Query Params on the URL string are merged with any `params=...` argument. (Pull #653)\n- More robust behavior when closing connections. (Pull #640)\n- More robust behavior when handling HTTP/2 headers with trailing whitespace. (Pull #637)\n- Allow any explicit `Content-Type` header to take precedence over the encoding default. (Pull #633)\n\n## 0.9.4 (December 12th, 2019)\n\n### Fixed\n\n- Added expiry to Keep-Alive connections, resolving issues with acquiring connections. (Pull #627)\n- Increased flow control windows on HTTP/2, resolving download speed issues. (Pull #629)\n\n## 0.9.3 (December 7th, 2019)\n\n### Fixed\n\n- Fixed HTTP/2 with autodetection backend. (Pull #614)\n\n## 0.9.2 (December 7th, 2019)\n\n* Released due to packaging build artifact.\n\n## 0.9.1 (December 6th, 2019)\n\n* Released due to packaging build artifact.\n\n## 0.9.0 (December 6th, 2019)\n\nThe 0.9 releases brings some major new features, including:\n\n* A new streaming API.\n* Autodetection of either asyncio or trio.\n* Nicer timeout configuration.\n* HTTP/2 support off by default, but can be enabled.\n\nWe've also removed all private types from the top-level package export.\n\nIn order to ensure you are only ever working with public API you should make\nsure to only import the top-level package eg. `import httpx`, rather than\nimporting modules within the package.\n\n### Added\n\n- Added concurrency backend autodetection. (Pull #585)\n- Added `Client(backend='trio')` and `Client(backend='asyncio')` API. (Pull #585)\n- Added `response.stream_lines()` API. (Pull #575)\n- Added `response.is_error` API. (Pull #574)\n- Added support for `timeout=Timeout(5.0, connect_timeout=60.0)` styles. (Pull #593)\n\n### Fixed\n\n- Requests or Clients with `timeout=None` now correctly always disable timeouts. (Pull #592)\n- Request 'Authorization' headers now have priority over `.netrc` authentication info. (Commit 095b691)\n- Files without a filename no longer set a Content-Type in multipart data. (Commit ed94950)\n\n### Changed\n\n- Added `httpx.stream()` API. Using `stream=True` now results in a warning. (Pull #600, #610)\n- HTTP/2 support is switched to \"off by default\", but can be enabled explicitly. (Pull #584)\n- Switched to `Client(http2=True)` API from `Client(http_versions=[\"HTTP/1.1\", \"HTTP/2\"])`. (Pull #586)\n- Removed all private types from the top-level package export. (Pull #608)\n- The SSL configuration settings of `verify`, `cert`, and `trust_env` now raise warnings if used per-request when using a Client instance. They should always be set on the Client instance itself. (Pull #597)\n- Use plain strings \"TUNNEL_ONLY\" or \"FORWARD_ONLY\" on the HTTPProxy `proxy_mode` argument. The `HTTPProxyMode` enum still exists, but its usage will raise warnings. (#610)\n- Pool timeouts are now on the timeout configuration, not the pool limits configuration. (Pull #563)\n- The timeout configuration is now named `httpx.Timeout(...)`, not `httpx.TimeoutConfig(...)`. The old version currently remains as a synonym for backwards compatibility. (Pull #591)\n\n---\n\n## 0.8.0 (November 27, 2019)\n\n### Removed\n\n- The synchronous API has been removed, in order to allow us to fundamentally change how we approach supporting both sync and async variants. (See #588 for more details.)\n\n---\n\n## 0.7.8 (November 17, 2019)\n\n### Added\n\n- Add support for proxy tunnels for Python 3.6 + asyncio. (Pull #521)\n\n## 0.7.7 (November 15, 2019)\n\n### Fixed\n\n- Resolve an issue with cookies behavior on redirect requests. (Pull #529)\n\n### Added\n\n- Add request/response DEBUG logs. (Pull #502)\n- Use TRACE log level for low level info. (Pull #500)\n\n## 0.7.6 (November 2, 2019)\n\n### Removed\n\n- Drop `proxies` parameter from the high-level API. (Pull #485)\n\n### Fixed\n\n- Tweak multipart files: omit null filenames, add support for `str` file contents. (Pull #482)\n- Cache NETRC authentication per-client. (Pull #400)\n- Rely on `getproxies` for all proxy environment variables. (Pull #470)\n- Wait for the `asyncio` stream to close when closing a connection. (Pull #494)\n\n## 0.7.5 (October 10, 2019)\n\n### Added\n\n- Allow lists of values to be passed to `params`. (Pull #386)\n- `ASGIDispatch`, `WSGIDispatch` are now available in the `httpx.dispatch` namespace. (Pull #407)\n- `HTTPError` is now available in the `httpx` namespace. (Pull #421)\n- Add support for `start_tls()` to the Trio concurrency backend. (Pull #467)\n\n### Fixed\n\n- Username and password are no longer included in the `Host` header when basic authentication\n credentials are supplied via the URL. (Pull #417)\n\n### Removed\n\n- The `.delete()` function no longer has `json`, `data`, or `files` parameters\n to match the expected semantics of the `DELETE` method. (Pull #408)\n- Removed the `trio` extra. Trio support is detected automatically. (Pull #390)\n\n## 0.7.4 (September 25, 2019)\n\n### Added\n\n- Add Trio concurrency backend. (Pull #276)\n- Add `params` parameter to `Client` for setting default query parameters. (Pull #372)\n- Add support for `SSL_CERT_FILE` and `SSL_CERT_DIR` environment variables. (Pull #307)\n- Add debug logging to calls into ASGI apps. (Pull #371)\n- Add debug logging to SSL configuration. (Pull #378)\n\n### Fixed\n\n- Fix a bug when using `Client` without timeouts in Python 3.6. (Pull #383)\n- Propagate `Client` configuration to HTTP proxies. (Pull #377)\n\n## 0.7.3 (September 20, 2019)\n\n### Added\n\n- HTTP Proxy support. (Pulls #259, #353)\n- Add Digest authentication. (Pull #332)\n- Add `.build_request()` method to `Client` and `AsyncClient`. (Pull #319)\n- Add `.elapsed` property on responses. (Pull #351)\n- Add support for `SSLKEYLOGFILE` in Python 3.8b4+. (Pull #301)\n\n### Removed\n\n- Drop NPN support for HTTP version negotiation. (Pull #314)\n\n### Fixed\n\n- Fix distribution of type annotations for mypy (Pull #361).\n- Set `Host` header when redirecting cross-origin. (Pull #321)\n- Drop `Content-Length` headers on `GET` redirects. (Pull #310)\n- Raise `KeyError` if header isn't found in `Headers`. (Pull #324)\n- Raise `NotRedirectResponse` in `response.next()` if there is no redirection to perform. (Pull #297)\n- Fix bug in calculating the HTTP/2 maximum frame size. (Pull #153)\n\n## 0.7.2 (August 28, 2019)\n\n- Enforce using `httpx.AsyncioBackend` for the synchronous client. (Pull #232)\n- `httpx.ConnectionPool` will properly release a dropped connection. (Pull #230)\n- Remove the `raise_app_exceptions` argument from `Client`. (Pull #238)\n- `DecodeError` will no longer be raised for an empty body encoded with Brotli. (Pull #237)\n- Added `http_versions` parameter to `Client`. (Pull #250)\n- Only use HTTP/1.1 on short-lived connections like `httpx.get()`. (Pull #284)\n- Convert `Client.cookies` and `Client.headers` when set as a property. (Pull #274)\n- Setting `HTTPX_DEBUG=1` enables debug logging on all requests. (Pull #277)\n\n## 0.7.1 (August 18, 2019)\n\n- Include files with source distribution to be installable. (Pull #233)\n\n## 0.7.0 (August 17, 2019)\n\n- Add the `trust_env` property to `BaseClient`. (Pull #187)\n- Add the `links` property to `BaseResponse`. (Pull #211)\n- Accept `ssl.SSLContext` instances into `SSLConfig(verify=...)`. (Pull #215)\n- Add `Response.stream_text()` with incremental encoding detection. (Pull #183)\n- Properly updated the `Host` header when a redirect changes the origin. (Pull #199)\n- Ignore invalid `Content-Encoding` headers. (Pull #196)\n- Use `~/.netrc` and `~/_netrc` files by default when `trust_env=True`. (Pull #189)\n- Create exception base class `HTTPError` with `request` and `response` properties. (Pull #162)\n- Add HSTS preload list checking within `BaseClient` to upgrade HTTP URLs to HTTPS. (Pull #184)\n- Switch IDNA encoding from IDNA 2003 to IDNA 2008. (Pull #161)\n- Expose base classes for alternate concurrency backends. (Pull #178)\n- Improve Multipart parameter encoding. (Pull #167)\n- Add the `headers` property to `BaseClient`. (Pull #159)\n- Add support for Google's `brotli` library. (Pull #156)\n- Remove deprecated TLS versions (TLSv1 and TLSv1.1) from default `SSLConfig`. (Pull #155)\n- Fix `URL.join(...)` to work similarly to RFC 3986 URL joining. (Pull #144)\n\n---\n\n## 0.6.8 (July 25, 2019)\n\n- Check for disconnections when searching for an available\n connection in `ConnectionPool.keepalive_connections` (Pull #145)\n- Allow string comparison for `URL` objects (Pull #139)\n- Add HTTP status codes 418 and 451 (Pull #135)\n- Add support for client certificate passwords (Pull #118)\n- Enable post-handshake client cert authentication for TLSv1.3 (Pull #118)\n- Disable using `commonName` for hostname checking for OpenSSL 1.1.0+ (Pull #118)\n- Detect encoding for `Response.json()` (Pull #116)\n\n## 0.6.7 (July 8, 2019)\n\n- Check for connection aliveness on re-acquiry (Pull #111)\n\n## 0.6.6 (July 3, 2019)\n\n- Improve `USER_AGENT` (Pull #110)\n- Add `Connection: keep-alive` by default to HTTP/1.1 connections. (Pull #110)\n\n## 0.6.5 (June 27, 2019)\n\n- Include `Host` header by default. (Pull #109)\n- Improve HTTP protocol detection. (Pull #107)\n\n## 0.6.4 (June 25, 2019)\n\n- Implement read and write timeouts (Pull #104)\n\n## 0.6.3 (June 24, 2019)\n\n- Handle early connection closes (Pull #103)\n\n## 0.6.2 (June 23, 2019)\n\n- Use urllib3's `DEFAULT_CIPHERS` for the `SSLConfig` object. (Pull #100)\n\n## 0.6.1 (June 21, 2019)\n\n- Add support for setting a `base_url` on the `Client`.\n\n## 0.6.0 (June 21, 2019)\n\n- Honor `local_flow_control_window` for HTTP/2 connections (Pull #98)\n",
"path": "CHANGELOG.md"
},
{
"content": "import datetime\nimport email.message\nimport json as jsonlib\nimport typing\nimport urllib.request\nfrom collections.abc import Mapping\nfrom http.cookiejar import Cookie, CookieJar\n\nfrom ._content import ByteStream, UnattachedStream, encode_request, encode_response\nfrom ._decoders import (\n SUPPORTED_DECODERS,\n ByteChunker,\n ContentDecoder,\n IdentityDecoder,\n LineDecoder,\n MultiDecoder,\n TextChunker,\n TextDecoder,\n)\nfrom ._exceptions import (\n CookieConflict,\n HTTPStatusError,\n RequestNotRead,\n ResponseNotRead,\n StreamClosed,\n StreamConsumed,\n request_context,\n)\nfrom ._multipart import get_multipart_boundary_from_content_type\nfrom ._status_codes import codes\nfrom ._types import (\n AsyncByteStream,\n CookieTypes,\n HeaderTypes,\n QueryParamTypes,\n RequestContent,\n RequestData,\n RequestExtensions,\n RequestFiles,\n ResponseContent,\n ResponseExtensions,\n SyncByteStream,\n)\nfrom ._urls import URL\nfrom ._utils import (\n guess_json_utf,\n is_known_encoding,\n normalize_header_key,\n normalize_header_value,\n obfuscate_sensitive_headers,\n parse_content_type_charset,\n parse_header_links,\n)\n\n\nclass Headers(typing.MutableMapping[str, str]):\n \"\"\"\n HTTP headers, as a case-insensitive multi-dict.\n \"\"\"\n\n def __init__(\n self,\n headers: typing.Optional[HeaderTypes] = None,\n encoding: typing.Optional[str] = None,\n ) -> None:\n if headers is None:\n self._list = [] # type: typing.List[typing.Tuple[bytes, bytes, bytes]]\n elif isinstance(headers, Headers):\n self._list = list(headers._list)\n elif isinstance(headers, Mapping):\n self._list = [\n (\n normalize_header_key(k, lower=False, encoding=encoding),\n normalize_header_key(k, lower=True, encoding=encoding),\n normalize_header_value(v, encoding),\n )\n for k, v in headers.items()\n ]\n else:\n self._list = [\n (\n normalize_header_key(k, lower=False, encoding=encoding),\n normalize_header_key(k, lower=True, encoding=encoding),\n normalize_header_value(v, encoding),\n )\n for k, v in headers\n ]\n\n self._encoding = encoding\n\n @property\n def encoding(self) -> str:\n \"\"\"\n Header encoding is mandated as ascii, but we allow fallbacks to utf-8\n or iso-8859-1.\n \"\"\"\n if self._encoding is None:\n for encoding in [\"ascii\", \"utf-8\"]:\n for key, value in self.raw:\n try:\n key.decode(encoding)\n value.decode(encoding)\n except UnicodeDecodeError:\n break\n else:\n # The else block runs if 'break' did not occur, meaning\n # all values fitted the encoding.\n self._encoding = encoding\n break\n else:\n # The ISO-8859-1 encoding covers all 256 code points in a byte,\n # so will never raise decode errors.\n self._encoding = \"iso-8859-1\"\n return self._encoding\n\n @encoding.setter\n def encoding(self, value: str) -> None:\n self._encoding = value\n\n @property\n def raw(self) -> typing.List[typing.Tuple[bytes, bytes]]:\n \"\"\"\n Returns a list of the raw header items, as byte pairs.\n \"\"\"\n return [(raw_key, value) for raw_key, _, value in self._list]\n\n def keys(self) -> typing.KeysView[str]:\n return {key.decode(self.encoding): None for _, key, value in self._list}.keys()\n\n def values(self) -> typing.ValuesView[str]:\n values_dict: typing.Dict[str, str] = {}\n for _, key, value in self._list:\n str_key = key.decode(self.encoding)\n str_value = value.decode(self.encoding)\n if str_key in values_dict:\n values_dict[str_key] += f\", {str_value}\"\n else:\n values_dict[str_key] = str_value\n return values_dict.values()\n\n def items(self) -> typing.ItemsView[str, str]:\n \"\"\"\n Return `(key, value)` items of headers. Concatenate headers\n into a single comma separated value when a key occurs multiple times.\n \"\"\"\n values_dict: typing.Dict[str, str] = {}\n for _, key, value in self._list:\n str_key = key.decode(self.encoding)\n str_value = value.decode(self.encoding)\n if str_key in values_dict:\n values_dict[str_key] += f\", {str_value}\"\n else:\n values_dict[str_key] = str_value\n return values_dict.items()\n\n def multi_items(self) -> typing.List[typing.Tuple[str, str]]:\n \"\"\"\n Return a list of `(key, value)` pairs of headers. Allow multiple\n occurrences of the same key without concatenating into a single\n comma separated value.\n \"\"\"\n return [\n (key.decode(self.encoding), value.decode(self.encoding))\n for _, key, value in self._list\n ]\n\n def get(self, key: str, default: typing.Any = None) -> typing.Any:\n \"\"\"\n Return a header value. If multiple occurrences of the header occur\n then concatenate them together with commas.\n \"\"\"\n try:\n return self[key]\n except KeyError:\n return default\n\n def get_list(self, key: str, split_commas: bool = False) -> typing.List[str]:\n \"\"\"\n Return a list of all header values for a given key.\n If `split_commas=True` is passed, then any comma separated header\n values are split into multiple return strings.\n \"\"\"\n get_header_key = key.lower().encode(self.encoding)\n\n values = [\n item_value.decode(self.encoding)\n for _, item_key, item_value in self._list\n if item_key.lower() == get_header_key\n ]\n\n if not split_commas:\n return values\n\n split_values = []\n for value in values:\n split_values.extend([item.strip() for item in value.split(\",\")])\n return split_values\n\n def update(self, headers: typing.Optional[HeaderTypes] = None) -> None: # type: ignore\n headers = Headers(headers)\n for key in headers.keys():\n if key in self:\n self.pop(key)\n self._list.extend(headers._list)\n\n def copy(self) -> \"Headers\":\n return Headers(self, encoding=self.encoding)\n\n def __getitem__(self, key: str) -> str:\n \"\"\"\n Return a single header value.\n\n If there are multiple headers with the same key, then we concatenate\n them with commas. See: https://tools.ietf.org/html/rfc7230#section-3.2.2\n \"\"\"\n normalized_key = key.lower().encode(self.encoding)\n\n items = [\n header_value.decode(self.encoding)\n for _, header_key, header_value in self._list\n if header_key == normalized_key\n ]\n\n if items:\n return \", \".join(items)\n\n raise KeyError(key)\n\n def __setitem__(self, key: str, value: str) -> None:\n \"\"\"\n Set the header `key` to `value`, removing any duplicate entries.\n Retains insertion order.\n \"\"\"\n set_key = key.encode(self._encoding or \"utf-8\")\n set_value = value.encode(self._encoding or \"utf-8\")\n lookup_key = set_key.lower()\n\n found_indexes = [\n idx\n for idx, (_, item_key, _) in enumerate(self._list)\n if item_key == lookup_key\n ]\n\n for idx in reversed(found_indexes[1:]):\n del self._list[idx]\n\n if found_indexes:\n idx = found_indexes[0]\n self._list[idx] = (set_key, lookup_key, set_value)\n else:\n self._list.append((set_key, lookup_key, set_value))\n\n def __delitem__(self, key: str) -> None:\n \"\"\"\n Remove the header `key`.\n \"\"\"\n del_key = key.lower().encode(self.encoding)\n\n pop_indexes = [\n idx\n for idx, (_, item_key, _) in enumerate(self._list)\n if item_key.lower() == del_key\n ]\n\n if not pop_indexes:\n raise KeyError(key)\n\n for idx in reversed(pop_indexes):\n del self._list[idx]\n\n def __contains__(self, key: typing.Any) -> bool:\n header_key = key.lower().encode(self.encoding)\n return header_key in [key for _, key, _ in self._list]\n\n def __iter__(self) -> typing.Iterator[typing.Any]:\n return iter(self.keys())\n\n def __len__(self) -> int:\n return len(self._list)\n\n def __eq__(self, other: typing.Any) -> bool:\n try:\n other_headers = Headers(other)\n except ValueError:\n return False\n\n self_list = [(key, value) for _, key, value in self._list]\n other_list = [(key, value) for _, key, value in other_headers._list]\n return sorted(self_list) == sorted(other_list)\n\n def __repr__(self) -> str:\n class_name = self.__class__.__name__\n\n encoding_str = \"\"\n if self.encoding != \"ascii\":\n encoding_str = f\", encoding={self.encoding!r}\"\n\n as_list = list(obfuscate_sensitive_headers(self.multi_items()))\n as_dict = dict(as_list)\n\n no_duplicate_keys = len(as_dict) == len(as_list)\n if no_duplicate_keys:\n return f\"{class_name}({as_dict!r}{encoding_str})\"\n return f\"{class_name}({as_list!r}{encoding_str})\"\n\n\nclass Request:\n def __init__(\n self,\n method: typing.Union[str, bytes],\n url: typing.Union[\"URL\", str],\n *,\n params: typing.Optional[QueryParamTypes] = None,\n headers: typing.Optional[HeaderTypes] = None,\n cookies: typing.Optional[CookieTypes] = None,\n content: typing.Optional[RequestContent] = None,\n data: typing.Optional[RequestData] = None,\n files: typing.Optional[RequestFiles] = None,\n json: typing.Optional[typing.Any] = None,\n stream: typing.Union[SyncByteStream, AsyncByteStream, None] = None,\n extensions: typing.Optional[RequestExtensions] = None,\n ):\n self.method = (\n method.decode(\"ascii\").upper()\n if isinstance(method, bytes)\n else method.upper()\n )\n self.url = URL(url)\n if params is not None:\n self.url = self.url.copy_merge_params(params=params)\n self.headers = Headers(headers)\n self.extensions = {} if extensions is None else extensions\n\n if cookies:\n Cookies(cookies).set_cookie_header(self)\n\n if stream is None:\n content_type: typing.Optional[str] = self.headers.get(\"content-type\")\n headers, stream = encode_request(\n content=content,\n data=data,\n files=files,\n json=json,\n boundary=get_multipart_boundary_from_content_type(\n content_type=content_type.encode(self.headers.encoding)\n if content_type\n else None\n ),\n )\n self._prepare(headers)\n self.stream = stream\n # Load the request body, except for streaming content.\n if isinstance(stream, ByteStream):\n self.read()\n else:\n # There's an important distinction between `Request(content=...)`,\n # and `Request(stream=...)`.\n #\n # Using `content=...` implies automatically populated `Host` and content\n # headers, of either `Content-Length: ...` or `Transfer-Encoding: chunked`.\n #\n # Using `stream=...` will not automatically include *any* auto-populated headers.\n #\n # As an end-user you don't really need `stream=...`. It's only\n # useful when:\n #\n # * Preserving the request stream when copying requests, eg for redirects.\n # * Creating request instances on the *server-side* of the transport API.\n self.stream = stream\n\n def _prepare(self, default_headers: typing.Dict[str, str]) -> None:\n for key, value in default_headers.items():\n # Ignore Transfer-Encoding if the Content-Length has been set explicitly.\n if key.lower() == \"transfer-encoding\" and \"Content-Length\" in self.headers:\n continue\n self.headers.setdefault(key, value)\n\n auto_headers: typing.List[typing.Tuple[bytes, bytes]] = []\n\n has_host = \"Host\" in self.headers\n has_content_length = (\n \"Content-Length\" in self.headers or \"Transfer-Encoding\" in self.headers\n )\n\n if not has_host and self.url.host:\n auto_headers.append((b\"Host\", self.url.netloc))\n if not has_content_length and self.method in (\"POST\", \"PUT\", \"PATCH\"):\n auto_headers.append((b\"Content-Length\", b\"0\"))\n\n self.headers = Headers(auto_headers + self.headers.raw)\n\n @property\n def content(self) -> bytes:\n if not hasattr(self, \"_content\"):\n raise RequestNotRead()\n return self._content\n\n def read(self) -> bytes:\n \"\"\"\n Read and return the request content.\n \"\"\"\n if not hasattr(self, \"_content\"):\n assert isinstance(self.stream, typing.Iterable)\n self._content = b\"\".join(self.stream)\n if not isinstance(self.stream, ByteStream):\n # If a streaming request has been read entirely into memory, then\n # we can replace the stream with a raw bytes implementation,\n # to ensure that any non-replayable streams can still be used.\n self.stream = ByteStream(self._content)\n return self._content\n\n async def aread(self) -> bytes:\n \"\"\"\n Read and return the request content.\n \"\"\"\n if not hasattr(self, \"_content\"):\n assert isinstance(self.stream, typing.AsyncIterable)\n self._content = b\"\".join([part async for part in self.stream])\n if not isinstance(self.stream, ByteStream):\n # If a streaming request has been read entirely into memory, then\n # we can replace the stream with a raw bytes implementation,\n # to ensure that any non-replayable streams can still be used.\n self.stream = ByteStream(self._content)\n return self._content\n\n def __repr__(self) -> str:\n class_name = self.__class__.__name__\n url = str(self.url)\n return f\"<{class_name}({self.method!r}, {url!r})>\"\n\n def __getstate__(self) -> typing.Dict[str, typing.Any]:\n return {\n name: value\n for name, value in self.__dict__.items()\n if name not in [\"extensions\", \"stream\"]\n }\n\n def __setstate__(self, state: typing.Dict[str, typing.Any]) -> None:\n for name, value in state.items():\n setattr(self, name, value)\n self.extensions = {}\n self.stream = UnattachedStream()\n\n\nclass Response:\n def __init__(\n self,\n status_code: int,\n *,\n headers: typing.Optional[HeaderTypes] = None,\n content: typing.Optional[ResponseContent] = None,\n text: typing.Optional[str] = None,\n html: typing.Optional[str] = None,\n json: typing.Any = None,\n stream: typing.Union[SyncByteStream, AsyncByteStream, None] = None,\n request: typing.Optional[Request] = None,\n extensions: typing.Optional[ResponseExtensions] = None,\n history: typing.Optional[typing.List[\"Response\"]] = None,\n default_encoding: typing.Union[str, typing.Callable[[bytes], str]] = \"utf-8\",\n ):\n self.status_code = status_code\n self.headers = Headers(headers)\n\n self._request: typing.Optional[Request] = request\n\n # When follow_redirects=False and a redirect is received,\n # the client will set `response.next_request`.\n self.next_request: typing.Optional[Request] = None\n\n self.extensions: ResponseExtensions = {} if extensions is None else extensions\n self.history = [] if history is None else list(history)\n\n self.is_closed = False\n self.is_stream_consumed = False\n\n self.default_encoding = default_encoding\n\n if stream is None:\n headers, stream = encode_response(content, text, html, json)\n self._prepare(headers)\n self.stream = stream\n if isinstance(stream, ByteStream):\n # Load the response body, except for streaming content.\n self.read()\n else:\n # There's an important distinction between `Response(content=...)`,\n # and `Response(stream=...)`.\n #\n # Using `content=...` implies automatically populated content headers,\n # of either `Content-Length: ...` or `Transfer-Encoding: chunked`.\n #\n # Using `stream=...` will not automatically include any content headers.\n #\n # As an end-user you don't really need `stream=...`. It's only\n # useful when creating response instances having received a stream\n # from the transport API.\n self.stream = stream\n\n self._num_bytes_downloaded = 0\n\n def _prepare(self, default_headers: typing.Dict[str, str]) -> None:\n for key, value in default_headers.items():\n # Ignore Transfer-Encoding if the Content-Length has been set explicitly.\n if key.lower() == \"transfer-encoding\" and \"content-length\" in self.headers:\n continue\n self.headers.setdefault(key, value)\n\n @property\n def elapsed(self) -> datetime.timedelta:\n \"\"\"\n Returns the time taken for the complete request/response\n cycle to complete.\n \"\"\"\n if not hasattr(self, \"_elapsed\"):\n raise RuntimeError(\n \"'.elapsed' may only be accessed after the response \"\n \"has been read or closed.\"\n )\n return self._elapsed\n\n @elapsed.setter\n def elapsed(self, elapsed: datetime.timedelta) -> None:\n self._elapsed = elapsed\n\n @property\n def request(self) -> Request:\n \"\"\"\n Returns the request instance associated to the current response.\n \"\"\"\n if self._request is None:\n raise RuntimeError(\n \"The request instance has not been set on this response.\"\n )\n return self._request\n\n @request.setter\n def request(self, value: Request) -> None:\n self._request = value\n\n @property\n def http_version(self) -> str:\n try:\n http_version: bytes = self.extensions[\"http_version\"]\n except KeyError:\n return \"HTTP/1.1\"\n else:\n return http_version.decode(\"ascii\", errors=\"ignore\")\n\n @property\n def reason_phrase(self) -> str:\n try:\n reason_phrase: bytes = self.extensions[\"reason_phrase\"]\n except KeyError:\n return codes.get_reason_phrase(self.status_code)\n else:\n return reason_phrase.decode(\"ascii\", errors=\"ignore\")\n\n @property\n def url(self) -> URL:\n \"\"\"\n Returns the URL for which the request was made.\n \"\"\"\n return self.request.url\n\n @property\n def content(self) -> bytes:\n if not hasattr(self, \"_content\"):\n raise ResponseNotRead()\n return self._content\n\n @property\n def text(self) -> str:\n if not hasattr(self, \"_text\"):\n content = self.content\n if not content:\n self._text = \"\"\n else:\n decoder = TextDecoder(encoding=self.encoding or \"utf-8\")\n self._text = \"\".join([decoder.decode(self.content), decoder.flush()])\n return self._text\n\n @property\n def encoding(self) -> typing.Optional[str]:\n \"\"\"\n Return an encoding to use for decoding the byte content into text.\n The priority for determining this is given by...\n\n * `.encoding = <>` has been set explicitly.\n * The encoding as specified by the charset parameter in the Content-Type header.\n * The encoding as determined by `default_encoding`, which may either be\n a string like \"utf-8\" indicating the encoding to use, or may be a callable\n which enables charset autodetection.\n \"\"\"\n if not hasattr(self, \"_encoding\"):\n encoding = self.charset_encoding\n if encoding is None or not is_known_encoding(encoding):\n if isinstance(self.default_encoding, str):\n encoding = self.default_encoding\n elif hasattr(self, \"_content\"):\n encoding = self.default_encoding(self._content)\n self._encoding = encoding or \"utf-8\"\n return self._encoding\n\n @encoding.setter\n def encoding(self, value: str) -> None:\n self._encoding = value\n\n @property\n def charset_encoding(self) -> typing.Optional[str]:\n \"\"\"\n Return the encoding, as specified by the Content-Type header.\n \"\"\"\n content_type = self.headers.get(\"Content-Type\")\n if content_type is None:\n return None\n\n return parse_content_type_charset(content_type)\n\n def _get_content_decoder(self) -> ContentDecoder:\n \"\"\"\n Returns a decoder instance which can be used to decode the raw byte\n content, depending on the Content-Encoding used in the response.\n \"\"\"\n if not hasattr(self, \"_decoder\"):\n decoders: typing.List[ContentDecoder] = []\n values = self.headers.get_list(\"content-encoding\", split_commas=True)\n for value in values:\n value = value.strip().lower()\n try:\n decoder_cls = SUPPORTED_DECODERS[value]\n decoders.append(decoder_cls())\n except KeyError:\n continue\n\n if len(decoders) == 1:\n self._decoder = decoders[0]\n elif len(decoders) > 1:\n self._decoder = MultiDecoder(children=decoders)\n else:\n self._decoder = IdentityDecoder()\n\n return self._decoder\n\n @property\n def is_informational(self) -> bool:\n \"\"\"\n A property which is `True` for 1xx status codes, `False` otherwise.\n \"\"\"\n return codes.is_informational(self.status_code)\n\n @property\n def is_success(self) -> bool:\n \"\"\"\n A property which is `True` for 2xx status codes, `False` otherwise.\n \"\"\"\n return codes.is_success(self.status_code)\n\n @property\n def is_redirect(self) -> bool:\n \"\"\"\n A property which is `True` for 3xx status codes, `False` otherwise.\n\n Note that not all responses with a 3xx status code indicate a URL redirect.\n\n Use `response.has_redirect_location` to determine responses with a properly\n formed URL redirection.\n \"\"\"\n return codes.is_redirect(self.status_code)\n\n @property\n def is_client_error(self) -> bool:\n \"\"\"\n A property which is `True` for 4xx status codes, `False` otherwise.\n \"\"\"\n return codes.is_client_error(self.status_code)\n\n @property\n def is_server_error(self) -> bool:\n \"\"\"\n A property which is `True` for 5xx status codes, `False` otherwise.\n \"\"\"\n return codes.is_server_error(self.status_code)\n\n @property\n def is_error(self) -> bool:\n \"\"\"\n A property which is `True` for 4xx and 5xx status codes, `False` otherwise.\n \"\"\"\n return codes.is_error(self.status_code)\n\n @property\n def has_redirect_location(self) -> bool:\n \"\"\"\n Returns True for 3xx responses with a properly formed URL redirection,\n `False` otherwise.\n \"\"\"\n return (\n self.status_code\n in (\n # 301 (Cacheable redirect. Method may change to GET.)\n codes.MOVED_PERMANENTLY,\n # 302 (Uncacheable redirect. Method may change to GET.)\n codes.FOUND,\n # 303 (Client should make a GET or HEAD request.)\n codes.SEE_OTHER,\n # 307 (Equiv. 302, but retain method)\n codes.TEMPORARY_REDIRECT,\n # 308 (Equiv. 301, but retain method)\n codes.PERMANENT_REDIRECT,\n )\n and \"Location\" in self.headers\n )\n\n def raise_for_status(self) -> \"Response\":\n \"\"\"\n Raise the `HTTPStatusError` if one occurred.\n \"\"\"\n request = self._request\n if request is None:\n raise RuntimeError(\n \"Cannot call `raise_for_status` as the request \"\n \"instance has not been set on this response.\"\n )\n\n if self.is_success:\n return self\n\n if self.has_redirect_location:\n message = (\n \"{error_type} '{0.status_code} {0.reason_phrase}' for url '{0.url}'\\n\"\n \"Redirect location: '{0.headers[location]}'\\n\"\n \"For more information check: https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/{0.status_code}\"\n )\n else:\n message = (\n \"{error_type} '{0.status_code} {0.reason_phrase}' for url '{0.url}'\\n\"\n \"For more information check: https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/{0.status_code}\"\n )\n\n status_class = self.status_code // 100\n error_types = {\n 1: \"Informational response\",\n 3: \"Redirect response\",\n 4: \"Client error\",\n 5: \"Server error\",\n }\n error_type = error_types.get(status_class, \"Invalid status code\")\n message = message.format(self, error_type=error_type)\n raise HTTPStatusError(message, request=request, response=self)\n\n def json(self, **kwargs: typing.Any) -> typing.Any:\n if self.charset_encoding is None and self.content and len(self.content) > 3:\n encoding = guess_json_utf(self.content)\n if encoding is not None:\n return jsonlib.loads(self.content.decode(encoding), **kwargs)\n return jsonlib.loads(self.text, **kwargs)\n\n @property\n def cookies(self) -> \"Cookies\":\n if not hasattr(self, \"_cookies\"):\n self._cookies = Cookies()\n self._cookies.extract_cookies(self)\n return self._cookies\n\n @property\n def links(self) -> typing.Dict[typing.Optional[str], typing.Dict[str, str]]:\n \"\"\"\n Returns the parsed header links of the response, if any\n \"\"\"\n header = self.headers.get(\"link\")\n ldict = {}\n if header:\n links = parse_header_links(header)\n for link in links:\n key = link.get(\"rel\") or link.get(\"url\")\n ldict[key] = link\n return ldict\n\n @property\n def num_bytes_downloaded(self) -> int:\n return self._num_bytes_downloaded\n\n def __repr__(self) -> str:\n return f\"<Response [{self.status_code} {self.reason_phrase}]>\"\n\n def __getstate__(self) -> typing.Dict[str, typing.Any]:\n return {\n name: value\n for name, value in self.__dict__.items()\n if name not in [\"extensions\", \"stream\", \"is_closed\", \"_decoder\"]\n }\n\n def __setstate__(self, state: typing.Dict[str, typing.Any]) -> None:\n for name, value in state.items():\n setattr(self, name, value)\n self.is_closed = True\n self.extensions = {}\n self.stream = UnattachedStream()\n\n def read(self) -> bytes:\n \"\"\"\n Read and return the response content.\n \"\"\"\n if not hasattr(self, \"_content\"):\n self._content = b\"\".join(self.iter_bytes())\n return self._content\n\n def iter_bytes(\n self, chunk_size: typing.Optional[int] = None\n ) -> typing.Iterator[bytes]:\n \"\"\"\n A byte-iterator over the decoded response content.\n This allows us to handle gzip, deflate, and brotli encoded responses.\n \"\"\"\n if hasattr(self, \"_content\"):\n chunk_size = len(self._content) if chunk_size is None else chunk_size\n for i in range(0, len(self._content), max(chunk_size, 1)):\n yield self._content[i : i + chunk_size]\n else:\n decoder = self._get_content_decoder()\n chunker = ByteChunker(chunk_size=chunk_size)\n with request_context(request=self._request):\n for raw_bytes in self.iter_raw():\n decoded = decoder.decode(raw_bytes)\n for chunk in chunker.decode(decoded):\n yield chunk\n decoded = decoder.flush()\n for chunk in chunker.decode(decoded):\n yield chunk # pragma: no cover\n for chunk in chunker.flush():\n yield chunk\n\n def iter_text(\n self, chunk_size: typing.Optional[int] = None\n ) -> typing.Iterator[str]:\n \"\"\"\n A str-iterator over the decoded response content\n that handles both gzip, deflate, etc but also detects the content's\n string encoding.\n \"\"\"\n decoder = TextDecoder(encoding=self.encoding or \"utf-8\")\n chunker = TextChunker(chunk_size=chunk_size)\n with request_context(request=self._request):\n for byte_content in self.iter_bytes():\n text_content = decoder.decode(byte_content)\n for chunk in chunker.decode(text_content):\n yield chunk\n text_content = decoder.flush()\n for chunk in chunker.decode(text_content):\n yield chunk\n for chunk in chunker.flush():\n yield chunk\n\n def iter_lines(self) -> typing.Iterator[str]:\n decoder = LineDecoder()\n with request_context(request=self._request):\n for text in self.iter_text():\n for line in decoder.decode(text):\n yield line\n for line in decoder.flush():\n yield line\n\n def iter_raw(\n self, chunk_size: typing.Optional[int] = None\n ) -> typing.Iterator[bytes]:\n \"\"\"\n A byte-iterator over the raw response content.\n \"\"\"\n if self.is_stream_consumed:\n raise StreamConsumed()\n if self.is_closed:\n raise StreamClosed()\n if not isinstance(self.stream, SyncByteStream):\n raise RuntimeError(\"Attempted to call a sync iterator on an async stream.\")\n\n self.is_stream_consumed = True\n self._num_bytes_downloaded = 0\n chunker = ByteChunker(chunk_size=chunk_size)\n\n with request_context(request=self._request):\n for raw_stream_bytes in self.stream:\n self._num_bytes_downloaded += len(raw_stream_bytes)\n for chunk in chunker.decode(raw_stream_bytes):\n yield chunk\n\n for chunk in chunker.flush():\n yield chunk\n\n self.close()\n\n def close(self) -> None:\n \"\"\"\n Close the response and release the connection.\n Automatically called if the response body is read to completion.\n \"\"\"\n if not isinstance(self.stream, SyncByteStream):\n raise RuntimeError(\"Attempted to call an sync close on an async stream.\")\n\n if not self.is_closed:\n self.is_closed = True\n with request_context(request=self._request):\n self.stream.close()\n\n async def aread(self) -> bytes:\n \"\"\"\n Read and return the response content.\n \"\"\"\n if not hasattr(self, \"_content\"):\n self._content = b\"\".join([part async for part in self.aiter_bytes()])\n return self._content\n\n async def aiter_bytes(\n self, chunk_size: typing.Optional[int] = None\n ) -> typing.AsyncIterator[bytes]:\n \"\"\"\n A byte-iterator over the decoded response content.\n This allows us to handle gzip, deflate, and brotli encoded responses.\n \"\"\"\n if hasattr(self, \"_content\"):\n chunk_size = len(self._content) if chunk_size is None else chunk_size\n for i in range(0, len(self._content), max(chunk_size, 1)):\n yield self._content[i : i + chunk_size]\n else:\n decoder = self._get_content_decoder()\n chunker = ByteChunker(chunk_size=chunk_size)\n with request_context(request=self._request):\n async for raw_bytes in self.aiter_raw():\n decoded = decoder.decode(raw_bytes)\n for chunk in chunker.decode(decoded):\n yield chunk\n decoded = decoder.flush()\n for chunk in chunker.decode(decoded):\n yield chunk # pragma: no cover\n for chunk in chunker.flush():\n yield chunk\n\n async def aiter_text(\n self, chunk_size: typing.Optional[int] = None\n ) -> typing.AsyncIterator[str]:\n \"\"\"\n A str-iterator over the decoded response content\n that handles both gzip, deflate, etc but also detects the content's\n string encoding.\n \"\"\"\n decoder = TextDecoder(encoding=self.encoding or \"utf-8\")\n chunker = TextChunker(chunk_size=chunk_size)\n with request_context(request=self._request):\n async for byte_content in self.aiter_bytes():\n text_content = decoder.decode(byte_content)\n for chunk in chunker.decode(text_content):\n yield chunk\n text_content = decoder.flush()\n for chunk in chunker.decode(text_content):\n yield chunk\n for chunk in chunker.flush():\n yield chunk\n\n async def aiter_lines(self) -> typing.AsyncIterator[str]:\n decoder = LineDecoder()\n with request_context(request=self._request):\n async for text in self.aiter_text():\n for line in decoder.decode(text):\n yield line\n for line in decoder.flush():\n yield line\n\n async def aiter_raw(\n self, chunk_size: typing.Optional[int] = None\n ) -> typing.AsyncIterator[bytes]:\n \"\"\"\n A byte-iterator over the raw response content.\n \"\"\"\n if self.is_stream_consumed:\n raise StreamConsumed()\n if self.is_closed:\n raise StreamClosed()\n if not isinstance(self.stream, AsyncByteStream):\n raise RuntimeError(\"Attempted to call an async iterator on an sync stream.\")\n\n self.is_stream_consumed = True\n self._num_bytes_downloaded = 0\n chunker = ByteChunker(chunk_size=chunk_size)\n\n with request_context(request=self._request):\n async for raw_stream_bytes in self.stream:\n self._num_bytes_downloaded += len(raw_stream_bytes)\n for chunk in chunker.decode(raw_stream_bytes):\n yield chunk\n\n for chunk in chunker.flush():\n yield chunk\n\n await self.aclose()\n\n async def aclose(self) -> None:\n \"\"\"\n Close the response and release the connection.\n Automatically called if the response body is read to completion.\n \"\"\"\n if not isinstance(self.stream, AsyncByteStream):\n raise RuntimeError(\"Attempted to call an async close on an sync stream.\")\n\n if not self.is_closed:\n self.is_closed = True\n with request_context(request=self._request):\n await self.stream.aclose()\n\n\nclass Cookies(typing.MutableMapping[str, str]):\n \"\"\"\n HTTP Cookies, as a mutable mapping.\n \"\"\"\n\n def __init__(self, cookies: typing.Optional[CookieTypes] = None) -> None:\n if cookies is None or isinstance(cookies, dict):\n self.jar = CookieJar()\n if isinstance(cookies, dict):\n for key, value in cookies.items():\n self.set(key, value)\n elif isinstance(cookies, list):\n self.jar = CookieJar()\n for key, value in cookies:\n self.set(key, value)\n elif isinstance(cookies, Cookies):\n self.jar = CookieJar()\n for cookie in cookies.jar:\n self.jar.set_cookie(cookie)\n else:\n self.jar = cookies\n\n def extract_cookies(self, response: Response) -> None:\n \"\"\"\n Loads any cookies based on the response `Set-Cookie` headers.\n \"\"\"\n urllib_response = self._CookieCompatResponse(response)\n urllib_request = self._CookieCompatRequest(response.request)\n\n self.jar.extract_cookies(urllib_response, urllib_request) # type: ignore\n\n def set_cookie_header(self, request: Request) -> None:\n \"\"\"\n Sets an appropriate 'Cookie:' HTTP header on the `Request`.\n \"\"\"\n urllib_request = self._CookieCompatRequest(request)\n self.jar.add_cookie_header(urllib_request)\n\n def set(self, name: str, value: str, domain: str = \"\", path: str = \"/\") -> None:\n \"\"\"\n Set a cookie value by name. May optionally include domain and path.\n \"\"\"\n kwargs = {\n \"version\": 0,\n \"name\": name,\n \"value\": value,\n \"port\": None,\n \"port_specified\": False,\n \"domain\": domain,\n \"domain_specified\": bool(domain),\n \"domain_initial_dot\": domain.startswith(\".\"),\n \"path\": path,\n \"path_specified\": bool(path),\n \"secure\": False,\n \"expires\": None,\n \"discard\": True,\n \"comment\": None,\n \"comment_url\": None,\n \"rest\": {\"HttpOnly\": None},\n \"rfc2109\": False,\n }\n cookie = Cookie(**kwargs) # type: ignore\n self.jar.set_cookie(cookie)\n\n def get( # type: ignore\n self,\n name: str,\n default: typing.Optional[str] = None,\n domain: typing.Optional[str] = None,\n path: typing.Optional[str] = None,\n ) -> typing.Optional[str]:\n \"\"\"\n Get a cookie by name. May optionally include domain and path\n in order to specify exactly which cookie to retrieve.\n \"\"\"\n value = None\n for cookie in self.jar:\n if cookie.name == name:\n if domain is None or cookie.domain == domain:\n if path is None or cookie.path == path:\n if value is not None:\n message = f\"Multiple cookies exist with name={name}\"\n raise CookieConflict(message)\n value = cookie.value\n\n if value is None:\n return default\n return value\n\n def delete(\n self,\n name: str,\n domain: typing.Optional[str] = None,\n path: typing.Optional[str] = None,\n ) -> None:\n \"\"\"\n Delete a cookie by name. May optionally include domain and path\n in order to specify exactly which cookie to delete.\n \"\"\"\n if domain is not None and path is not None:\n return self.jar.clear(domain, path, name)\n\n remove = [\n cookie\n for cookie in self.jar\n if cookie.name == name\n and (domain is None or cookie.domain == domain)\n and (path is None or cookie.path == path)\n ]\n\n for cookie in remove:\n self.jar.clear(cookie.domain, cookie.path, cookie.name)\n\n def clear(\n self, domain: typing.Optional[str] = None, path: typing.Optional[str] = None\n ) -> None:\n \"\"\"\n Delete all cookies. Optionally include a domain and path in\n order to only delete a subset of all the cookies.\n \"\"\"\n args = []\n if domain is not None:\n args.append(domain)\n if path is not None:\n assert domain is not None\n args.append(path)\n self.jar.clear(*args)\n\n def update(self, cookies: typing.Optional[CookieTypes] = None) -> None: # type: ignore\n cookies = Cookies(cookies)\n for cookie in cookies.jar:\n self.jar.set_cookie(cookie)\n\n def __setitem__(self, name: str, value: str) -> None:\n return self.set(name, value)\n\n def __getitem__(self, name: str) -> str:\n value = self.get(name)\n if value is None:\n raise KeyError(name)\n return value\n\n def __delitem__(self, name: str) -> None:\n return self.delete(name)\n\n def __len__(self) -> int:\n return len(self.jar)\n\n def __iter__(self) -> typing.Iterator[str]:\n return (cookie.name for cookie in self.jar)\n\n def __bool__(self) -> bool:\n for _ in self.jar:\n return True\n return False\n\n def __repr__(self) -> str:\n cookies_repr = \", \".join(\n [\n f\"<Cookie {cookie.name}={cookie.value} for {cookie.domain} />\"\n for cookie in self.jar\n ]\n )\n\n return f\"<Cookies[{cookies_repr}]>\"\n\n class _CookieCompatRequest(urllib.request.Request):\n \"\"\"\n Wraps a `Request` instance up in a compatibility interface suitable\n for use with `CookieJar` operations.\n \"\"\"\n\n def __init__(self, request: Request) -> None:\n super().__init__(\n url=str(request.url),\n headers=dict(request.headers),\n method=request.method,\n )\n self.request = request\n\n def add_unredirected_header(self, key: str, value: str) -> None:\n super().add_unredirected_header(key, value)\n self.request.headers[key] = value\n\n class _CookieCompatResponse:\n \"\"\"\n Wraps a `Request` instance up in a compatibility interface suitable\n for use with `CookieJar` operations.\n \"\"\"\n\n def __init__(self, response: Response):\n self.response = response\n\n def info(self) -> email.message.Message:\n info = email.message.Message()\n for key, value in self.response.headers.multi_items():\n # Note that setting `info[key]` here is an \"append\" operation,\n # not a \"replace\" operation.\n # https://docs.python.org/3/library/email.compat32-message.html#email.message.Message.__setitem__\n info[key] = value\n return info\n",
"path": "httpx/_models.py"
},
{
"content": "import json\nimport pickle\nimport typing\n\nimport chardet\nimport pytest\n\nimport httpx\n\n\nclass StreamingBody:\n def __iter__(self):\n yield b\"Hello, \"\n yield b\"world!\"\n\n\ndef streaming_body() -> typing.Iterator[bytes]:\n yield b\"Hello, \"\n yield b\"world!\"\n\n\nasync def async_streaming_body() -> typing.AsyncIterator[bytes]:\n yield b\"Hello, \"\n yield b\"world!\"\n\n\ndef autodetect(content):\n return chardet.detect(content).get(\"encoding\")\n\n\ndef test_response():\n response = httpx.Response(\n 200,\n content=b\"Hello, world!\",\n request=httpx.Request(\"GET\", \"https://example.org\"),\n )\n\n assert response.status_code == 200\n assert response.reason_phrase == \"OK\"\n assert response.text == \"Hello, world!\"\n assert response.request.method == \"GET\"\n assert response.request.url == \"https://example.org\"\n assert not response.is_error\n\n\ndef test_response_content():\n response = httpx.Response(200, content=\"Hello, world!\")\n\n assert response.status_code == 200\n assert response.reason_phrase == \"OK\"\n assert response.text == \"Hello, world!\"\n assert response.headers == {\"Content-Length\": \"13\"}\n\n\ndef test_response_text():\n response = httpx.Response(200, text=\"Hello, world!\")\n\n assert response.status_code == 200\n assert response.reason_phrase == \"OK\"\n assert response.text == \"Hello, world!\"\n assert response.headers == {\n \"Content-Length\": \"13\",\n \"Content-Type\": \"text/plain; charset=utf-8\",\n }\n\n\ndef test_response_html():\n response = httpx.Response(200, html=\"<html><body>Hello, world!</html></body>\")\n\n assert response.status_code == 200\n assert response.reason_phrase == \"OK\"\n assert response.text == \"<html><body>Hello, world!</html></body>\"\n assert response.headers == {\n \"Content-Length\": \"39\",\n \"Content-Type\": \"text/html; charset=utf-8\",\n }\n\n\ndef test_response_json():\n response = httpx.Response(200, json={\"hello\": \"world\"})\n\n assert response.status_code == 200\n assert response.reason_phrase == \"OK\"\n assert response.json() == {\"hello\": \"world\"}\n assert response.headers == {\n \"Content-Length\": \"18\",\n \"Content-Type\": \"application/json\",\n }\n\n\ndef test_raise_for_status():\n request = httpx.Request(\"GET\", \"https://example.org\")\n\n # 2xx status codes are not an error.\n response = httpx.Response(200, request=request)\n response.raise_for_status()\n\n # 1xx status codes are informational responses.\n response = httpx.Response(101, request=request)\n assert response.is_informational\n with pytest.raises(httpx.HTTPStatusError) as exc_info:\n response.raise_for_status()\n assert str(exc_info.value) == (\n \"Informational response '101 Switching Protocols' for url 'https://example.org'\\n\"\n \"For more information check: https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/101\"\n )\n\n # 3xx status codes are redirections.\n headers = {\"location\": \"https://other.org\"}\n response = httpx.Response(303, headers=headers, request=request)\n assert response.is_redirect\n with pytest.raises(httpx.HTTPStatusError) as exc_info:\n response.raise_for_status()\n assert str(exc_info.value) == (\n \"Redirect response '303 See Other' for url 'https://example.org'\\n\"\n \"Redirect location: 'https://other.org'\\n\"\n \"For more information check: https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/303\"\n )\n\n # 4xx status codes are a client error.\n response = httpx.Response(403, request=request)\n assert response.is_client_error\n assert response.is_error\n with pytest.raises(httpx.HTTPStatusError) as exc_info:\n response.raise_for_status()\n assert str(exc_info.value) == (\n \"Client error '403 Forbidden' for url 'https://example.org'\\n\"\n \"For more information check: https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/403\"\n )\n\n # 5xx status codes are a server error.\n response = httpx.Response(500, request=request)\n assert response.is_server_error\n assert response.is_error\n with pytest.raises(httpx.HTTPStatusError) as exc_info:\n response.raise_for_status()\n assert str(exc_info.value) == (\n \"Server error '500 Internal Server Error' for url 'https://example.org'\\n\"\n \"For more information check: https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/500\"\n )\n\n # Calling .raise_for_status without setting a request instance is\n # not valid. Should raise a runtime error.\n response = httpx.Response(200)\n with pytest.raises(RuntimeError):\n response.raise_for_status()\n\n\ndef test_response_repr():\n response = httpx.Response(\n 200,\n content=b\"Hello, world!\",\n )\n assert repr(response) == \"<Response [200 OK]>\"\n\n\ndef test_response_content_type_encoding():\n \"\"\"\n Use the charset encoding in the Content-Type header if possible.\n \"\"\"\n headers = {\"Content-Type\": \"text-plain; charset=latin-1\"}\n content = \"Latin 1: ÿ\".encode(\"latin-1\")\n response = httpx.Response(\n 200,\n content=content,\n headers=headers,\n )\n assert response.text == \"Latin 1: ÿ\"\n assert response.encoding == \"latin-1\"\n\n\ndef test_response_default_to_utf8_encoding():\n \"\"\"\n Default to utf-8 encoding if there is no Content-Type header.\n \"\"\"\n content = \"おはようございます。\".encode(\"utf-8\")\n response = httpx.Response(\n 200,\n content=content,\n )\n assert response.text == \"おはようございます。\"\n assert response.encoding == \"utf-8\"\n\n\ndef test_response_fallback_to_utf8_encoding():\n \"\"\"\n Fallback to utf-8 if we get an invalid charset in the Content-Type header.\n \"\"\"\n headers = {\"Content-Type\": \"text-plain; charset=invalid-codec-name\"}\n content = \"おはようございます。\".encode(\"utf-8\")\n response = httpx.Response(\n 200,\n content=content,\n headers=headers,\n )\n assert response.text == \"おはようございます。\"\n assert response.encoding == \"utf-8\"\n\n\ndef test_response_no_charset_with_ascii_content():\n \"\"\"\n A response with ascii encoded content should decode correctly,\n even with no charset specified.\n \"\"\"\n content = b\"Hello, world!\"\n headers = {\"Content-Type\": \"text/plain\"}\n response = httpx.Response(\n 200,\n content=content,\n headers=headers,\n )\n assert response.status_code == 200\n assert response.encoding == \"utf-8\"\n assert response.text == \"Hello, world!\"\n\n\ndef test_response_no_charset_with_utf8_content():\n \"\"\"\n A response with UTF-8 encoded content should decode correctly,\n even with no charset specified.\n \"\"\"\n content = \"Unicode Snowman: ☃\".encode(\"utf-8\")\n headers = {\"Content-Type\": \"text/plain\"}\n response = httpx.Response(\n 200,\n content=content,\n headers=headers,\n )\n assert response.text == \"Unicode Snowman: ☃\"\n assert response.encoding == \"utf-8\"\n\n\ndef test_response_no_charset_with_iso_8859_1_content():\n \"\"\"\n A response with ISO 8859-1 encoded content should decode correctly,\n even with no charset specified, if autodetect is enabled.\n \"\"\"\n content = \"Accented: Österreich abcdefghijklmnopqrstuzwxyz\".encode(\"iso-8859-1\")\n headers = {\"Content-Type\": \"text/plain\"}\n response = httpx.Response(\n 200, content=content, headers=headers, default_encoding=autodetect\n )\n assert response.text == \"Accented: Österreich abcdefghijklmnopqrstuzwxyz\"\n assert response.charset_encoding is None\n\n\ndef test_response_no_charset_with_cp_1252_content():\n \"\"\"\n A response with Windows 1252 encoded content should decode correctly,\n even with no charset specified, if autodetect is enabled.\n \"\"\"\n content = \"Euro Currency: € abcdefghijklmnopqrstuzwxyz\".encode(\"cp1252\")\n headers = {\"Content-Type\": \"text/plain\"}\n response = httpx.Response(\n 200, content=content, headers=headers, default_encoding=autodetect\n )\n assert response.text == \"Euro Currency: € abcdefghijklmnopqrstuzwxyz\"\n assert response.charset_encoding is None\n\n\ndef test_response_non_text_encoding():\n \"\"\"\n Default to attempting utf-8 encoding for non-text content-type headers.\n \"\"\"\n headers = {\"Content-Type\": \"image/png\"}\n response = httpx.Response(\n 200,\n content=b\"xyz\",\n headers=headers,\n )\n assert response.text == \"xyz\"\n assert response.encoding == \"utf-8\"\n\n\ndef test_response_set_explicit_encoding():\n headers = {\n \"Content-Type\": \"text-plain; charset=utf-8\"\n } # Deliberately incorrect charset\n response = httpx.Response(\n 200,\n content=\"Latin 1: ÿ\".encode(\"latin-1\"),\n headers=headers,\n )\n response.encoding = \"latin-1\"\n assert response.text == \"Latin 1: ÿ\"\n assert response.encoding == \"latin-1\"\n\n\ndef test_response_force_encoding():\n response = httpx.Response(\n 200,\n content=\"Snowman: ☃\".encode(\"utf-8\"),\n )\n response.encoding = \"iso-8859-1\"\n assert response.status_code == 200\n assert response.reason_phrase == \"OK\"\n assert response.text == \"Snowman: â\\x98\\x83\"\n assert response.encoding == \"iso-8859-1\"\n\n\ndef test_read():\n response = httpx.Response(\n 200,\n content=b\"Hello, world!\",\n )\n\n assert response.status_code == 200\n assert response.text == \"Hello, world!\"\n assert response.encoding == \"utf-8\"\n assert response.is_closed\n\n content = response.read()\n\n assert content == b\"Hello, world!\"\n assert response.content == b\"Hello, world!\"\n assert response.is_closed\n\n\ndef test_empty_read():\n response = httpx.Response(200)\n\n assert response.status_code == 200\n assert response.text == \"\"\n assert response.encoding == \"utf-8\"\n assert response.is_closed\n\n content = response.read()\n\n assert content == b\"\"\n assert response.content == b\"\"\n assert response.is_closed\n\n\n@pytest.mark.anyio\nasync def test_aread():\n response = httpx.Response(\n 200,\n content=b\"Hello, world!\",\n )\n\n assert response.status_code == 200\n assert response.text == \"Hello, world!\"\n assert response.encoding == \"utf-8\"\n assert response.is_closed\n\n content = await response.aread()\n\n assert content == b\"Hello, world!\"\n assert response.content == b\"Hello, world!\"\n assert response.is_closed\n\n\n@pytest.mark.anyio\nasync def test_empty_aread():\n response = httpx.Response(200)\n\n assert response.status_code == 200\n assert response.text == \"\"\n assert response.encoding == \"utf-8\"\n assert response.is_closed\n\n content = await response.aread()\n\n assert content == b\"\"\n assert response.content == b\"\"\n assert response.is_closed\n\n\ndef test_iter_raw():\n response = httpx.Response(\n 200,\n content=streaming_body(),\n )\n\n raw = b\"\"\n for part in response.iter_raw():\n raw += part\n assert raw == b\"Hello, world!\"\n\n\ndef test_iter_raw_with_chunksize():\n response = httpx.Response(200, content=streaming_body())\n parts = [part for part in response.iter_raw(chunk_size=5)]\n assert parts == [b\"Hello\", b\", wor\", b\"ld!\"]\n\n response = httpx.Response(200, content=streaming_body())\n parts = [part for part in response.iter_raw(chunk_size=7)]\n assert parts == [b\"Hello, \", b\"world!\"]\n\n response = httpx.Response(200, content=streaming_body())\n parts = [part for part in response.iter_raw(chunk_size=13)]\n assert parts == [b\"Hello, world!\"]\n\n response = httpx.Response(200, content=streaming_body())\n parts = [part for part in response.iter_raw(chunk_size=20)]\n assert parts == [b\"Hello, world!\"]\n\n\ndef test_iter_raw_doesnt_return_empty_chunks():\n def streaming_body_with_empty_chunks() -> typing.Iterator[bytes]:\n yield b\"Hello, \"\n yield b\"\"\n yield b\"world!\"\n yield b\"\"\n\n response = httpx.Response(200, content=streaming_body_with_empty_chunks())\n\n parts = [part for part in response.iter_raw()]\n assert parts == [b\"Hello, \", b\"world!\"]\n\n\ndef test_iter_raw_on_iterable():\n response = httpx.Response(\n 200,\n content=StreamingBody(),\n )\n\n raw = b\"\"\n for part in response.iter_raw():\n raw += part\n assert raw == b\"Hello, world!\"\n\n\ndef test_iter_raw_on_async():\n response = httpx.Response(\n 200,\n content=async_streaming_body(),\n )\n\n with pytest.raises(RuntimeError):\n [part for part in response.iter_raw()]\n\n\ndef test_close_on_async():\n response = httpx.Response(\n 200,\n content=async_streaming_body(),\n )\n\n with pytest.raises(RuntimeError):\n response.close()\n\n\ndef test_iter_raw_increments_updates_counter():\n response = httpx.Response(200, content=streaming_body())\n\n num_downloaded = response.num_bytes_downloaded\n for part in response.iter_raw():\n assert len(part) == (response.num_bytes_downloaded - num_downloaded)\n num_downloaded = response.num_bytes_downloaded\n\n\n@pytest.mark.anyio\nasync def test_aiter_raw():\n response = httpx.Response(200, content=async_streaming_body())\n\n raw = b\"\"\n async for part in response.aiter_raw():\n raw += part\n assert raw == b\"Hello, world!\"\n\n\n@pytest.mark.anyio\nasync def test_aiter_raw_with_chunksize():\n response = httpx.Response(200, content=async_streaming_body())\n\n parts = [part async for part in response.aiter_raw(chunk_size=5)]\n assert parts == [b\"Hello\", b\", wor\", b\"ld!\"]\n\n response = httpx.Response(200, content=async_streaming_body())\n\n parts = [part async for part in response.aiter_raw(chunk_size=13)]\n assert parts == [b\"Hello, world!\"]\n\n response = httpx.Response(200, content=async_streaming_body())\n\n parts = [part async for part in response.aiter_raw(chunk_size=20)]\n assert parts == [b\"Hello, world!\"]\n\n\n@pytest.mark.anyio\nasync def test_aiter_raw_on_sync():\n response = httpx.Response(\n 200,\n content=streaming_body(),\n )\n\n with pytest.raises(RuntimeError):\n [part async for part in response.aiter_raw()]\n\n\n@pytest.mark.anyio\nasync def test_aclose_on_sync():\n response = httpx.Response(\n 200,\n content=streaming_body(),\n )\n\n with pytest.raises(RuntimeError):\n await response.aclose()\n\n\n@pytest.mark.anyio\nasync def test_aiter_raw_increments_updates_counter():\n response = httpx.Response(200, content=async_streaming_body())\n\n num_downloaded = response.num_bytes_downloaded\n async for part in response.aiter_raw():\n assert len(part) == (response.num_bytes_downloaded - num_downloaded)\n num_downloaded = response.num_bytes_downloaded\n\n\ndef test_iter_bytes():\n response = httpx.Response(200, content=b\"Hello, world!\")\n\n content = b\"\"\n for part in response.iter_bytes():\n content += part\n assert content == b\"Hello, world!\"\n\n\ndef test_iter_bytes_with_chunk_size():\n response = httpx.Response(200, content=streaming_body())\n parts = [part for part in response.iter_bytes(chunk_size=5)]\n assert parts == [b\"Hello\", b\", wor\", b\"ld!\"]\n\n response = httpx.Response(200, content=streaming_body())\n parts = [part for part in response.iter_bytes(chunk_size=13)]\n assert parts == [b\"Hello, world!\"]\n\n response = httpx.Response(200, content=streaming_body())\n parts = [part for part in response.iter_bytes(chunk_size=20)]\n assert parts == [b\"Hello, world!\"]\n\n\ndef test_iter_bytes_with_empty_response():\n response = httpx.Response(200, content=b\"\")\n parts = [part for part in response.iter_bytes()]\n assert parts == []\n\n\ndef test_iter_bytes_doesnt_return_empty_chunks():\n def streaming_body_with_empty_chunks() -> typing.Iterator[bytes]:\n yield b\"Hello, \"\n yield b\"\"\n yield b\"world!\"\n yield b\"\"\n\n response = httpx.Response(200, content=streaming_body_with_empty_chunks())\n\n parts = [part for part in response.iter_bytes()]\n assert parts == [b\"Hello, \", b\"world!\"]\n\n\n@pytest.mark.anyio\nasync def test_aiter_bytes():\n response = httpx.Response(\n 200,\n content=b\"Hello, world!\",\n )\n\n content = b\"\"\n async for part in response.aiter_bytes():\n content += part\n assert content == b\"Hello, world!\"\n\n\n@pytest.mark.anyio\nasync def test_aiter_bytes_with_chunk_size():\n response = httpx.Response(200, content=async_streaming_body())\n parts = [part async for part in response.aiter_bytes(chunk_size=5)]\n assert parts == [b\"Hello\", b\", wor\", b\"ld!\"]\n\n response = httpx.Response(200, content=async_streaming_body())\n parts = [part async for part in response.aiter_bytes(chunk_size=13)]\n assert parts == [b\"Hello, world!\"]\n\n response = httpx.Response(200, content=async_streaming_body())\n parts = [part async for part in response.aiter_bytes(chunk_size=20)]\n assert parts == [b\"Hello, world!\"]\n\n\ndef test_iter_text():\n response = httpx.Response(\n 200,\n content=b\"Hello, world!\",\n )\n\n content = \"\"\n for part in response.iter_text():\n content += part\n assert content == \"Hello, world!\"\n\n\ndef test_iter_text_with_chunk_size():\n response = httpx.Response(200, content=b\"Hello, world!\")\n parts = [part for part in response.iter_text(chunk_size=5)]\n assert parts == [\"Hello\", \", wor\", \"ld!\"]\n\n response = httpx.Response(200, content=b\"Hello, world!!\")\n parts = [part for part in response.iter_text(chunk_size=7)]\n assert parts == [\"Hello, \", \"world!!\"]\n\n response = httpx.Response(200, content=b\"Hello, world!\")\n parts = [part for part in response.iter_text(chunk_size=7)]\n assert parts == [\"Hello, \", \"world!\"]\n\n response = httpx.Response(200, content=b\"Hello, world!\")\n parts = [part for part in response.iter_text(chunk_size=13)]\n assert parts == [\"Hello, world!\"]\n\n response = httpx.Response(200, content=b\"Hello, world!\")\n parts = [part for part in response.iter_text(chunk_size=20)]\n assert parts == [\"Hello, world!\"]\n\n\n@pytest.mark.anyio\nasync def test_aiter_text():\n response = httpx.Response(\n 200,\n content=b\"Hello, world!\",\n )\n\n content = \"\"\n async for part in response.aiter_text():\n content += part\n assert content == \"Hello, world!\"\n\n\n@pytest.mark.anyio\nasync def test_aiter_text_with_chunk_size():\n response = httpx.Response(200, content=b\"Hello, world!\")\n parts = [part async for part in response.aiter_text(chunk_size=5)]\n assert parts == [\"Hello\", \", wor\", \"ld!\"]\n\n response = httpx.Response(200, content=b\"Hello, world!\")\n parts = [part async for part in response.aiter_text(chunk_size=13)]\n assert parts == [\"Hello, world!\"]\n\n response = httpx.Response(200, content=b\"Hello, world!\")\n parts = [part async for part in response.aiter_text(chunk_size=20)]\n assert parts == [\"Hello, world!\"]\n\n\ndef test_iter_lines():\n response = httpx.Response(\n 200,\n content=b\"Hello,\\nworld!\",\n )\n content = [line for line in response.iter_lines()]\n assert content == [\"Hello,\", \"world!\"]\n\n\n@pytest.mark.anyio\nasync def test_aiter_lines():\n response = httpx.Response(\n 200,\n content=b\"Hello,\\nworld!\",\n )\n\n content = []\n async for line in response.aiter_lines():\n content.append(line)\n assert content == [\"Hello,\", \"world!\"]\n\n\ndef test_sync_streaming_response():\n response = httpx.Response(\n 200,\n content=streaming_body(),\n )\n\n assert response.status_code == 200\n assert not response.is_closed\n\n content = response.read()\n\n assert content == b\"Hello, world!\"\n assert response.content == b\"Hello, world!\"\n assert response.is_closed\n\n\n@pytest.mark.anyio\nasync def test_async_streaming_response():\n response = httpx.Response(\n 200,\n content=async_streaming_body(),\n )\n\n assert response.status_code == 200\n assert not response.is_closed\n\n content = await response.aread()\n\n assert content == b\"Hello, world!\"\n assert response.content == b\"Hello, world!\"\n assert response.is_closed\n\n\ndef test_cannot_read_after_stream_consumed():\n response = httpx.Response(\n 200,\n content=streaming_body(),\n )\n\n content = b\"\"\n for part in response.iter_bytes():\n content += part\n\n with pytest.raises(httpx.StreamConsumed):\n response.read()\n\n\n@pytest.mark.anyio\nasync def test_cannot_aread_after_stream_consumed():\n response = httpx.Response(\n 200,\n content=async_streaming_body(),\n )\n\n content = b\"\"\n async for part in response.aiter_bytes():\n content += part\n\n with pytest.raises(httpx.StreamConsumed):\n await response.aread()\n\n\ndef test_cannot_read_after_response_closed():\n response = httpx.Response(\n 200,\n content=streaming_body(),\n )\n\n response.close()\n with pytest.raises(httpx.StreamClosed):\n response.read()\n\n\n@pytest.mark.anyio\nasync def test_cannot_aread_after_response_closed():\n response = httpx.Response(\n 200,\n content=async_streaming_body(),\n )\n\n await response.aclose()\n with pytest.raises(httpx.StreamClosed):\n await response.aread()\n\n\n@pytest.mark.anyio\nasync def test_elapsed_not_available_until_closed():\n response = httpx.Response(\n 200,\n content=async_streaming_body(),\n )\n\n with pytest.raises(RuntimeError):\n response.elapsed # noqa: B018\n\n\ndef test_unknown_status_code():\n response = httpx.Response(\n 600,\n )\n assert response.status_code == 600\n assert response.reason_phrase == \"\"\n assert response.text == \"\"\n\n\ndef test_json_with_specified_encoding():\n data = {\"greeting\": \"hello\", \"recipient\": \"world\"}\n content = json.dumps(data).encode(\"utf-16\")\n headers = {\"Content-Type\": \"application/json, charset=utf-16\"}\n response = httpx.Response(\n 200,\n content=content,\n headers=headers,\n )\n assert response.json() == data\n\n\ndef test_json_with_options():\n data = {\"greeting\": \"hello\", \"recipient\": \"world\", \"amount\": 1}\n content = json.dumps(data).encode(\"utf-16\")\n headers = {\"Content-Type\": \"application/json, charset=utf-16\"}\n response = httpx.Response(\n 200,\n content=content,\n headers=headers,\n )\n assert response.json(parse_int=str)[\"amount\"] == \"1\"\n\n\n@pytest.mark.parametrize(\n \"encoding\",\n [\n \"utf-8\",\n \"utf-8-sig\",\n \"utf-16\",\n \"utf-16-be\",\n \"utf-16-le\",\n \"utf-32\",\n \"utf-32-be\",\n \"utf-32-le\",\n ],\n)\ndef test_json_without_specified_charset(encoding):\n data = {\"greeting\": \"hello\", \"recipient\": \"world\"}\n content = json.dumps(data).encode(encoding)\n headers = {\"Content-Type\": \"application/json\"}\n response = httpx.Response(\n 200,\n content=content,\n headers=headers,\n )\n assert response.json() == data\n\n\n@pytest.mark.parametrize(\n \"encoding\",\n [\n \"utf-8\",\n \"utf-8-sig\",\n \"utf-16\",\n \"utf-16-be\",\n \"utf-16-le\",\n \"utf-32\",\n \"utf-32-be\",\n \"utf-32-le\",\n ],\n)\ndef test_json_with_specified_charset(encoding):\n data = {\"greeting\": \"hello\", \"recipient\": \"world\"}\n content = json.dumps(data).encode(encoding)\n headers = {\"Content-Type\": f\"application/json; charset={encoding}\"}\n response = httpx.Response(\n 200,\n content=content,\n headers=headers,\n )\n assert response.json() == data\n\n\n@pytest.mark.parametrize(\n \"headers, expected\",\n [\n (\n {\"Link\": \"<https://example.com>; rel='preload'\"},\n {\"preload\": {\"rel\": \"preload\", \"url\": \"https://example.com\"}},\n ),\n (\n {\"Link\": '</hub>; rel=\"hub\", </resource>; rel=\"self\"'},\n {\n \"hub\": {\"url\": \"/hub\", \"rel\": \"hub\"},\n \"self\": {\"url\": \"/resource\", \"rel\": \"self\"},\n },\n ),\n ],\n)\ndef test_link_headers(headers, expected):\n response = httpx.Response(\n 200,\n content=None,\n headers=headers,\n )\n assert response.links == expected\n\n\n@pytest.mark.parametrize(\"header_value\", (b\"deflate\", b\"gzip\", b\"br\"))\ndef test_decode_error_with_request(header_value):\n headers = [(b\"Content-Encoding\", header_value)]\n broken_compressed_body = b\"xxxxxxxxxxxxxx\"\n with pytest.raises(httpx.DecodingError):\n httpx.Response(\n 200,\n headers=headers,\n content=broken_compressed_body,\n )\n\n with pytest.raises(httpx.DecodingError):\n httpx.Response(\n 200,\n headers=headers,\n content=broken_compressed_body,\n request=httpx.Request(\"GET\", \"https://www.example.org/\"),\n )\n\n\n@pytest.mark.parametrize(\"header_value\", (b\"deflate\", b\"gzip\", b\"br\"))\ndef test_value_error_without_request(header_value):\n headers = [(b\"Content-Encoding\", header_value)]\n broken_compressed_body = b\"xxxxxxxxxxxxxx\"\n with pytest.raises(httpx.DecodingError):\n httpx.Response(200, headers=headers, content=broken_compressed_body)\n\n\ndef test_response_with_unset_request():\n response = httpx.Response(200, content=b\"Hello, world!\")\n\n assert response.status_code == 200\n assert response.reason_phrase == \"OK\"\n assert response.text == \"Hello, world!\"\n assert not response.is_error\n\n\ndef test_set_request_after_init():\n response = httpx.Response(200, content=b\"Hello, world!\")\n\n response.request = httpx.Request(\"GET\", \"https://www.example.org\")\n\n assert response.request.method == \"GET\"\n assert response.request.url == \"https://www.example.org\"\n\n\ndef test_cannot_access_unset_request():\n response = httpx.Response(200, content=b\"Hello, world!\")\n\n with pytest.raises(RuntimeError):\n response.request # noqa: B018\n\n\ndef test_generator_with_transfer_encoding_header():\n def content() -> typing.Iterator[bytes]:\n yield b\"test 123\" # pragma: no cover\n\n response = httpx.Response(200, content=content())\n assert response.headers == {\"Transfer-Encoding\": \"chunked\"}\n\n\ndef test_generator_with_content_length_header():\n def content() -> typing.Iterator[bytes]:\n yield b\"test 123\" # pragma: no cover\n\n headers = {\"Content-Length\": \"8\"}\n response = httpx.Response(200, content=content(), headers=headers)\n assert response.headers == {\"Content-Length\": \"8\"}\n\n\ndef test_response_picklable():\n response = httpx.Response(\n 200,\n content=b\"Hello, world!\",\n request=httpx.Request(\"GET\", \"https://example.org\"),\n )\n pickle_response = pickle.loads(pickle.dumps(response))\n assert pickle_response.is_closed is True\n assert pickle_response.is_stream_consumed is True\n assert pickle_response.next_request is None\n assert pickle_response.stream is not None\n assert pickle_response.content == b\"Hello, world!\"\n assert pickle_response.status_code == 200\n assert pickle_response.request.url == response.request.url\n assert pickle_response.extensions == {}\n assert pickle_response.history == []\n\n\n@pytest.mark.anyio\nasync def test_response_async_streaming_picklable():\n response = httpx.Response(200, content=async_streaming_body())\n pickle_response = pickle.loads(pickle.dumps(response))\n with pytest.raises(httpx.ResponseNotRead):\n pickle_response.content # noqa: B018\n with pytest.raises(httpx.StreamClosed):\n await pickle_response.aread()\n assert pickle_response.is_stream_consumed is False\n assert pickle_response.num_bytes_downloaded == 0\n assert pickle_response.headers == {\"Transfer-Encoding\": \"chunked\"}\n\n response = httpx.Response(200, content=async_streaming_body())\n await response.aread()\n pickle_response = pickle.loads(pickle.dumps(response))\n assert pickle_response.is_stream_consumed is True\n assert pickle_response.content == b\"Hello, world!\"\n assert pickle_response.num_bytes_downloaded == 13\n\n\ndef test_response_decode_text_using_autodetect():\n # Ensure that a 'default_encoding=\"autodetect\"' on the response allows for\n # encoding autodetection to be used when no \"Content-Type: text/plain; charset=...\"\n # info is present.\n #\n # Here we have some french text encoded with ISO-8859-1, rather than UTF-8.\n text = (\n \"Non-seulement Despréaux ne se trompait pas, mais de tous les écrivains \"\n \"que la France a produits, sans excepter Voltaire lui-même, imprégné de \"\n \"l'esprit anglais par son séjour à Londres, c'est incontestablement \"\n \"Molière ou Poquelin qui reproduit avec l'exactitude la plus vive et la \"\n \"plus complète le fond du génie français.\"\n )\n content = text.encode(\"ISO-8859-1\")\n response = httpx.Response(200, content=content, default_encoding=autodetect)\n\n assert response.status_code == 200\n assert response.reason_phrase == \"OK\"\n assert response.encoding == \"ISO-8859-1\"\n assert response.text == text\n\n\ndef test_response_decode_text_using_explicit_encoding():\n # Ensure that a 'default_encoding=\"...\"' on the response is used for text decoding\n # when no \"Content-Type: text/plain; charset=...\"\" info is present.\n #\n # Here we have some french text encoded with Windows-1252, rather than UTF-8.\n # https://en.wikipedia.org/wiki/Windows-1252\n text = (\n \"Non-seulement Despréaux ne se trompait pas, mais de tous les écrivains \"\n \"que la France a produits, sans excepter Voltaire lui-même, imprégné de \"\n \"l'esprit anglais par son séjour à Londres, c'est incontestablement \"\n \"Molière ou Poquelin qui reproduit avec l'exactitude la plus vive et la \"\n \"plus complète le fond du génie français.\"\n )\n content = text.encode(\"cp1252\")\n response = httpx.Response(200, content=content, default_encoding=\"cp1252\")\n\n assert response.status_code == 200\n assert response.reason_phrase == \"OK\"\n assert response.encoding == \"cp1252\"\n assert response.text == text\n",
"path": "tests/models/test_responses.py"
}
] | 0_4 | python | import sys
import unittest
import inspect
class TestResponseForceEncoding(unittest.TestCase):
def test_response_force_encoding_after_text_accessed(self):
import httpx
response = httpx.Response(
200,
content=b"Hello, world!",
)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.reason_phrase, "OK")
self.assertEqual(response.text, "Hello, world!")
self.assertEqual(response.encoding, "utf-8")
with self.assertRaises(ValueError):
response.encoding = "UTF8"
with self.assertRaises(ValueError):
response.encoding = "iso-8859-1"
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestResponseForceEncoding))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == "__main__":
main()
|
https://github.com/teamqurrent/discord.py | Your task is to add support for the Latin American Spanish locale to the discord.py library. This involves updating the `Locale` enumeration in the `enums.py` file to include the new locale. The locale code is es-419 | 08ef42f | discord | python3.9 | 2a59e028 | diff --git a/discord/enums.py b/discord/enums.py
--- a/discord/enums.py
+++ b/discord/enums.py
@@ -690,6 +690,7 @@ class Locale(Enum):
italian = 'it'
japanese = 'ja'
korean = 'ko'
+ latin_american_spanish = 'es-419'
lithuanian = 'lt'
norwegian = 'no'
polish = 'pl'
diff --git a/docs/api.rst b/docs/api.rst
--- a/docs/api.rst
+++ b/docs/api.rst
@@ -3236,6 +3236,12 @@ of :class:`enum.Enum`.
The ``ko`` locale.
+ .. attribute:: latin_american_spanish
+
+ The ``es-419`` locale.
+
+ .. versionadded:: 2.4
+
.. attribute:: lithuanian
The ``lt`` locale.
| [
{
"content": "\"\"\"\nThe MIT License (MIT)\n\nCopyright (c) 2015-present Rapptz\n\nPermission is hereby granted, free of charge, to any person obtaining a\ncopy of this software and associated documentation files (the \"Software\"),\nto deal in the Software without restriction, including without limitation\nthe rights to use, copy, modify, merge, publish, distribute, sublicense,\nand/or sell copies of the Software, and to permit persons to whom the\nSoftware is furnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in\nall copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\nOR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\nFROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER\nDEALINGS IN THE SOFTWARE.\n\"\"\"\nfrom __future__ import annotations\n\nimport types\nfrom collections import namedtuple\nfrom typing import Any, ClassVar, Dict, List, Optional, TYPE_CHECKING, Tuple, Type, TypeVar, Iterator, Mapping\n\n__all__ = (\n 'Enum',\n 'ChannelType',\n 'MessageType',\n 'SpeakingState',\n 'VerificationLevel',\n 'ContentFilter',\n 'Status',\n 'DefaultAvatar',\n 'AuditLogAction',\n 'AuditLogActionCategory',\n 'UserFlags',\n 'ActivityType',\n 'NotificationLevel',\n 'TeamMembershipState',\n 'TeamMemberRole',\n 'WebhookType',\n 'ExpireBehaviour',\n 'ExpireBehavior',\n 'StickerType',\n 'StickerFormatType',\n 'InviteTarget',\n 'VideoQualityMode',\n 'ComponentType',\n 'ButtonStyle',\n 'TextStyle',\n 'PrivacyLevel',\n 'InteractionType',\n 'InteractionResponseType',\n 'NSFWLevel',\n 'MFALevel',\n 'Locale',\n 'EntityType',\n 'EventStatus',\n 'AppCommandType',\n 'AppCommandOptionType',\n 'AppCommandPermissionType',\n 'AutoModRuleTriggerType',\n 'AutoModRuleEventType',\n 'AutoModRuleActionType',\n 'ForumLayoutType',\n 'ForumOrderType',\n 'SelectDefaultValueType',\n 'SKUType',\n 'EntitlementType',\n 'EntitlementOwnerType',\n)\n\nif TYPE_CHECKING:\n from typing_extensions import Self\n\n\ndef _create_value_cls(name: str, comparable: bool):\n # All the type ignores here are due to the type checker being unable to recognise\n # Runtime type creation without exploding.\n cls = namedtuple('_EnumValue_' + name, 'name value')\n cls.__repr__ = lambda self: f'<{name}.{self.name}: {self.value!r}>' # type: ignore\n cls.__str__ = lambda self: f'{name}.{self.name}' # type: ignore\n if comparable:\n cls.__le__ = lambda self, other: isinstance(other, self.__class__) and self.value <= other.value # type: ignore\n cls.__ge__ = lambda self, other: isinstance(other, self.__class__) and self.value >= other.value # type: ignore\n cls.__lt__ = lambda self, other: isinstance(other, self.__class__) and self.value < other.value # type: ignore\n cls.__gt__ = lambda self, other: isinstance(other, self.__class__) and self.value > other.value # type: ignore\n return cls\n\n\ndef _is_descriptor(obj):\n return hasattr(obj, '__get__') or hasattr(obj, '__set__') or hasattr(obj, '__delete__')\n\n\nclass EnumMeta(type):\n if TYPE_CHECKING:\n __name__: ClassVar[str]\n _enum_member_names_: ClassVar[List[str]]\n _enum_member_map_: ClassVar[Dict[str, Any]]\n _enum_value_map_: ClassVar[Dict[Any, Any]]\n\n def __new__(cls, name: str, bases: Tuple[type, ...], attrs: Dict[str, Any], *, comparable: bool = False) -> Self:\n value_mapping = {}\n member_mapping = {}\n member_names = []\n\n value_cls = _create_value_cls(name, comparable)\n for key, value in list(attrs.items()):\n is_descriptor = _is_descriptor(value)\n if key[0] == '_' and not is_descriptor:\n continue\n\n # Special case classmethod to just pass through\n if isinstance(value, classmethod):\n continue\n\n if is_descriptor:\n setattr(value_cls, key, value)\n del attrs[key]\n continue\n\n try:\n new_value = value_mapping[value]\n except KeyError:\n new_value = value_cls(name=key, value=value)\n value_mapping[value] = new_value\n member_names.append(key)\n\n member_mapping[key] = new_value\n attrs[key] = new_value\n\n attrs['_enum_value_map_'] = value_mapping\n attrs['_enum_member_map_'] = member_mapping\n attrs['_enum_member_names_'] = member_names\n attrs['_enum_value_cls_'] = value_cls\n actual_cls = super().__new__(cls, name, bases, attrs)\n value_cls._actual_enum_cls_ = actual_cls # type: ignore # Runtime attribute isn't understood\n return actual_cls\n\n def __iter__(cls) -> Iterator[Any]:\n return (cls._enum_member_map_[name] for name in cls._enum_member_names_)\n\n def __reversed__(cls) -> Iterator[Any]:\n return (cls._enum_member_map_[name] for name in reversed(cls._enum_member_names_))\n\n def __len__(cls) -> int:\n return len(cls._enum_member_names_)\n\n def __repr__(cls) -> str:\n return f'<enum {cls.__name__}>'\n\n @property\n def __members__(cls) -> Mapping[str, Any]:\n return types.MappingProxyType(cls._enum_member_map_)\n\n def __call__(cls, value: str) -> Any:\n try:\n return cls._enum_value_map_[value]\n except (KeyError, TypeError):\n raise ValueError(f\"{value!r} is not a valid {cls.__name__}\")\n\n def __getitem__(cls, key: str) -> Any:\n return cls._enum_member_map_[key]\n\n def __setattr__(cls, name: str, value: Any) -> None:\n raise TypeError('Enums are immutable.')\n\n def __delattr__(cls, attr: str) -> None:\n raise TypeError('Enums are immutable')\n\n def __instancecheck__(self, instance: Any) -> bool:\n # isinstance(x, Y)\n # -> __instancecheck__(Y, x)\n try:\n return instance._actual_enum_cls_ is self\n except AttributeError:\n return False\n\n\nif TYPE_CHECKING:\n from enum import Enum\nelse:\n\n class Enum(metaclass=EnumMeta):\n @classmethod\n def try_value(cls, value):\n try:\n return cls._enum_value_map_[value]\n except (KeyError, TypeError):\n return value\n\n\nclass ChannelType(Enum):\n text = 0\n private = 1\n voice = 2\n group = 3\n category = 4\n news = 5\n news_thread = 10\n public_thread = 11\n private_thread = 12\n stage_voice = 13\n forum = 15\n media = 16\n\n def __str__(self) -> str:\n return self.name\n\n\nclass MessageType(Enum):\n default = 0\n recipient_add = 1\n recipient_remove = 2\n call = 3\n channel_name_change = 4\n channel_icon_change = 5\n pins_add = 6\n new_member = 7\n premium_guild_subscription = 8\n premium_guild_tier_1 = 9\n premium_guild_tier_2 = 10\n premium_guild_tier_3 = 11\n channel_follow_add = 12\n guild_stream = 13\n guild_discovery_disqualified = 14\n guild_discovery_requalified = 15\n guild_discovery_grace_period_initial_warning = 16\n guild_discovery_grace_period_final_warning = 17\n thread_created = 18\n reply = 19\n chat_input_command = 20\n thread_starter_message = 21\n guild_invite_reminder = 22\n context_menu_command = 23\n auto_moderation_action = 24\n role_subscription_purchase = 25\n interaction_premium_upsell = 26\n stage_start = 27\n stage_end = 28\n stage_speaker = 29\n stage_raise_hand = 30\n stage_topic = 31\n guild_application_premium_subscription = 32\n guild_incident_alert_mode_enabled = 36\n guild_incident_alert_mode_disabled = 37\n guild_incident_report_raid = 38\n guild_incident_report_false_alarm = 39\n\n\nclass SpeakingState(Enum):\n none = 0\n voice = 1\n soundshare = 2\n priority = 4\n\n def __str__(self) -> str:\n return self.name\n\n def __int__(self) -> int:\n return self.value\n\n\nclass VerificationLevel(Enum, comparable=True):\n none = 0\n low = 1\n medium = 2\n high = 3\n highest = 4\n\n def __str__(self) -> str:\n return self.name\n\n\nclass ContentFilter(Enum, comparable=True):\n disabled = 0\n no_role = 1\n all_members = 2\n\n def __str__(self) -> str:\n return self.name\n\n\nclass Status(Enum):\n online = 'online'\n offline = 'offline'\n idle = 'idle'\n dnd = 'dnd'\n do_not_disturb = 'dnd'\n invisible = 'invisible'\n\n def __str__(self) -> str:\n return self.value\n\n\nclass DefaultAvatar(Enum):\n blurple = 0\n grey = 1\n gray = 1\n green = 2\n orange = 3\n red = 4\n pink = 5\n\n def __str__(self) -> str:\n return self.name\n\n\nclass NotificationLevel(Enum, comparable=True):\n all_messages = 0\n only_mentions = 1\n\n\nclass AuditLogActionCategory(Enum):\n create = 1\n delete = 2\n update = 3\n\n\nclass AuditLogAction(Enum):\n # fmt: off\n guild_update = 1\n channel_create = 10\n channel_update = 11\n channel_delete = 12\n overwrite_create = 13\n overwrite_update = 14\n overwrite_delete = 15\n kick = 20\n member_prune = 21\n ban = 22\n unban = 23\n member_update = 24\n member_role_update = 25\n member_move = 26\n member_disconnect = 27\n bot_add = 28\n role_create = 30\n role_update = 31\n role_delete = 32\n invite_create = 40\n invite_update = 41\n invite_delete = 42\n webhook_create = 50\n webhook_update = 51\n webhook_delete = 52\n emoji_create = 60\n emoji_update = 61\n emoji_delete = 62\n message_delete = 72\n message_bulk_delete = 73\n message_pin = 74\n message_unpin = 75\n integration_create = 80\n integration_update = 81\n integration_delete = 82\n stage_instance_create = 83\n stage_instance_update = 84\n stage_instance_delete = 85\n sticker_create = 90\n sticker_update = 91\n sticker_delete = 92\n scheduled_event_create = 100\n scheduled_event_update = 101\n scheduled_event_delete = 102\n thread_create = 110\n thread_update = 111\n thread_delete = 112\n app_command_permission_update = 121\n automod_rule_create = 140\n automod_rule_update = 141\n automod_rule_delete = 142\n automod_block_message = 143\n automod_flag_message = 144\n automod_timeout_member = 145\n creator_monetization_request_created = 150\n creator_monetization_terms_accepted = 151\n # fmt: on\n\n @property\n def category(self) -> Optional[AuditLogActionCategory]:\n # fmt: off\n lookup: Dict[AuditLogAction, Optional[AuditLogActionCategory]] = {\n AuditLogAction.guild_update: AuditLogActionCategory.update,\n AuditLogAction.channel_create: AuditLogActionCategory.create,\n AuditLogAction.channel_update: AuditLogActionCategory.update,\n AuditLogAction.channel_delete: AuditLogActionCategory.delete,\n AuditLogAction.overwrite_create: AuditLogActionCategory.create,\n AuditLogAction.overwrite_update: AuditLogActionCategory.update,\n AuditLogAction.overwrite_delete: AuditLogActionCategory.delete,\n AuditLogAction.kick: None,\n AuditLogAction.member_prune: None,\n AuditLogAction.ban: None,\n AuditLogAction.unban: None,\n AuditLogAction.member_update: AuditLogActionCategory.update,\n AuditLogAction.member_role_update: AuditLogActionCategory.update,\n AuditLogAction.member_move: None,\n AuditLogAction.member_disconnect: None,\n AuditLogAction.bot_add: None,\n AuditLogAction.role_create: AuditLogActionCategory.create,\n AuditLogAction.role_update: AuditLogActionCategory.update,\n AuditLogAction.role_delete: AuditLogActionCategory.delete,\n AuditLogAction.invite_create: AuditLogActionCategory.create,\n AuditLogAction.invite_update: AuditLogActionCategory.update,\n AuditLogAction.invite_delete: AuditLogActionCategory.delete,\n AuditLogAction.webhook_create: AuditLogActionCategory.create,\n AuditLogAction.webhook_update: AuditLogActionCategory.update,\n AuditLogAction.webhook_delete: AuditLogActionCategory.delete,\n AuditLogAction.emoji_create: AuditLogActionCategory.create,\n AuditLogAction.emoji_update: AuditLogActionCategory.update,\n AuditLogAction.emoji_delete: AuditLogActionCategory.delete,\n AuditLogAction.message_delete: AuditLogActionCategory.delete,\n AuditLogAction.message_bulk_delete: AuditLogActionCategory.delete,\n AuditLogAction.message_pin: None,\n AuditLogAction.message_unpin: None,\n AuditLogAction.integration_create: AuditLogActionCategory.create,\n AuditLogAction.integration_update: AuditLogActionCategory.update,\n AuditLogAction.integration_delete: AuditLogActionCategory.delete,\n AuditLogAction.stage_instance_create: AuditLogActionCategory.create,\n AuditLogAction.stage_instance_update: AuditLogActionCategory.update,\n AuditLogAction.stage_instance_delete: AuditLogActionCategory.delete,\n AuditLogAction.sticker_create: AuditLogActionCategory.create,\n AuditLogAction.sticker_update: AuditLogActionCategory.update,\n AuditLogAction.sticker_delete: AuditLogActionCategory.delete,\n AuditLogAction.scheduled_event_create: AuditLogActionCategory.create,\n AuditLogAction.scheduled_event_update: AuditLogActionCategory.update,\n AuditLogAction.scheduled_event_delete: AuditLogActionCategory.delete,\n AuditLogAction.thread_create: AuditLogActionCategory.create,\n AuditLogAction.thread_delete: AuditLogActionCategory.delete,\n AuditLogAction.thread_update: AuditLogActionCategory.update,\n AuditLogAction.app_command_permission_update: AuditLogActionCategory.update,\n AuditLogAction.automod_rule_create: AuditLogActionCategory.create,\n AuditLogAction.automod_rule_update: AuditLogActionCategory.update,\n AuditLogAction.automod_rule_delete: AuditLogActionCategory.delete,\n AuditLogAction.automod_block_message: None,\n AuditLogAction.automod_flag_message: None,\n AuditLogAction.automod_timeout_member: None,\n AuditLogAction.creator_monetization_request_created: None,\n AuditLogAction.creator_monetization_terms_accepted: None,\n }\n # fmt: on\n return lookup[self]\n\n @property\n def target_type(self) -> Optional[str]:\n v = self.value\n if v == -1:\n return 'all'\n elif v < 10:\n return 'guild'\n elif v < 20:\n return 'channel'\n elif v < 30:\n return 'user'\n elif v < 40:\n return 'role'\n elif v < 50:\n return 'invite'\n elif v < 60:\n return 'webhook'\n elif v < 70:\n return 'emoji'\n elif v == 73:\n return 'channel'\n elif v < 80:\n return 'message'\n elif v < 83:\n return 'integration'\n elif v < 90:\n return 'stage_instance'\n elif v < 93:\n return 'sticker'\n elif v < 103:\n return 'guild_scheduled_event'\n elif v < 113:\n return 'thread'\n elif v < 122:\n return 'integration_or_app_command'\n elif v < 143:\n return 'auto_moderation'\n elif v < 146:\n return 'user'\n elif v < 152:\n return 'creator_monetization'\n\n\nclass UserFlags(Enum):\n staff = 1\n partner = 2\n hypesquad = 4\n bug_hunter = 8\n mfa_sms = 16\n premium_promo_dismissed = 32\n hypesquad_bravery = 64\n hypesquad_brilliance = 128\n hypesquad_balance = 256\n early_supporter = 512\n team_user = 1024\n system = 4096\n has_unread_urgent_messages = 8192\n bug_hunter_level_2 = 16384\n verified_bot = 65536\n verified_bot_developer = 131072\n discord_certified_moderator = 262144\n bot_http_interactions = 524288\n spammer = 1048576\n active_developer = 4194304\n\n\nclass ActivityType(Enum):\n unknown = -1\n playing = 0\n streaming = 1\n listening = 2\n watching = 3\n custom = 4\n competing = 5\n\n def __int__(self) -> int:\n return self.value\n\n\nclass TeamMembershipState(Enum):\n invited = 1\n accepted = 2\n\n\nclass TeamMemberRole(Enum):\n admin = 'admin'\n developer = 'developer'\n read_only = 'read_only'\n\n\nclass WebhookType(Enum):\n incoming = 1\n channel_follower = 2\n application = 3\n\n\nclass ExpireBehaviour(Enum):\n remove_role = 0\n kick = 1\n\n\nExpireBehavior = ExpireBehaviour\n\n\nclass StickerType(Enum):\n standard = 1\n guild = 2\n\n\nclass StickerFormatType(Enum):\n png = 1\n apng = 2\n lottie = 3\n gif = 4\n\n @property\n def file_extension(self) -> str:\n # fmt: off\n lookup: Dict[StickerFormatType, str] = {\n StickerFormatType.png: 'png',\n StickerFormatType.apng: 'png',\n StickerFormatType.lottie: 'json',\n StickerFormatType.gif: 'gif',\n }\n # fmt: on\n return lookup.get(self, 'png')\n\n\nclass InviteTarget(Enum):\n unknown = 0\n stream = 1\n embedded_application = 2\n\n\nclass InteractionType(Enum):\n ping = 1\n application_command = 2\n component = 3\n autocomplete = 4\n modal_submit = 5\n\n\nclass InteractionResponseType(Enum):\n pong = 1\n # ack = 2 (deprecated)\n # channel_message = 3 (deprecated)\n channel_message = 4 # (with source)\n deferred_channel_message = 5 # (with source)\n deferred_message_update = 6 # for components\n message_update = 7 # for components\n autocomplete_result = 8\n modal = 9 # for modals\n premium_required = 10\n\n\nclass VideoQualityMode(Enum):\n auto = 1\n full = 2\n\n def __int__(self) -> int:\n return self.value\n\n\nclass ComponentType(Enum):\n action_row = 1\n button = 2\n select = 3\n string_select = 3\n text_input = 4\n user_select = 5\n role_select = 6\n mentionable_select = 7\n channel_select = 8\n\n def __int__(self) -> int:\n return self.value\n\n\nclass ButtonStyle(Enum):\n primary = 1\n secondary = 2\n success = 3\n danger = 4\n link = 5\n\n # Aliases\n blurple = 1\n grey = 2\n gray = 2\n green = 3\n red = 4\n url = 5\n\n def __int__(self) -> int:\n return self.value\n\n\nclass TextStyle(Enum):\n short = 1\n paragraph = 2\n\n # Aliases\n long = 2\n\n def __int__(self) -> int:\n return self.value\n\n\nclass PrivacyLevel(Enum):\n guild_only = 2\n\n\nclass NSFWLevel(Enum, comparable=True):\n default = 0\n explicit = 1\n safe = 2\n age_restricted = 3\n\n\nclass MFALevel(Enum, comparable=True):\n disabled = 0\n require_2fa = 1\n\n\nclass Locale(Enum):\n american_english = 'en-US'\n british_english = 'en-GB'\n bulgarian = 'bg'\n chinese = 'zh-CN'\n taiwan_chinese = 'zh-TW'\n croatian = 'hr'\n czech = 'cs'\n indonesian = 'id'\n danish = 'da'\n dutch = 'nl'\n finnish = 'fi'\n french = 'fr'\n german = 'de'\n greek = 'el'\n hindi = 'hi'\n hungarian = 'hu'\n italian = 'it'\n japanese = 'ja'\n korean = 'ko'\n lithuanian = 'lt'\n norwegian = 'no'\n polish = 'pl'\n brazil_portuguese = 'pt-BR'\n romanian = 'ro'\n russian = 'ru'\n spain_spanish = 'es-ES'\n swedish = 'sv-SE'\n thai = 'th'\n turkish = 'tr'\n ukrainian = 'uk'\n vietnamese = 'vi'\n\n def __str__(self) -> str:\n return self.value\n\n\nE = TypeVar('E', bound='Enum')\n\n\nclass EntityType(Enum):\n stage_instance = 1\n voice = 2\n external = 3\n\n\nclass EventStatus(Enum):\n scheduled = 1\n active = 2\n completed = 3\n canceled = 4\n\n ended = 3\n cancelled = 4\n\n\nclass AppCommandOptionType(Enum):\n subcommand = 1\n subcommand_group = 2\n string = 3\n integer = 4\n boolean = 5\n user = 6\n channel = 7\n role = 8\n mentionable = 9\n number = 10\n attachment = 11\n\n\nclass AppCommandType(Enum):\n chat_input = 1\n user = 2\n message = 3\n\n\nclass AppCommandPermissionType(Enum):\n role = 1\n user = 2\n channel = 3\n\n\nclass AutoModRuleTriggerType(Enum):\n keyword = 1\n harmful_link = 2\n spam = 3\n keyword_preset = 4\n mention_spam = 5\n member_profile = 6\n\n\nclass AutoModRuleEventType(Enum):\n message_send = 1\n member_update = 2\n\n\nclass AutoModRuleActionType(Enum):\n block_message = 1\n send_alert_message = 2\n timeout = 3\n block_member_interactions = 4\n\n\nclass ForumLayoutType(Enum):\n not_set = 0\n list_view = 1\n gallery_view = 2\n\n\nclass ForumOrderType(Enum):\n latest_activity = 0\n creation_date = 1\n\n\nclass SelectDefaultValueType(Enum):\n user = 'user'\n role = 'role'\n channel = 'channel'\n\n\nclass SKUType(Enum):\n subscription = 5\n subscription_group = 6\n\n\nclass EntitlementType(Enum):\n application_subscription = 8\n\n\nclass EntitlementOwnerType(Enum):\n guild = 1\n user = 2\n\n\ndef create_unknown_value(cls: Type[E], val: Any) -> E:\n value_cls = cls._enum_value_cls_ # type: ignore # This is narrowed below\n name = f'unknown_{val}'\n return value_cls(name=name, value=val)\n\n\ndef try_enum(cls: Type[E], val: Any) -> E:\n \"\"\"A function that tries to turn the value into enum ``cls``.\n\n If it fails it returns a proxy invalid value instead.\n \"\"\"\n\n try:\n return cls._enum_value_map_[val] # type: ignore # All errors are caught below\n except (KeyError, TypeError, AttributeError):\n return create_unknown_value(cls, val)\n",
"path": "discord/enums.py"
},
{
"content": ".. currentmodule:: discord\n\nAPI Reference\n===============\n\nThe following section outlines the API of discord.py.\n\n.. note::\n\n This module uses the Python logging module to log diagnostic and errors\n in an output independent way. If the logging module is not configured,\n these logs will not be output anywhere. See :ref:`logging_setup` for\n more information on how to set up and use the logging module with\n discord.py.\n\nVersion Related Info\n---------------------\n\nThere are two main ways to query version information about the library. For guarantees, check :ref:`version_guarantees`.\n\n.. data:: version_info\n\n A named tuple that is similar to :obj:`py:sys.version_info`.\n\n Just like :obj:`py:sys.version_info` the valid values for ``releaselevel`` are\n 'alpha', 'beta', 'candidate' and 'final'.\n\n.. data:: __version__\n\n A string representation of the version. e.g. ``'1.0.0rc1'``. This is based\n off of :pep:`440`.\n\nClients\n--------\n\nClient\n~~~~~~~\n\n.. attributetable:: Client\n\n.. autoclass:: Client\n :members:\n :exclude-members: event\n\n .. automethod:: Client.event()\n :decorator:\n\nAutoShardedClient\n~~~~~~~~~~~~~~~~~~\n\n.. attributetable:: AutoShardedClient\n\n.. autoclass:: AutoShardedClient\n :members:\n\nApplication Info\n------------------\n\nAppInfo\n~~~~~~~~\n\n.. attributetable:: AppInfo\n\n.. autoclass:: AppInfo()\n :members:\n\nPartialAppInfo\n~~~~~~~~~~~~~~~\n\n.. attributetable:: PartialAppInfo\n\n.. autoclass:: PartialAppInfo()\n :members:\n\nAppInstallParams\n~~~~~~~~~~~~~~~~\n\n.. attributetable:: AppInstallParams\n\n.. autoclass:: AppInstallParams()\n :members:\n\nTeam\n~~~~~\n\n.. attributetable:: Team\n\n.. autoclass:: Team()\n :members:\n\nTeamMember\n~~~~~~~~~~~\n\n.. attributetable:: TeamMember\n\n.. autoclass:: TeamMember()\n :members:\n :inherited-members:\n\nVoice Related\n---------------\n\nVoiceClient\n~~~~~~~~~~~~\n\n.. attributetable:: VoiceClient\n\n.. autoclass:: VoiceClient()\n :members:\n :exclude-members: connect, on_voice_state_update, on_voice_server_update\n\nVoiceProtocol\n~~~~~~~~~~~~~~~\n\n.. attributetable:: VoiceProtocol\n\n.. autoclass:: VoiceProtocol\n :members:\n\nAudioSource\n~~~~~~~~~~~~\n\n.. attributetable:: AudioSource\n\n.. autoclass:: AudioSource\n :members:\n\nPCMAudio\n~~~~~~~~~\n\n.. attributetable:: PCMAudio\n\n.. autoclass:: PCMAudio\n :members:\n\nFFmpegAudio\n~~~~~~~~~~~~\n\n.. attributetable:: FFmpegAudio\n\n.. autoclass:: FFmpegAudio\n :members:\n\nFFmpegPCMAudio\n~~~~~~~~~~~~~~~\n\n.. attributetable:: FFmpegPCMAudio\n\n.. autoclass:: FFmpegPCMAudio\n :members:\n\nFFmpegOpusAudio\n~~~~~~~~~~~~~~~~\n\n.. attributetable:: FFmpegOpusAudio\n\n.. autoclass:: FFmpegOpusAudio\n :members:\n\nPCMVolumeTransformer\n~~~~~~~~~~~~~~~~~~~~~\n\n.. attributetable:: PCMVolumeTransformer\n\n.. autoclass:: PCMVolumeTransformer\n :members:\n\nOpus Library\n~~~~~~~~~~~~~\n\n.. autofunction:: discord.opus.load_opus\n\n.. autofunction:: discord.opus.is_loaded\n\n.. _discord-api-events:\n\nEvent Reference\n---------------\n\nThis section outlines the different types of events listened by :class:`Client`.\n\nThere are two ways to register an event, the first way is through the use of\n:meth:`Client.event`. The second way is through subclassing :class:`Client` and\noverriding the specific events. For example: ::\n\n import discord\n\n class MyClient(discord.Client):\n async def on_message(self, message):\n if message.author == self.user:\n return\n\n if message.content.startswith('$hello'):\n await message.channel.send('Hello World!')\n\n\nIf an event handler raises an exception, :func:`on_error` will be called\nto handle it, which defaults to logging the traceback and ignoring the exception.\n\n.. warning::\n\n All the events must be a |coroutine_link|_. If they aren't, then you might get unexpected\n errors. In order to turn a function into a coroutine they must be ``async def``\n functions.\n\nApp Commands\n~~~~~~~~~~~~~\n\n.. function:: on_raw_app_command_permissions_update(payload)\n\n Called when application command permissions are updated.\n\n .. versionadded:: 2.0\n\n :param payload: The raw event payload data.\n :type payload: :class:`RawAppCommandPermissionsUpdateEvent`\n\n.. function:: on_app_command_completion(interaction, command)\n\n Called when a :class:`app_commands.Command` or :class:`app_commands.ContextMenu` has\n successfully completed without error.\n\n .. versionadded:: 2.0\n\n :param interaction: The interaction of the command.\n :type interaction: :class:`Interaction`\n :param command: The command that completed successfully\n :type command: Union[:class:`app_commands.Command`, :class:`app_commands.ContextMenu`]\n\nAutoMod\n~~~~~~~~\n\n.. function:: on_automod_rule_create(rule)\n\n Called when a :class:`AutoModRule` is created.\n You must have :attr:`~Permissions.manage_guild` to receive this.\n\n This requires :attr:`Intents.auto_moderation_configuration` to be enabled.\n\n .. versionadded:: 2.0\n\n :param rule: The rule that was created.\n :type rule: :class:`AutoModRule`\n\n.. function:: on_automod_rule_update(rule)\n\n Called when a :class:`AutoModRule` is updated.\n You must have :attr:`~Permissions.manage_guild` to receive this.\n\n This requires :attr:`Intents.auto_moderation_configuration` to be enabled.\n\n .. versionadded:: 2.0\n\n :param rule: The rule that was updated.\n :type rule: :class:`AutoModRule`\n\n.. function:: on_automod_rule_delete(rule)\n\n Called when a :class:`AutoModRule` is deleted.\n You must have :attr:`~Permissions.manage_guild` to receive this.\n\n This requires :attr:`Intents.auto_moderation_configuration` to be enabled.\n\n .. versionadded:: 2.0\n\n :param rule: The rule that was deleted.\n :type rule: :class:`AutoModRule`\n\n.. function:: on_automod_action(execution)\n\n Called when a :class:`AutoModAction` is created/performed.\n You must have :attr:`~Permissions.manage_guild` to receive this.\n\n This requires :attr:`Intents.auto_moderation_execution` to be enabled.\n\n .. versionadded:: 2.0\n\n :param execution: The rule execution that was performed.\n :type execution: :class:`AutoModAction`\n\nChannels\n~~~~~~~~~\n\n.. function:: on_guild_channel_delete(channel)\n on_guild_channel_create(channel)\n\n Called whenever a guild channel is deleted or created.\n\n Note that you can get the guild from :attr:`~abc.GuildChannel.guild`.\n\n This requires :attr:`Intents.guilds` to be enabled.\n\n :param channel: The guild channel that got created or deleted.\n :type channel: :class:`abc.GuildChannel`\n\n.. function:: on_guild_channel_update(before, after)\n\n Called whenever a guild channel is updated. e.g. changed name, topic, permissions.\n\n This requires :attr:`Intents.guilds` to be enabled.\n\n :param before: The updated guild channel's old info.\n :type before: :class:`abc.GuildChannel`\n :param after: The updated guild channel's new info.\n :type after: :class:`abc.GuildChannel`\n\n.. function:: on_guild_channel_pins_update(channel, last_pin)\n\n Called whenever a message is pinned or unpinned from a guild channel.\n\n This requires :attr:`Intents.guilds` to be enabled.\n\n :param channel: The guild channel that had its pins updated.\n :type channel: Union[:class:`abc.GuildChannel`, :class:`Thread`]\n :param last_pin: The latest message that was pinned as an aware datetime in UTC. Could be ``None``.\n :type last_pin: Optional[:class:`datetime.datetime`]\n\n.. function:: on_private_channel_update(before, after)\n\n Called whenever a private group DM is updated. e.g. changed name or topic.\n\n This requires :attr:`Intents.messages` to be enabled.\n\n :param before: The updated group channel's old info.\n :type before: :class:`GroupChannel`\n :param after: The updated group channel's new info.\n :type after: :class:`GroupChannel`\n\n.. function:: on_private_channel_pins_update(channel, last_pin)\n\n Called whenever a message is pinned or unpinned from a private channel.\n\n :param channel: The private channel that had its pins updated.\n :type channel: :class:`abc.PrivateChannel`\n :param last_pin: The latest message that was pinned as an aware datetime in UTC. Could be ``None``.\n :type last_pin: Optional[:class:`datetime.datetime`]\n\n.. function:: on_typing(channel, user, when)\n\n Called when someone begins typing a message.\n\n The ``channel`` parameter can be a :class:`abc.Messageable` instance.\n Which could either be :class:`TextChannel`, :class:`GroupChannel`, or\n :class:`DMChannel`.\n\n If the ``channel`` is a :class:`TextChannel` then the ``user`` parameter\n is a :class:`Member`, otherwise it is a :class:`User`.\n\n If the channel or user could not be found in the internal cache this event\n will not be called, you may use :func:`on_raw_typing` instead.\n\n This requires :attr:`Intents.typing` to be enabled.\n\n :param channel: The location where the typing originated from.\n :type channel: :class:`abc.Messageable`\n :param user: The user that started typing.\n :type user: Union[:class:`User`, :class:`Member`]\n :param when: When the typing started as an aware datetime in UTC.\n :type when: :class:`datetime.datetime`\n\n.. function:: on_raw_typing(payload)\n\n Called when someone begins typing a message. Unlike :func:`on_typing` this\n is called regardless of the channel and user being in the internal cache.\n\n This requires :attr:`Intents.typing` to be enabled.\n\n .. versionadded:: 2.0\n\n :param payload: The raw event payload data.\n :type payload: :class:`RawTypingEvent`\n\nConnection\n~~~~~~~~~~~\n\n.. function:: on_connect()\n\n Called when the client has successfully connected to Discord. This is not\n the same as the client being fully prepared, see :func:`on_ready` for that.\n\n The warnings on :func:`on_ready` also apply.\n\n.. function:: on_disconnect()\n\n Called when the client has disconnected from Discord, or a connection attempt to Discord has failed.\n This could happen either through the internet being disconnected, explicit calls to close,\n or Discord terminating the connection one way or the other.\n\n This function can be called many times without a corresponding :func:`on_connect` call.\n\n.. function:: on_shard_connect(shard_id)\n\n Similar to :func:`on_connect` except used by :class:`AutoShardedClient`\n to denote when a particular shard ID has connected to Discord.\n\n .. versionadded:: 1.4\n\n :param shard_id: The shard ID that has connected.\n :type shard_id: :class:`int`\n\n\n.. function:: on_shard_disconnect(shard_id)\n\n Similar to :func:`on_disconnect` except used by :class:`AutoShardedClient`\n to denote when a particular shard ID has disconnected from Discord.\n\n .. versionadded:: 1.4\n\n :param shard_id: The shard ID that has disconnected.\n :type shard_id: :class:`int`\n\nDebug\n~~~~~~\n\n.. function:: on_error(event, *args, **kwargs)\n\n Usually when an event raises an uncaught exception, a traceback is\n logged to stderr and the exception is ignored. If you want to\n change this behaviour and handle the exception for whatever reason\n yourself, this event can be overridden. Which, when done, will\n suppress the default action of printing the traceback.\n\n The information of the exception raised and the exception itself can\n be retrieved with a standard call to :func:`sys.exc_info`.\n\n .. note::\n\n ``on_error`` will only be dispatched to :meth:`Client.event`.\n\n It will not be received by :meth:`Client.wait_for`, or, if used,\n :ref:`ext_commands_api_bot` listeners such as\n :meth:`~ext.commands.Bot.listen` or :meth:`~ext.commands.Cog.listener`.\n\n .. versionchanged:: 2.0\n\n The traceback is now logged rather than printed.\n\n :param event: The name of the event that raised the exception.\n :type event: :class:`str`\n\n :param args: The positional arguments for the event that raised the\n exception.\n :param kwargs: The keyword arguments for the event that raised the\n exception.\n\n.. function:: on_socket_event_type(event_type)\n\n Called whenever a websocket event is received from the WebSocket.\n\n This is mainly useful for logging how many events you are receiving\n from the Discord gateway.\n\n .. versionadded:: 2.0\n\n :param event_type: The event type from Discord that is received, e.g. ``'READY'``.\n :type event_type: :class:`str`\n\n.. function:: on_socket_raw_receive(msg)\n\n Called whenever a message is completely received from the WebSocket, before\n it's processed and parsed. This event is always dispatched when a\n complete message is received and the passed data is not parsed in any way.\n\n This is only really useful for grabbing the WebSocket stream and\n debugging purposes.\n\n This requires setting the ``enable_debug_events`` setting in the :class:`Client`.\n\n .. note::\n\n This is only for the messages received from the client\n WebSocket. The voice WebSocket will not trigger this event.\n\n :param msg: The message passed in from the WebSocket library.\n :type msg: :class:`str`\n\n.. function:: on_socket_raw_send(payload)\n\n Called whenever a send operation is done on the WebSocket before the\n message is sent. The passed parameter is the message that is being\n sent to the WebSocket.\n\n This is only really useful for grabbing the WebSocket stream and\n debugging purposes.\n\n This requires setting the ``enable_debug_events`` setting in the :class:`Client`.\n\n .. note::\n\n This is only for the messages sent from the client\n WebSocket. The voice WebSocket will not trigger this event.\n\n :param payload: The message that is about to be passed on to the\n WebSocket library. It can be :class:`bytes` to denote a binary\n message or :class:`str` to denote a regular text message.\n :type payload: Union[:class:`bytes`, :class:`str`]\n\n\nEntitlements\n~~~~~~~~~~~~\n\n.. function:: on_entitlement_create(entitlement)\n\n Called when a user subscribes to a SKU.\n\n .. versionadded:: 2.4\n\n :param entitlement: The entitlement that was created.\n :type entitlement: :class:`Entitlement`\n\n.. function:: on_entitlement_update(entitlement)\n\n Called when a user updates their subscription to a SKU. This is usually called when\n the user renews or cancels their subscription.\n\n .. versionadded:: 2.4\n\n :param entitlement: The entitlement that was updated.\n :type entitlement: :class:`Entitlement`\n\n.. function:: on_entitlement_delete(entitlement)\n\n Called when a users subscription to a SKU is cancelled. This is typically only called when:\n\n - Discord issues a refund for the subscription.\n - Discord removes an entitlement from a user.\n\n .. warning::\n\n This event won't be called if the user cancels their subscription manually, instead\n :func:`on_entitlement_update` will be called with :attr:`Entitlement.ends_at` set to the end of the\n current billing period.\n\n .. versionadded:: 2.4\n\n :param entitlement: The entitlement that was deleted.\n :type entitlement: :class:`Entitlement`\n\n\nGateway\n~~~~~~~~\n\n.. function:: on_ready()\n\n Called when the client is done preparing the data received from Discord. Usually after login is successful\n and the :attr:`Client.guilds` and co. are filled up.\n\n .. warning::\n\n This function is not guaranteed to be the first event called.\n Likewise, this function is **not** guaranteed to only be called\n once. This library implements reconnection logic and thus will\n end up calling this event whenever a RESUME request fails.\n\n.. function:: on_resumed()\n\n Called when the client has resumed a session.\n\n.. function:: on_shard_ready(shard_id)\n\n Similar to :func:`on_ready` except used by :class:`AutoShardedClient`\n to denote when a particular shard ID has become ready.\n\n :param shard_id: The shard ID that is ready.\n :type shard_id: :class:`int`\n\n\n.. function:: on_shard_resumed(shard_id)\n\n Similar to :func:`on_resumed` except used by :class:`AutoShardedClient`\n to denote when a particular shard ID has resumed a session.\n\n .. versionadded:: 1.4\n\n :param shard_id: The shard ID that has resumed.\n :type shard_id: :class:`int`\n\nGuilds\n~~~~~~~\n\n.. function:: on_guild_available(guild)\n on_guild_unavailable(guild)\n\n Called when a guild becomes available or unavailable. The guild must have\n existed in the :attr:`Client.guilds` cache.\n\n This requires :attr:`Intents.guilds` to be enabled.\n\n :param guild: The :class:`Guild` that has changed availability.\n\n.. function:: on_guild_join(guild)\n\n Called when a :class:`Guild` is either created by the :class:`Client` or when the\n :class:`Client` joins a guild.\n\n This requires :attr:`Intents.guilds` to be enabled.\n\n :param guild: The guild that was joined.\n :type guild: :class:`Guild`\n\n.. function:: on_guild_remove(guild)\n\n Called when a :class:`Guild` is removed from the :class:`Client`.\n\n This happens through, but not limited to, these circumstances:\n\n - The client got banned.\n - The client got kicked.\n - The client left the guild.\n - The client or the guild owner deleted the guild.\n\n In order for this event to be invoked then the :class:`Client` must have\n been part of the guild to begin with. (i.e. it is part of :attr:`Client.guilds`)\n\n This requires :attr:`Intents.guilds` to be enabled.\n\n :param guild: The guild that got removed.\n :type guild: :class:`Guild`\n\n.. function:: on_guild_update(before, after)\n\n Called when a :class:`Guild` updates, for example:\n\n - Changed name\n - Changed AFK channel\n - Changed AFK timeout\n - etc\n\n This requires :attr:`Intents.guilds` to be enabled.\n\n :param before: The guild prior to being updated.\n :type before: :class:`Guild`\n :param after: The guild after being updated.\n :type after: :class:`Guild`\n\n.. function:: on_guild_emojis_update(guild, before, after)\n\n Called when a :class:`Guild` adds or removes :class:`Emoji`.\n\n This requires :attr:`Intents.emojis_and_stickers` to be enabled.\n\n :param guild: The guild who got their emojis updated.\n :type guild: :class:`Guild`\n :param before: A list of emojis before the update.\n :type before: Sequence[:class:`Emoji`]\n :param after: A list of emojis after the update.\n :type after: Sequence[:class:`Emoji`]\n\n.. function:: on_guild_stickers_update(guild, before, after)\n\n Called when a :class:`Guild` updates its stickers.\n\n This requires :attr:`Intents.emojis_and_stickers` to be enabled.\n\n .. versionadded:: 2.0\n\n :param guild: The guild who got their stickers updated.\n :type guild: :class:`Guild`\n :param before: A list of stickers before the update.\n :type before: Sequence[:class:`GuildSticker`]\n :param after: A list of stickers after the update.\n :type after: Sequence[:class:`GuildSticker`]\n\n.. function:: on_audit_log_entry_create(entry)\n\n Called when a :class:`Guild` gets a new audit log entry.\n You must have :attr:`~Permissions.view_audit_log` to receive this.\n\n This requires :attr:`Intents.moderation` to be enabled.\n\n .. versionadded:: 2.2\n\n .. warning::\n\n Audit log entries received through the gateway are subject to data retrieval\n from cache rather than REST. This means that some data might not be present\n when you expect it to be. For example, the :attr:`AuditLogEntry.target`\n attribute will usually be a :class:`discord.Object` and the\n :attr:`AuditLogEntry.user` attribute will depend on user and member cache.\n\n To get the user ID of entry, :attr:`AuditLogEntry.user_id` can be used instead.\n\n :param entry: The audit log entry that was created.\n :type entry: :class:`AuditLogEntry`\n\n.. function:: on_invite_create(invite)\n\n Called when an :class:`Invite` is created.\n You must have :attr:`~Permissions.manage_channels` to receive this.\n\n .. versionadded:: 1.3\n\n .. note::\n\n There is a rare possibility that the :attr:`Invite.guild` and :attr:`Invite.channel`\n attributes will be of :class:`Object` rather than the respective models.\n\n This requires :attr:`Intents.invites` to be enabled.\n\n :param invite: The invite that was created.\n :type invite: :class:`Invite`\n\n.. function:: on_invite_delete(invite)\n\n Called when an :class:`Invite` is deleted.\n You must have :attr:`~Permissions.manage_channels` to receive this.\n\n .. versionadded:: 1.3\n\n .. note::\n\n There is a rare possibility that the :attr:`Invite.guild` and :attr:`Invite.channel`\n attributes will be of :class:`Object` rather than the respective models.\n\n Outside of those two attributes, the only other attribute guaranteed to be\n filled by the Discord gateway for this event is :attr:`Invite.code`.\n\n This requires :attr:`Intents.invites` to be enabled.\n\n :param invite: The invite that was deleted.\n :type invite: :class:`Invite`\n\n\nIntegrations\n~~~~~~~~~~~~~\n\n.. function:: on_integration_create(integration)\n\n Called when an integration is created.\n\n This requires :attr:`Intents.integrations` to be enabled.\n\n .. versionadded:: 2.0\n\n :param integration: The integration that was created.\n :type integration: :class:`Integration`\n\n.. function:: on_integration_update(integration)\n\n Called when an integration is updated.\n\n This requires :attr:`Intents.integrations` to be enabled.\n\n .. versionadded:: 2.0\n\n :param integration: The integration that was updated.\n :type integration: :class:`Integration`\n\n.. function:: on_guild_integrations_update(guild)\n\n Called whenever an integration is created, modified, or removed from a guild.\n\n This requires :attr:`Intents.integrations` to be enabled.\n\n .. versionadded:: 1.4\n\n :param guild: The guild that had its integrations updated.\n :type guild: :class:`Guild`\n\n.. function:: on_webhooks_update(channel)\n\n Called whenever a webhook is created, modified, or removed from a guild channel.\n\n This requires :attr:`Intents.webhooks` to be enabled.\n\n :param channel: The channel that had its webhooks updated.\n :type channel: :class:`abc.GuildChannel`\n\n.. function:: on_raw_integration_delete(payload)\n\n Called when an integration is deleted.\n\n This requires :attr:`Intents.integrations` to be enabled.\n\n .. versionadded:: 2.0\n\n :param payload: The raw event payload data.\n :type payload: :class:`RawIntegrationDeleteEvent`\n\nInteractions\n~~~~~~~~~~~~~\n\n.. function:: on_interaction(interaction)\n\n Called when an interaction happened.\n\n This currently happens due to slash command invocations or components being used.\n\n .. warning::\n\n This is a low level function that is not generally meant to be used.\n If you are working with components, consider using the callbacks associated\n with the :class:`~discord.ui.View` instead as it provides a nicer user experience.\n\n .. versionadded:: 2.0\n\n :param interaction: The interaction data.\n :type interaction: :class:`Interaction`\n\nMembers\n~~~~~~~~\n\n.. function:: on_member_join(member)\n\n Called when a :class:`Member` joins a :class:`Guild`.\n\n This requires :attr:`Intents.members` to be enabled.\n\n :param member: The member who joined.\n :type member: :class:`Member`\n\n.. function:: on_member_remove(member)\n\n Called when a :class:`Member` leaves a :class:`Guild`.\n\n If the guild or member could not be found in the internal cache this event\n will not be called, you may use :func:`on_raw_member_remove` instead.\n\n This requires :attr:`Intents.members` to be enabled.\n\n :param member: The member who left.\n :type member: :class:`Member`\n\n.. function:: on_raw_member_remove(payload)\n\n Called when a :class:`Member` leaves a :class:`Guild`.\n\n Unlike :func:`on_member_remove`\n this is called regardless of the guild or member being in the internal cache.\n\n This requires :attr:`Intents.members` to be enabled.\n\n .. versionadded:: 2.0\n\n :param payload: The raw event payload data.\n :type payload: :class:`RawMemberRemoveEvent`\n\n.. function:: on_member_update(before, after)\n\n Called when a :class:`Member` updates their profile.\n\n This is called when one or more of the following things change:\n\n - nickname\n - roles\n - pending\n - timeout\n - guild avatar\n - flags\n\n Due to a Discord limitation, this event is not dispatched when a member's timeout expires.\n\n This requires :attr:`Intents.members` to be enabled.\n\n :param before: The updated member's old info.\n :type before: :class:`Member`\n :param after: The updated member's updated info.\n :type after: :class:`Member`\n\n.. function:: on_user_update(before, after)\n\n Called when a :class:`User` updates their profile.\n\n This is called when one or more of the following things change:\n\n - avatar\n - username\n - discriminator\n\n This requires :attr:`Intents.members` to be enabled.\n\n :param before: The updated user's old info.\n :type before: :class:`User`\n :param after: The updated user's updated info.\n :type after: :class:`User`\n\n.. function:: on_member_ban(guild, user)\n\n Called when a user gets banned from a :class:`Guild`.\n\n This requires :attr:`Intents.moderation` to be enabled.\n\n :param guild: The guild the user got banned from.\n :type guild: :class:`Guild`\n :param user: The user that got banned.\n Can be either :class:`User` or :class:`Member` depending if\n the user was in the guild or not at the time of removal.\n :type user: Union[:class:`User`, :class:`Member`]\n\n.. function:: on_member_unban(guild, user)\n\n Called when a :class:`User` gets unbanned from a :class:`Guild`.\n\n This requires :attr:`Intents.moderation` to be enabled.\n\n :param guild: The guild the user got unbanned from.\n :type guild: :class:`Guild`\n :param user: The user that got unbanned.\n :type user: :class:`User`\n\n.. function:: on_presence_update(before, after)\n\n Called when a :class:`Member` updates their presence.\n\n This is called when one or more of the following things change:\n\n - status\n - activity\n\n This requires :attr:`Intents.presences` and :attr:`Intents.members` to be enabled.\n\n .. versionadded:: 2.0\n\n :param before: The updated member's old info.\n :type before: :class:`Member`\n :param after: The updated member's updated info.\n :type after: :class:`Member`\n\nMessages\n~~~~~~~~~\n\n.. function:: on_message(message)\n\n Called when a :class:`Message` is created and sent.\n\n This requires :attr:`Intents.messages` to be enabled.\n\n .. warning::\n\n Your bot's own messages and private messages are sent through this\n event. This can lead cases of 'recursion' depending on how your bot was\n programmed. If you want the bot to not reply to itself, consider\n checking the user IDs. Note that :class:`~ext.commands.Bot` does not\n have this problem.\n\n :param message: The current message.\n :type message: :class:`Message`\n\n.. function:: on_message_edit(before, after)\n\n Called when a :class:`Message` receives an update event. If the message is not found\n in the internal message cache, then these events will not be called.\n Messages might not be in cache if the message is too old\n or the client is participating in high traffic guilds.\n\n If this occurs increase the :class:`max_messages <Client>` parameter\n or use the :func:`on_raw_message_edit` event instead.\n\n The following non-exhaustive cases trigger this event:\n\n - A message has been pinned or unpinned.\n - The message content has been changed.\n - The message has received an embed.\n\n - For performance reasons, the embed server does not do this in a \"consistent\" manner.\n\n - The message's embeds were suppressed or unsuppressed.\n - A call message has received an update to its participants or ending time.\n\n This requires :attr:`Intents.messages` to be enabled.\n\n :param before: The previous version of the message.\n :type before: :class:`Message`\n :param after: The current version of the message.\n :type after: :class:`Message`\n\n.. function:: on_message_delete(message)\n\n Called when a message is deleted. If the message is not found in the\n internal message cache, then this event will not be called.\n Messages might not be in cache if the message is too old\n or the client is participating in high traffic guilds.\n\n If this occurs increase the :class:`max_messages <Client>` parameter\n or use the :func:`on_raw_message_delete` event instead.\n\n This requires :attr:`Intents.messages` to be enabled.\n\n :param message: The deleted message.\n :type message: :class:`Message`\n\n.. function:: on_bulk_message_delete(messages)\n\n Called when messages are bulk deleted. If none of the messages deleted\n are found in the internal message cache, then this event will not be called.\n If individual messages were not found in the internal message cache,\n this event will still be called, but the messages not found will not be included in\n the messages list. Messages might not be in cache if the message is too old\n or the client is participating in high traffic guilds.\n\n If this occurs increase the :class:`max_messages <Client>` parameter\n or use the :func:`on_raw_bulk_message_delete` event instead.\n\n This requires :attr:`Intents.messages` to be enabled.\n\n :param messages: The messages that have been deleted.\n :type messages: List[:class:`Message`]\n\n.. function:: on_raw_message_edit(payload)\n\n Called when a message is edited. Unlike :func:`on_message_edit`, this is called\n regardless of the state of the internal message cache.\n\n If the message is found in the message cache,\n it can be accessed via :attr:`RawMessageUpdateEvent.cached_message`. The cached message represents\n the message before it has been edited. For example, if the content of a message is modified and\n triggers the :func:`on_raw_message_edit` coroutine, the :attr:`RawMessageUpdateEvent.cached_message`\n will return a :class:`Message` object that represents the message before the content was modified.\n\n Due to the inherently raw nature of this event, the data parameter coincides with\n the raw data given by the :ddocs:`gateway <topics/gateway#message-update>`.\n\n Since the data payload can be partial, care must be taken when accessing stuff in the dictionary.\n One example of a common case of partial data is when the ``'content'`` key is inaccessible. This\n denotes an \"embed\" only edit, which is an edit in which only the embeds are updated by the Discord\n embed server.\n\n This requires :attr:`Intents.messages` to be enabled.\n\n :param payload: The raw event payload data.\n :type payload: :class:`RawMessageUpdateEvent`\n\n\n.. function:: on_raw_message_delete(payload)\n\n Called when a message is deleted. Unlike :func:`on_message_delete`, this is\n called regardless of the message being in the internal message cache or not.\n\n If the message is found in the message cache,\n it can be accessed via :attr:`RawMessageDeleteEvent.cached_message`\n\n This requires :attr:`Intents.messages` to be enabled.\n\n :param payload: The raw event payload data.\n :type payload: :class:`RawMessageDeleteEvent`\n\n.. function:: on_raw_bulk_message_delete(payload)\n\n Called when a bulk delete is triggered. Unlike :func:`on_bulk_message_delete`, this is\n called regardless of the messages being in the internal message cache or not.\n\n If the messages are found in the message cache,\n they can be accessed via :attr:`RawBulkMessageDeleteEvent.cached_messages`\n\n This requires :attr:`Intents.messages` to be enabled.\n\n :param payload: The raw event payload data.\n :type payload: :class:`RawBulkMessageDeleteEvent`\n\nReactions\n~~~~~~~~~~\n\n.. function:: on_reaction_add(reaction, user)\n\n Called when a message has a reaction added to it. Similar to :func:`on_message_edit`,\n if the message is not found in the internal message cache, then this\n event will not be called. Consider using :func:`on_raw_reaction_add` instead.\n\n .. note::\n\n To get the :class:`Message` being reacted, access it via :attr:`Reaction.message`.\n\n This requires :attr:`Intents.reactions` to be enabled.\n\n .. note::\n\n This doesn't require :attr:`Intents.members` within a guild context,\n but due to Discord not providing updated user information in a direct message\n it's required for direct messages to receive this event.\n Consider using :func:`on_raw_reaction_add` if you need this and do not otherwise want\n to enable the members intent.\n\n .. warning::\n\n This event does not have a way of differentiating whether a reaction is a\n burst reaction (also known as \"super reaction\") or not. If you need this,\n consider using :func:`on_raw_reaction_add` instead.\n\n :param reaction: The current state of the reaction.\n :type reaction: :class:`Reaction`\n :param user: The user who added the reaction.\n :type user: Union[:class:`Member`, :class:`User`]\n\n.. function:: on_reaction_remove(reaction, user)\n\n Called when a message has a reaction removed from it. Similar to on_message_edit,\n if the message is not found in the internal message cache, then this event\n will not be called.\n\n .. note::\n\n To get the message being reacted, access it via :attr:`Reaction.message`.\n\n This requires both :attr:`Intents.reactions` and :attr:`Intents.members` to be enabled.\n\n .. note::\n\n Consider using :func:`on_raw_reaction_remove` if you need this and do not want\n to enable the members intent.\n\n .. warning::\n\n This event does not have a way of differentiating whether a reaction is a\n burst reaction (also known as \"super reaction\") or not. If you need this,\n consider using :func:`on_raw_reaction_remove` instead.\n\n :param reaction: The current state of the reaction.\n :type reaction: :class:`Reaction`\n :param user: The user whose reaction was removed.\n :type user: Union[:class:`Member`, :class:`User`]\n\n.. function:: on_reaction_clear(message, reactions)\n\n Called when a message has all its reactions removed from it. Similar to :func:`on_message_edit`,\n if the message is not found in the internal message cache, then this event\n will not be called. Consider using :func:`on_raw_reaction_clear` instead.\n\n This requires :attr:`Intents.reactions` to be enabled.\n\n :param message: The message that had its reactions cleared.\n :type message: :class:`Message`\n :param reactions: The reactions that were removed.\n :type reactions: List[:class:`Reaction`]\n\n.. function:: on_reaction_clear_emoji(reaction)\n\n Called when a message has a specific reaction removed from it. Similar to :func:`on_message_edit`,\n if the message is not found in the internal message cache, then this event\n will not be called. Consider using :func:`on_raw_reaction_clear_emoji` instead.\n\n This requires :attr:`Intents.reactions` to be enabled.\n\n .. versionadded:: 1.3\n\n :param reaction: The reaction that got cleared.\n :type reaction: :class:`Reaction`\n\n\n.. function:: on_raw_reaction_add(payload)\n\n Called when a message has a reaction added. Unlike :func:`on_reaction_add`, this is\n called regardless of the state of the internal message cache.\n\n This requires :attr:`Intents.reactions` to be enabled.\n\n :param payload: The raw event payload data.\n :type payload: :class:`RawReactionActionEvent`\n\n.. function:: on_raw_reaction_remove(payload)\n\n Called when a message has a reaction removed. Unlike :func:`on_reaction_remove`, this is\n called regardless of the state of the internal message cache.\n\n This requires :attr:`Intents.reactions` to be enabled.\n\n :param payload: The raw event payload data.\n :type payload: :class:`RawReactionActionEvent`\n\n.. function:: on_raw_reaction_clear(payload)\n\n Called when a message has all its reactions removed. Unlike :func:`on_reaction_clear`,\n this is called regardless of the state of the internal message cache.\n\n This requires :attr:`Intents.reactions` to be enabled.\n\n :param payload: The raw event payload data.\n :type payload: :class:`RawReactionClearEvent`\n\n.. function:: on_raw_reaction_clear_emoji(payload)\n\n Called when a message has a specific reaction removed from it. Unlike :func:`on_reaction_clear_emoji` this is called\n regardless of the state of the internal message cache.\n\n This requires :attr:`Intents.reactions` to be enabled.\n\n .. versionadded:: 1.3\n\n :param payload: The raw event payload data.\n :type payload: :class:`RawReactionClearEmojiEvent`\n\n\nRoles\n~~~~~~\n\n.. function:: on_guild_role_create(role)\n on_guild_role_delete(role)\n\n Called when a :class:`Guild` creates or deletes a new :class:`Role`.\n\n To get the guild it belongs to, use :attr:`Role.guild`.\n\n This requires :attr:`Intents.guilds` to be enabled.\n\n :param role: The role that was created or deleted.\n :type role: :class:`Role`\n\n.. function:: on_guild_role_update(before, after)\n\n Called when a :class:`Role` is changed guild-wide.\n\n This requires :attr:`Intents.guilds` to be enabled.\n\n :param before: The updated role's old info.\n :type before: :class:`Role`\n :param after: The updated role's updated info.\n :type after: :class:`Role`\n\n\nScheduled Events\n~~~~~~~~~~~~~~~~~\n\n.. function:: on_scheduled_event_create(event)\n on_scheduled_event_delete(event)\n\n Called when a :class:`ScheduledEvent` is created or deleted.\n\n This requires :attr:`Intents.guild_scheduled_events` to be enabled.\n\n .. versionadded:: 2.0\n\n :param event: The scheduled event that was created or deleted.\n :type event: :class:`ScheduledEvent`\n\n.. function:: on_scheduled_event_update(before, after)\n\n Called when a :class:`ScheduledEvent` is updated.\n\n This requires :attr:`Intents.guild_scheduled_events` to be enabled.\n\n The following, but not limited to, examples illustrate when this event is called:\n\n - The scheduled start/end times are changed.\n - The channel is changed.\n - The description is changed.\n - The status is changed.\n - The image is changed.\n\n .. versionadded:: 2.0\n\n :param before: The scheduled event before the update.\n :type before: :class:`ScheduledEvent`\n :param after: The scheduled event after the update.\n :type after: :class:`ScheduledEvent`\n\n.. function:: on_scheduled_event_user_add(event, user)\n on_scheduled_event_user_remove(event, user)\n\n Called when a user is added or removed from a :class:`ScheduledEvent`.\n\n This requires :attr:`Intents.guild_scheduled_events` to be enabled.\n\n .. versionadded:: 2.0\n\n :param event: The scheduled event that the user was added or removed from.\n :type event: :class:`ScheduledEvent`\n :param user: The user that was added or removed.\n :type user: :class:`User`\n\n\nStages\n~~~~~~~\n\n.. function:: on_stage_instance_create(stage_instance)\n on_stage_instance_delete(stage_instance)\n\n Called when a :class:`StageInstance` is created or deleted for a :class:`StageChannel`.\n\n .. versionadded:: 2.0\n\n :param stage_instance: The stage instance that was created or deleted.\n :type stage_instance: :class:`StageInstance`\n\n.. function:: on_stage_instance_update(before, after)\n\n Called when a :class:`StageInstance` is updated.\n\n The following, but not limited to, examples illustrate when this event is called:\n\n - The topic is changed.\n - The privacy level is changed.\n\n .. versionadded:: 2.0\n\n :param before: The stage instance before the update.\n :type before: :class:`StageInstance`\n :param after: The stage instance after the update.\n :type after: :class:`StageInstance`\n\nThreads\n~~~~~~~~\n\n.. function:: on_thread_create(thread)\n\n Called whenever a thread is created.\n\n Note that you can get the guild from :attr:`Thread.guild`.\n\n This requires :attr:`Intents.guilds` to be enabled.\n\n .. versionadded:: 2.0\n\n :param thread: The thread that was created.\n :type thread: :class:`Thread`\n\n.. function:: on_thread_join(thread)\n\n Called whenever a thread is joined.\n\n Note that you can get the guild from :attr:`Thread.guild`.\n\n This requires :attr:`Intents.guilds` to be enabled.\n\n .. versionadded:: 2.0\n\n :param thread: The thread that got joined.\n :type thread: :class:`Thread`\n\n.. function:: on_thread_update(before, after)\n\n Called whenever a thread is updated. If the thread could\n not be found in the internal cache this event will not be called.\n Threads will not be in the cache if they are archived.\n\n If you need this information use :func:`on_raw_thread_update` instead.\n\n This requires :attr:`Intents.guilds` to be enabled.\n\n .. versionadded:: 2.0\n\n :param before: The updated thread's old info.\n :type before: :class:`Thread`\n :param after: The updated thread's new info.\n :type after: :class:`Thread`\n\n.. function:: on_thread_remove(thread)\n\n Called whenever a thread is removed. This is different from a thread being deleted.\n\n Note that you can get the guild from :attr:`Thread.guild`.\n\n This requires :attr:`Intents.guilds` to be enabled.\n\n .. warning::\n\n Due to technical limitations, this event might not be called\n as soon as one expects. Since the library tracks thread membership\n locally, the API only sends updated thread membership status upon being\n synced by joining a thread.\n\n .. versionadded:: 2.0\n\n :param thread: The thread that got removed.\n :type thread: :class:`Thread`\n\n.. function:: on_thread_delete(thread)\n\n Called whenever a thread is deleted. If the thread could\n not be found in the internal cache this event will not be called.\n Threads will not be in the cache if they are archived.\n\n If you need this information use :func:`on_raw_thread_delete` instead.\n\n Note that you can get the guild from :attr:`Thread.guild`.\n\n This requires :attr:`Intents.guilds` to be enabled.\n\n .. versionadded:: 2.0\n\n :param thread: The thread that got deleted.\n :type thread: :class:`Thread`\n\n.. function:: on_raw_thread_update(payload)\n\n Called whenever a thread is updated. Unlike :func:`on_thread_update` this\n is called regardless of the thread being in the internal thread cache or not.\n\n This requires :attr:`Intents.guilds` to be enabled.\n\n .. versionadded:: 2.0\n\n :param payload: The raw event payload data.\n :type payload: :class:`RawThreadUpdateEvent`\n\n.. function:: on_raw_thread_delete(payload)\n\n Called whenever a thread is deleted. Unlike :func:`on_thread_delete` this\n is called regardless of the thread being in the internal thread cache or not.\n\n This requires :attr:`Intents.guilds` to be enabled.\n\n .. versionadded:: 2.0\n\n :param payload: The raw event payload data.\n :type payload: :class:`RawThreadDeleteEvent`\n\n.. function:: on_thread_member_join(member)\n on_thread_member_remove(member)\n\n Called when a :class:`ThreadMember` leaves or joins a :class:`Thread`.\n\n You can get the thread a member belongs in by accessing :attr:`ThreadMember.thread`.\n\n This requires :attr:`Intents.members` to be enabled.\n\n .. versionadded:: 2.0\n\n :param member: The member who joined or left.\n :type member: :class:`ThreadMember`\n\n.. function:: on_raw_thread_member_remove(payload)\n\n Called when a :class:`ThreadMember` leaves a :class:`Thread`. Unlike :func:`on_thread_member_remove` this\n is called regardless of the member being in the internal thread's members cache or not.\n\n This requires :attr:`Intents.members` to be enabled.\n\n .. versionadded:: 2.0\n\n :param payload: The raw event payload data.\n :type payload: :class:`RawThreadMembersUpdate`\n\nVoice\n~~~~~~\n\n.. function:: on_voice_state_update(member, before, after)\n\n Called when a :class:`Member` changes their :class:`VoiceState`.\n\n The following, but not limited to, examples illustrate when this event is called:\n\n - A member joins a voice or stage channel.\n - A member leaves a voice or stage channel.\n - A member is muted or deafened by their own accord.\n - A member is muted or deafened by a guild administrator.\n\n This requires :attr:`Intents.voice_states` to be enabled.\n\n :param member: The member whose voice states changed.\n :type member: :class:`Member`\n :param before: The voice state prior to the changes.\n :type before: :class:`VoiceState`\n :param after: The voice state after the changes.\n :type after: :class:`VoiceState`\n\n.. _discord-api-utils:\n\nUtility Functions\n-----------------\n\n.. autofunction:: discord.utils.find\n\n.. autofunction:: discord.utils.get\n\n.. autofunction:: discord.utils.setup_logging\n\n.. autofunction:: discord.utils.maybe_coroutine\n\n.. autofunction:: discord.utils.snowflake_time\n\n.. autofunction:: discord.utils.time_snowflake\n\n.. autofunction:: discord.utils.oauth_url\n\n.. autofunction:: discord.utils.remove_markdown\n\n.. autofunction:: discord.utils.escape_markdown\n\n.. autofunction:: discord.utils.escape_mentions\n\n.. class:: ResolvedInvite\n\n A data class which represents a resolved invite returned from :func:`discord.utils.resolve_invite`.\n\n .. attribute:: code\n\n The invite code.\n\n :type: :class:`str`\n\n .. attribute:: event\n\n The id of the scheduled event that the invite refers to.\n\n :type: Optional[:class:`int`]\n\n.. autofunction:: discord.utils.resolve_invite\n\n.. autofunction:: discord.utils.resolve_template\n\n.. autofunction:: discord.utils.sleep_until\n\n.. autofunction:: discord.utils.utcnow\n\n.. autofunction:: discord.utils.format_dt\n\n.. autofunction:: discord.utils.as_chunks\n\n.. data:: MISSING\n :module: discord.utils\n\n A type safe sentinel used in the library to represent something as missing. Used to distinguish from ``None`` values.\n\n .. versionadded:: 2.0\n\n.. _discord-api-enums:\n\nEnumerations\n-------------\n\nThe API provides some enumerations for certain types of strings to avoid the API\nfrom being stringly typed in case the strings change in the future.\n\nAll enumerations are subclasses of an internal class which mimics the behaviour\nof :class:`enum.Enum`.\n\n.. class:: ChannelType\n\n Specifies the type of channel.\n\n .. attribute:: text\n\n A text channel.\n .. attribute:: voice\n\n A voice channel.\n .. attribute:: private\n\n A private text channel. Also called a direct message.\n .. attribute:: group\n\n A private group text channel.\n .. attribute:: category\n\n A category channel.\n .. attribute:: news\n\n A guild news channel.\n\n .. attribute:: stage_voice\n\n A guild stage voice channel.\n\n .. versionadded:: 1.7\n\n .. attribute:: news_thread\n\n A news thread\n\n .. versionadded:: 2.0\n\n .. attribute:: public_thread\n\n A public thread\n\n .. versionadded:: 2.0\n\n .. attribute:: private_thread\n\n A private thread\n\n .. versionadded:: 2.0\n\n .. attribute:: forum\n\n A forum channel.\n\n .. versionadded:: 2.0\n\n .. attribute:: media\n\n A media channel.\n\n .. versionadded:: 2.4\n\n.. class:: MessageType\n\n Specifies the type of :class:`Message`. This is used to denote if a message\n is to be interpreted as a system message or a regular message.\n\n .. container:: operations\n\n .. describe:: x == y\n\n Checks if two messages are equal.\n .. describe:: x != y\n\n Checks if two messages are not equal.\n\n .. attribute:: default\n\n The default message type. This is the same as regular messages.\n .. attribute:: recipient_add\n\n The system message when a user is added to a group private\n message or a thread.\n .. attribute:: recipient_remove\n\n The system message when a user is removed from a group private\n message or a thread.\n .. attribute:: call\n\n The system message denoting call state, e.g. missed call, started call,\n etc.\n .. attribute:: channel_name_change\n\n The system message denoting that a channel's name has been changed.\n .. attribute:: channel_icon_change\n\n The system message denoting that a channel's icon has been changed.\n .. attribute:: pins_add\n\n The system message denoting that a pinned message has been added to a channel.\n .. attribute:: new_member\n\n The system message denoting that a new member has joined a Guild.\n\n .. attribute:: premium_guild_subscription\n\n The system message denoting that a member has \"nitro boosted\" a guild.\n .. attribute:: premium_guild_tier_1\n\n The system message denoting that a member has \"nitro boosted\" a guild\n and it achieved level 1.\n .. attribute:: premium_guild_tier_2\n\n The system message denoting that a member has \"nitro boosted\" a guild\n and it achieved level 2.\n .. attribute:: premium_guild_tier_3\n\n The system message denoting that a member has \"nitro boosted\" a guild\n and it achieved level 3.\n .. attribute:: channel_follow_add\n\n The system message denoting that an announcement channel has been followed.\n\n .. versionadded:: 1.3\n .. attribute:: guild_stream\n\n The system message denoting that a member is streaming in the guild.\n\n .. versionadded:: 1.7\n .. attribute:: guild_discovery_disqualified\n\n The system message denoting that the guild is no longer eligible for Server\n Discovery.\n\n .. versionadded:: 1.7\n .. attribute:: guild_discovery_requalified\n\n The system message denoting that the guild has become eligible again for Server\n Discovery.\n\n .. versionadded:: 1.7\n .. attribute:: guild_discovery_grace_period_initial_warning\n\n The system message denoting that the guild has failed to meet the Server\n Discovery requirements for one week.\n\n .. versionadded:: 1.7\n .. attribute:: guild_discovery_grace_period_final_warning\n\n The system message denoting that the guild has failed to meet the Server\n Discovery requirements for 3 weeks in a row.\n\n .. versionadded:: 1.7\n .. attribute:: thread_created\n\n The system message denoting that a thread has been created. This is only\n sent if the thread has been created from an older message. The period of time\n required for a message to be considered old cannot be relied upon and is up to\n Discord.\n\n .. versionadded:: 2.0\n .. attribute:: reply\n\n The system message denoting that the author is replying to a message.\n\n .. versionadded:: 2.0\n .. attribute:: chat_input_command\n\n The system message denoting that a slash command was executed.\n\n .. versionadded:: 2.0\n .. attribute:: guild_invite_reminder\n\n The system message sent as a reminder to invite people to the guild.\n\n .. versionadded:: 2.0\n .. attribute:: thread_starter_message\n\n The system message denoting the message in the thread that is the one that started the\n thread's conversation topic.\n\n .. versionadded:: 2.0\n .. attribute:: context_menu_command\n\n The system message denoting that a context menu command was executed.\n\n .. versionadded:: 2.0\n .. attribute:: auto_moderation_action\n\n The system message sent when an AutoMod rule is triggered. This is only\n sent if the rule is configured to sent an alert when triggered.\n\n .. versionadded:: 2.0\n .. attribute:: role_subscription_purchase\n\n The system message sent when a user purchases or renews a role subscription.\n\n .. versionadded:: 2.2\n .. attribute:: interaction_premium_upsell\n\n The system message sent when a user is given an advertisement to purchase a premium tier for\n an application during an interaction.\n\n .. versionadded:: 2.2\n .. attribute:: stage_start\n\n The system message sent when the stage starts.\n\n .. versionadded:: 2.2\n .. attribute:: stage_end\n\n The system message sent when the stage ends.\n\n .. versionadded:: 2.2\n .. attribute:: stage_speaker\n\n The system message sent when the stage speaker changes.\n\n .. versionadded:: 2.2\n .. attribute:: stage_raise_hand\n\n The system message sent when a user is requesting to speak by raising their hands.\n\n .. versionadded:: 2.2\n .. attribute:: stage_topic\n\n The system message sent when the stage topic changes.\n\n .. versionadded:: 2.2\n .. attribute:: guild_application_premium_subscription\n\n The system message sent when an application's premium subscription is purchased for the guild.\n\n .. versionadded:: 2.2\n\n .. attribute:: guild_incident_alert_mode_enabled\n\n The system message sent when security actions is enabled.\n\n .. versionadded:: 2.4\n\n .. attribute:: guild_incident_alert_mode_disabled\n\n The system message sent when security actions is disabled.\n\n .. versionadded:: 2.4\n\n .. attribute:: guild_incident_report_raid\n\n The system message sent when a raid is reported.\n\n .. versionadded:: 2.4\n\n .. attribute:: guild_incident_report_false_alarm\n\n The system message sent when a false alarm is reported.\n\n .. versionadded:: 2.4\n\n.. class:: UserFlags\n\n Represents Discord User flags.\n\n .. attribute:: staff\n\n The user is a Discord Employee.\n .. attribute:: partner\n\n The user is a Discord Partner.\n .. attribute:: hypesquad\n\n The user is a HypeSquad Events member.\n .. attribute:: bug_hunter\n\n The user is a Bug Hunter.\n .. attribute:: mfa_sms\n\n The user has SMS recovery for Multi Factor Authentication enabled.\n .. attribute:: premium_promo_dismissed\n\n The user has dismissed the Discord Nitro promotion.\n .. attribute:: hypesquad_bravery\n\n The user is a HypeSquad Bravery member.\n .. attribute:: hypesquad_brilliance\n\n The user is a HypeSquad Brilliance member.\n .. attribute:: hypesquad_balance\n\n The user is a HypeSquad Balance member.\n .. attribute:: early_supporter\n\n The user is an Early Supporter.\n .. attribute:: team_user\n\n The user is a Team User.\n .. attribute:: system\n\n The user is a system user (i.e. represents Discord officially).\n .. attribute:: has_unread_urgent_messages\n\n The user has an unread system message.\n .. attribute:: bug_hunter_level_2\n\n The user is a Bug Hunter Level 2.\n .. attribute:: verified_bot\n\n The user is a Verified Bot.\n .. attribute:: verified_bot_developer\n\n The user is an Early Verified Bot Developer.\n .. attribute:: discord_certified_moderator\n\n The user is a Moderator Programs Alumni.\n .. attribute:: bot_http_interactions\n\n The user is a bot that only uses HTTP interactions and is shown in the online member list.\n\n .. versionadded:: 2.0\n .. attribute:: spammer\n\n The user is flagged as a spammer by Discord.\n\n .. versionadded:: 2.0\n\n .. attribute:: active_developer\n\n The user is an active developer.\n\n .. versionadded:: 2.1\n\n.. class:: ActivityType\n\n Specifies the type of :class:`Activity`. This is used to check how to\n interpret the activity itself.\n\n .. attribute:: unknown\n\n An unknown activity type. This should generally not happen.\n .. attribute:: playing\n\n A \"Playing\" activity type.\n .. attribute:: streaming\n\n A \"Streaming\" activity type.\n .. attribute:: listening\n\n A \"Listening\" activity type.\n .. attribute:: watching\n\n A \"Watching\" activity type.\n .. attribute:: custom\n\n A custom activity type.\n .. attribute:: competing\n\n A competing activity type.\n\n .. versionadded:: 1.5\n\n.. class:: VerificationLevel\n\n Specifies a :class:`Guild`\\'s verification level, which is the criteria in\n which a member must meet before being able to send messages to the guild.\n\n .. container:: operations\n\n .. versionadded:: 2.0\n\n .. describe:: x == y\n\n Checks if two verification levels are equal.\n .. describe:: x != y\n\n Checks if two verification levels are not equal.\n .. describe:: x > y\n\n Checks if a verification level is higher than another.\n .. describe:: x < y\n\n Checks if a verification level is lower than another.\n .. describe:: x >= y\n\n Checks if a verification level is higher or equal to another.\n .. describe:: x <= y\n\n Checks if a verification level is lower or equal to another.\n\n .. attribute:: none\n\n No criteria set.\n .. attribute:: low\n\n Member must have a verified email on their Discord account.\n .. attribute:: medium\n\n Member must have a verified email and be registered on Discord for more\n than five minutes.\n .. attribute:: high\n\n Member must have a verified email, be registered on Discord for more\n than five minutes, and be a member of the guild itself for more than\n ten minutes.\n .. attribute:: highest\n\n Member must have a verified phone on their Discord account.\n\n.. class:: NotificationLevel\n\n Specifies whether a :class:`Guild` has notifications on for all messages or mentions only by default.\n\n .. container:: operations\n\n .. versionadded:: 2.0\n\n .. describe:: x == y\n\n Checks if two notification levels are equal.\n .. describe:: x != y\n\n Checks if two notification levels are not equal.\n .. describe:: x > y\n\n Checks if a notification level is higher than another.\n .. describe:: x < y\n\n Checks if a notification level is lower than another.\n .. describe:: x >= y\n\n Checks if a notification level is higher or equal to another.\n .. describe:: x <= y\n\n Checks if a notification level is lower or equal to another.\n\n .. attribute:: all_messages\n\n Members receive notifications for every message regardless of them being mentioned.\n .. attribute:: only_mentions\n\n Members receive notifications for messages they are mentioned in.\n\n.. class:: ContentFilter\n\n Specifies a :class:`Guild`\\'s explicit content filter, which is the machine\n learning algorithms that Discord uses to detect if an image contains\n pornography or otherwise explicit content.\n\n .. container:: operations\n\n .. versionadded:: 2.0\n\n .. describe:: x == y\n\n Checks if two content filter levels are equal.\n .. describe:: x != y\n\n Checks if two content filter levels are not equal.\n .. describe:: x > y\n\n Checks if a content filter level is higher than another.\n .. describe:: x < y\n\n Checks if a content filter level is lower than another.\n .. describe:: x >= y\n\n Checks if a content filter level is higher or equal to another.\n .. describe:: x <= y\n\n Checks if a content filter level is lower or equal to another.\n\n .. attribute:: disabled\n\n The guild does not have the content filter enabled.\n .. attribute:: no_role\n\n The guild has the content filter enabled for members without a role.\n .. attribute:: all_members\n\n The guild has the content filter enabled for every member.\n\n.. class:: Status\n\n Specifies a :class:`Member` 's status.\n\n .. attribute:: online\n\n The member is online.\n .. attribute:: offline\n\n The member is offline.\n .. attribute:: idle\n\n The member is idle.\n .. attribute:: dnd\n\n The member is \"Do Not Disturb\".\n .. attribute:: do_not_disturb\n\n An alias for :attr:`dnd`.\n .. attribute:: invisible\n\n The member is \"invisible\". In reality, this is only used when sending\n a presence a la :meth:`Client.change_presence`. When you receive a\n user's presence this will be :attr:`offline` instead.\n\n\n.. class:: AuditLogAction\n\n Represents the type of action being done for a :class:`AuditLogEntry`\\,\n which is retrievable via :meth:`Guild.audit_logs`.\n\n .. attribute:: guild_update\n\n The guild has updated. Things that trigger this include:\n\n - Changing the guild vanity URL\n - Changing the guild invite splash\n - Changing the guild AFK channel or timeout\n - Changing the guild voice server region\n - Changing the guild icon, banner, or discovery splash\n - Changing the guild moderation settings\n - Changing things related to the guild widget\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`Guild`.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.afk_channel`\n - :attr:`~AuditLogDiff.system_channel`\n - :attr:`~AuditLogDiff.afk_timeout`\n - :attr:`~AuditLogDiff.default_notifications`\n - :attr:`~AuditLogDiff.explicit_content_filter`\n - :attr:`~AuditLogDiff.mfa_level`\n - :attr:`~AuditLogDiff.name`\n - :attr:`~AuditLogDiff.owner`\n - :attr:`~AuditLogDiff.splash`\n - :attr:`~AuditLogDiff.discovery_splash`\n - :attr:`~AuditLogDiff.icon`\n - :attr:`~AuditLogDiff.banner`\n - :attr:`~AuditLogDiff.vanity_url_code`\n - :attr:`~AuditLogDiff.description`\n - :attr:`~AuditLogDiff.preferred_locale`\n - :attr:`~AuditLogDiff.prune_delete_days`\n - :attr:`~AuditLogDiff.public_updates_channel`\n - :attr:`~AuditLogDiff.rules_channel`\n - :attr:`~AuditLogDiff.verification_level`\n - :attr:`~AuditLogDiff.widget_channel`\n - :attr:`~AuditLogDiff.widget_enabled`\n - :attr:`~AuditLogDiff.premium_progress_bar_enabled`\n - :attr:`~AuditLogDiff.system_channel_flags`\n\n .. attribute:: channel_create\n\n A new channel was created.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n either a :class:`abc.GuildChannel` or :class:`Object` with an ID.\n\n A more filled out object in the :class:`Object` case can be found\n by using :attr:`~AuditLogEntry.after`.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.name`\n - :attr:`~AuditLogDiff.type`\n - :attr:`~AuditLogDiff.overwrites`\n\n .. attribute:: channel_update\n\n A channel was updated. Things that trigger this include:\n\n - The channel name or topic was changed\n - The channel bitrate was changed\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`abc.GuildChannel` or :class:`Object` with an ID.\n\n A more filled out object in the :class:`Object` case can be found\n by using :attr:`~AuditLogEntry.after` or :attr:`~AuditLogEntry.before`.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.name`\n - :attr:`~AuditLogDiff.type`\n - :attr:`~AuditLogDiff.position`\n - :attr:`~AuditLogDiff.overwrites`\n - :attr:`~AuditLogDiff.topic`\n - :attr:`~AuditLogDiff.bitrate`\n - :attr:`~AuditLogDiff.rtc_region`\n - :attr:`~AuditLogDiff.video_quality_mode`\n - :attr:`~AuditLogDiff.default_auto_archive_duration`\n - :attr:`~AuditLogDiff.nsfw`\n - :attr:`~AuditLogDiff.slowmode_delay`\n - :attr:`~AuditLogDiff.user_limit`\n\n .. attribute:: channel_delete\n\n A channel was deleted.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n an :class:`Object` with an ID.\n\n A more filled out object can be found by using the\n :attr:`~AuditLogEntry.before` object.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.name`\n - :attr:`~AuditLogDiff.type`\n - :attr:`~AuditLogDiff.overwrites`\n - :attr:`~AuditLogDiff.flags`\n - :attr:`~AuditLogDiff.nsfw`\n - :attr:`~AuditLogDiff.slowmode_delay`\n\n .. attribute:: overwrite_create\n\n A channel permission overwrite was created.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`abc.GuildChannel` or :class:`Object` with an ID.\n\n When this is the action, the type of :attr:`~AuditLogEntry.extra` is\n either a :class:`Role` or :class:`Member`. If the object is not found\n then it is a :class:`Object` with an ID being filled, a name, and a\n ``type`` attribute set to either ``'role'`` or ``'member'`` to help\n dictate what type of ID it is.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.deny`\n - :attr:`~AuditLogDiff.allow`\n - :attr:`~AuditLogDiff.id`\n - :attr:`~AuditLogDiff.type`\n\n .. attribute:: overwrite_update\n\n A channel permission overwrite was changed, this is typically\n when the permission values change.\n\n See :attr:`overwrite_create` for more information on how the\n :attr:`~AuditLogEntry.target` and :attr:`~AuditLogEntry.extra` fields\n are set.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.deny`\n - :attr:`~AuditLogDiff.allow`\n - :attr:`~AuditLogDiff.id`\n - :attr:`~AuditLogDiff.type`\n\n .. attribute:: overwrite_delete\n\n A channel permission overwrite was deleted.\n\n See :attr:`overwrite_create` for more information on how the\n :attr:`~AuditLogEntry.target` and :attr:`~AuditLogEntry.extra` fields\n are set.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.deny`\n - :attr:`~AuditLogDiff.allow`\n - :attr:`~AuditLogDiff.id`\n - :attr:`~AuditLogDiff.type`\n\n .. attribute:: kick\n\n A member was kicked.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`User` or :class:`Object` who got kicked.\n\n When this is the action, the type of :attr:`~AuditLogEntry.extra` is\n set to an unspecified proxy object with one attribute:\n\n - ``integration_type``: An optional string that denotes the type of integration that did the action.\n\n When this is the action, :attr:`~AuditLogEntry.changes` is empty.\n\n .. attribute:: member_prune\n\n A member prune was triggered.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n set to ``None``.\n\n When this is the action, the type of :attr:`~AuditLogEntry.extra` is\n set to an unspecified proxy object with two attributes:\n\n - ``delete_member_days``: An integer specifying how far the prune was.\n - ``members_removed``: An integer specifying how many members were removed.\n\n When this is the action, :attr:`~AuditLogEntry.changes` is empty.\n\n .. attribute:: ban\n\n A member was banned.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`User` or :class:`Object` who got banned.\n\n When this is the action, :attr:`~AuditLogEntry.changes` is empty.\n\n .. attribute:: unban\n\n A member was unbanned.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`User` or :class:`Object` who got unbanned.\n\n When this is the action, :attr:`~AuditLogEntry.changes` is empty.\n\n .. attribute:: member_update\n\n A member has updated. This triggers in the following situations:\n\n - A nickname was changed\n - They were server muted or deafened (or it was undo'd)\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`Member`, :class:`User`, or :class:`Object` who got updated.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.nick`\n - :attr:`~AuditLogDiff.mute`\n - :attr:`~AuditLogDiff.deaf`\n - :attr:`~AuditLogDiff.timed_out_until`\n\n .. attribute:: member_role_update\n\n A member's role has been updated. This triggers when a member\n either gains a role or loses a role.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`Member`, :class:`User`, or :class:`Object` who got the role.\n\n When this is the action, the type of :attr:`~AuditLogEntry.extra` is\n set to an unspecified proxy object with one attribute:\n\n - ``integration_type``: An optional string that denotes the type of integration that did the action.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.roles`\n\n .. attribute:: member_move\n\n A member's voice channel has been updated. This triggers when a\n member is moved to a different voice channel.\n\n When this is the action, the type of :attr:`~AuditLogEntry.extra` is\n set to an unspecified proxy object with two attributes:\n\n - ``channel``: A :class:`TextChannel` or :class:`Object` with the channel ID where the members were moved.\n - ``count``: An integer specifying how many members were moved.\n\n .. versionadded:: 1.3\n\n .. attribute:: member_disconnect\n\n A member's voice state has changed. This triggers when a\n member is force disconnected from voice.\n\n When this is the action, the type of :attr:`~AuditLogEntry.extra` is\n set to an unspecified proxy object with one attribute:\n\n - ``count``: An integer specifying how many members were disconnected.\n\n .. versionadded:: 1.3\n\n .. attribute:: bot_add\n\n A bot was added to the guild.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`Member`, :class:`User`, or :class:`Object` which was added to the guild.\n\n .. versionadded:: 1.3\n\n .. attribute:: role_create\n\n A new role was created.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`Role` or a :class:`Object` with the ID.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.colour`\n - :attr:`~AuditLogDiff.mentionable`\n - :attr:`~AuditLogDiff.hoist`\n - :attr:`~AuditLogDiff.icon`\n - :attr:`~AuditLogDiff.unicode_emoji`\n - :attr:`~AuditLogDiff.name`\n - :attr:`~AuditLogDiff.permissions`\n\n .. attribute:: role_update\n\n A role was updated. This triggers in the following situations:\n\n - The name has changed\n - The permissions have changed\n - The colour has changed\n - The role icon (or unicode emoji) has changed\n - Its hoist/mentionable state has changed\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`Role` or a :class:`Object` with the ID.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.colour`\n - :attr:`~AuditLogDiff.mentionable`\n - :attr:`~AuditLogDiff.hoist`\n - :attr:`~AuditLogDiff.icon`\n - :attr:`~AuditLogDiff.unicode_emoji`\n - :attr:`~AuditLogDiff.name`\n - :attr:`~AuditLogDiff.permissions`\n\n .. attribute:: role_delete\n\n A role was deleted.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`Role` or a :class:`Object` with the ID.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.colour`\n - :attr:`~AuditLogDiff.mentionable`\n - :attr:`~AuditLogDiff.hoist`\n - :attr:`~AuditLogDiff.name`\n - :attr:`~AuditLogDiff.permissions`\n\n .. attribute:: invite_create\n\n An invite was created.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`Invite` that was created.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.max_age`\n - :attr:`~AuditLogDiff.code`\n - :attr:`~AuditLogDiff.temporary`\n - :attr:`~AuditLogDiff.inviter`\n - :attr:`~AuditLogDiff.channel`\n - :attr:`~AuditLogDiff.uses`\n - :attr:`~AuditLogDiff.max_uses`\n\n .. attribute:: invite_update\n\n An invite was updated.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`Invite` that was updated.\n\n .. attribute:: invite_delete\n\n An invite was deleted.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`Invite` that was deleted.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.max_age`\n - :attr:`~AuditLogDiff.code`\n - :attr:`~AuditLogDiff.temporary`\n - :attr:`~AuditLogDiff.inviter`\n - :attr:`~AuditLogDiff.channel`\n - :attr:`~AuditLogDiff.uses`\n - :attr:`~AuditLogDiff.max_uses`\n\n .. attribute:: webhook_create\n\n A webhook was created.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`Object` with the webhook ID.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.channel`\n - :attr:`~AuditLogDiff.name`\n - :attr:`~AuditLogDiff.type` (always set to ``1`` if so)\n\n .. attribute:: webhook_update\n\n A webhook was updated. This trigger in the following situations:\n\n - The webhook name changed\n - The webhook channel changed\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`Object` with the webhook ID.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.channel`\n - :attr:`~AuditLogDiff.name`\n - :attr:`~AuditLogDiff.avatar`\n\n .. attribute:: webhook_delete\n\n A webhook was deleted.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`Object` with the webhook ID.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.channel`\n - :attr:`~AuditLogDiff.name`\n - :attr:`~AuditLogDiff.type` (always set to ``1`` if so)\n\n .. attribute:: emoji_create\n\n An emoji was created.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`Emoji` or :class:`Object` with the emoji ID.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.name`\n\n .. attribute:: emoji_update\n\n An emoji was updated. This triggers when the name has changed.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`Emoji` or :class:`Object` with the emoji ID.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.name`\n\n .. attribute:: emoji_delete\n\n An emoji was deleted.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`Object` with the emoji ID.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.name`\n\n .. attribute:: message_delete\n\n A message was deleted by a moderator. Note that this\n only triggers if the message was deleted by someone other than the author.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`Member`, :class:`User`, or :class:`Object` who had their message deleted.\n\n When this is the action, the type of :attr:`~AuditLogEntry.extra` is\n set to an unspecified proxy object with two attributes:\n\n - ``count``: An integer specifying how many messages were deleted.\n - ``channel``: A :class:`TextChannel` or :class:`Object` with the channel ID where the message got deleted.\n\n .. attribute:: message_bulk_delete\n\n Messages were bulk deleted by a moderator.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`TextChannel` or :class:`Object` with the ID of the channel that was purged.\n\n When this is the action, the type of :attr:`~AuditLogEntry.extra` is\n set to an unspecified proxy object with one attribute:\n\n - ``count``: An integer specifying how many messages were deleted.\n\n .. versionadded:: 1.3\n\n .. attribute:: message_pin\n\n A message was pinned in a channel.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`Member`, :class:`User`, or :class:`Object` who had their message pinned.\n\n When this is the action, the type of :attr:`~AuditLogEntry.extra` is\n set to an unspecified proxy object with two attributes:\n\n - ``channel``: A :class:`TextChannel` or :class:`Object` with the channel ID where the message was pinned.\n - ``message_id``: the ID of the message which was pinned.\n\n .. versionadded:: 1.3\n\n .. attribute:: message_unpin\n\n A message was unpinned in a channel.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`Member`, :class:`User`, or :class:`Object` who had their message unpinned.\n\n When this is the action, the type of :attr:`~AuditLogEntry.extra` is\n set to an unspecified proxy object with two attributes:\n\n - ``channel``: A :class:`TextChannel` or :class:`Object` with the channel ID where the message was unpinned.\n - ``message_id``: the ID of the message which was unpinned.\n\n .. versionadded:: 1.3\n\n .. attribute:: integration_create\n\n A guild integration was created.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n a :class:`PartialIntegration` or :class:`Object` with the\n integration ID of the integration which was created.\n\n .. versionadded:: 1.3\n\n .. attribute:: integration_update\n\n A guild integration was updated.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n a :class:`PartialIntegration` or :class:`Object` with the\n integration ID of the integration which was updated.\n\n .. versionadded:: 1.3\n\n .. attribute:: integration_delete\n\n A guild integration was deleted.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n a :class:`PartialIntegration` or :class:`Object` with the\n integration ID of the integration which was deleted.\n\n .. versionadded:: 1.3\n\n .. attribute:: stage_instance_create\n\n A stage instance was started.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`StageInstance` or :class:`Object` with the ID of the stage\n instance which was created.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.topic`\n - :attr:`~AuditLogDiff.privacy_level`\n\n .. versionadded:: 2.0\n\n .. attribute:: stage_instance_update\n\n A stage instance was updated.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`StageInstance` or :class:`Object` with the ID of the stage\n instance which was updated.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.topic`\n - :attr:`~AuditLogDiff.privacy_level`\n\n .. versionadded:: 2.0\n\n .. attribute:: stage_instance_delete\n\n A stage instance was ended.\n\n .. versionadded:: 2.0\n\n .. attribute:: sticker_create\n\n A sticker was created.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`GuildSticker` or :class:`Object` with the ID of the sticker\n which was created.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.name`\n - :attr:`~AuditLogDiff.emoji`\n - :attr:`~AuditLogDiff.type`\n - :attr:`~AuditLogDiff.format_type`\n - :attr:`~AuditLogDiff.description`\n - :attr:`~AuditLogDiff.available`\n\n .. versionadded:: 2.0\n\n .. attribute:: sticker_update\n\n A sticker was updated.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`GuildSticker` or :class:`Object` with the ID of the sticker\n which was updated.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.name`\n - :attr:`~AuditLogDiff.emoji`\n - :attr:`~AuditLogDiff.type`\n - :attr:`~AuditLogDiff.format_type`\n - :attr:`~AuditLogDiff.description`\n - :attr:`~AuditLogDiff.available`\n\n .. versionadded:: 2.0\n\n .. attribute:: sticker_delete\n\n A sticker was deleted.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`GuildSticker` or :class:`Object` with the ID of the sticker\n which was updated.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.name`\n - :attr:`~AuditLogDiff.emoji`\n - :attr:`~AuditLogDiff.type`\n - :attr:`~AuditLogDiff.format_type`\n - :attr:`~AuditLogDiff.description`\n - :attr:`~AuditLogDiff.available`\n\n .. versionadded:: 2.0\n\n .. attribute:: scheduled_event_create\n\n A scheduled event was created.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`ScheduledEvent` or :class:`Object` with the ID of the event\n which was created.\n\n Possible attributes for :class:`AuditLogDiff`:\n - :attr:`~AuditLogDiff.name`\n - :attr:`~AuditLogDiff.channel`\n - :attr:`~AuditLogDiff.description`\n - :attr:`~AuditLogDiff.privacy_level`\n - :attr:`~AuditLogDiff.status`\n - :attr:`~AuditLogDiff.entity_type`\n - :attr:`~AuditLogDiff.cover_image`\n\n .. versionadded:: 2.0\n\n .. attribute:: scheduled_event_update\n\n A scheduled event was created.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`ScheduledEvent` or :class:`Object` with the ID of the event\n which was updated.\n\n Possible attributes for :class:`AuditLogDiff`:\n - :attr:`~AuditLogDiff.name`\n - :attr:`~AuditLogDiff.channel`\n - :attr:`~AuditLogDiff.description`\n - :attr:`~AuditLogDiff.privacy_level`\n - :attr:`~AuditLogDiff.status`\n - :attr:`~AuditLogDiff.entity_type`\n - :attr:`~AuditLogDiff.cover_image`\n\n .. versionadded:: 2.0\n\n .. attribute:: scheduled_event_delete\n\n A scheduled event was created.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`ScheduledEvent` or :class:`Object` with the ID of the event\n which was deleted.\n\n Possible attributes for :class:`AuditLogDiff`:\n - :attr:`~AuditLogDiff.name`\n - :attr:`~AuditLogDiff.channel`\n - :attr:`~AuditLogDiff.description`\n - :attr:`~AuditLogDiff.privacy_level`\n - :attr:`~AuditLogDiff.status`\n - :attr:`~AuditLogDiff.entity_type`\n - :attr:`~AuditLogDiff.cover_image`\n\n .. versionadded:: 2.0\n\n .. attribute:: thread_create\n\n A thread was created.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`Thread` or :class:`Object` with the ID of the thread which\n was created.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.name`\n - :attr:`~AuditLogDiff.archived`\n - :attr:`~AuditLogDiff.locked`\n - :attr:`~AuditLogDiff.auto_archive_duration`\n - :attr:`~AuditLogDiff.invitable`\n\n .. versionadded:: 2.0\n\n .. attribute:: thread_update\n\n A thread was updated.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`Thread` or :class:`Object` with the ID of the thread which\n was updated.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.name`\n - :attr:`~AuditLogDiff.archived`\n - :attr:`~AuditLogDiff.locked`\n - :attr:`~AuditLogDiff.auto_archive_duration`\n - :attr:`~AuditLogDiff.invitable`\n\n .. versionadded:: 2.0\n\n .. attribute:: thread_delete\n\n A thread was deleted.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`Thread` or :class:`Object` with the ID of the thread which\n was deleted.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.name`\n - :attr:`~AuditLogDiff.archived`\n - :attr:`~AuditLogDiff.locked`\n - :attr:`~AuditLogDiff.auto_archive_duration`\n - :attr:`~AuditLogDiff.invitable`\n\n .. versionadded:: 2.0\n\n .. attribute:: app_command_permission_update\n\n An application command or integrations application command permissions\n were updated.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n a :class:`PartialIntegration` for an integrations general permissions,\n :class:`~discord.app_commands.AppCommand` for a specific commands permissions,\n or :class:`Object` with the ID of the command or integration which\n was updated.\n\n When this is the action, the type of :attr:`~AuditLogEntry.extra` is\n set to an :class:`PartialIntegration` or :class:`Object` with the ID of\n application that command or integration belongs to.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.app_command_permissions`\n\n .. versionadded:: 2.0\n\n .. attribute:: automod_rule_create\n\n An automod rule was created.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n a :class:`AutoModRule` or :class:`Object` with the ID of the automod\n rule that was created.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.name`\n - :attr:`~AuditLogDiff.enabled`\n - :attr:`~AuditLogDiff.event_type`\n - :attr:`~AuditLogDiff.trigger_type`\n - :attr:`~AuditLogDiff.trigger`\n - :attr:`~AuditLogDiff.actions`\n - :attr:`~AuditLogDiff.exempt_roles`\n - :attr:`~AuditLogDiff.exempt_channels`\n\n .. versionadded:: 2.0\n\n .. attribute:: automod_rule_update\n\n An automod rule was updated.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n a :class:`AutoModRule` or :class:`Object` with the ID of the automod\n rule that was created.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.name`\n - :attr:`~AuditLogDiff.enabled`\n - :attr:`~AuditLogDiff.event_type`\n - :attr:`~AuditLogDiff.trigger_type`\n - :attr:`~AuditLogDiff.trigger`\n - :attr:`~AuditLogDiff.actions`\n - :attr:`~AuditLogDiff.exempt_roles`\n - :attr:`~AuditLogDiff.exempt_channels`\n\n .. versionadded:: 2.0\n\n .. attribute:: automod_rule_delete\n\n An automod rule was deleted.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n a :class:`AutoModRule` or :class:`Object` with the ID of the automod\n rule that was created.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.name`\n - :attr:`~AuditLogDiff.enabled`\n - :attr:`~AuditLogDiff.event_type`\n - :attr:`~AuditLogDiff.trigger_type`\n - :attr:`~AuditLogDiff.trigger`\n - :attr:`~AuditLogDiff.actions`\n - :attr:`~AuditLogDiff.exempt_roles`\n - :attr:`~AuditLogDiff.exempt_channels`\n\n .. versionadded:: 2.0\n\n .. attribute:: automod_block_message\n\n An automod rule blocked a message from being sent.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n a :class:`Member` with the ID of the person who triggered the automod rule.\n\n When this is the action, the type of :attr:`~AuditLogEntry.extra` is\n set to an unspecified proxy object with 3 attributes:\n\n - ``automod_rule_name``: The name of the automod rule that was triggered.\n - ``automod_rule_trigger_type``: A :class:`AutoModRuleTriggerType` representation of the rule type that was triggered.\n - ``channel``: The channel in which the automod rule was triggered.\n\n When this is the action, :attr:`AuditLogEntry.changes` is empty.\n\n .. versionadded:: 2.0\n\n .. attribute:: automod_flag_message\n\n An automod rule flagged a message.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n a :class:`Member` with the ID of the person who triggered the automod rule.\n\n When this is the action, the type of :attr:`~AuditLogEntry.extra` is\n set to an unspecified proxy object with 3 attributes:\n\n - ``automod_rule_name``: The name of the automod rule that was triggered.\n - ``automod_rule_trigger_type``: A :class:`AutoModRuleTriggerType` representation of the rule type that was triggered.\n - ``channel``: The channel in which the automod rule was triggered.\n\n When this is the action, :attr:`AuditLogEntry.changes` is empty.\n\n .. versionadded:: 2.1\n\n .. attribute:: automod_timeout_member\n\n An automod rule timed-out a member.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n a :class:`Member` with the ID of the person who triggered the automod rule.\n\n When this is the action, the type of :attr:`~AuditLogEntry.extra` is\n set to an unspecified proxy object with 3 attributes:\n\n - ``automod_rule_name``: The name of the automod rule that was triggered.\n - ``automod_rule_trigger_type``: A :class:`AutoModRuleTriggerType` representation of the rule type that was triggered.\n - ``channel``: The channel in which the automod rule was triggered.\n\n When this is the action, :attr:`AuditLogEntry.changes` is empty.\n\n .. versionadded:: 2.1\n\n .. attribute:: creator_monetization_request_created\n\n A request to monetize the server was created.\n\n .. versionadded:: 2.4\n\n .. attribute:: creator_monetization_terms_accepted\n\n The terms and conditions for creator monetization were accepted.\n\n .. versionadded:: 2.4\n\n.. class:: AuditLogActionCategory\n\n Represents the category that the :class:`AuditLogAction` belongs to.\n\n This can be retrieved via :attr:`AuditLogEntry.category`.\n\n .. attribute:: create\n\n The action is the creation of something.\n\n .. attribute:: delete\n\n The action is the deletion of something.\n\n .. attribute:: update\n\n The action is the update of something.\n\n.. class:: TeamMembershipState\n\n Represents the membership state of a team member retrieved through :func:`Client.application_info`.\n\n .. versionadded:: 1.3\n\n .. attribute:: invited\n\n Represents an invited member.\n\n .. attribute:: accepted\n\n Represents a member currently in the team.\n\n.. class:: TeamMemberRole\n\n Represents the type of role of a team member retrieved through :func:`Client.application_info`.\n\n .. versionadded:: 2.4\n\n .. attribute:: admin\n\n The team member is an admin. This allows them to invite members to the team, access credentials, edit the application,\n and do most things the owner can do. However they cannot do destructive actions.\n\n .. attribute:: developer\n\n The team member is a developer. This allows them to access information, like the client secret or public key.\n They can also configure interaction endpoints or reset the bot token. Developers cannot invite anyone to the team\n nor can they do destructive actions.\n\n .. attribute:: read_only\n\n The team member is a read-only member. This allows them to access information, but not edit anything.\n\n.. class:: WebhookType\n\n Represents the type of webhook that can be received.\n\n .. versionadded:: 1.3\n\n .. attribute:: incoming\n\n Represents a webhook that can post messages to channels with a token.\n\n .. attribute:: channel_follower\n\n Represents a webhook that is internally managed by Discord, used for following channels.\n\n .. attribute:: application\n\n Represents a webhook that is used for interactions or applications.\n\n .. versionadded:: 2.0\n\n.. class:: ExpireBehaviour\n\n Represents the behaviour the :class:`Integration` should perform\n when a user's subscription has finished.\n\n There is an alias for this called ``ExpireBehavior``.\n\n .. versionadded:: 1.4\n\n .. attribute:: remove_role\n\n This will remove the :attr:`StreamIntegration.role` from the user\n when their subscription is finished.\n\n .. attribute:: kick\n\n This will kick the user when their subscription is finished.\n\n.. class:: DefaultAvatar\n\n Represents the default avatar of a Discord :class:`User`\n\n .. attribute:: blurple\n\n Represents the default avatar with the colour blurple.\n See also :attr:`Colour.blurple`\n .. attribute:: grey\n\n Represents the default avatar with the colour grey.\n See also :attr:`Colour.greyple`\n .. attribute:: gray\n\n An alias for :attr:`grey`.\n .. attribute:: green\n\n Represents the default avatar with the colour green.\n See also :attr:`Colour.green`\n .. attribute:: orange\n\n Represents the default avatar with the colour orange.\n See also :attr:`Colour.orange`\n .. attribute:: red\n\n Represents the default avatar with the colour red.\n See also :attr:`Colour.red`\n .. attribute:: pink\n\n Represents the default avatar with the colour pink.\n See also :attr:`Colour.pink`\n\n .. versionadded:: 2.3\n\n.. class:: StickerType\n\n Represents the type of sticker.\n\n .. versionadded:: 2.0\n\n .. attribute:: standard\n\n Represents a standard sticker that all Nitro users can use.\n\n .. attribute:: guild\n\n Represents a custom sticker created in a guild.\n\n.. class:: StickerFormatType\n\n Represents the type of sticker images.\n\n .. versionadded:: 1.6\n\n .. attribute:: png\n\n Represents a sticker with a png image.\n\n .. attribute:: apng\n\n Represents a sticker with an apng image.\n\n .. attribute:: lottie\n\n Represents a sticker with a lottie image.\n\n .. attribute:: gif\n\n Represents a sticker with a gif image.\n\n .. versionadded:: 2.2\n\n.. class:: InviteTarget\n\n Represents the invite type for voice channel invites.\n\n .. versionadded:: 2.0\n\n .. attribute:: unknown\n\n The invite doesn't target anyone or anything.\n\n .. attribute:: stream\n\n A stream invite that targets a user.\n\n .. attribute:: embedded_application\n\n A stream invite that targets an embedded application.\n\n.. class:: VideoQualityMode\n\n Represents the camera video quality mode for voice channel participants.\n\n .. versionadded:: 2.0\n\n .. attribute:: auto\n\n Represents auto camera video quality.\n\n .. attribute:: full\n\n Represents full camera video quality.\n\n.. class:: PrivacyLevel\n\n Represents the privacy level of a stage instance or scheduled event.\n\n .. versionadded:: 2.0\n\n .. attribute:: guild_only\n\n The stage instance or scheduled event is only accessible within the guild.\n\n.. class:: NSFWLevel\n\n Represents the NSFW level of a guild.\n\n .. versionadded:: 2.0\n\n .. container:: operations\n\n .. describe:: x == y\n\n Checks if two NSFW levels are equal.\n .. describe:: x != y\n\n Checks if two NSFW levels are not equal.\n .. describe:: x > y\n\n Checks if a NSFW level is higher than another.\n .. describe:: x < y\n\n Checks if a NSFW level is lower than another.\n .. describe:: x >= y\n\n Checks if a NSFW level is higher or equal to another.\n .. describe:: x <= y\n\n Checks if a NSFW level is lower or equal to another.\n\n .. attribute:: default\n\n The guild has not been categorised yet.\n\n .. attribute:: explicit\n\n The guild contains NSFW content.\n\n .. attribute:: safe\n\n The guild does not contain any NSFW content.\n\n .. attribute:: age_restricted\n\n The guild may contain NSFW content.\n\n.. class:: Locale\n\n Supported locales by Discord. Mainly used for application command localisation.\n\n .. versionadded:: 2.0\n\n .. attribute:: american_english\n\n The ``en-US`` locale.\n\n .. attribute:: british_english\n\n The ``en-GB`` locale.\n\n .. attribute:: bulgarian\n\n The ``bg`` locale.\n\n .. attribute:: chinese\n\n The ``zh-CN`` locale.\n\n .. attribute:: taiwan_chinese\n\n The ``zh-TW`` locale.\n\n .. attribute:: croatian\n\n The ``hr`` locale.\n\n .. attribute:: czech\n\n The ``cs`` locale.\n\n .. attribute:: indonesian\n\n The ``id`` locale.\n\n .. versionadded:: 2.2\n\n .. attribute:: danish\n\n The ``da`` locale.\n\n .. attribute:: dutch\n\n The ``nl`` locale.\n\n .. attribute:: finnish\n\n The ``fi`` locale.\n\n .. attribute:: french\n\n The ``fr`` locale.\n\n .. attribute:: german\n\n The ``de`` locale.\n\n .. attribute:: greek\n\n The ``el`` locale.\n\n .. attribute:: hindi\n\n The ``hi`` locale.\n\n .. attribute:: hungarian\n\n The ``hu`` locale.\n\n .. attribute:: italian\n\n The ``it`` locale.\n\n .. attribute:: japanese\n\n The ``ja`` locale.\n\n .. attribute:: korean\n\n The ``ko`` locale.\n\n .. attribute:: lithuanian\n\n The ``lt`` locale.\n\n .. attribute:: norwegian\n\n The ``no`` locale.\n\n .. attribute:: polish\n\n The ``pl`` locale.\n\n .. attribute:: brazil_portuguese\n\n The ``pt-BR`` locale.\n\n .. attribute:: romanian\n\n The ``ro`` locale.\n\n .. attribute:: russian\n\n The ``ru`` locale.\n\n .. attribute:: spain_spanish\n\n The ``es-ES`` locale.\n\n .. attribute:: swedish\n\n The ``sv-SE`` locale.\n\n .. attribute:: thai\n\n The ``th`` locale.\n\n .. attribute:: turkish\n\n The ``tr`` locale.\n\n .. attribute:: ukrainian\n\n The ``uk`` locale.\n\n .. attribute:: vietnamese\n\n The ``vi`` locale.\n\n\n.. class:: MFALevel\n\n Represents the Multi-Factor Authentication requirement level of a guild.\n\n .. versionadded:: 2.0\n\n .. container:: operations\n\n .. describe:: x == y\n\n Checks if two MFA levels are equal.\n .. describe:: x != y\n\n Checks if two MFA levels are not equal.\n .. describe:: x > y\n\n Checks if a MFA level is higher than another.\n .. describe:: x < y\n\n Checks if a MFA level is lower than another.\n .. describe:: x >= y\n\n Checks if a MFA level is higher or equal to another.\n .. describe:: x <= y\n\n Checks if a MFA level is lower or equal to another.\n\n .. attribute:: disabled\n\n The guild has no MFA requirement.\n\n .. attribute:: require_2fa\n\n The guild requires 2 factor authentication.\n\n.. class:: EntityType\n\n Represents the type of entity that a scheduled event is for.\n\n .. versionadded:: 2.0\n\n .. attribute:: stage_instance\n\n The scheduled event will occur in a stage instance.\n\n .. attribute:: voice\n\n The scheduled event will occur in a voice channel.\n\n .. attribute:: external\n\n The scheduled event will occur externally.\n\n.. class:: EventStatus\n\n Represents the status of an event.\n\n .. versionadded:: 2.0\n\n .. attribute:: scheduled\n\n The event is scheduled.\n\n .. attribute:: active\n\n The event is active.\n\n .. attribute:: completed\n\n The event has ended.\n\n .. attribute:: cancelled\n\n The event has been cancelled.\n\n .. attribute:: canceled\n\n An alias for :attr:`cancelled`.\n\n .. attribute:: ended\n\n An alias for :attr:`completed`.\n\n.. class:: AutoModRuleTriggerType\n\n Represents the trigger type of an automod rule.\n\n .. versionadded:: 2.0\n\n .. attribute:: keyword\n\n The rule will trigger when a keyword is mentioned.\n\n .. attribute:: harmful_link\n\n The rule will trigger when a harmful link is posted.\n\n .. attribute:: spam\n\n The rule will trigger when a spam message is posted.\n\n .. attribute:: keyword_preset\n\n The rule will trigger when something triggers based on the set keyword preset types.\n\n .. attribute:: mention_spam\n\n The rule will trigger when combined number of role and user mentions\n is greater than the set limit.\n\n .. attribute:: member_profile\n\n The rule will trigger when a user's profile contains a keyword.\n\n .. versionadded:: 2.4\n\n.. class:: AutoModRuleEventType\n\n Represents the event type of an automod rule.\n\n .. versionadded:: 2.0\n\n .. attribute:: message_send\n\n The rule will trigger when a message is sent.\n\n .. attribute:: member_update\n\n The rule will trigger when a member's profile is updated.\n\n .. versionadded:: 2.4\n\n.. class:: AutoModRuleActionType\n\n Represents the action type of an automod rule.\n\n .. versionadded:: 2.0\n\n .. attribute:: block_message\n\n The rule will block a message from being sent.\n\n .. attribute:: send_alert_message\n\n The rule will send an alert message to a predefined channel.\n\n .. attribute:: timeout\n\n The rule will timeout a user.\n\n .. attribute:: block_member_interactions\n\n Similar to :attr:`timeout`, except the user will be timed out indefinitely.\n This will request the user to edit it's profile.\n\n .. versionadded:: 2.4\n\n.. class:: ForumLayoutType\n\n Represents how a forum's posts are layed out in the client.\n\n .. versionadded:: 2.2\n\n .. attribute:: not_set\n\n No default has been set, so it is up to the client to know how to lay it out.\n\n .. attribute:: list_view\n\n Displays posts as a list.\n\n .. attribute:: gallery_view\n\n Displays posts as a collection of tiles.\n\n\n.. class:: ForumOrderType\n\n Represents how a forum's posts are sorted in the client.\n\n .. versionadded:: 2.3\n\n .. attribute:: latest_activity\n\n Sort forum posts by activity.\n\n .. attribute:: creation_date\n\n Sort forum posts by creation time (from most recent to oldest).\n\n.. class:: SelectDefaultValueType\n\n Represents the default value of a select menu.\n\n .. versionadded:: 2.4\n\n .. attribute:: user\n\n The underlying type of the ID is a user.\n\n .. attribute:: role\n\n The underlying type of the ID is a role.\n\n .. attribute:: channel\n\n The underlying type of the ID is a channel or thread.\n\n\n.. class:: SKUType\n\n Represents the type of a SKU.\n\n .. versionadded:: 2.4\n\n .. attribute:: subscription\n\n The SKU is a recurring subscription.\n\n .. attribute:: subscription_group\n\n The SKU is a system-generated group which is created for each :attr:`SKUType.subscription`.\n\n\n.. class:: EntitlementType\n\n Represents the type of an entitlement.\n\n .. versionadded:: 2.4\n\n .. attribute:: application_subscription\n\n The entitlement was purchased as an app subscription.\n\n\n.. class:: EntitlementOwnerType\n\n Represents the type of an entitlement owner.\n\n .. versionadded:: 2.4\n\n .. attribute:: guild\n\n The entitlement owner is a guild.\n\n .. attribute:: user\n\n The entitlement owner is a user.\n\n\n.. _discord-api-audit-logs:\n\nAudit Log Data\n----------------\n\nWorking with :meth:`Guild.audit_logs` is a complicated process with a lot of machinery\ninvolved. The library attempts to make it easy to use and friendly. In order to accomplish\nthis goal, it must make use of a couple of data classes that aid in this goal.\n\nAuditLogEntry\n~~~~~~~~~~~~~~~\n\n.. attributetable:: AuditLogEntry\n\n.. autoclass:: AuditLogEntry\n :members:\n\nAuditLogChanges\n~~~~~~~~~~~~~~~~~\n\n.. attributetable:: AuditLogChanges\n\n.. class:: AuditLogChanges\n\n An audit log change set.\n\n .. attribute:: before\n\n The old value. The attribute has the type of :class:`AuditLogDiff`.\n\n Depending on the :class:`AuditLogActionCategory` retrieved by\n :attr:`~AuditLogEntry.category`\\, the data retrieved by this\n attribute differs:\n\n +----------------------------------------+---------------------------------------------------+\n | Category | Description |\n +----------------------------------------+---------------------------------------------------+\n | :attr:`~AuditLogActionCategory.create` | All attributes are set to ``None``. |\n +----------------------------------------+---------------------------------------------------+\n | :attr:`~AuditLogActionCategory.delete` | All attributes are set the value before deletion. |\n +----------------------------------------+---------------------------------------------------+\n | :attr:`~AuditLogActionCategory.update` | All attributes are set the value before updating. |\n +----------------------------------------+---------------------------------------------------+\n | ``None`` | No attributes are set. |\n +----------------------------------------+---------------------------------------------------+\n\n .. attribute:: after\n\n The new value. The attribute has the type of :class:`AuditLogDiff`.\n\n Depending on the :class:`AuditLogActionCategory` retrieved by\n :attr:`~AuditLogEntry.category`\\, the data retrieved by this\n attribute differs:\n\n +----------------------------------------+--------------------------------------------------+\n | Category | Description |\n +----------------------------------------+--------------------------------------------------+\n | :attr:`~AuditLogActionCategory.create` | All attributes are set to the created value |\n +----------------------------------------+--------------------------------------------------+\n | :attr:`~AuditLogActionCategory.delete` | All attributes are set to ``None`` |\n +----------------------------------------+--------------------------------------------------+\n | :attr:`~AuditLogActionCategory.update` | All attributes are set the value after updating. |\n +----------------------------------------+--------------------------------------------------+\n | ``None`` | No attributes are set. |\n +----------------------------------------+--------------------------------------------------+\n\nAuditLogDiff\n~~~~~~~~~~~~~\n\n.. attributetable:: AuditLogDiff\n\n.. class:: AuditLogDiff\n\n Represents an audit log \"change\" object. A change object has dynamic\n attributes that depend on the type of action being done. Certain actions\n map to certain attributes being set.\n\n Note that accessing an attribute that does not match the specified action\n will lead to an attribute error.\n\n To get a list of attributes that have been set, you can iterate over\n them. To see a list of all possible attributes that could be set based\n on the action being done, check the documentation for :class:`AuditLogAction`,\n otherwise check the documentation below for all attributes that are possible.\n\n .. container:: operations\n\n .. describe:: iter(diff)\n\n Returns an iterator over (attribute, value) tuple of this diff.\n\n .. attribute:: name\n\n A name of something.\n\n :type: :class:`str`\n\n .. attribute:: guild\n\n The guild of something.\n\n :type: :class:`Guild`\n\n .. attribute:: icon\n\n A guild's or role's icon. See also :attr:`Guild.icon` or :attr:`Role.icon`.\n\n :type: :class:`Asset`\n\n .. attribute:: splash\n\n The guild's invite splash. See also :attr:`Guild.splash`.\n\n :type: :class:`Asset`\n\n .. attribute:: discovery_splash\n\n The guild's discovery splash. See also :attr:`Guild.discovery_splash`.\n\n :type: :class:`Asset`\n\n .. attribute:: banner\n\n The guild's banner. See also :attr:`Guild.banner`.\n\n :type: :class:`Asset`\n\n .. attribute:: owner\n\n The guild's owner. See also :attr:`Guild.owner`\n\n :type: Union[:class:`Member`, :class:`User`]\n\n .. attribute:: afk_channel\n\n The guild's AFK channel.\n\n If this could not be found, then it falls back to a :class:`Object`\n with the ID being set.\n\n See :attr:`Guild.afk_channel`.\n\n :type: Union[:class:`VoiceChannel`, :class:`Object`]\n\n .. attribute:: system_channel\n\n The guild's system channel.\n\n If this could not be found, then it falls back to a :class:`Object`\n with the ID being set.\n\n See :attr:`Guild.system_channel`.\n\n :type: Union[:class:`TextChannel`, :class:`Object`]\n\n\n .. attribute:: rules_channel\n\n The guild's rules channel.\n\n If this could not be found then it falls back to a :class:`Object`\n with the ID being set.\n\n See :attr:`Guild.rules_channel`.\n\n :type: Union[:class:`TextChannel`, :class:`Object`]\n\n\n .. attribute:: public_updates_channel\n\n The guild's public updates channel.\n\n If this could not be found then it falls back to a :class:`Object`\n with the ID being set.\n\n See :attr:`Guild.public_updates_channel`.\n\n :type: Union[:class:`TextChannel`, :class:`Object`]\n\n .. attribute:: afk_timeout\n\n The guild's AFK timeout. See :attr:`Guild.afk_timeout`.\n\n :type: :class:`int`\n\n .. attribute:: mfa_level\n\n The guild's MFA level. See :attr:`Guild.mfa_level`.\n\n :type: :class:`MFALevel`\n\n .. attribute:: widget_enabled\n\n The guild's widget has been enabled or disabled.\n\n :type: :class:`bool`\n\n .. attribute:: widget_channel\n\n The widget's channel.\n\n If this could not be found then it falls back to a :class:`Object`\n with the ID being set.\n\n :type: Union[:class:`TextChannel`, :class:`Object`]\n\n .. attribute:: verification_level\n\n The guild's verification level.\n\n See also :attr:`Guild.verification_level`.\n\n :type: :class:`VerificationLevel`\n\n .. attribute:: default_notifications\n\n The guild's default notification level.\n\n See also :attr:`Guild.default_notifications`.\n\n :type: :class:`NotificationLevel`\n\n .. attribute:: explicit_content_filter\n\n The guild's content filter.\n\n See also :attr:`Guild.explicit_content_filter`.\n\n :type: :class:`ContentFilter`\n\n .. attribute:: vanity_url_code\n\n The guild's vanity URL.\n\n See also :meth:`Guild.vanity_invite` and :meth:`Guild.edit`.\n\n :type: :class:`str`\n\n .. attribute:: position\n\n The position of a :class:`Role` or :class:`abc.GuildChannel`.\n\n :type: :class:`int`\n\n .. attribute:: type\n\n The type of channel, sticker, webhook or integration.\n\n :type: Union[:class:`ChannelType`, :class:`StickerType`, :class:`WebhookType`, :class:`str`]\n\n .. attribute:: topic\n\n The topic of a :class:`TextChannel` or :class:`StageChannel`.\n\n See also :attr:`TextChannel.topic` or :attr:`StageChannel.topic`.\n\n :type: :class:`str`\n\n .. attribute:: bitrate\n\n The bitrate of a :class:`VoiceChannel`.\n\n See also :attr:`VoiceChannel.bitrate`.\n\n :type: :class:`int`\n\n .. attribute:: overwrites\n\n A list of permission overwrite tuples that represents a target and a\n :class:`PermissionOverwrite` for said target.\n\n The first element is the object being targeted, which can either\n be a :class:`Member` or :class:`User` or :class:`Role`. If this object\n is not found then it is a :class:`Object` with an ID being filled and\n a ``type`` attribute set to either ``'role'`` or ``'member'`` to help\n decide what type of ID it is.\n\n :type: List[Tuple[target, :class:`PermissionOverwrite`]]\n\n .. attribute:: privacy_level\n\n The privacy level of the stage instance or scheduled event\n\n :type: :class:`PrivacyLevel`\n\n .. attribute:: roles\n\n A list of roles being added or removed from a member.\n\n If a role is not found then it is a :class:`Object` with the ID and name being\n filled in.\n\n :type: List[Union[:class:`Role`, :class:`Object`]]\n\n .. attribute:: nick\n\n The nickname of a member.\n\n See also :attr:`Member.nick`\n\n :type: Optional[:class:`str`]\n\n .. attribute:: deaf\n\n Whether the member is being server deafened.\n\n See also :attr:`VoiceState.deaf`.\n\n :type: :class:`bool`\n\n .. attribute:: mute\n\n Whether the member is being server muted.\n\n See also :attr:`VoiceState.mute`.\n\n :type: :class:`bool`\n\n .. attribute:: permissions\n\n The permissions of a role.\n\n See also :attr:`Role.permissions`.\n\n :type: :class:`Permissions`\n\n .. attribute:: colour\n color\n\n The colour of a role.\n\n See also :attr:`Role.colour`\n\n :type: :class:`Colour`\n\n .. attribute:: hoist\n\n Whether the role is being hoisted or not.\n\n See also :attr:`Role.hoist`\n\n :type: :class:`bool`\n\n .. attribute:: mentionable\n\n Whether the role is mentionable or not.\n\n See also :attr:`Role.mentionable`\n\n :type: :class:`bool`\n\n .. attribute:: code\n\n The invite's code.\n\n See also :attr:`Invite.code`\n\n :type: :class:`str`\n\n .. attribute:: channel\n\n A guild channel.\n\n If the channel is not found then it is a :class:`Object` with the ID\n being set. In some cases the channel name is also set.\n\n :type: Union[:class:`abc.GuildChannel`, :class:`Object`]\n\n .. attribute:: inviter\n\n The user who created the invite.\n\n See also :attr:`Invite.inviter`.\n\n :type: Optional[:class:`User`]\n\n .. attribute:: max_uses\n\n The invite's max uses.\n\n See also :attr:`Invite.max_uses`.\n\n :type: :class:`int`\n\n .. attribute:: uses\n\n The invite's current uses.\n\n See also :attr:`Invite.uses`.\n\n :type: :class:`int`\n\n .. attribute:: max_age\n\n The invite's max age in seconds.\n\n See also :attr:`Invite.max_age`.\n\n :type: :class:`int`\n\n .. attribute:: temporary\n\n If the invite is a temporary invite.\n\n See also :attr:`Invite.temporary`.\n\n :type: :class:`bool`\n\n .. attribute:: allow\n deny\n\n The permissions being allowed or denied.\n\n :type: :class:`Permissions`\n\n .. attribute:: id\n\n The ID of the object being changed.\n\n :type: :class:`int`\n\n .. attribute:: avatar\n\n The avatar of a member.\n\n See also :attr:`User.avatar`.\n\n :type: :class:`Asset`\n\n .. attribute:: slowmode_delay\n\n The number of seconds members have to wait before\n sending another message in the channel.\n\n See also :attr:`TextChannel.slowmode_delay`.\n\n :type: :class:`int`\n\n .. attribute:: rtc_region\n\n The region for the voice channel’s voice communication.\n A value of ``None`` indicates automatic voice region detection.\n\n See also :attr:`VoiceChannel.rtc_region`.\n\n :type: :class:`str`\n\n .. attribute:: video_quality_mode\n\n The camera video quality for the voice channel's participants.\n\n See also :attr:`VoiceChannel.video_quality_mode`.\n\n :type: :class:`VideoQualityMode`\n\n .. attribute:: format_type\n\n The format type of a sticker being changed.\n\n See also :attr:`GuildSticker.format`\n\n :type: :class:`StickerFormatType`\n\n .. attribute:: emoji\n\n The name of the emoji that represents a sticker being changed.\n\n See also :attr:`GuildSticker.emoji`.\n\n :type: :class:`str`\n\n .. attribute:: unicode_emoji\n\n The unicode emoji that is used as an icon for the role being changed.\n\n See also :attr:`Role.unicode_emoji`.\n\n :type: :class:`str`\n\n .. attribute:: description\n\n The description of a guild, a sticker, or a scheduled event.\n\n See also :attr:`Guild.description`, :attr:`GuildSticker.description`, or\n :attr:`ScheduledEvent.description`.\n\n :type: :class:`str`\n\n .. attribute:: available\n\n The availability of a sticker being changed.\n\n See also :attr:`GuildSticker.available`\n\n :type: :class:`bool`\n\n .. attribute:: archived\n\n The thread is now archived.\n\n :type: :class:`bool`\n\n .. attribute:: locked\n\n The thread is being locked or unlocked.\n\n :type: :class:`bool`\n\n .. attribute:: auto_archive_duration\n\n The thread's auto archive duration being changed.\n\n See also :attr:`Thread.auto_archive_duration`\n\n :type: :class:`int`\n\n .. attribute:: default_auto_archive_duration\n\n The default auto archive duration for newly created threads being changed.\n\n :type: :class:`int`\n\n .. attribute:: invitable\n\n Whether non-moderators can add users to this private thread.\n\n :type: :class:`bool`\n\n .. attribute:: timed_out_until\n\n Whether the user is timed out, and if so until when.\n\n :type: Optional[:class:`datetime.datetime`]\n\n .. attribute:: enable_emoticons\n\n Integration emoticons were enabled or disabled.\n\n See also :attr:`StreamIntegration.enable_emoticons`\n\n :type: :class:`bool`\n\n .. attribute:: expire_behaviour\n expire_behavior\n\n The behaviour of expiring subscribers changed.\n\n See also :attr:`StreamIntegration.expire_behaviour`\n\n :type: :class:`ExpireBehaviour`\n\n .. attribute:: expire_grace_period\n\n The grace period before expiring subscribers changed.\n\n See also :attr:`StreamIntegration.expire_grace_period`\n\n :type: :class:`int`\n\n .. attribute:: preferred_locale\n\n The preferred locale for the guild changed.\n\n See also :attr:`Guild.preferred_locale`\n\n :type: :class:`Locale`\n\n .. attribute:: prune_delete_days\n\n The number of days after which inactive and role-unassigned members are kicked has been changed.\n\n :type: :class:`int`\n\n .. attribute:: status\n\n The status of the scheduled event.\n\n :type: :class:`EventStatus`\n\n .. attribute:: entity_type\n\n The type of entity this scheduled event is for.\n\n :type: :class:`EntityType`\n\n .. attribute:: cover_image\n\n The scheduled event's cover image.\n\n See also :attr:`ScheduledEvent.cover_image`.\n\n :type: :class:`Asset`\n\n .. attribute:: app_command_permissions\n\n List of permissions for the app command.\n\n :type: List[:class:`~discord.app_commands.AppCommandPermissions`]\n\n .. attribute:: enabled\n\n Whether the automod rule is active or not.\n\n :type: :class:`bool`\n\n .. attribute:: event_type\n\n The event type for triggering the automod rule.\n\n :type: :class:`AutoModRuleEventType`\n\n .. attribute:: trigger_type\n\n The trigger type for the automod rule.\n\n :type: :class:`AutoModRuleTriggerType`\n\n .. attribute:: trigger\n\n The trigger for the automod rule.\n\n .. note ::\n\n The :attr:`~AutoModTrigger.type` of the trigger may be incorrect.\n Some attributes such as :attr:`~AutoModTrigger.keyword_filter`, :attr:`~AutoModTrigger.regex_patterns`,\n and :attr:`~AutoModTrigger.allow_list` will only have the added or removed values.\n\n :type: :class:`AutoModTrigger`\n\n .. attribute:: actions\n\n The actions to take when an automod rule is triggered.\n\n :type: List[AutoModRuleAction]\n\n .. attribute:: exempt_roles\n\n The list of roles that are exempt from the automod rule.\n\n :type: List[Union[:class:`Role`, :class:`Object`]]\n\n .. attribute:: exempt_channels\n\n The list of channels or threads that are exempt from the automod rule.\n\n :type: List[:class:`abc.GuildChannel`, :class:`Thread`, :class:`Object`]\n\n .. attribute:: premium_progress_bar_enabled\n\n The guild’s display setting to show boost progress bar.\n\n :type: :class:`bool`\n\n .. attribute:: system_channel_flags\n\n The guild’s system channel settings.\n\n See also :attr:`Guild.system_channel_flags`\n\n :type: :class:`SystemChannelFlags`\n\n .. attribute:: nsfw\n\n Whether the channel is marked as “not safe for work” or “age restricted”.\n\n :type: :class:`bool`\n\n .. attribute:: user_limit\n\n The channel’s limit for number of members that can be in a voice or stage channel.\n\n See also :attr:`VoiceChannel.user_limit` and :attr:`StageChannel.user_limit`\n\n :type: :class:`int`\n\n .. attribute:: flags\n\n The channel flags associated with this thread or forum post.\n\n See also :attr:`ForumChannel.flags` and :attr:`Thread.flags`\n\n :type: :class:`ChannelFlags`\n\n .. attribute:: default_thread_slowmode_delay\n\n The default slowmode delay for threads created in this text channel or forum.\n\n See also :attr:`TextChannel.default_thread_slowmode_delay` and :attr:`ForumChannel.default_thread_slowmode_delay`\n\n :type: :class:`int`\n\n .. attribute:: applied_tags\n\n The applied tags of a forum post.\n\n See also :attr:`Thread.applied_tags`\n\n :type: List[Union[:class:`ForumTag`, :class:`Object`]]\n\n .. attribute:: available_tags\n\n The available tags of a forum.\n\n See also :attr:`ForumChannel.available_tags`\n\n :type: Sequence[:class:`ForumTag`]\n\n .. attribute:: default_reaction_emoji\n\n The default_reaction_emoji for forum posts.\n\n See also :attr:`ForumChannel.default_reaction_emoji`\n\n :type: Optional[:class:`PartialEmoji`]\n\n.. this is currently missing the following keys: reason and application_id\n I'm not sure how to port these\n\nWebhook Support\n------------------\n\ndiscord.py offers support for creating, editing, and executing webhooks through the :class:`Webhook` class.\n\nWebhook\n~~~~~~~~~\n\n.. attributetable:: Webhook\n\n.. autoclass:: Webhook()\n :members:\n :inherited-members:\n\nWebhookMessage\n~~~~~~~~~~~~~~~~\n\n.. attributetable:: WebhookMessage\n\n.. autoclass:: WebhookMessage()\n :members:\n :inherited-members:\n\nSyncWebhook\n~~~~~~~~~~~~\n\n.. attributetable:: SyncWebhook\n\n.. autoclass:: SyncWebhook()\n :members:\n :inherited-members:\n\nSyncWebhookMessage\n~~~~~~~~~~~~~~~~~~~\n\n.. attributetable:: SyncWebhookMessage\n\n.. autoclass:: SyncWebhookMessage()\n :members:\n\n.. _discord_api_abcs:\n\nAbstract Base Classes\n-----------------------\n\nAn :term:`abstract base class` (also known as an ``abc``) is a class that models can inherit\nto get their behaviour. **Abstract base classes should not be instantiated**.\nThey are mainly there for usage with :func:`isinstance` and :func:`issubclass`\\.\n\nThis library has a module related to abstract base classes, in which all the ABCs are subclasses of\n:class:`typing.Protocol`.\n\nSnowflake\n~~~~~~~~~~\n\n.. attributetable:: discord.abc.Snowflake\n\n.. autoclass:: discord.abc.Snowflake()\n :members:\n\nUser\n~~~~~\n\n.. attributetable:: discord.abc.User\n\n.. autoclass:: discord.abc.User()\n :members:\n\nPrivateChannel\n~~~~~~~~~~~~~~~\n\n.. attributetable:: discord.abc.PrivateChannel\n\n.. autoclass:: discord.abc.PrivateChannel()\n :members:\n\nGuildChannel\n~~~~~~~~~~~~~\n\n.. attributetable:: discord.abc.GuildChannel\n\n.. autoclass:: discord.abc.GuildChannel()\n :members:\n\nMessageable\n~~~~~~~~~~~~\n\n.. attributetable:: discord.abc.Messageable\n\n.. autoclass:: discord.abc.Messageable()\n :members:\n :exclude-members: typing\n\n .. automethod:: discord.abc.Messageable.typing\n :async-with:\n\nConnectable\n~~~~~~~~~~~~\n\n.. attributetable:: discord.abc.Connectable\n\n.. autoclass:: discord.abc.Connectable()\n :members:\n\n.. _discord_api_models:\n\nDiscord Models\n---------------\n\nModels are classes that are received from Discord and are not meant to be created by\nthe user of the library.\n\n.. danger::\n\n The classes listed below are **not intended to be created by users** and are also\n **read-only**.\n\n For example, this means that you should not make your own :class:`User` instances\n nor should you modify the :class:`User` instance yourself.\n\n If you want to get one of these model classes instances they'd have to be through\n the cache, and a common way of doing so is through the :func:`utils.find` function\n or attributes of model classes that you receive from the events specified in the\n :ref:`discord-api-events`.\n\n.. note::\n\n Nearly all classes here have :ref:`py:slots` defined which means that it is\n impossible to have dynamic attributes to the data classes.\n\n\nClientUser\n~~~~~~~~~~~~\n\n.. attributetable:: ClientUser\n\n.. autoclass:: ClientUser()\n :members:\n :inherited-members:\n\nUser\n~~~~~\n\n.. attributetable:: User\n\n.. autoclass:: User()\n :members:\n :inherited-members:\n :exclude-members: typing\n\n .. automethod:: typing\n :async-with:\n\nAutoMod\n~~~~~~~\n\n.. attributetable:: AutoModRule\n\n.. autoclass:: AutoModRule()\n :members:\n\n.. attributetable:: AutoModAction\n\n.. autoclass:: AutoModAction()\n :members:\n\nAttachment\n~~~~~~~~~~~\n\n.. attributetable:: Attachment\n\n.. autoclass:: Attachment()\n :members:\n\nAsset\n~~~~~\n\n.. attributetable:: Asset\n\n.. autoclass:: Asset()\n :members:\n :inherited-members:\n\nMessage\n~~~~~~~\n\n.. attributetable:: Message\n\n.. autoclass:: Message()\n :members:\n :inherited-members:\n\nDeletedReferencedMessage\n~~~~~~~~~~~~~~~~~~~~~~~~~\n\n.. attributetable:: DeletedReferencedMessage\n\n.. autoclass:: DeletedReferencedMessage()\n :members:\n\n\nReaction\n~~~~~~~~~\n\n.. attributetable:: Reaction\n\n.. autoclass:: Reaction()\n :members:\n\nGuild\n~~~~~~\n\n.. attributetable:: Guild\n\n.. autoclass:: Guild()\n :members:\n\n.. class:: BanEntry\n\n A namedtuple which represents a ban returned from :meth:`~Guild.bans`.\n\n .. attribute:: reason\n\n The reason this user was banned.\n\n :type: Optional[:class:`str`]\n .. attribute:: user\n\n The :class:`User` that was banned.\n\n :type: :class:`User`\n\n\nScheduledEvent\n~~~~~~~~~~~~~~\n\n.. attributetable:: ScheduledEvent\n\n.. autoclass:: ScheduledEvent()\n :members:\n\n\nIntegration\n~~~~~~~~~~~~\n\n.. attributetable:: Integration\n\n.. autoclass:: Integration()\n :members:\n\n.. attributetable:: IntegrationAccount\n\n.. autoclass:: IntegrationAccount()\n :members:\n\n.. attributetable:: BotIntegration\n\n.. autoclass:: BotIntegration()\n :members:\n\n.. attributetable:: IntegrationApplication\n\n.. autoclass:: IntegrationApplication()\n :members:\n\n.. attributetable:: StreamIntegration\n\n.. autoclass:: StreamIntegration()\n :members:\n\n.. attributetable:: PartialIntegration\n\n.. autoclass:: PartialIntegration()\n :members:\n\nMember\n~~~~~~\n\n.. attributetable:: Member\n\n.. autoclass:: Member()\n :members:\n :inherited-members:\n :exclude-members: typing\n\n .. automethod:: typing\n :async-with:\n\nSpotify\n~~~~~~~~\n\n.. attributetable:: Spotify\n\n.. autoclass:: Spotify()\n :members:\n\nVoiceState\n~~~~~~~~~~~\n\n.. attributetable:: VoiceState\n\n.. autoclass:: VoiceState()\n :members:\n\nEmoji\n~~~~~\n\n.. attributetable:: Emoji\n\n.. autoclass:: Emoji()\n :members:\n :inherited-members:\n\nPartialEmoji\n~~~~~~~~~~~~~~~~~~~~~~\n\n.. attributetable:: PartialEmoji\n\n.. autoclass:: PartialEmoji()\n :members:\n :inherited-members:\n\nRole\n~~~~~\n\n.. attributetable:: Role\n\n.. autoclass:: Role()\n :members:\n\nRoleTags\n~~~~~~~~~~\n\n.. attributetable:: RoleTags\n\n.. autoclass:: RoleTags()\n :members:\n\nPartialMessageable\n~~~~~~~~~~~~~~~~~~~~\n\n.. attributetable:: PartialMessageable\n\n.. autoclass:: PartialMessageable()\n :members:\n :inherited-members:\n\nTextChannel\n~~~~~~~~~~~~\n\n.. attributetable:: TextChannel\n\n.. autoclass:: TextChannel()\n :members:\n :inherited-members:\n :exclude-members: typing\n\n .. automethod:: typing\n :async-with:\n\nForumChannel\n~~~~~~~~~~~~~\n\n.. attributetable:: ForumChannel\n\n.. autoclass:: ForumChannel()\n :members:\n :inherited-members:\n\nThread\n~~~~~~~~\n\n.. attributetable:: Thread\n\n.. autoclass:: Thread()\n :members:\n :inherited-members:\n :exclude-members: typing\n\n .. automethod:: typing\n :async-with:\n\nThreadMember\n~~~~~~~~~~~~~\n\n.. attributetable:: ThreadMember\n\n.. autoclass:: ThreadMember()\n :members:\n\nVoiceChannel\n~~~~~~~~~~~~~\n\n.. attributetable:: VoiceChannel\n\n.. autoclass:: VoiceChannel()\n :members:\n :inherited-members:\n\nStageChannel\n~~~~~~~~~~~~~\n\n.. attributetable:: StageChannel\n\n.. autoclass:: StageChannel()\n :members:\n :inherited-members:\n\n\nStageInstance\n~~~~~~~~~~~~~~\n\n.. attributetable:: StageInstance\n\n.. autoclass:: StageInstance()\n :members:\n\nCategoryChannel\n~~~~~~~~~~~~~~~~~\n\n.. attributetable:: CategoryChannel\n\n.. autoclass:: CategoryChannel()\n :members:\n :inherited-members:\n\nDMChannel\n~~~~~~~~~\n\n.. attributetable:: DMChannel\n\n.. autoclass:: DMChannel()\n :members:\n :inherited-members:\n :exclude-members: typing\n\n .. automethod:: typing\n :async-with:\n\nGroupChannel\n~~~~~~~~~~~~\n\n.. attributetable:: GroupChannel\n\n.. autoclass:: GroupChannel()\n :members:\n :inherited-members:\n :exclude-members: typing\n\n .. automethod:: typing\n :async-with:\n\nPartialInviteGuild\n~~~~~~~~~~~~~~~~~~~\n\n.. attributetable:: PartialInviteGuild\n\n.. autoclass:: PartialInviteGuild()\n :members:\n\nPartialInviteChannel\n~~~~~~~~~~~~~~~~~~~~~\n\n.. attributetable:: PartialInviteChannel\n\n.. autoclass:: PartialInviteChannel()\n :members:\n\nInvite\n~~~~~~~\n\n.. attributetable:: Invite\n\n.. autoclass:: Invite()\n :members:\n\nTemplate\n~~~~~~~~~\n\n.. attributetable:: Template\n\n.. autoclass:: Template()\n :members:\n\nWelcomeScreen\n~~~~~~~~~~~~~~~\n\n.. attributetable:: WelcomeScreen\n\n.. autoclass:: WelcomeScreen()\n :members:\n\nWelcomeChannel\n~~~~~~~~~~~~~~~\n\n.. attributetable:: WelcomeChannel\n\n.. autoclass:: WelcomeChannel()\n :members:\n\nWidgetChannel\n~~~~~~~~~~~~~~~\n\n.. attributetable:: WidgetChannel\n\n.. autoclass:: WidgetChannel()\n :members:\n\nWidgetMember\n~~~~~~~~~~~~~\n\n.. attributetable:: WidgetMember\n\n.. autoclass:: WidgetMember()\n :members:\n :inherited-members:\n\nWidget\n~~~~~~~\n\n.. attributetable:: Widget\n\n.. autoclass:: Widget()\n :members:\n\nStickerPack\n~~~~~~~~~~~~~\n\n.. attributetable:: StickerPack\n\n.. autoclass:: StickerPack()\n :members:\n\nStickerItem\n~~~~~~~~~~~~~\n\n.. attributetable:: StickerItem\n\n.. autoclass:: StickerItem()\n :members:\n\nSticker\n~~~~~~~~~~~~~~~\n\n.. attributetable:: Sticker\n\n.. autoclass:: Sticker()\n :members:\n\nStandardSticker\n~~~~~~~~~~~~~~~~\n\n.. attributetable:: StandardSticker\n\n.. autoclass:: StandardSticker()\n :members:\n\nGuildSticker\n~~~~~~~~~~~~~\n\n.. attributetable:: GuildSticker\n\n.. autoclass:: GuildSticker()\n :members:\n\nShardInfo\n~~~~~~~~~~~\n\n.. attributetable:: ShardInfo\n\n.. autoclass:: ShardInfo()\n :members:\n\nSKU\n~~~~~~~~~~~\n\n.. attributetable:: SKU\n\n.. autoclass:: SKU()\n :members:\n\nEntitlement\n~~~~~~~~~~~\n\n.. attributetable:: Entitlement\n\n.. autoclass:: Entitlement()\n :members:\n\nRawMessageDeleteEvent\n~~~~~~~~~~~~~~~~~~~~~~~\n\n.. attributetable:: RawMessageDeleteEvent\n\n.. autoclass:: RawMessageDeleteEvent()\n :members:\n\nRawBulkMessageDeleteEvent\n~~~~~~~~~~~~~~~~~~~~~~~~~~\n\n.. attributetable:: RawBulkMessageDeleteEvent\n\n.. autoclass:: RawBulkMessageDeleteEvent()\n :members:\n\nRawMessageUpdateEvent\n~~~~~~~~~~~~~~~~~~~~~~\n\n.. attributetable:: RawMessageUpdateEvent\n\n.. autoclass:: RawMessageUpdateEvent()\n :members:\n\nRawReactionActionEvent\n~~~~~~~~~~~~~~~~~~~~~~~\n\n.. attributetable:: RawReactionActionEvent\n\n.. autoclass:: RawReactionActionEvent()\n :members:\n\nRawReactionClearEvent\n~~~~~~~~~~~~~~~~~~~~~~\n\n.. attributetable:: RawReactionClearEvent\n\n.. autoclass:: RawReactionClearEvent()\n :members:\n\nRawReactionClearEmojiEvent\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n\n.. attributetable:: RawReactionClearEmojiEvent\n\n.. autoclass:: RawReactionClearEmojiEvent()\n :members:\n\nRawIntegrationDeleteEvent\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n\n.. attributetable:: RawIntegrationDeleteEvent\n\n.. autoclass:: RawIntegrationDeleteEvent()\n :members:\n\nRawThreadUpdateEvent\n~~~~~~~~~~~~~~~~~~~~~~\n\n.. attributetable:: RawThreadUpdateEvent\n\n.. autoclass:: RawThreadUpdateEvent()\n :members:\n\nRawThreadMembersUpdate\n~~~~~~~~~~~~~~~~~~~~~~\n\n.. attributetable:: RawThreadMembersUpdate\n\n.. autoclass:: RawThreadMembersUpdate()\n :members:\n\nRawThreadDeleteEvent\n~~~~~~~~~~~~~~~~~~~~~~\n\n.. attributetable:: RawThreadDeleteEvent\n\n.. autoclass:: RawThreadDeleteEvent()\n :members:\n\nRawTypingEvent\n~~~~~~~~~~~~~~~~\n\n.. attributetable:: RawTypingEvent\n\n.. autoclass:: RawTypingEvent()\n :members:\n\nRawMemberRemoveEvent\n~~~~~~~~~~~~~~~~~~~~~~\n\n.. attributetable:: RawMemberRemoveEvent\n\n.. autoclass:: RawMemberRemoveEvent()\n :members:\n\nRawAppCommandPermissionsUpdateEvent\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n\n.. attributetable:: RawAppCommandPermissionsUpdateEvent\n\n.. autoclass:: RawAppCommandPermissionsUpdateEvent()\n :members:\n\nPartialWebhookGuild\n~~~~~~~~~~~~~~~~~~~~\n\n.. attributetable:: PartialWebhookGuild\n\n.. autoclass:: PartialWebhookGuild()\n :members:\n\nPartialWebhookChannel\n~~~~~~~~~~~~~~~~~~~~~~~\n\n.. attributetable:: PartialWebhookChannel\n\n.. autoclass:: PartialWebhookChannel()\n :members:\n\n.. _discord_api_data:\n\nData Classes\n--------------\n\nSome classes are just there to be data containers, this lists them.\n\nUnlike :ref:`models <discord_api_models>` you are allowed to create\nmost of these yourself, even if they can also be used to hold attributes.\n\nNearly all classes here have :ref:`py:slots` defined which means that it is\nimpossible to have dynamic attributes to the data classes.\n\nThe only exception to this rule is :class:`Object`, which is made with\ndynamic attributes in mind.\n\n\nObject\n~~~~~~~\n\n.. attributetable:: Object\n\n.. autoclass:: Object\n :members:\n\nEmbed\n~~~~~~\n\n.. attributetable:: Embed\n\n.. autoclass:: Embed\n :members:\n\nAllowedMentions\n~~~~~~~~~~~~~~~~~\n\n.. attributetable:: AllowedMentions\n\n.. autoclass:: AllowedMentions\n :members:\n\nMessageReference\n~~~~~~~~~~~~~~~~~\n\n.. attributetable:: MessageReference\n\n.. autoclass:: MessageReference\n :members:\n\nPartialMessage\n~~~~~~~~~~~~~~~~~\n\n.. attributetable:: PartialMessage\n\n.. autoclass:: PartialMessage\n :members:\n\nMessageApplication\n~~~~~~~~~~~~~~~~~~~\n\n.. attributetable:: MessageApplication\n\n.. autoclass:: MessageApplication\n :members:\n\nRoleSubscriptionInfo\n~~~~~~~~~~~~~~~~~~~~~\n\n.. attributetable:: RoleSubscriptionInfo\n\n.. autoclass:: RoleSubscriptionInfo\n :members:\n\nIntents\n~~~~~~~~~~\n\n.. attributetable:: Intents\n\n.. autoclass:: Intents\n :members:\n\nMemberCacheFlags\n~~~~~~~~~~~~~~~~~~\n\n.. attributetable:: MemberCacheFlags\n\n.. autoclass:: MemberCacheFlags\n :members:\n\nApplicationFlags\n~~~~~~~~~~~~~~~~~\n\n.. attributetable:: ApplicationFlags\n\n.. autoclass:: ApplicationFlags\n :members:\n\nChannelFlags\n~~~~~~~~~~~~~~\n\n.. attributetable:: ChannelFlags\n\n.. autoclass:: ChannelFlags\n :members:\n\nAutoModPresets\n~~~~~~~~~~~~~~\n\n.. attributetable:: AutoModPresets\n\n.. autoclass:: AutoModPresets\n :members:\n\nAutoModRuleAction\n~~~~~~~~~~~~~~~~~\n\n.. attributetable:: AutoModRuleAction\n\n.. autoclass:: AutoModRuleAction\n :members:\n\nAutoModTrigger\n~~~~~~~~~~~~~~\n\n.. attributetable:: AutoModTrigger\n\n.. autoclass:: AutoModTrigger\n :members:\n\nFile\n~~~~~\n\n.. attributetable:: File\n\n.. autoclass:: File\n :members:\n\nColour\n~~~~~~\n\n.. attributetable:: Colour\n\n.. autoclass:: Colour\n :members:\n\nBaseActivity\n~~~~~~~~~~~~~~\n\n.. attributetable:: BaseActivity\n\n.. autoclass:: BaseActivity\n :members:\n\nActivity\n~~~~~~~~~\n\n.. attributetable:: Activity\n\n.. autoclass:: Activity\n :members:\n\nGame\n~~~~~\n\n.. attributetable:: Game\n\n.. autoclass:: Game\n :members:\n\nStreaming\n~~~~~~~~~~~\n\n.. attributetable:: Streaming\n\n.. autoclass:: Streaming\n :members:\n\nCustomActivity\n~~~~~~~~~~~~~~~\n\n.. attributetable:: CustomActivity\n\n.. autoclass:: CustomActivity\n :members:\n\nPermissions\n~~~~~~~~~~~~\n\n.. attributetable:: Permissions\n\n.. autoclass:: Permissions\n :members:\n\nPermissionOverwrite\n~~~~~~~~~~~~~~~~~~~~\n\n.. attributetable:: PermissionOverwrite\n\n.. autoclass:: PermissionOverwrite\n :members:\n\nSystemChannelFlags\n~~~~~~~~~~~~~~~~~~~~\n\n.. attributetable:: SystemChannelFlags\n\n.. autoclass:: SystemChannelFlags\n :members:\n\nMessageFlags\n~~~~~~~~~~~~\n\n.. attributetable:: MessageFlags\n\n.. autoclass:: MessageFlags\n :members:\n\nPublicUserFlags\n~~~~~~~~~~~~~~~\n\n.. attributetable:: PublicUserFlags\n\n.. autoclass:: PublicUserFlags\n :members:\n\nMemberFlags\n~~~~~~~~~~~~\n\n.. attributetable:: MemberFlags\n\n.. autoclass:: MemberFlags\n :members:\n\nAttachmentFlags\n~~~~~~~~~~~~~~~~\n\n.. attributetable:: AttachmentFlags\n\n.. autoclass:: AttachmentFlags\n :members:\n\nRoleFlags\n~~~~~~~~~~\n\n.. attributetable:: RoleFlags\n\n.. autoclass:: RoleFlags\n :members:\n\nSKUFlags\n~~~~~~~~~~~\n\n.. attributetable:: SKUFlags\n\n.. autoclass:: SKUFlags()\n :members:\n\nForumTag\n~~~~~~~~~\n\n.. attributetable:: ForumTag\n\n.. autoclass:: ForumTag\n :members:\n\n\nExceptions\n------------\n\nThe following exceptions are thrown by the library.\n\n.. autoexception:: DiscordException\n\n.. autoexception:: ClientException\n\n.. autoexception:: LoginFailure\n\n.. autoexception:: HTTPException\n :members:\n\n.. autoexception:: RateLimited\n :members:\n\n.. autoexception:: Forbidden\n\n.. autoexception:: NotFound\n\n.. autoexception:: DiscordServerError\n\n.. autoexception:: InvalidData\n\n.. autoexception:: GatewayNotFound\n\n.. autoexception:: ConnectionClosed\n\n.. autoexception:: PrivilegedIntentsRequired\n\n.. autoexception:: InteractionResponded\n\n.. autoexception:: discord.opus.OpusError\n\n.. autoexception:: discord.opus.OpusNotLoaded\n\nException Hierarchy\n~~~~~~~~~~~~~~~~~~~~~\n\n.. exception_hierarchy::\n\n - :exc:`Exception`\n - :exc:`DiscordException`\n - :exc:`ClientException`\n - :exc:`InvalidData`\n - :exc:`LoginFailure`\n - :exc:`ConnectionClosed`\n - :exc:`PrivilegedIntentsRequired`\n - :exc:`InteractionResponded`\n - :exc:`GatewayNotFound`\n - :exc:`HTTPException`\n - :exc:`Forbidden`\n - :exc:`NotFound`\n - :exc:`DiscordServerError`\n - :exc:`app_commands.CommandSyncFailure`\n - :exc:`RateLimited`\n",
"path": "docs/api.rst"
}
] | 10_0 | python | import unittest
import sys
class TestLocaleAddition(unittest.TestCase):
def test_latin_american_spanish_locale(self):
from discord.enums import Locale
# Check if the Latin American Spanish locale is present in the Locale enumeration
self.assertTrue(hasattr(Locale, 'latin_american_spanish'), "Locale for Latin American Spanish is missing")
def test_locale_code_correct(self):
from discord.enums import Locale
# Verify that the value of the Latin American Spanish locale is correctly set to 'es-419'
self.assertEqual(Locale.latin_american_spanish.value, 'es-419', "Latin American Spanish locale should be 'es-419'")
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestLocaleAddition))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
main()
|
https://github.com/teamqurrent/discord.py | Your task is to introduce new message types related to guild incidents in the discord.py library. Specifically, add the message types guild_incident_alert_mode_enabled, guild_incident_alert_mode_disabled, guild_incident_report_raid, and guild_incident_report_false_alarm with respective values 36, 37, 38, and 39. These changes should be made in the `enums.py` file to update the `MessageType` enumeration. Additionally, update the `message.py` file to handle these new types appropriately. | 9db0dad | discord | python3.9 | 08ef42fe | diff --git a/discord/enums.py b/discord/enums.py
--- a/discord/enums.py
+++ b/discord/enums.py
@@ -247,6 +247,10 @@ class MessageType(Enum):
stage_raise_hand = 30
stage_topic = 31
guild_application_premium_subscription = 32
+ guild_incident_alert_mode_enabled = 36
+ guild_incident_alert_mode_disabled = 37
+ guild_incident_report_raid = 38
+ guild_incident_report_false_alarm = 39
class SpeakingState(Enum):
diff --git a/discord/message.py b/discord/message.py
--- a/discord/message.py
+++ b/discord/message.py
@@ -2216,6 +2216,20 @@ class Message(PartialMessage, Hashable):
if self.type is MessageType.stage_topic:
return f'{self.author.name} changed Stage topic: **{self.content}**.'
+ if self.type is MessageType.guild_incident_alert_mode_enabled:
+ dt = utils.parse_time(self.content)
+ dt_content = utils.format_dt(dt)
+ return f'{self.author.name} enabled security actions until {dt_content}.'
+
+ if self.type is MessageType.guild_incident_alert_mode_disabled:
+ return f'{self.author.name} disabled security actions.'
+
+ if self.type is MessageType.guild_incident_report_raid:
+ return f'{self.author.name} reported a raid in {self.guild}.'
+
+ if self.type is MessageType.guild_incident_report_false_alarm:
+ return f'{self.author.name} reported a false alarm in {self.guild}.'
+
# Fallback for unknown message types
return ''
diff --git a/discord/types/message.py b/discord/types/message.py
--- a/discord/types/message.py
+++ b/discord/types/message.py
@@ -113,7 +113,40 @@ class RoleSubscriptionData(TypedDict):
MessageType = Literal[
- 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 14, 15, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32
+ 0,
+ 1,
+ 2,
+ 3,
+ 4,
+ 5,
+ 6,
+ 7,
+ 8,
+ 9,
+ 10,
+ 11,
+ 12,
+ 14,
+ 15,
+ 18,
+ 19,
+ 20,
+ 21,
+ 22,
+ 23,
+ 24,
+ 25,
+ 26,
+ 27,
+ 28,
+ 29,
+ 30,
+ 31,
+ 32,
+ 36,
+ 37,
+ 38,
+ 39,
]
diff --git a/docs/api.rst b/docs/api.rst
--- a/docs/api.rst
+++ b/docs/api.rst
@@ -1745,6 +1745,30 @@ of :class:`enum.Enum`.
.. versionadded:: 2.2
+ .. attribute:: guild_incident_alert_mode_enabled
+
+ The system message sent when security actions is enabled.
+
+ .. versionadded:: 2.4
+
+ .. attribute:: guild_incident_alert_mode_disabled
+
+ The system message sent when security actions is disabled.
+
+ .. versionadded:: 2.4
+
+ .. attribute:: guild_incident_report_raid
+
+ The system message sent when a raid is reported.
+
+ .. versionadded:: 2.4
+
+ .. attribute:: guild_incident_report_false_alarm
+
+ The system message sent when a false alarm is reported.
+
+ .. versionadded:: 2.4
+
.. class:: UserFlags
Represents Discord User flags.
| [
{
"content": "\"\"\"\nThe MIT License (MIT)\n\nCopyright (c) 2015-present Rapptz\n\nPermission is hereby granted, free of charge, to any person obtaining a\ncopy of this software and associated documentation files (the \"Software\"),\nto deal in the Software without restriction, including without limitation\nthe rights to use, copy, modify, merge, publish, distribute, sublicense,\nand/or sell copies of the Software, and to permit persons to whom the\nSoftware is furnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in\nall copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\nOR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\nFROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER\nDEALINGS IN THE SOFTWARE.\n\"\"\"\nfrom __future__ import annotations\n\nimport types\nfrom collections import namedtuple\nfrom typing import Any, ClassVar, Dict, List, Optional, TYPE_CHECKING, Tuple, Type, TypeVar, Iterator, Mapping\n\n__all__ = (\n 'Enum',\n 'ChannelType',\n 'MessageType',\n 'SpeakingState',\n 'VerificationLevel',\n 'ContentFilter',\n 'Status',\n 'DefaultAvatar',\n 'AuditLogAction',\n 'AuditLogActionCategory',\n 'UserFlags',\n 'ActivityType',\n 'NotificationLevel',\n 'TeamMembershipState',\n 'TeamMemberRole',\n 'WebhookType',\n 'ExpireBehaviour',\n 'ExpireBehavior',\n 'StickerType',\n 'StickerFormatType',\n 'InviteTarget',\n 'VideoQualityMode',\n 'ComponentType',\n 'ButtonStyle',\n 'TextStyle',\n 'PrivacyLevel',\n 'InteractionType',\n 'InteractionResponseType',\n 'NSFWLevel',\n 'MFALevel',\n 'Locale',\n 'EntityType',\n 'EventStatus',\n 'AppCommandType',\n 'AppCommandOptionType',\n 'AppCommandPermissionType',\n 'AutoModRuleTriggerType',\n 'AutoModRuleEventType',\n 'AutoModRuleActionType',\n 'ForumLayoutType',\n 'ForumOrderType',\n 'SelectDefaultValueType',\n 'SKUType',\n 'EntitlementType',\n 'EntitlementOwnerType',\n)\n\nif TYPE_CHECKING:\n from typing_extensions import Self\n\n\ndef _create_value_cls(name: str, comparable: bool):\n # All the type ignores here are due to the type checker being unable to recognise\n # Runtime type creation without exploding.\n cls = namedtuple('_EnumValue_' + name, 'name value')\n cls.__repr__ = lambda self: f'<{name}.{self.name}: {self.value!r}>' # type: ignore\n cls.__str__ = lambda self: f'{name}.{self.name}' # type: ignore\n if comparable:\n cls.__le__ = lambda self, other: isinstance(other, self.__class__) and self.value <= other.value # type: ignore\n cls.__ge__ = lambda self, other: isinstance(other, self.__class__) and self.value >= other.value # type: ignore\n cls.__lt__ = lambda self, other: isinstance(other, self.__class__) and self.value < other.value # type: ignore\n cls.__gt__ = lambda self, other: isinstance(other, self.__class__) and self.value > other.value # type: ignore\n return cls\n\n\ndef _is_descriptor(obj):\n return hasattr(obj, '__get__') or hasattr(obj, '__set__') or hasattr(obj, '__delete__')\n\n\nclass EnumMeta(type):\n if TYPE_CHECKING:\n __name__: ClassVar[str]\n _enum_member_names_: ClassVar[List[str]]\n _enum_member_map_: ClassVar[Dict[str, Any]]\n _enum_value_map_: ClassVar[Dict[Any, Any]]\n\n def __new__(cls, name: str, bases: Tuple[type, ...], attrs: Dict[str, Any], *, comparable: bool = False) -> Self:\n value_mapping = {}\n member_mapping = {}\n member_names = []\n\n value_cls = _create_value_cls(name, comparable)\n for key, value in list(attrs.items()):\n is_descriptor = _is_descriptor(value)\n if key[0] == '_' and not is_descriptor:\n continue\n\n # Special case classmethod to just pass through\n if isinstance(value, classmethod):\n continue\n\n if is_descriptor:\n setattr(value_cls, key, value)\n del attrs[key]\n continue\n\n try:\n new_value = value_mapping[value]\n except KeyError:\n new_value = value_cls(name=key, value=value)\n value_mapping[value] = new_value\n member_names.append(key)\n\n member_mapping[key] = new_value\n attrs[key] = new_value\n\n attrs['_enum_value_map_'] = value_mapping\n attrs['_enum_member_map_'] = member_mapping\n attrs['_enum_member_names_'] = member_names\n attrs['_enum_value_cls_'] = value_cls\n actual_cls = super().__new__(cls, name, bases, attrs)\n value_cls._actual_enum_cls_ = actual_cls # type: ignore # Runtime attribute isn't understood\n return actual_cls\n\n def __iter__(cls) -> Iterator[Any]:\n return (cls._enum_member_map_[name] for name in cls._enum_member_names_)\n\n def __reversed__(cls) -> Iterator[Any]:\n return (cls._enum_member_map_[name] for name in reversed(cls._enum_member_names_))\n\n def __len__(cls) -> int:\n return len(cls._enum_member_names_)\n\n def __repr__(cls) -> str:\n return f'<enum {cls.__name__}>'\n\n @property\n def __members__(cls) -> Mapping[str, Any]:\n return types.MappingProxyType(cls._enum_member_map_)\n\n def __call__(cls, value: str) -> Any:\n try:\n return cls._enum_value_map_[value]\n except (KeyError, TypeError):\n raise ValueError(f\"{value!r} is not a valid {cls.__name__}\")\n\n def __getitem__(cls, key: str) -> Any:\n return cls._enum_member_map_[key]\n\n def __setattr__(cls, name: str, value: Any) -> None:\n raise TypeError('Enums are immutable.')\n\n def __delattr__(cls, attr: str) -> None:\n raise TypeError('Enums are immutable')\n\n def __instancecheck__(self, instance: Any) -> bool:\n # isinstance(x, Y)\n # -> __instancecheck__(Y, x)\n try:\n return instance._actual_enum_cls_ is self\n except AttributeError:\n return False\n\n\nif TYPE_CHECKING:\n from enum import Enum\nelse:\n\n class Enum(metaclass=EnumMeta):\n @classmethod\n def try_value(cls, value):\n try:\n return cls._enum_value_map_[value]\n except (KeyError, TypeError):\n return value\n\n\nclass ChannelType(Enum):\n text = 0\n private = 1\n voice = 2\n group = 3\n category = 4\n news = 5\n news_thread = 10\n public_thread = 11\n private_thread = 12\n stage_voice = 13\n forum = 15\n media = 16\n\n def __str__(self) -> str:\n return self.name\n\n\nclass MessageType(Enum):\n default = 0\n recipient_add = 1\n recipient_remove = 2\n call = 3\n channel_name_change = 4\n channel_icon_change = 5\n pins_add = 6\n new_member = 7\n premium_guild_subscription = 8\n premium_guild_tier_1 = 9\n premium_guild_tier_2 = 10\n premium_guild_tier_3 = 11\n channel_follow_add = 12\n guild_stream = 13\n guild_discovery_disqualified = 14\n guild_discovery_requalified = 15\n guild_discovery_grace_period_initial_warning = 16\n guild_discovery_grace_period_final_warning = 17\n thread_created = 18\n reply = 19\n chat_input_command = 20\n thread_starter_message = 21\n guild_invite_reminder = 22\n context_menu_command = 23\n auto_moderation_action = 24\n role_subscription_purchase = 25\n interaction_premium_upsell = 26\n stage_start = 27\n stage_end = 28\n stage_speaker = 29\n stage_raise_hand = 30\n stage_topic = 31\n guild_application_premium_subscription = 32\n\n\nclass SpeakingState(Enum):\n none = 0\n voice = 1\n soundshare = 2\n priority = 4\n\n def __str__(self) -> str:\n return self.name\n\n def __int__(self) -> int:\n return self.value\n\n\nclass VerificationLevel(Enum, comparable=True):\n none = 0\n low = 1\n medium = 2\n high = 3\n highest = 4\n\n def __str__(self) -> str:\n return self.name\n\n\nclass ContentFilter(Enum, comparable=True):\n disabled = 0\n no_role = 1\n all_members = 2\n\n def __str__(self) -> str:\n return self.name\n\n\nclass Status(Enum):\n online = 'online'\n offline = 'offline'\n idle = 'idle'\n dnd = 'dnd'\n do_not_disturb = 'dnd'\n invisible = 'invisible'\n\n def __str__(self) -> str:\n return self.value\n\n\nclass DefaultAvatar(Enum):\n blurple = 0\n grey = 1\n gray = 1\n green = 2\n orange = 3\n red = 4\n pink = 5\n\n def __str__(self) -> str:\n return self.name\n\n\nclass NotificationLevel(Enum, comparable=True):\n all_messages = 0\n only_mentions = 1\n\n\nclass AuditLogActionCategory(Enum):\n create = 1\n delete = 2\n update = 3\n\n\nclass AuditLogAction(Enum):\n # fmt: off\n guild_update = 1\n channel_create = 10\n channel_update = 11\n channel_delete = 12\n overwrite_create = 13\n overwrite_update = 14\n overwrite_delete = 15\n kick = 20\n member_prune = 21\n ban = 22\n unban = 23\n member_update = 24\n member_role_update = 25\n member_move = 26\n member_disconnect = 27\n bot_add = 28\n role_create = 30\n role_update = 31\n role_delete = 32\n invite_create = 40\n invite_update = 41\n invite_delete = 42\n webhook_create = 50\n webhook_update = 51\n webhook_delete = 52\n emoji_create = 60\n emoji_update = 61\n emoji_delete = 62\n message_delete = 72\n message_bulk_delete = 73\n message_pin = 74\n message_unpin = 75\n integration_create = 80\n integration_update = 81\n integration_delete = 82\n stage_instance_create = 83\n stage_instance_update = 84\n stage_instance_delete = 85\n sticker_create = 90\n sticker_update = 91\n sticker_delete = 92\n scheduled_event_create = 100\n scheduled_event_update = 101\n scheduled_event_delete = 102\n thread_create = 110\n thread_update = 111\n thread_delete = 112\n app_command_permission_update = 121\n automod_rule_create = 140\n automod_rule_update = 141\n automod_rule_delete = 142\n automod_block_message = 143\n automod_flag_message = 144\n automod_timeout_member = 145\n creator_monetization_request_created = 150\n creator_monetization_terms_accepted = 151\n # fmt: on\n\n @property\n def category(self) -> Optional[AuditLogActionCategory]:\n # fmt: off\n lookup: Dict[AuditLogAction, Optional[AuditLogActionCategory]] = {\n AuditLogAction.guild_update: AuditLogActionCategory.update,\n AuditLogAction.channel_create: AuditLogActionCategory.create,\n AuditLogAction.channel_update: AuditLogActionCategory.update,\n AuditLogAction.channel_delete: AuditLogActionCategory.delete,\n AuditLogAction.overwrite_create: AuditLogActionCategory.create,\n AuditLogAction.overwrite_update: AuditLogActionCategory.update,\n AuditLogAction.overwrite_delete: AuditLogActionCategory.delete,\n AuditLogAction.kick: None,\n AuditLogAction.member_prune: None,\n AuditLogAction.ban: None,\n AuditLogAction.unban: None,\n AuditLogAction.member_update: AuditLogActionCategory.update,\n AuditLogAction.member_role_update: AuditLogActionCategory.update,\n AuditLogAction.member_move: None,\n AuditLogAction.member_disconnect: None,\n AuditLogAction.bot_add: None,\n AuditLogAction.role_create: AuditLogActionCategory.create,\n AuditLogAction.role_update: AuditLogActionCategory.update,\n AuditLogAction.role_delete: AuditLogActionCategory.delete,\n AuditLogAction.invite_create: AuditLogActionCategory.create,\n AuditLogAction.invite_update: AuditLogActionCategory.update,\n AuditLogAction.invite_delete: AuditLogActionCategory.delete,\n AuditLogAction.webhook_create: AuditLogActionCategory.create,\n AuditLogAction.webhook_update: AuditLogActionCategory.update,\n AuditLogAction.webhook_delete: AuditLogActionCategory.delete,\n AuditLogAction.emoji_create: AuditLogActionCategory.create,\n AuditLogAction.emoji_update: AuditLogActionCategory.update,\n AuditLogAction.emoji_delete: AuditLogActionCategory.delete,\n AuditLogAction.message_delete: AuditLogActionCategory.delete,\n AuditLogAction.message_bulk_delete: AuditLogActionCategory.delete,\n AuditLogAction.message_pin: None,\n AuditLogAction.message_unpin: None,\n AuditLogAction.integration_create: AuditLogActionCategory.create,\n AuditLogAction.integration_update: AuditLogActionCategory.update,\n AuditLogAction.integration_delete: AuditLogActionCategory.delete,\n AuditLogAction.stage_instance_create: AuditLogActionCategory.create,\n AuditLogAction.stage_instance_update: AuditLogActionCategory.update,\n AuditLogAction.stage_instance_delete: AuditLogActionCategory.delete,\n AuditLogAction.sticker_create: AuditLogActionCategory.create,\n AuditLogAction.sticker_update: AuditLogActionCategory.update,\n AuditLogAction.sticker_delete: AuditLogActionCategory.delete,\n AuditLogAction.scheduled_event_create: AuditLogActionCategory.create,\n AuditLogAction.scheduled_event_update: AuditLogActionCategory.update,\n AuditLogAction.scheduled_event_delete: AuditLogActionCategory.delete,\n AuditLogAction.thread_create: AuditLogActionCategory.create,\n AuditLogAction.thread_delete: AuditLogActionCategory.delete,\n AuditLogAction.thread_update: AuditLogActionCategory.update,\n AuditLogAction.app_command_permission_update: AuditLogActionCategory.update,\n AuditLogAction.automod_rule_create: AuditLogActionCategory.create,\n AuditLogAction.automod_rule_update: AuditLogActionCategory.update,\n AuditLogAction.automod_rule_delete: AuditLogActionCategory.delete,\n AuditLogAction.automod_block_message: None,\n AuditLogAction.automod_flag_message: None,\n AuditLogAction.automod_timeout_member: None,\n AuditLogAction.creator_monetization_request_created: None,\n AuditLogAction.creator_monetization_terms_accepted: None,\n }\n # fmt: on\n return lookup[self]\n\n @property\n def target_type(self) -> Optional[str]:\n v = self.value\n if v == -1:\n return 'all'\n elif v < 10:\n return 'guild'\n elif v < 20:\n return 'channel'\n elif v < 30:\n return 'user'\n elif v < 40:\n return 'role'\n elif v < 50:\n return 'invite'\n elif v < 60:\n return 'webhook'\n elif v < 70:\n return 'emoji'\n elif v == 73:\n return 'channel'\n elif v < 80:\n return 'message'\n elif v < 83:\n return 'integration'\n elif v < 90:\n return 'stage_instance'\n elif v < 93:\n return 'sticker'\n elif v < 103:\n return 'guild_scheduled_event'\n elif v < 113:\n return 'thread'\n elif v < 122:\n return 'integration_or_app_command'\n elif v < 143:\n return 'auto_moderation'\n elif v < 146:\n return 'user'\n elif v < 152:\n return 'creator_monetization'\n\n\nclass UserFlags(Enum):\n staff = 1\n partner = 2\n hypesquad = 4\n bug_hunter = 8\n mfa_sms = 16\n premium_promo_dismissed = 32\n hypesquad_bravery = 64\n hypesquad_brilliance = 128\n hypesquad_balance = 256\n early_supporter = 512\n team_user = 1024\n system = 4096\n has_unread_urgent_messages = 8192\n bug_hunter_level_2 = 16384\n verified_bot = 65536\n verified_bot_developer = 131072\n discord_certified_moderator = 262144\n bot_http_interactions = 524288\n spammer = 1048576\n active_developer = 4194304\n\n\nclass ActivityType(Enum):\n unknown = -1\n playing = 0\n streaming = 1\n listening = 2\n watching = 3\n custom = 4\n competing = 5\n\n def __int__(self) -> int:\n return self.value\n\n\nclass TeamMembershipState(Enum):\n invited = 1\n accepted = 2\n\n\nclass TeamMemberRole(Enum):\n admin = 'admin'\n developer = 'developer'\n read_only = 'read_only'\n\n\nclass WebhookType(Enum):\n incoming = 1\n channel_follower = 2\n application = 3\n\n\nclass ExpireBehaviour(Enum):\n remove_role = 0\n kick = 1\n\n\nExpireBehavior = ExpireBehaviour\n\n\nclass StickerType(Enum):\n standard = 1\n guild = 2\n\n\nclass StickerFormatType(Enum):\n png = 1\n apng = 2\n lottie = 3\n gif = 4\n\n @property\n def file_extension(self) -> str:\n # fmt: off\n lookup: Dict[StickerFormatType, str] = {\n StickerFormatType.png: 'png',\n StickerFormatType.apng: 'png',\n StickerFormatType.lottie: 'json',\n StickerFormatType.gif: 'gif',\n }\n # fmt: on\n return lookup.get(self, 'png')\n\n\nclass InviteTarget(Enum):\n unknown = 0\n stream = 1\n embedded_application = 2\n\n\nclass InteractionType(Enum):\n ping = 1\n application_command = 2\n component = 3\n autocomplete = 4\n modal_submit = 5\n\n\nclass InteractionResponseType(Enum):\n pong = 1\n # ack = 2 (deprecated)\n # channel_message = 3 (deprecated)\n channel_message = 4 # (with source)\n deferred_channel_message = 5 # (with source)\n deferred_message_update = 6 # for components\n message_update = 7 # for components\n autocomplete_result = 8\n modal = 9 # for modals\n premium_required = 10\n\n\nclass VideoQualityMode(Enum):\n auto = 1\n full = 2\n\n def __int__(self) -> int:\n return self.value\n\n\nclass ComponentType(Enum):\n action_row = 1\n button = 2\n select = 3\n string_select = 3\n text_input = 4\n user_select = 5\n role_select = 6\n mentionable_select = 7\n channel_select = 8\n\n def __int__(self) -> int:\n return self.value\n\n\nclass ButtonStyle(Enum):\n primary = 1\n secondary = 2\n success = 3\n danger = 4\n link = 5\n\n # Aliases\n blurple = 1\n grey = 2\n gray = 2\n green = 3\n red = 4\n url = 5\n\n def __int__(self) -> int:\n return self.value\n\n\nclass TextStyle(Enum):\n short = 1\n paragraph = 2\n\n # Aliases\n long = 2\n\n def __int__(self) -> int:\n return self.value\n\n\nclass PrivacyLevel(Enum):\n guild_only = 2\n\n\nclass NSFWLevel(Enum, comparable=True):\n default = 0\n explicit = 1\n safe = 2\n age_restricted = 3\n\n\nclass MFALevel(Enum, comparable=True):\n disabled = 0\n require_2fa = 1\n\n\nclass Locale(Enum):\n american_english = 'en-US'\n british_english = 'en-GB'\n bulgarian = 'bg'\n chinese = 'zh-CN'\n taiwan_chinese = 'zh-TW'\n croatian = 'hr'\n czech = 'cs'\n indonesian = 'id'\n danish = 'da'\n dutch = 'nl'\n finnish = 'fi'\n french = 'fr'\n german = 'de'\n greek = 'el'\n hindi = 'hi'\n hungarian = 'hu'\n italian = 'it'\n japanese = 'ja'\n korean = 'ko'\n lithuanian = 'lt'\n norwegian = 'no'\n polish = 'pl'\n brazil_portuguese = 'pt-BR'\n romanian = 'ro'\n russian = 'ru'\n spain_spanish = 'es-ES'\n swedish = 'sv-SE'\n thai = 'th'\n turkish = 'tr'\n ukrainian = 'uk'\n vietnamese = 'vi'\n\n def __str__(self) -> str:\n return self.value\n\n\nE = TypeVar('E', bound='Enum')\n\n\nclass EntityType(Enum):\n stage_instance = 1\n voice = 2\n external = 3\n\n\nclass EventStatus(Enum):\n scheduled = 1\n active = 2\n completed = 3\n canceled = 4\n\n ended = 3\n cancelled = 4\n\n\nclass AppCommandOptionType(Enum):\n subcommand = 1\n subcommand_group = 2\n string = 3\n integer = 4\n boolean = 5\n user = 6\n channel = 7\n role = 8\n mentionable = 9\n number = 10\n attachment = 11\n\n\nclass AppCommandType(Enum):\n chat_input = 1\n user = 2\n message = 3\n\n\nclass AppCommandPermissionType(Enum):\n role = 1\n user = 2\n channel = 3\n\n\nclass AutoModRuleTriggerType(Enum):\n keyword = 1\n harmful_link = 2\n spam = 3\n keyword_preset = 4\n mention_spam = 5\n member_profile = 6\n\n\nclass AutoModRuleEventType(Enum):\n message_send = 1\n member_update = 2\n\n\nclass AutoModRuleActionType(Enum):\n block_message = 1\n send_alert_message = 2\n timeout = 3\n block_member_interactions = 4\n\n\nclass ForumLayoutType(Enum):\n not_set = 0\n list_view = 1\n gallery_view = 2\n\n\nclass ForumOrderType(Enum):\n latest_activity = 0\n creation_date = 1\n\n\nclass SelectDefaultValueType(Enum):\n user = 'user'\n role = 'role'\n channel = 'channel'\n\n\nclass SKUType(Enum):\n subscription = 5\n subscription_group = 6\n\n\nclass EntitlementType(Enum):\n application_subscription = 8\n\n\nclass EntitlementOwnerType(Enum):\n guild = 1\n user = 2\n\n\ndef create_unknown_value(cls: Type[E], val: Any) -> E:\n value_cls = cls._enum_value_cls_ # type: ignore # This is narrowed below\n name = f'unknown_{val}'\n return value_cls(name=name, value=val)\n\n\ndef try_enum(cls: Type[E], val: Any) -> E:\n \"\"\"A function that tries to turn the value into enum ``cls``.\n\n If it fails it returns a proxy invalid value instead.\n \"\"\"\n\n try:\n return cls._enum_value_map_[val] # type: ignore # All errors are caught below\n except (KeyError, TypeError, AttributeError):\n return create_unknown_value(cls, val)\n",
"path": "discord/enums.py"
},
{
"content": "\"\"\"\nThe MIT License (MIT)\n\nCopyright (c) 2015-present Rapptz\n\nPermission is hereby granted, free of charge, to any person obtaining a\ncopy of this software and associated documentation files (the \"Software\"),\nto deal in the Software without restriction, including without limitation\nthe rights to use, copy, modify, merge, publish, distribute, sublicense,\nand/or sell copies of the Software, and to permit persons to whom the\nSoftware is furnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in\nall copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\nOR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\nFROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER\nDEALINGS IN THE SOFTWARE.\n\"\"\"\n\nfrom __future__ import annotations\n\nimport asyncio\nimport datetime\nimport re\nimport io\nfrom os import PathLike\nfrom typing import (\n Dict,\n TYPE_CHECKING,\n Sequence,\n Union,\n List,\n Optional,\n Any,\n Callable,\n Tuple,\n ClassVar,\n Type,\n overload,\n)\n\nfrom . import utils\nfrom .asset import Asset\nfrom .reaction import Reaction\nfrom .emoji import Emoji\nfrom .partial_emoji import PartialEmoji\nfrom .enums import InteractionType, MessageType, ChannelType, try_enum\nfrom .errors import HTTPException\nfrom .components import _component_factory\nfrom .embeds import Embed\nfrom .member import Member\nfrom .flags import MessageFlags, AttachmentFlags\nfrom .file import File\nfrom .utils import escape_mentions, MISSING\nfrom .http import handle_message_parameters\nfrom .guild import Guild\nfrom .mixins import Hashable\nfrom .sticker import StickerItem, GuildSticker\nfrom .threads import Thread\nfrom .channel import PartialMessageable\n\nif TYPE_CHECKING:\n from typing_extensions import Self\n\n from .types.message import (\n Message as MessagePayload,\n Attachment as AttachmentPayload,\n MessageReference as MessageReferencePayload,\n MessageApplication as MessageApplicationPayload,\n MessageActivity as MessageActivityPayload,\n RoleSubscriptionData as RoleSubscriptionDataPayload,\n )\n\n from .types.interactions import MessageInteraction as MessageInteractionPayload\n\n from .types.components import Component as ComponentPayload\n from .types.threads import ThreadArchiveDuration\n from .types.member import (\n Member as MemberPayload,\n UserWithMember as UserWithMemberPayload,\n )\n from .types.user import User as UserPayload\n from .types.embed import Embed as EmbedPayload\n from .types.gateway import MessageReactionRemoveEvent, MessageUpdateEvent\n from .abc import Snowflake\n from .abc import GuildChannel, MessageableChannel\n from .components import ActionRow, ActionRowChildComponentType\n from .state import ConnectionState\n from .mentions import AllowedMentions\n from .user import User\n from .role import Role\n from .ui.view import View\n\n EmojiInputType = Union[Emoji, PartialEmoji, str]\n MessageComponentType = Union[ActionRow, ActionRowChildComponentType]\n\n\n__all__ = (\n 'Attachment',\n 'Message',\n 'PartialMessage',\n 'MessageInteraction',\n 'MessageReference',\n 'DeletedReferencedMessage',\n 'MessageApplication',\n 'RoleSubscriptionInfo',\n)\n\n\ndef convert_emoji_reaction(emoji: Union[EmojiInputType, Reaction]) -> str:\n if isinstance(emoji, Reaction):\n emoji = emoji.emoji\n\n if isinstance(emoji, Emoji):\n return f'{emoji.name}:{emoji.id}'\n if isinstance(emoji, PartialEmoji):\n return emoji._as_reaction()\n if isinstance(emoji, str):\n # Reactions can be in :name:id format, but not <:name:id>.\n # No existing emojis have <> in them, so this should be okay.\n return emoji.strip('<>')\n\n raise TypeError(f'emoji argument must be str, Emoji, or Reaction not {emoji.__class__.__name__}.')\n\n\nclass Attachment(Hashable):\n \"\"\"Represents an attachment from Discord.\n\n .. container:: operations\n\n .. describe:: str(x)\n\n Returns the URL of the attachment.\n\n .. describe:: x == y\n\n Checks if the attachment is equal to another attachment.\n\n .. describe:: x != y\n\n Checks if the attachment is not equal to another attachment.\n\n .. describe:: hash(x)\n\n Returns the hash of the attachment.\n\n .. versionchanged:: 1.7\n Attachment can now be casted to :class:`str` and is hashable.\n\n Attributes\n ------------\n id: :class:`int`\n The attachment ID.\n size: :class:`int`\n The attachment size in bytes.\n height: Optional[:class:`int`]\n The attachment's height, in pixels. Only applicable to images and videos.\n width: Optional[:class:`int`]\n The attachment's width, in pixels. Only applicable to images and videos.\n filename: :class:`str`\n The attachment's filename.\n url: :class:`str`\n The attachment URL. If the message this attachment was attached\n to is deleted, then this will 404.\n proxy_url: :class:`str`\n The proxy URL. This is a cached version of the :attr:`~Attachment.url` in the\n case of images. When the message is deleted, this URL might be valid for a few\n minutes or not valid at all.\n content_type: Optional[:class:`str`]\n The attachment's `media type <https://en.wikipedia.org/wiki/Media_type>`_\n\n .. versionadded:: 1.7\n description: Optional[:class:`str`]\n The attachment's description. Only applicable to images.\n\n .. versionadded:: 2.0\n ephemeral: :class:`bool`\n Whether the attachment is ephemeral.\n\n .. versionadded:: 2.0\n duration: Optional[:class:`float`]\n The duration of the audio file in seconds. Returns ``None`` if it's not a voice message.\n\n .. versionadded:: 2.3\n waveform: Optional[:class:`bytes`]\n The waveform (amplitudes) of the audio in bytes. Returns ``None`` if it's not a voice message.\n\n .. versionadded:: 2.3\n \"\"\"\n\n __slots__ = (\n 'id',\n 'size',\n 'height',\n 'width',\n 'filename',\n 'url',\n 'proxy_url',\n '_http',\n 'content_type',\n 'description',\n 'ephemeral',\n 'duration',\n 'waveform',\n '_flags',\n )\n\n def __init__(self, *, data: AttachmentPayload, state: ConnectionState):\n self.id: int = int(data['id'])\n self.size: int = data['size']\n self.height: Optional[int] = data.get('height')\n self.width: Optional[int] = data.get('width')\n self.filename: str = data['filename']\n self.url: str = data['url']\n self.proxy_url: str = data['proxy_url']\n self._http = state.http\n self.content_type: Optional[str] = data.get('content_type')\n self.description: Optional[str] = data.get('description')\n self.ephemeral: bool = data.get('ephemeral', False)\n self.duration: Optional[float] = data.get('duration_secs')\n\n waveform = data.get('waveform')\n self.waveform: Optional[bytes] = utils._base64_to_bytes(waveform) if waveform is not None else None\n\n self._flags: int = data.get('flags', 0)\n\n @property\n def flags(self) -> AttachmentFlags:\n \"\"\":class:`AttachmentFlags`: The attachment's flags.\"\"\"\n return AttachmentFlags._from_value(self._flags)\n\n def is_spoiler(self) -> bool:\n \"\"\":class:`bool`: Whether this attachment contains a spoiler.\"\"\"\n return self.filename.startswith('SPOILER_')\n\n def is_voice_message(self) -> bool:\n \"\"\":class:`bool`: Whether this attachment is a voice message.\"\"\"\n return self.duration is not None and 'voice-message' in self.url\n\n def __repr__(self) -> str:\n return f'<Attachment id={self.id} filename={self.filename!r} url={self.url!r}>'\n\n def __str__(self) -> str:\n return self.url or ''\n\n async def save(\n self,\n fp: Union[io.BufferedIOBase, PathLike[Any]],\n *,\n seek_begin: bool = True,\n use_cached: bool = False,\n ) -> int:\n \"\"\"|coro|\n\n Saves this attachment into a file-like object.\n\n Parameters\n -----------\n fp: Union[:class:`io.BufferedIOBase`, :class:`os.PathLike`]\n The file-like object to save this attachment to or the filename\n to use. If a filename is passed then a file is created with that\n filename and used instead.\n seek_begin: :class:`bool`\n Whether to seek to the beginning of the file after saving is\n successfully done.\n use_cached: :class:`bool`\n Whether to use :attr:`proxy_url` rather than :attr:`url` when downloading\n the attachment. This will allow attachments to be saved after deletion\n more often, compared to the regular URL which is generally deleted right\n after the message is deleted. Note that this can still fail to download\n deleted attachments if too much time has passed and it does not work\n on some types of attachments.\n\n Raises\n --------\n HTTPException\n Saving the attachment failed.\n NotFound\n The attachment was deleted.\n\n Returns\n --------\n :class:`int`\n The number of bytes written.\n \"\"\"\n data = await self.read(use_cached=use_cached)\n if isinstance(fp, io.BufferedIOBase):\n written = fp.write(data)\n if seek_begin:\n fp.seek(0)\n return written\n else:\n with open(fp, 'wb') as f:\n return f.write(data)\n\n async def read(self, *, use_cached: bool = False) -> bytes:\n \"\"\"|coro|\n\n Retrieves the content of this attachment as a :class:`bytes` object.\n\n .. versionadded:: 1.1\n\n Parameters\n -----------\n use_cached: :class:`bool`\n Whether to use :attr:`proxy_url` rather than :attr:`url` when downloading\n the attachment. This will allow attachments to be saved after deletion\n more often, compared to the regular URL which is generally deleted right\n after the message is deleted. Note that this can still fail to download\n deleted attachments if too much time has passed and it does not work\n on some types of attachments.\n\n Raises\n ------\n HTTPException\n Downloading the attachment failed.\n Forbidden\n You do not have permissions to access this attachment\n NotFound\n The attachment was deleted.\n\n Returns\n -------\n :class:`bytes`\n The contents of the attachment.\n \"\"\"\n url = self.proxy_url if use_cached else self.url\n data = await self._http.get_from_cdn(url)\n return data\n\n async def to_file(\n self,\n *,\n filename: Optional[str] = MISSING,\n description: Optional[str] = MISSING,\n use_cached: bool = False,\n spoiler: bool = False,\n ) -> File:\n \"\"\"|coro|\n\n Converts the attachment into a :class:`File` suitable for sending via\n :meth:`abc.Messageable.send`.\n\n .. versionadded:: 1.3\n\n Parameters\n -----------\n filename: Optional[:class:`str`]\n The filename to use for the file. If not specified then the filename\n of the attachment is used instead.\n\n .. versionadded:: 2.0\n description: Optional[:class:`str`]\n The description to use for the file. If not specified then the\n description of the attachment is used instead.\n\n .. versionadded:: 2.0\n use_cached: :class:`bool`\n Whether to use :attr:`proxy_url` rather than :attr:`url` when downloading\n the attachment. This will allow attachments to be saved after deletion\n more often, compared to the regular URL which is generally deleted right\n after the message is deleted. Note that this can still fail to download\n deleted attachments if too much time has passed and it does not work\n on some types of attachments.\n\n .. versionadded:: 1.4\n spoiler: :class:`bool`\n Whether the file is a spoiler.\n\n .. versionadded:: 1.4\n\n Raises\n ------\n HTTPException\n Downloading the attachment failed.\n Forbidden\n You do not have permissions to access this attachment\n NotFound\n The attachment was deleted.\n\n Returns\n -------\n :class:`File`\n The attachment as a file suitable for sending.\n \"\"\"\n\n data = await self.read(use_cached=use_cached)\n file_filename = filename if filename is not MISSING else self.filename\n file_description = description if description is not MISSING else self.description\n return File(io.BytesIO(data), filename=file_filename, description=file_description, spoiler=spoiler)\n\n def to_dict(self) -> AttachmentPayload:\n result: AttachmentPayload = {\n 'filename': self.filename,\n 'id': self.id,\n 'proxy_url': self.proxy_url,\n 'size': self.size,\n 'url': self.url,\n 'spoiler': self.is_spoiler(),\n }\n if self.height:\n result['height'] = self.height\n if self.width:\n result['width'] = self.width\n if self.content_type:\n result['content_type'] = self.content_type\n if self.description is not None:\n result['description'] = self.description\n return result\n\n\nclass DeletedReferencedMessage:\n \"\"\"A special sentinel type given when the resolved message reference\n points to a deleted message.\n\n The purpose of this class is to separate referenced messages that could not be\n fetched and those that were previously fetched but have since been deleted.\n\n .. versionadded:: 1.6\n \"\"\"\n\n __slots__ = ('_parent',)\n\n def __init__(self, parent: MessageReference):\n self._parent: MessageReference = parent\n\n def __repr__(self) -> str:\n return f\"<DeletedReferencedMessage id={self.id} channel_id={self.channel_id} guild_id={self.guild_id!r}>\"\n\n @property\n def id(self) -> int:\n \"\"\":class:`int`: The message ID of the deleted referenced message.\"\"\"\n # the parent's message id won't be None here\n return self._parent.message_id # type: ignore\n\n @property\n def channel_id(self) -> int:\n \"\"\":class:`int`: The channel ID of the deleted referenced message.\"\"\"\n return self._parent.channel_id\n\n @property\n def guild_id(self) -> Optional[int]:\n \"\"\"Optional[:class:`int`]: The guild ID of the deleted referenced message.\"\"\"\n return self._parent.guild_id\n\n\nclass MessageReference:\n \"\"\"Represents a reference to a :class:`~discord.Message`.\n\n .. versionadded:: 1.5\n\n .. versionchanged:: 1.6\n This class can now be constructed by users.\n\n Attributes\n -----------\n message_id: Optional[:class:`int`]\n The id of the message referenced.\n channel_id: :class:`int`\n The channel id of the message referenced.\n guild_id: Optional[:class:`int`]\n The guild id of the message referenced.\n fail_if_not_exists: :class:`bool`\n Whether replying to the referenced message should raise :class:`HTTPException`\n if the message no longer exists or Discord could not fetch the message.\n\n .. versionadded:: 1.7\n\n resolved: Optional[Union[:class:`Message`, :class:`DeletedReferencedMessage`]]\n The message that this reference resolved to. If this is ``None``\n then the original message was not fetched either due to the Discord API\n not attempting to resolve it or it not being available at the time of creation.\n If the message was resolved at a prior point but has since been deleted then\n this will be of type :class:`DeletedReferencedMessage`.\n\n Currently, this is mainly the replied to message when a user replies to a message.\n\n .. versionadded:: 1.6\n \"\"\"\n\n __slots__ = ('message_id', 'channel_id', 'guild_id', 'fail_if_not_exists', 'resolved', '_state')\n\n def __init__(self, *, message_id: int, channel_id: int, guild_id: Optional[int] = None, fail_if_not_exists: bool = True):\n self._state: Optional[ConnectionState] = None\n self.resolved: Optional[Union[Message, DeletedReferencedMessage]] = None\n self.message_id: Optional[int] = message_id\n self.channel_id: int = channel_id\n self.guild_id: Optional[int] = guild_id\n self.fail_if_not_exists: bool = fail_if_not_exists\n\n @classmethod\n def with_state(cls, state: ConnectionState, data: MessageReferencePayload) -> Self:\n self = cls.__new__(cls)\n self.message_id = utils._get_as_snowflake(data, 'message_id')\n self.channel_id = int(data['channel_id'])\n self.guild_id = utils._get_as_snowflake(data, 'guild_id')\n self.fail_if_not_exists = data.get('fail_if_not_exists', True)\n self._state = state\n self.resolved = None\n return self\n\n @classmethod\n def from_message(cls, message: PartialMessage, *, fail_if_not_exists: bool = True) -> Self:\n \"\"\"Creates a :class:`MessageReference` from an existing :class:`~discord.Message`.\n\n .. versionadded:: 1.6\n\n Parameters\n ----------\n message: :class:`~discord.Message`\n The message to be converted into a reference.\n fail_if_not_exists: :class:`bool`\n Whether replying to the referenced message should raise :class:`HTTPException`\n if the message no longer exists or Discord could not fetch the message.\n\n .. versionadded:: 1.7\n\n Returns\n -------\n :class:`MessageReference`\n A reference to the message.\n \"\"\"\n self = cls(\n message_id=message.id,\n channel_id=message.channel.id,\n guild_id=getattr(message.guild, 'id', None),\n fail_if_not_exists=fail_if_not_exists,\n )\n self._state = message._state\n return self\n\n @property\n def cached_message(self) -> Optional[Message]:\n \"\"\"Optional[:class:`~discord.Message`]: The cached message, if found in the internal message cache.\"\"\"\n return self._state and self._state._get_message(self.message_id)\n\n @property\n def jump_url(self) -> str:\n \"\"\":class:`str`: Returns a URL that allows the client to jump to the referenced message.\n\n .. versionadded:: 1.7\n \"\"\"\n guild_id = self.guild_id if self.guild_id is not None else '@me'\n return f'https://discord.com/channels/{guild_id}/{self.channel_id}/{self.message_id}'\n\n def __repr__(self) -> str:\n return f'<MessageReference message_id={self.message_id!r} channel_id={self.channel_id!r} guild_id={self.guild_id!r}>'\n\n def to_dict(self) -> MessageReferencePayload:\n result: Dict[str, Any] = {'message_id': self.message_id} if self.message_id is not None else {}\n result['channel_id'] = self.channel_id\n if self.guild_id is not None:\n result['guild_id'] = self.guild_id\n if self.fail_if_not_exists is not None:\n result['fail_if_not_exists'] = self.fail_if_not_exists\n return result # type: ignore # Type checker doesn't understand these are the same.\n\n to_message_reference_dict = to_dict\n\n\nclass MessageInteraction(Hashable):\n \"\"\"Represents the interaction that a :class:`Message` is a response to.\n\n .. versionadded:: 2.0\n\n .. container:: operations\n\n .. describe:: x == y\n\n Checks if two message interactions are equal.\n\n .. describe:: x != y\n\n Checks if two message interactions are not equal.\n\n .. describe:: hash(x)\n\n Returns the message interaction's hash.\n\n Attributes\n -----------\n id: :class:`int`\n The interaction ID.\n type: :class:`InteractionType`\n The interaction type.\n name: :class:`str`\n The name of the interaction.\n user: Union[:class:`User`, :class:`Member`]\n The user or member that invoked the interaction.\n \"\"\"\n\n __slots__: Tuple[str, ...] = ('id', 'type', 'name', 'user')\n\n def __init__(self, *, state: ConnectionState, guild: Optional[Guild], data: MessageInteractionPayload) -> None:\n self.id: int = int(data['id'])\n self.type: InteractionType = try_enum(InteractionType, data['type'])\n self.name: str = data['name']\n self.user: Union[User, Member] = MISSING\n\n try:\n payload = data['member']\n except KeyError:\n self.user = state.create_user(data['user'])\n else:\n if guild is None:\n # This is an unfortunate data loss, but it's better than giving bad data\n # This is also an incredibly rare scenario.\n self.user = state.create_user(data['user'])\n else:\n payload['user'] = data['user']\n self.user = Member(data=payload, guild=guild, state=state) # type: ignore\n\n def __repr__(self) -> str:\n return f'<MessageInteraction id={self.id} name={self.name!r} type={self.type!r} user={self.user!r}>'\n\n @property\n def created_at(self) -> datetime.datetime:\n \"\"\":class:`datetime.datetime`: The interaction's creation time in UTC.\"\"\"\n return utils.snowflake_time(self.id)\n\n\ndef flatten_handlers(cls: Type[Message]) -> Type[Message]:\n prefix = len('_handle_')\n handlers = [\n (key[prefix:], value)\n for key, value in cls.__dict__.items()\n if key.startswith('_handle_') and key != '_handle_member'\n ]\n\n # store _handle_member last\n handlers.append(('member', cls._handle_member))\n cls._HANDLERS = handlers\n cls._CACHED_SLOTS = [attr for attr in cls.__slots__ if attr.startswith('_cs_')]\n return cls\n\n\nclass MessageApplication:\n \"\"\"Represents a message's application data from a :class:`~discord.Message`.\n\n .. versionadded:: 2.0\n\n Attributes\n -----------\n id: :class:`int`\n The application ID.\n description: :class:`str`\n The application description.\n name: :class:`str`\n The application's name.\n \"\"\"\n\n __slots__ = ('_state', '_icon', '_cover_image', 'id', 'description', 'name')\n\n def __init__(self, *, state: ConnectionState, data: MessageApplicationPayload) -> None:\n self._state: ConnectionState = state\n self.id: int = int(data['id'])\n self.description: str = data['description']\n self.name: str = data['name']\n self._icon: Optional[str] = data['icon']\n self._cover_image: Optional[str] = data.get('cover_image')\n\n def __repr__(self) -> str:\n return f'<MessageApplication id={self.id} name={self.name!r}>'\n\n @property\n def icon(self) -> Optional[Asset]:\n \"\"\"Optional[:class:`Asset`]: The application's icon, if any.\"\"\"\n if self._icon:\n return Asset._from_app_icon(state=self._state, object_id=self.id, icon_hash=self._icon, asset_type='icon')\n return None\n\n @property\n def cover(self) -> Optional[Asset]:\n \"\"\"Optional[:class:`Asset`]: The application's cover image, if any.\"\"\"\n if self._cover_image:\n return Asset._from_app_icon(\n state=self._state, object_id=self.id, icon_hash=self._cover_image, asset_type='cover_image'\n )\n return None\n\n\nclass RoleSubscriptionInfo:\n \"\"\"Represents a message's role subscription information.\n\n This is currently only attached to messages of type :attr:`MessageType.role_subscription_purchase`.\n\n .. versionadded:: 2.0\n\n Attributes\n -----------\n role_subscription_listing_id: :class:`int`\n The ID of the SKU and listing that the user is subscribed to.\n tier_name: :class:`str`\n The name of the tier that the user is subscribed to.\n total_months_subscribed: :class:`int`\n The cumulative number of months that the user has been subscribed for.\n is_renewal: :class:`bool`\n Whether this notification is for a renewal rather than a new purchase.\n \"\"\"\n\n __slots__ = (\n 'role_subscription_listing_id',\n 'tier_name',\n 'total_months_subscribed',\n 'is_renewal',\n )\n\n def __init__(self, data: RoleSubscriptionDataPayload) -> None:\n self.role_subscription_listing_id: int = int(data['role_subscription_listing_id'])\n self.tier_name: str = data['tier_name']\n self.total_months_subscribed: int = data['total_months_subscribed']\n self.is_renewal: bool = data['is_renewal']\n\n\nclass PartialMessage(Hashable):\n \"\"\"Represents a partial message to aid with working messages when only\n a message and channel ID are present.\n\n There are two ways to construct this class. The first one is through\n the constructor itself, and the second is via the following:\n\n - :meth:`TextChannel.get_partial_message`\n - :meth:`VoiceChannel.get_partial_message`\n - :meth:`StageChannel.get_partial_message`\n - :meth:`Thread.get_partial_message`\n - :meth:`DMChannel.get_partial_message`\n\n Note that this class is trimmed down and has no rich attributes.\n\n .. versionadded:: 1.6\n\n .. container:: operations\n\n .. describe:: x == y\n\n Checks if two partial messages are equal.\n\n .. describe:: x != y\n\n Checks if two partial messages are not equal.\n\n .. describe:: hash(x)\n\n Returns the partial message's hash.\n\n Attributes\n -----------\n channel: Union[:class:`PartialMessageable`, :class:`TextChannel`, :class:`StageChannel`, :class:`VoiceChannel`, :class:`Thread`, :class:`DMChannel`]\n The channel associated with this partial message.\n id: :class:`int`\n The message ID.\n guild: Optional[:class:`Guild`]\n The guild that the partial message belongs to, if applicable.\n \"\"\"\n\n __slots__ = ('channel', 'id', '_cs_guild', '_state', 'guild')\n\n def __init__(self, *, channel: MessageableChannel, id: int) -> None:\n if not isinstance(channel, PartialMessageable) and channel.type not in (\n ChannelType.text,\n ChannelType.voice,\n ChannelType.stage_voice,\n ChannelType.news,\n ChannelType.private,\n ChannelType.news_thread,\n ChannelType.public_thread,\n ChannelType.private_thread,\n ):\n raise TypeError(\n f'expected PartialMessageable, TextChannel, StageChannel, VoiceChannel, DMChannel or Thread not {type(channel)!r}'\n )\n\n self.channel: MessageableChannel = channel\n self._state: ConnectionState = channel._state\n self.id: int = id\n\n self.guild: Optional[Guild] = getattr(channel, 'guild', None)\n\n def _update(self, data: MessageUpdateEvent) -> None:\n # This is used for duck typing purposes.\n # Just do nothing with the data.\n pass\n\n # Also needed for duck typing purposes\n # n.b. not exposed\n pinned: Any = property(None, lambda x, y: None)\n\n def __repr__(self) -> str:\n return f'<PartialMessage id={self.id} channel={self.channel!r}>'\n\n @property\n def created_at(self) -> datetime.datetime:\n \"\"\":class:`datetime.datetime`: The partial message's creation time in UTC.\"\"\"\n return utils.snowflake_time(self.id)\n\n @property\n def jump_url(self) -> str:\n \"\"\":class:`str`: Returns a URL that allows the client to jump to this message.\"\"\"\n guild_id = getattr(self.guild, 'id', '@me')\n return f'https://discord.com/channels/{guild_id}/{self.channel.id}/{self.id}'\n\n @property\n def thread(self) -> Optional[Thread]:\n \"\"\"Optional[:class:`Thread`]: The public thread created from this message, if it exists.\n\n .. note::\n\n This does not retrieve archived threads, as they are not retained in the internal\n cache. Use :meth:`fetch_thread` instead.\n\n .. versionadded:: 2.4\n \"\"\"\n if self.guild is not None:\n return self.guild.get_thread(self.id)\n\n async def fetch(self) -> Message:\n \"\"\"|coro|\n\n Fetches the partial message to a full :class:`Message`.\n\n Raises\n --------\n NotFound\n The message was not found.\n Forbidden\n You do not have the permissions required to get a message.\n HTTPException\n Retrieving the message failed.\n\n Returns\n --------\n :class:`Message`\n The full message.\n \"\"\"\n\n data = await self._state.http.get_message(self.channel.id, self.id)\n return self._state.create_message(channel=self.channel, data=data)\n\n async def delete(self, *, delay: Optional[float] = None) -> None:\n \"\"\"|coro|\n\n Deletes the message.\n\n Your own messages could be deleted without any proper permissions. However to\n delete other people's messages, you must have :attr:`~Permissions.manage_messages`.\n\n .. versionchanged:: 1.1\n Added the new ``delay`` keyword-only parameter.\n\n Parameters\n -----------\n delay: Optional[:class:`float`]\n If provided, the number of seconds to wait in the background\n before deleting the message. If the deletion fails then it is silently ignored.\n\n Raises\n ------\n Forbidden\n You do not have proper permissions to delete the message.\n NotFound\n The message was deleted already\n HTTPException\n Deleting the message failed.\n \"\"\"\n if delay is not None:\n\n async def delete(delay: float):\n await asyncio.sleep(delay)\n try:\n await self._state.http.delete_message(self.channel.id, self.id)\n except HTTPException:\n pass\n\n asyncio.create_task(delete(delay))\n else:\n await self._state.http.delete_message(self.channel.id, self.id)\n\n @overload\n async def edit(\n self,\n *,\n content: Optional[str] = ...,\n embed: Optional[Embed] = ...,\n attachments: Sequence[Union[Attachment, File]] = ...,\n delete_after: Optional[float] = ...,\n allowed_mentions: Optional[AllowedMentions] = ...,\n view: Optional[View] = ...,\n ) -> Message:\n ...\n\n @overload\n async def edit(\n self,\n *,\n content: Optional[str] = ...,\n embeds: Sequence[Embed] = ...,\n attachments: Sequence[Union[Attachment, File]] = ...,\n delete_after: Optional[float] = ...,\n allowed_mentions: Optional[AllowedMentions] = ...,\n view: Optional[View] = ...,\n ) -> Message:\n ...\n\n async def edit(\n self,\n *,\n content: Optional[str] = MISSING,\n embed: Optional[Embed] = MISSING,\n embeds: Sequence[Embed] = MISSING,\n attachments: Sequence[Union[Attachment, File]] = MISSING,\n delete_after: Optional[float] = None,\n allowed_mentions: Optional[AllowedMentions] = MISSING,\n view: Optional[View] = MISSING,\n ) -> Message:\n \"\"\"|coro|\n\n Edits the message.\n\n The content must be able to be transformed into a string via ``str(content)``.\n\n .. versionchanged:: 2.0\n Edits are no longer in-place, the newly edited message is returned instead.\n\n .. versionchanged:: 2.0\n This function will now raise :exc:`TypeError` instead of\n ``InvalidArgument``.\n\n Parameters\n -----------\n content: Optional[:class:`str`]\n The new content to replace the message with.\n Could be ``None`` to remove the content.\n embed: Optional[:class:`Embed`]\n The new embed to replace the original with.\n Could be ``None`` to remove the embed.\n embeds: List[:class:`Embed`]\n The new embeds to replace the original with. Must be a maximum of 10.\n To remove all embeds ``[]`` should be passed.\n\n .. versionadded:: 2.0\n attachments: List[Union[:class:`Attachment`, :class:`File`]]\n A list of attachments to keep in the message as well as new files to upload. If ``[]`` is passed\n then all attachments are removed.\n\n .. note::\n\n New files will always appear after current attachments.\n\n .. versionadded:: 2.0\n delete_after: Optional[:class:`float`]\n If provided, the number of seconds to wait in the background\n before deleting the message we just edited. If the deletion fails,\n then it is silently ignored.\n allowed_mentions: Optional[:class:`~discord.AllowedMentions`]\n Controls the mentions being processed in this message. If this is\n passed, then the object is merged with :attr:`~discord.Client.allowed_mentions`.\n The merging behaviour only overrides attributes that have been explicitly passed\n to the object, otherwise it uses the attributes set in :attr:`~discord.Client.allowed_mentions`.\n If no object is passed at all then the defaults given by :attr:`~discord.Client.allowed_mentions`\n are used instead.\n\n .. versionadded:: 1.4\n view: Optional[:class:`~discord.ui.View`]\n The updated view to update this message with. If ``None`` is passed then\n the view is removed.\n\n Raises\n -------\n HTTPException\n Editing the message failed.\n Forbidden\n Tried to suppress a message without permissions or\n edited a message's content or embed that isn't yours.\n TypeError\n You specified both ``embed`` and ``embeds``\n\n Returns\n --------\n :class:`Message`\n The newly edited message.\n \"\"\"\n\n if content is not MISSING:\n previous_allowed_mentions = self._state.allowed_mentions\n else:\n previous_allowed_mentions = None\n\n if view is not MISSING:\n self._state.prevent_view_updates_for(self.id)\n\n with handle_message_parameters(\n content=content,\n embed=embed,\n embeds=embeds,\n attachments=attachments,\n view=view,\n allowed_mentions=allowed_mentions,\n previous_allowed_mentions=previous_allowed_mentions,\n ) as params:\n data = await self._state.http.edit_message(self.channel.id, self.id, params=params)\n message = Message(state=self._state, channel=self.channel, data=data)\n\n if view and not view.is_finished():\n interaction: Optional[MessageInteraction] = getattr(self, 'interaction', None)\n if interaction is not None:\n self._state.store_view(view, self.id, interaction_id=interaction.id)\n else:\n self._state.store_view(view, self.id)\n\n if delete_after is not None:\n await self.delete(delay=delete_after)\n\n return message\n\n async def publish(self) -> None:\n \"\"\"|coro|\n\n Publishes this message to the channel's followers.\n\n The message must have been sent in a news channel.\n You must have :attr:`~Permissions.send_messages` to do this.\n\n If the message is not your own then :attr:`~Permissions.manage_messages`\n is also needed.\n\n Raises\n -------\n Forbidden\n You do not have the proper permissions to publish this message\n or the channel is not a news channel.\n HTTPException\n Publishing the message failed.\n \"\"\"\n\n await self._state.http.publish_message(self.channel.id, self.id)\n\n async def pin(self, *, reason: Optional[str] = None) -> None:\n \"\"\"|coro|\n\n Pins the message.\n\n You must have :attr:`~Permissions.manage_messages` to do\n this in a non-private channel context.\n\n Parameters\n -----------\n reason: Optional[:class:`str`]\n The reason for pinning the message. Shows up on the audit log.\n\n .. versionadded:: 1.4\n\n Raises\n -------\n Forbidden\n You do not have permissions to pin the message.\n NotFound\n The message or channel was not found or deleted.\n HTTPException\n Pinning the message failed, probably due to the channel\n having more than 50 pinned messages.\n \"\"\"\n\n await self._state.http.pin_message(self.channel.id, self.id, reason=reason)\n # pinned exists on PartialMessage for duck typing purposes\n self.pinned = True\n\n async def unpin(self, *, reason: Optional[str] = None) -> None:\n \"\"\"|coro|\n\n Unpins the message.\n\n You must have :attr:`~Permissions.manage_messages` to do\n this in a non-private channel context.\n\n Parameters\n -----------\n reason: Optional[:class:`str`]\n The reason for unpinning the message. Shows up on the audit log.\n\n .. versionadded:: 1.4\n\n Raises\n -------\n Forbidden\n You do not have permissions to unpin the message.\n NotFound\n The message or channel was not found or deleted.\n HTTPException\n Unpinning the message failed.\n \"\"\"\n\n await self._state.http.unpin_message(self.channel.id, self.id, reason=reason)\n # pinned exists on PartialMessage for duck typing purposes\n self.pinned = False\n\n async def add_reaction(self, emoji: Union[EmojiInputType, Reaction], /) -> None:\n \"\"\"|coro|\n\n Adds a reaction to the message.\n\n The emoji may be a unicode emoji or a custom guild :class:`Emoji`.\n\n You must have :attr:`~Permissions.read_message_history`\n to do this. If nobody else has reacted to the message using this\n emoji, :attr:`~Permissions.add_reactions` is required.\n\n .. versionchanged:: 2.0\n\n ``emoji`` parameter is now positional-only.\n\n .. versionchanged:: 2.0\n This function will now raise :exc:`TypeError` instead of\n ``InvalidArgument``.\n\n Parameters\n ------------\n emoji: Union[:class:`Emoji`, :class:`Reaction`, :class:`PartialEmoji`, :class:`str`]\n The emoji to react with.\n\n Raises\n --------\n HTTPException\n Adding the reaction failed.\n Forbidden\n You do not have the proper permissions to react to the message.\n NotFound\n The emoji you specified was not found.\n TypeError\n The emoji parameter is invalid.\n \"\"\"\n\n emoji = convert_emoji_reaction(emoji)\n await self._state.http.add_reaction(self.channel.id, self.id, emoji)\n\n async def remove_reaction(self, emoji: Union[EmojiInputType, Reaction], member: Snowflake) -> None:\n \"\"\"|coro|\n\n Remove a reaction by the member from the message.\n\n The emoji may be a unicode emoji or a custom guild :class:`Emoji`.\n\n If the reaction is not your own (i.e. ``member`` parameter is not you) then\n :attr:`~Permissions.manage_messages` is needed.\n\n The ``member`` parameter must represent a member and meet\n the :class:`abc.Snowflake` abc.\n\n .. versionchanged:: 2.0\n This function will now raise :exc:`TypeError` instead of\n ``InvalidArgument``.\n\n Parameters\n ------------\n emoji: Union[:class:`Emoji`, :class:`Reaction`, :class:`PartialEmoji`, :class:`str`]\n The emoji to remove.\n member: :class:`abc.Snowflake`\n The member for which to remove the reaction.\n\n Raises\n --------\n HTTPException\n Removing the reaction failed.\n Forbidden\n You do not have the proper permissions to remove the reaction.\n NotFound\n The member or emoji you specified was not found.\n TypeError\n The emoji parameter is invalid.\n \"\"\"\n\n emoji = convert_emoji_reaction(emoji)\n\n if member.id == self._state.self_id:\n await self._state.http.remove_own_reaction(self.channel.id, self.id, emoji)\n else:\n await self._state.http.remove_reaction(self.channel.id, self.id, emoji, member.id)\n\n async def clear_reaction(self, emoji: Union[EmojiInputType, Reaction]) -> None:\n \"\"\"|coro|\n\n Clears a specific reaction from the message.\n\n The emoji may be a unicode emoji or a custom guild :class:`Emoji`.\n\n You must have :attr:`~Permissions.manage_messages` to do this.\n\n .. versionadded:: 1.3\n\n .. versionchanged:: 2.0\n This function will now raise :exc:`TypeError` instead of\n ``InvalidArgument``.\n\n Parameters\n -----------\n emoji: Union[:class:`Emoji`, :class:`Reaction`, :class:`PartialEmoji`, :class:`str`]\n The emoji to clear.\n\n Raises\n --------\n HTTPException\n Clearing the reaction failed.\n Forbidden\n You do not have the proper permissions to clear the reaction.\n NotFound\n The emoji you specified was not found.\n TypeError\n The emoji parameter is invalid.\n \"\"\"\n\n emoji = convert_emoji_reaction(emoji)\n await self._state.http.clear_single_reaction(self.channel.id, self.id, emoji)\n\n async def clear_reactions(self) -> None:\n \"\"\"|coro|\n\n Removes all the reactions from the message.\n\n You must have :attr:`~Permissions.manage_messages` to do this.\n\n Raises\n --------\n HTTPException\n Removing the reactions failed.\n Forbidden\n You do not have the proper permissions to remove all the reactions.\n \"\"\"\n await self._state.http.clear_reactions(self.channel.id, self.id)\n\n async def create_thread(\n self,\n *,\n name: str,\n auto_archive_duration: ThreadArchiveDuration = MISSING,\n slowmode_delay: Optional[int] = None,\n reason: Optional[str] = None,\n ) -> Thread:\n \"\"\"|coro|\n\n Creates a public thread from this message.\n\n You must have :attr:`~discord.Permissions.create_public_threads` in order to\n create a public thread from a message.\n\n The channel this message belongs in must be a :class:`TextChannel`.\n\n .. versionadded:: 2.0\n\n Parameters\n -----------\n name: :class:`str`\n The name of the thread.\n auto_archive_duration: :class:`int`\n The duration in minutes before a thread is automatically hidden from the channel list.\n If not provided, the channel's default auto archive duration is used.\n\n Must be one of ``60``, ``1440``, ``4320``, or ``10080``, if provided.\n slowmode_delay: Optional[:class:`int`]\n Specifies the slowmode rate limit for user in this channel, in seconds.\n The maximum value possible is ``21600``. By default no slowmode rate limit\n if this is ``None``.\n reason: Optional[:class:`str`]\n The reason for creating a new thread. Shows up on the audit log.\n\n Raises\n -------\n Forbidden\n You do not have permissions to create a thread.\n HTTPException\n Creating the thread failed.\n ValueError\n This message does not have guild info attached.\n\n Returns\n --------\n :class:`.Thread`\n The created thread.\n \"\"\"\n if self.guild is None:\n raise ValueError('This message does not have guild info attached.')\n\n default_auto_archive_duration: ThreadArchiveDuration = getattr(self.channel, 'default_auto_archive_duration', 1440)\n data = await self._state.http.start_thread_with_message(\n self.channel.id,\n self.id,\n name=name,\n auto_archive_duration=auto_archive_duration or default_auto_archive_duration,\n rate_limit_per_user=slowmode_delay,\n reason=reason,\n )\n return Thread(guild=self.guild, state=self._state, data=data)\n\n async def fetch_thread(self) -> Thread:\n \"\"\"|coro|\n\n Retrieves the public thread attached to this message.\n\n .. note::\n\n This method is an API call. For general usage, consider :attr:`thread` instead.\n\n .. versionadded:: 2.4\n\n Raises\n -------\n InvalidData\n An unknown channel type was received from Discord\n or the guild the thread belongs to is not the same\n as the one in this object points to.\n HTTPException\n Retrieving the thread failed.\n NotFound\n There is no thread attached to this message.\n Forbidden\n You do not have permission to fetch this channel.\n\n Returns\n --------\n :class:`.Thread`\n The public thread attached to this message.\n \"\"\"\n if self.guild is None:\n raise ValueError('This message does not have guild info attached.')\n\n return await self.guild.fetch_channel(self.id) # type: ignore # Can only be Thread in this case\n\n @overload\n async def reply(\n self,\n content: Optional[str] = ...,\n *,\n tts: bool = ...,\n embed: Embed = ...,\n file: File = ...,\n stickers: Sequence[Union[GuildSticker, StickerItem]] = ...,\n delete_after: float = ...,\n nonce: Union[str, int] = ...,\n allowed_mentions: AllowedMentions = ...,\n reference: Union[Message, MessageReference, PartialMessage] = ...,\n mention_author: bool = ...,\n view: View = ...,\n suppress_embeds: bool = ...,\n silent: bool = ...,\n ) -> Message:\n ...\n\n @overload\n async def reply(\n self,\n content: Optional[str] = ...,\n *,\n tts: bool = ...,\n embed: Embed = ...,\n files: Sequence[File] = ...,\n stickers: Sequence[Union[GuildSticker, StickerItem]] = ...,\n delete_after: float = ...,\n nonce: Union[str, int] = ...,\n allowed_mentions: AllowedMentions = ...,\n reference: Union[Message, MessageReference, PartialMessage] = ...,\n mention_author: bool = ...,\n view: View = ...,\n suppress_embeds: bool = ...,\n silent: bool = ...,\n ) -> Message:\n ...\n\n @overload\n async def reply(\n self,\n content: Optional[str] = ...,\n *,\n tts: bool = ...,\n embeds: Sequence[Embed] = ...,\n file: File = ...,\n stickers: Sequence[Union[GuildSticker, StickerItem]] = ...,\n delete_after: float = ...,\n nonce: Union[str, int] = ...,\n allowed_mentions: AllowedMentions = ...,\n reference: Union[Message, MessageReference, PartialMessage] = ...,\n mention_author: bool = ...,\n view: View = ...,\n suppress_embeds: bool = ...,\n silent: bool = ...,\n ) -> Message:\n ...\n\n @overload\n async def reply(\n self,\n content: Optional[str] = ...,\n *,\n tts: bool = ...,\n embeds: Sequence[Embed] = ...,\n files: Sequence[File] = ...,\n stickers: Sequence[Union[GuildSticker, StickerItem]] = ...,\n delete_after: float = ...,\n nonce: Union[str, int] = ...,\n allowed_mentions: AllowedMentions = ...,\n reference: Union[Message, MessageReference, PartialMessage] = ...,\n mention_author: bool = ...,\n view: View = ...,\n suppress_embeds: bool = ...,\n silent: bool = ...,\n ) -> Message:\n ...\n\n async def reply(self, content: Optional[str] = None, **kwargs: Any) -> Message:\n \"\"\"|coro|\n\n A shortcut method to :meth:`.abc.Messageable.send` to reply to the\n :class:`.Message`.\n\n .. versionadded:: 1.6\n\n .. versionchanged:: 2.0\n This function will now raise :exc:`TypeError` or\n :exc:`ValueError` instead of ``InvalidArgument``.\n\n Raises\n --------\n ~discord.HTTPException\n Sending the message failed.\n ~discord.Forbidden\n You do not have the proper permissions to send the message.\n ValueError\n The ``files`` list is not of the appropriate size\n TypeError\n You specified both ``file`` and ``files``.\n\n Returns\n ---------\n :class:`.Message`\n The message that was sent.\n \"\"\"\n\n return await self.channel.send(content, reference=self, **kwargs)\n\n def to_reference(self, *, fail_if_not_exists: bool = True) -> MessageReference:\n \"\"\"Creates a :class:`~discord.MessageReference` from the current message.\n\n .. versionadded:: 1.6\n\n Parameters\n ----------\n fail_if_not_exists: :class:`bool`\n Whether replying using the message reference should raise :class:`HTTPException`\n if the message no longer exists or Discord could not fetch the message.\n\n .. versionadded:: 1.7\n\n Returns\n ---------\n :class:`~discord.MessageReference`\n The reference to this message.\n \"\"\"\n\n return MessageReference.from_message(self, fail_if_not_exists=fail_if_not_exists)\n\n def to_message_reference_dict(self) -> MessageReferencePayload:\n data: MessageReferencePayload = {\n 'message_id': self.id,\n 'channel_id': self.channel.id,\n }\n\n if self.guild is not None:\n data['guild_id'] = self.guild.id\n\n return data\n\n\n@flatten_handlers\nclass Message(PartialMessage, Hashable):\n r\"\"\"Represents a message from Discord.\n\n .. container:: operations\n\n .. describe:: x == y\n\n Checks if two messages are equal.\n\n .. describe:: x != y\n\n Checks if two messages are not equal.\n\n .. describe:: hash(x)\n\n Returns the message's hash.\n\n Attributes\n -----------\n tts: :class:`bool`\n Specifies if the message was done with text-to-speech.\n This can only be accurately received in :func:`on_message` due to\n a discord limitation.\n type: :class:`MessageType`\n The type of message. In most cases this should not be checked, but it is helpful\n in cases where it might be a system message for :attr:`system_content`.\n author: Union[:class:`Member`, :class:`abc.User`]\n A :class:`Member` that sent the message. If :attr:`channel` is a\n private channel or the user has the left the guild, then it is a :class:`User` instead.\n content: :class:`str`\n The actual contents of the message.\n If :attr:`Intents.message_content` is not enabled this will always be an empty string\n unless the bot is mentioned or the message is a direct message.\n nonce: Optional[Union[:class:`str`, :class:`int`]]\n The value used by the discord guild and the client to verify that the message is successfully sent.\n This is not stored long term within Discord's servers and is only used ephemerally.\n embeds: List[:class:`Embed`]\n A list of embeds the message has.\n If :attr:`Intents.message_content` is not enabled this will always be an empty list\n unless the bot is mentioned or the message is a direct message.\n channel: Union[:class:`TextChannel`, :class:`StageChannel`, :class:`VoiceChannel`, :class:`Thread`, :class:`DMChannel`, :class:`GroupChannel`, :class:`PartialMessageable`]\n The :class:`TextChannel` or :class:`Thread` that the message was sent from.\n Could be a :class:`DMChannel` or :class:`GroupChannel` if it's a private message.\n reference: Optional[:class:`~discord.MessageReference`]\n The message that this message references. This is only applicable to messages of\n type :attr:`MessageType.pins_add`, crossposted messages created by a\n followed channel integration, or message replies.\n\n .. versionadded:: 1.5\n\n mention_everyone: :class:`bool`\n Specifies if the message mentions everyone.\n\n .. note::\n\n This does not check if the ``@everyone`` or the ``@here`` text is in the message itself.\n Rather this boolean indicates if either the ``@everyone`` or the ``@here`` text is in the message\n **and** it did end up mentioning.\n mentions: List[:class:`abc.User`]\n A list of :class:`Member` that were mentioned. If the message is in a private message\n then the list will be of :class:`User` instead. For messages that are not of type\n :attr:`MessageType.default`\\, this array can be used to aid in system messages.\n For more information, see :attr:`system_content`.\n\n .. warning::\n\n The order of the mentions list is not in any particular order so you should\n not rely on it. This is a Discord limitation, not one with the library.\n channel_mentions: List[Union[:class:`abc.GuildChannel`, :class:`Thread`]]\n A list of :class:`abc.GuildChannel` or :class:`Thread` that were mentioned. If the message is\n in a private message then the list is always empty.\n role_mentions: List[:class:`Role`]\n A list of :class:`Role` that were mentioned. If the message is in a private message\n then the list is always empty.\n id: :class:`int`\n The message ID.\n webhook_id: Optional[:class:`int`]\n If this message was sent by a webhook, then this is the webhook ID's that sent this\n message.\n attachments: List[:class:`Attachment`]\n A list of attachments given to a message.\n If :attr:`Intents.message_content` is not enabled this will always be an empty list\n unless the bot is mentioned or the message is a direct message.\n pinned: :class:`bool`\n Specifies if the message is currently pinned.\n flags: :class:`MessageFlags`\n Extra features of the message.\n\n .. versionadded:: 1.3\n\n reactions : List[:class:`Reaction`]\n Reactions to a message. Reactions can be either custom emoji or standard unicode emoji.\n activity: Optional[:class:`dict`]\n The activity associated with this message. Sent with Rich-Presence related messages that for\n example, request joining, spectating, or listening to or with another member.\n\n It is a dictionary with the following optional keys:\n\n - ``type``: An integer denoting the type of message activity being requested.\n - ``party_id``: The party ID associated with the party.\n application: Optional[:class:`~discord.MessageApplication`]\n The rich presence enabled application associated with this message.\n\n .. versionchanged:: 2.0\n Type is now :class:`MessageApplication` instead of :class:`dict`.\n\n stickers: List[:class:`StickerItem`]\n A list of sticker items given to the message.\n\n .. versionadded:: 1.6\n components: List[Union[:class:`ActionRow`, :class:`Button`, :class:`SelectMenu`]]\n A list of components in the message.\n If :attr:`Intents.message_content` is not enabled this will always be an empty list\n unless the bot is mentioned or the message is a direct message.\n\n .. versionadded:: 2.0\n interaction: Optional[:class:`MessageInteraction`]\n The interaction that this message is a response to.\n\n .. versionadded:: 2.0\n role_subscription: Optional[:class:`RoleSubscriptionInfo`]\n The data of the role subscription purchase or renewal that prompted this\n :attr:`MessageType.role_subscription_purchase` message.\n\n .. versionadded:: 2.2\n application_id: Optional[:class:`int`]\n The application ID of the application that created this message if this\n message was sent by an application-owned webhook or an interaction.\n\n .. versionadded:: 2.2\n position: Optional[:class:`int`]\n A generally increasing integer with potentially gaps or duplicates that represents\n the approximate position of the message in a thread.\n\n .. versionadded:: 2.2\n guild: Optional[:class:`Guild`]\n The guild that the message belongs to, if applicable.\n \"\"\"\n\n __slots__ = (\n '_edited_timestamp',\n '_cs_channel_mentions',\n '_cs_raw_mentions',\n '_cs_clean_content',\n '_cs_raw_channel_mentions',\n '_cs_raw_role_mentions',\n '_cs_system_content',\n '_thread',\n 'tts',\n 'content',\n 'webhook_id',\n 'mention_everyone',\n 'embeds',\n 'mentions',\n 'author',\n 'attachments',\n 'nonce',\n 'pinned',\n 'role_mentions',\n 'type',\n 'flags',\n 'reactions',\n 'reference',\n 'application',\n 'activity',\n 'stickers',\n 'components',\n 'interaction',\n 'role_subscription',\n 'application_id',\n 'position',\n )\n\n if TYPE_CHECKING:\n _HANDLERS: ClassVar[List[Tuple[str, Callable[..., None]]]]\n _CACHED_SLOTS: ClassVar[List[str]]\n # guild: Optional[Guild]\n reference: Optional[MessageReference]\n mentions: List[Union[User, Member]]\n author: Union[User, Member]\n role_mentions: List[Role]\n components: List[MessageComponentType]\n\n def __init__(\n self,\n *,\n state: ConnectionState,\n channel: MessageableChannel,\n data: MessagePayload,\n ) -> None:\n self.channel: MessageableChannel = channel\n self.id: int = int(data['id'])\n self._state: ConnectionState = state\n self.webhook_id: Optional[int] = utils._get_as_snowflake(data, 'webhook_id')\n self.reactions: List[Reaction] = [Reaction(message=self, data=d) for d in data.get('reactions', [])]\n self.attachments: List[Attachment] = [Attachment(data=a, state=self._state) for a in data['attachments']]\n self.embeds: List[Embed] = [Embed.from_dict(a) for a in data['embeds']]\n self.activity: Optional[MessageActivityPayload] = data.get('activity')\n self._edited_timestamp: Optional[datetime.datetime] = utils.parse_time(data['edited_timestamp'])\n self.type: MessageType = try_enum(MessageType, data['type'])\n self.pinned: bool = data['pinned']\n self.flags: MessageFlags = MessageFlags._from_value(data.get('flags', 0))\n self.mention_everyone: bool = data['mention_everyone']\n self.tts: bool = data['tts']\n self.content: str = data['content']\n self.nonce: Optional[Union[int, str]] = data.get('nonce')\n self.position: Optional[int] = data.get('position')\n self.application_id: Optional[int] = utils._get_as_snowflake(data, 'application_id')\n self.stickers: List[StickerItem] = [StickerItem(data=d, state=state) for d in data.get('sticker_items', [])]\n\n try:\n # if the channel doesn't have a guild attribute, we handle that\n self.guild = channel.guild\n except AttributeError:\n self.guild = state._get_guild(utils._get_as_snowflake(data, 'guild_id'))\n\n self._thread: Optional[Thread] = None\n\n if self.guild is not None:\n try:\n thread = data['thread']\n except KeyError:\n pass\n else:\n self._thread = self.guild.get_thread(int(thread['id']))\n\n if self._thread is not None:\n self._thread._update(thread)\n else:\n self._thread = Thread(guild=self.guild, state=state, data=thread)\n\n self.interaction: Optional[MessageInteraction] = None\n\n try:\n interaction = data['interaction']\n except KeyError:\n pass\n else:\n self.interaction = MessageInteraction(state=state, guild=self.guild, data=interaction)\n\n try:\n ref = data['message_reference']\n except KeyError:\n self.reference = None\n else:\n self.reference = ref = MessageReference.with_state(state, ref)\n try:\n resolved = data['referenced_message']\n except KeyError:\n pass\n else:\n if resolved is None:\n ref.resolved = DeletedReferencedMessage(ref)\n else:\n # Right now the channel IDs match but maybe in the future they won't.\n if ref.channel_id == channel.id:\n chan = channel\n elif isinstance(channel, Thread) and channel.parent_id == ref.channel_id:\n chan = channel\n else:\n chan, _ = state._get_guild_channel(resolved, ref.guild_id)\n\n # the channel will be the correct type here\n ref.resolved = self.__class__(channel=chan, data=resolved, state=state) # type: ignore\n\n self.application: Optional[MessageApplication] = None\n try:\n application = data['application']\n except KeyError:\n pass\n else:\n self.application = MessageApplication(state=self._state, data=application)\n\n self.role_subscription: Optional[RoleSubscriptionInfo] = None\n try:\n role_subscription = data['role_subscription_data']\n except KeyError:\n pass\n else:\n self.role_subscription = RoleSubscriptionInfo(role_subscription)\n\n for handler in ('author', 'member', 'mentions', 'mention_roles', 'components'):\n try:\n getattr(self, f'_handle_{handler}')(data[handler])\n except KeyError:\n continue\n\n def __repr__(self) -> str:\n name = self.__class__.__name__\n return (\n f'<{name} id={self.id} channel={self.channel!r} type={self.type!r} author={self.author!r} flags={self.flags!r}>'\n )\n\n def _try_patch(self, data, key, transform=None) -> None:\n try:\n value = data[key]\n except KeyError:\n pass\n else:\n if transform is None:\n setattr(self, key, value)\n else:\n setattr(self, key, transform(value))\n\n def _add_reaction(self, data, emoji, user_id) -> Reaction:\n reaction = utils.find(lambda r: r.emoji == emoji, self.reactions)\n is_me = data['me'] = user_id == self._state.self_id\n\n if reaction is None:\n reaction = Reaction(message=self, data=data, emoji=emoji)\n self.reactions.append(reaction)\n else:\n reaction.count += 1\n if is_me:\n reaction.me = is_me\n\n return reaction\n\n def _remove_reaction(self, data: MessageReactionRemoveEvent, emoji: EmojiInputType, user_id: int) -> Reaction:\n reaction = utils.find(lambda r: r.emoji == emoji, self.reactions)\n\n if reaction is None:\n # already removed?\n raise ValueError('Emoji already removed?')\n\n # if reaction isn't in the list, we crash. This means discord\n # sent bad data, or we stored improperly\n reaction.count -= 1\n\n if user_id == self._state.self_id:\n reaction.me = False\n if reaction.count == 0:\n # this raises ValueError if something went wrong as well.\n self.reactions.remove(reaction)\n\n return reaction\n\n def _clear_emoji(self, emoji: PartialEmoji) -> Optional[Reaction]:\n to_check = str(emoji)\n for index, reaction in enumerate(self.reactions):\n if str(reaction.emoji) == to_check:\n break\n else:\n # didn't find anything so just return\n return\n\n del self.reactions[index]\n return reaction\n\n def _update(self, data: MessageUpdateEvent) -> None:\n # In an update scheme, 'author' key has to be handled before 'member'\n # otherwise they overwrite each other which is undesirable.\n # Since there's no good way to do this we have to iterate over every\n # handler rather than iterating over the keys which is a little slower\n for key, handler in self._HANDLERS:\n try:\n value = data[key]\n except KeyError:\n continue\n else:\n handler(self, value)\n\n # clear the cached properties\n for attr in self._CACHED_SLOTS:\n try:\n delattr(self, attr)\n except AttributeError:\n pass\n\n def _handle_edited_timestamp(self, value: str) -> None:\n self._edited_timestamp = utils.parse_time(value)\n\n def _handle_pinned(self, value: bool) -> None:\n self.pinned = value\n\n def _handle_flags(self, value: int) -> None:\n self.flags = MessageFlags._from_value(value)\n\n def _handle_application(self, value: MessageApplicationPayload) -> None:\n application = MessageApplication(state=self._state, data=value)\n self.application = application\n\n def _handle_activity(self, value: MessageActivityPayload) -> None:\n self.activity = value\n\n def _handle_mention_everyone(self, value: bool) -> None:\n self.mention_everyone = value\n\n def _handle_tts(self, value: bool) -> None:\n self.tts = value\n\n def _handle_type(self, value: int) -> None:\n self.type = try_enum(MessageType, value)\n\n def _handle_content(self, value: str) -> None:\n self.content = value\n\n def _handle_attachments(self, value: List[AttachmentPayload]) -> None:\n self.attachments = [Attachment(data=a, state=self._state) for a in value]\n\n def _handle_embeds(self, value: List[EmbedPayload]) -> None:\n self.embeds = [Embed.from_dict(data) for data in value]\n\n def _handle_nonce(self, value: Union[str, int]) -> None:\n self.nonce = value\n\n def _handle_author(self, author: UserPayload) -> None:\n self.author = self._state.store_user(author, cache=self.webhook_id is None)\n if isinstance(self.guild, Guild):\n found = self.guild.get_member(self.author.id)\n if found is not None:\n self.author = found\n\n def _handle_member(self, member: MemberPayload) -> None:\n # The gateway now gives us full Member objects sometimes with the following keys\n # deaf, mute, joined_at, roles\n # For the sake of performance I'm going to assume that the only\n # field that needs *updating* would be the joined_at field.\n # If there is no Member object (for some strange reason), then we can upgrade\n # ourselves to a more \"partial\" member object.\n author = self.author\n try:\n # Update member reference\n author._update_from_message(member) # type: ignore\n except AttributeError:\n # It's a user here\n self.author = Member._from_message(message=self, data=member)\n\n def _handle_mentions(self, mentions: List[UserWithMemberPayload]) -> None:\n self.mentions = r = []\n guild = self.guild\n state = self._state\n if not isinstance(guild, Guild):\n self.mentions = [state.store_user(m) for m in mentions]\n return\n\n for mention in filter(None, mentions):\n id_search = int(mention['id'])\n member = guild.get_member(id_search)\n if member is not None:\n r.append(member)\n else:\n r.append(Member._try_upgrade(data=mention, guild=guild, state=state))\n\n def _handle_mention_roles(self, role_mentions: List[int]) -> None:\n self.role_mentions = []\n if isinstance(self.guild, Guild):\n for role_id in map(int, role_mentions):\n role = self.guild.get_role(role_id)\n if role is not None:\n self.role_mentions.append(role)\n\n def _handle_components(self, data: List[ComponentPayload]) -> None:\n self.components = []\n\n for component_data in data:\n component = _component_factory(component_data)\n\n if component is not None:\n self.components.append(component)\n\n def _handle_interaction(self, data: MessageInteractionPayload):\n self.interaction = MessageInteraction(state=self._state, guild=self.guild, data=data)\n\n def _rebind_cached_references(\n self,\n new_guild: Guild,\n new_channel: Union[GuildChannel, Thread, PartialMessageable],\n ) -> None:\n self.guild = new_guild\n self.channel = new_channel # type: ignore # Not all \"GuildChannel\" are messageable at the moment\n\n @utils.cached_slot_property('_cs_raw_mentions')\n def raw_mentions(self) -> List[int]:\n \"\"\"List[:class:`int`]: A property that returns an array of user IDs matched with\n the syntax of ``<@user_id>`` in the message content.\n\n This allows you to receive the user IDs of mentioned users\n even in a private message context.\n \"\"\"\n return [int(x) for x in re.findall(r'<@!?([0-9]{15,20})>', self.content)]\n\n @utils.cached_slot_property('_cs_raw_channel_mentions')\n def raw_channel_mentions(self) -> List[int]:\n \"\"\"List[:class:`int`]: A property that returns an array of channel IDs matched with\n the syntax of ``<#channel_id>`` in the message content.\n \"\"\"\n return [int(x) for x in re.findall(r'<#([0-9]{15,20})>', self.content)]\n\n @utils.cached_slot_property('_cs_raw_role_mentions')\n def raw_role_mentions(self) -> List[int]:\n \"\"\"List[:class:`int`]: A property that returns an array of role IDs matched with\n the syntax of ``<@&role_id>`` in the message content.\n \"\"\"\n return [int(x) for x in re.findall(r'<@&([0-9]{15,20})>', self.content)]\n\n @utils.cached_slot_property('_cs_channel_mentions')\n def channel_mentions(self) -> List[Union[GuildChannel, Thread]]:\n if self.guild is None:\n return []\n it = filter(None, map(self.guild._resolve_channel, self.raw_channel_mentions))\n return utils._unique(it)\n\n @utils.cached_slot_property('_cs_clean_content')\n def clean_content(self) -> str:\n \"\"\":class:`str`: A property that returns the content in a \"cleaned up\"\n manner. This basically means that mentions are transformed\n into the way the client shows it. e.g. ``<#id>`` will transform\n into ``#name``.\n\n This will also transform @everyone and @here mentions into\n non-mentions.\n\n .. note::\n\n This *does not* affect markdown. If you want to escape\n or remove markdown then use :func:`utils.escape_markdown` or :func:`utils.remove_markdown`\n respectively, along with this function.\n \"\"\"\n\n if self.guild:\n\n def resolve_member(id: int) -> str:\n m = self.guild.get_member(id) or utils.get(self.mentions, id=id) # type: ignore\n return f'@{m.display_name}' if m else '@deleted-user'\n\n def resolve_role(id: int) -> str:\n r = self.guild.get_role(id) or utils.get(self.role_mentions, id=id) # type: ignore\n return f'@{r.name}' if r else '@deleted-role'\n\n def resolve_channel(id: int) -> str:\n c = self.guild._resolve_channel(id) # type: ignore\n return f'#{c.name}' if c else '#deleted-channel'\n\n else:\n\n def resolve_member(id: int) -> str:\n m = utils.get(self.mentions, id=id)\n return f'@{m.display_name}' if m else '@deleted-user'\n\n def resolve_role(id: int) -> str:\n return '@deleted-role'\n\n def resolve_channel(id: int) -> str:\n return '#deleted-channel'\n\n transforms = {\n '@': resolve_member,\n '@!': resolve_member,\n '#': resolve_channel,\n '@&': resolve_role,\n }\n\n def repl(match: re.Match) -> str:\n type = match[1]\n id = int(match[2])\n transformed = transforms[type](id)\n return transformed\n\n result = re.sub(r'<(@[!&]?|#)([0-9]{15,20})>', repl, self.content)\n\n return escape_mentions(result)\n\n @property\n def created_at(self) -> datetime.datetime:\n \"\"\":class:`datetime.datetime`: The message's creation time in UTC.\"\"\"\n return utils.snowflake_time(self.id)\n\n @property\n def edited_at(self) -> Optional[datetime.datetime]:\n \"\"\"Optional[:class:`datetime.datetime`]: An aware UTC datetime object containing the edited time of the message.\"\"\"\n return self._edited_timestamp\n\n @property\n def thread(self) -> Optional[Thread]:\n \"\"\"Optional[:class:`Thread`]: The public thread created from this message, if it exists.\n\n .. note::\n\n For messages received via the gateway this does not retrieve archived threads, as they\n are not retained in the internal cache. Use :meth:`fetch_thread` instead.\n\n .. versionadded:: 2.4\n \"\"\"\n if self.guild is not None:\n # Fall back to guild threads in case one was created after the message\n return self._thread or self.guild.get_thread(self.id)\n\n def is_system(self) -> bool:\n \"\"\":class:`bool`: Whether the message is a system message.\n\n A system message is a message that is constructed entirely by the Discord API\n in response to something.\n\n .. versionadded:: 1.3\n \"\"\"\n return self.type not in (\n MessageType.default,\n MessageType.reply,\n MessageType.chat_input_command,\n MessageType.context_menu_command,\n MessageType.thread_starter_message,\n )\n\n @utils.cached_slot_property('_cs_system_content')\n def system_content(self) -> str:\n r\"\"\":class:`str`: A property that returns the content that is rendered\n regardless of the :attr:`Message.type`.\n\n In the case of :attr:`MessageType.default` and :attr:`MessageType.reply`\\,\n this just returns the regular :attr:`Message.content`. Otherwise this\n returns an English message denoting the contents of the system message.\n \"\"\"\n\n if self.type is MessageType.default:\n return self.content\n\n if self.type is MessageType.recipient_add:\n if self.channel.type is ChannelType.group:\n return f'{self.author.name} added {self.mentions[0].name} to the group.'\n else:\n return f'{self.author.name} added {self.mentions[0].name} to the thread.'\n\n if self.type is MessageType.recipient_remove:\n if self.channel.type is ChannelType.group:\n return f'{self.author.name} removed {self.mentions[0].name} from the group.'\n else:\n return f'{self.author.name} removed {self.mentions[0].name} from the thread.'\n\n if self.type is MessageType.channel_name_change:\n if getattr(self.channel, 'parent', self.channel).type is ChannelType.forum:\n return f'{self.author.name} changed the post title: **{self.content}**'\n else:\n return f'{self.author.name} changed the channel name: **{self.content}**'\n\n if self.type is MessageType.channel_icon_change:\n return f'{self.author.name} changed the group icon.'\n\n if self.type is MessageType.pins_add:\n return f'{self.author.name} pinned a message to this channel.'\n\n if self.type is MessageType.new_member:\n formats = [\n \"{0} joined the party.\",\n \"{0} is here.\",\n \"Welcome, {0}. We hope you brought pizza.\",\n \"A wild {0} appeared.\",\n \"{0} just landed.\",\n \"{0} just slid into the server.\",\n \"{0} just showed up!\",\n \"Welcome {0}. Say hi!\",\n \"{0} hopped into the server.\",\n \"Everyone welcome {0}!\",\n \"Glad you're here, {0}.\",\n \"Good to see you, {0}.\",\n \"Yay you made it, {0}!\",\n ]\n\n created_at_ms = int(self.created_at.timestamp() * 1000)\n return formats[created_at_ms % len(formats)].format(self.author.name)\n\n if self.type is MessageType.premium_guild_subscription:\n if not self.content:\n return f'{self.author.name} just boosted the server!'\n else:\n return f'{self.author.name} just boosted the server **{self.content}** times!'\n\n if self.type is MessageType.premium_guild_tier_1:\n if not self.content:\n return f'{self.author.name} just boosted the server! {self.guild} has achieved **Level 1!**'\n else:\n return f'{self.author.name} just boosted the server **{self.content}** times! {self.guild} has achieved **Level 1!**'\n\n if self.type is MessageType.premium_guild_tier_2:\n if not self.content:\n return f'{self.author.name} just boosted the server! {self.guild} has achieved **Level 2!**'\n else:\n return f'{self.author.name} just boosted the server **{self.content}** times! {self.guild} has achieved **Level 2!**'\n\n if self.type is MessageType.premium_guild_tier_3:\n if not self.content:\n return f'{self.author.name} just boosted the server! {self.guild} has achieved **Level 3!**'\n else:\n return f'{self.author.name} just boosted the server **{self.content}** times! {self.guild} has achieved **Level 3!**'\n\n if self.type is MessageType.channel_follow_add:\n return (\n f'{self.author.name} has added {self.content} to this channel. Its most important updates will show up here.'\n )\n\n if self.type is MessageType.guild_stream:\n # the author will be a Member\n return f'{self.author.name} is live! Now streaming {self.author.activity.name}' # type: ignore\n\n if self.type is MessageType.guild_discovery_disqualified:\n return 'This server has been removed from Server Discovery because it no longer passes all the requirements. Check Server Settings for more details.'\n\n if self.type is MessageType.guild_discovery_requalified:\n return 'This server is eligible for Server Discovery again and has been automatically relisted!'\n\n if self.type is MessageType.guild_discovery_grace_period_initial_warning:\n return 'This server has failed Discovery activity requirements for 1 week. If this server fails for 4 weeks in a row, it will be automatically removed from Discovery.'\n\n if self.type is MessageType.guild_discovery_grace_period_final_warning:\n return 'This server has failed Discovery activity requirements for 3 weeks in a row. If this server fails for 1 more week, it will be removed from Discovery.'\n\n if self.type is MessageType.thread_created:\n return f'{self.author.name} started a thread: **{self.content}**. See all **threads**.'\n\n if self.type is MessageType.reply:\n return self.content\n\n if self.type is MessageType.thread_starter_message:\n if self.reference is None or self.reference.resolved is None:\n return 'Sorry, we couldn\\'t load the first message in this thread'\n\n # the resolved message for the reference will be a Message\n return self.reference.resolved.content # type: ignore\n\n if self.type is MessageType.guild_invite_reminder:\n return 'Wondering who to invite?\\nStart by inviting anyone who can help you build the server!'\n\n if self.type is MessageType.role_subscription_purchase and self.role_subscription is not None:\n # TODO: figure out how the message looks like for is_renewal: true\n total_months = self.role_subscription.total_months_subscribed\n months = '1 month' if total_months == 1 else f'{total_months} months'\n return f'{self.author.name} joined {self.role_subscription.tier_name} and has been a subscriber of {self.guild} for {months}!'\n\n if self.type is MessageType.stage_start:\n return f'{self.author.name} started **{self.content}**.'\n\n if self.type is MessageType.stage_end:\n return f'{self.author.name} ended **{self.content}**.'\n\n if self.type is MessageType.stage_speaker:\n return f'{self.author.name} is now a speaker.'\n\n if self.type is MessageType.stage_raise_hand:\n return f'{self.author.name} requested to speak.'\n\n if self.type is MessageType.stage_topic:\n return f'{self.author.name} changed Stage topic: **{self.content}**.'\n\n # Fallback for unknown message types\n return ''\n\n @overload\n async def edit(\n self,\n *,\n content: Optional[str] = ...,\n embed: Optional[Embed] = ...,\n attachments: Sequence[Union[Attachment, File]] = ...,\n suppress: bool = ...,\n delete_after: Optional[float] = ...,\n allowed_mentions: Optional[AllowedMentions] = ...,\n view: Optional[View] = ...,\n ) -> Message:\n ...\n\n @overload\n async def edit(\n self,\n *,\n content: Optional[str] = ...,\n embeds: Sequence[Embed] = ...,\n attachments: Sequence[Union[Attachment, File]] = ...,\n suppress: bool = ...,\n delete_after: Optional[float] = ...,\n allowed_mentions: Optional[AllowedMentions] = ...,\n view: Optional[View] = ...,\n ) -> Message:\n ...\n\n async def edit(\n self,\n *,\n content: Optional[str] = MISSING,\n embed: Optional[Embed] = MISSING,\n embeds: Sequence[Embed] = MISSING,\n attachments: Sequence[Union[Attachment, File]] = MISSING,\n suppress: bool = False,\n delete_after: Optional[float] = None,\n allowed_mentions: Optional[AllowedMentions] = MISSING,\n view: Optional[View] = MISSING,\n ) -> Message:\n \"\"\"|coro|\n\n Edits the message.\n\n The content must be able to be transformed into a string via ``str(content)``.\n\n .. versionchanged:: 1.3\n The ``suppress`` keyword-only parameter was added.\n\n .. versionchanged:: 2.0\n Edits are no longer in-place, the newly edited message is returned instead.\n\n .. versionchanged:: 2.0\n This function will now raise :exc:`TypeError` instead of\n ``InvalidArgument``.\n\n Parameters\n -----------\n content: Optional[:class:`str`]\n The new content to replace the message with.\n Could be ``None`` to remove the content.\n embed: Optional[:class:`Embed`]\n The new embed to replace the original with.\n Could be ``None`` to remove the embed.\n embeds: List[:class:`Embed`]\n The new embeds to replace the original with. Must be a maximum of 10.\n To remove all embeds ``[]`` should be passed.\n\n .. versionadded:: 2.0\n attachments: List[Union[:class:`Attachment`, :class:`File`]]\n A list of attachments to keep in the message as well as new files to upload. If ``[]`` is passed\n then all attachments are removed.\n\n .. note::\n\n New files will always appear after current attachments.\n\n .. versionadded:: 2.0\n suppress: :class:`bool`\n Whether to suppress embeds for the message. This removes\n all the embeds if set to ``True``. If set to ``False``\n this brings the embeds back if they were suppressed.\n Using this parameter requires :attr:`~.Permissions.manage_messages`.\n delete_after: Optional[:class:`float`]\n If provided, the number of seconds to wait in the background\n before deleting the message we just edited. If the deletion fails,\n then it is silently ignored.\n allowed_mentions: Optional[:class:`~discord.AllowedMentions`]\n Controls the mentions being processed in this message. If this is\n passed, then the object is merged with :attr:`~discord.Client.allowed_mentions`.\n The merging behaviour only overrides attributes that have been explicitly passed\n to the object, otherwise it uses the attributes set in :attr:`~discord.Client.allowed_mentions`.\n If no object is passed at all then the defaults given by :attr:`~discord.Client.allowed_mentions`\n are used instead.\n\n .. versionadded:: 1.4\n view: Optional[:class:`~discord.ui.View`]\n The updated view to update this message with. If ``None`` is passed then\n the view is removed.\n\n Raises\n -------\n HTTPException\n Editing the message failed.\n Forbidden\n Tried to suppress a message without permissions or\n edited a message's content or embed that isn't yours.\n TypeError\n You specified both ``embed`` and ``embeds``\n\n Returns\n --------\n :class:`Message`\n The newly edited message.\n \"\"\"\n\n if content is not MISSING:\n previous_allowed_mentions = self._state.allowed_mentions\n else:\n previous_allowed_mentions = None\n\n if suppress is not MISSING:\n flags = MessageFlags._from_value(self.flags.value)\n flags.suppress_embeds = suppress\n else:\n flags = MISSING\n\n if view is not MISSING:\n self._state.prevent_view_updates_for(self.id)\n\n with handle_message_parameters(\n content=content,\n flags=flags,\n embed=embed,\n embeds=embeds,\n attachments=attachments,\n view=view,\n allowed_mentions=allowed_mentions,\n previous_allowed_mentions=previous_allowed_mentions,\n ) as params:\n data = await self._state.http.edit_message(self.channel.id, self.id, params=params)\n message = Message(state=self._state, channel=self.channel, data=data)\n\n if view and not view.is_finished():\n self._state.store_view(view, self.id)\n\n if delete_after is not None:\n await self.delete(delay=delete_after)\n\n return message\n\n async def add_files(self, *files: File) -> Message:\n r\"\"\"|coro|\n\n Adds new files to the end of the message attachments.\n\n .. versionadded:: 2.0\n\n Parameters\n -----------\n \\*files: :class:`File`\n New files to add to the message.\n\n Raises\n -------\n HTTPException\n Editing the message failed.\n Forbidden\n Tried to edit a message that isn't yours.\n\n Returns\n --------\n :class:`Message`\n The newly edited message.\n \"\"\"\n return await self.edit(attachments=[*self.attachments, *files])\n\n async def remove_attachments(self, *attachments: Attachment) -> Message:\n r\"\"\"|coro|\n\n Removes attachments from the message.\n\n .. versionadded:: 2.0\n\n Parameters\n -----------\n \\*attachments: :class:`Attachment`\n Attachments to remove from the message.\n\n Raises\n -------\n HTTPException\n Editing the message failed.\n Forbidden\n Tried to edit a message that isn't yours.\n\n Returns\n --------\n :class:`Message`\n The newly edited message.\n \"\"\"\n return await self.edit(attachments=[a for a in self.attachments if a not in attachments])\n",
"path": "discord/message.py"
},
{
"content": "\"\"\"\nThe MIT License (MIT)\n\nCopyright (c) 2015-present Rapptz\n\nPermission is hereby granted, free of charge, to any person obtaining a\ncopy of this software and associated documentation files (the \"Software\"),\nto deal in the Software without restriction, including without limitation\nthe rights to use, copy, modify, merge, publish, distribute, sublicense,\nand/or sell copies of the Software, and to permit persons to whom the\nSoftware is furnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in\nall copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\nOR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\nFROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER\nDEALINGS IN THE SOFTWARE.\n\"\"\"\n\nfrom __future__ import annotations\n\nfrom typing import List, Literal, Optional, TypedDict, Union\nfrom typing_extensions import NotRequired, Required\n\nfrom .snowflake import Snowflake, SnowflakeList\nfrom .member import Member, UserWithMember\nfrom .user import User\nfrom .emoji import PartialEmoji\nfrom .embed import Embed\nfrom .channel import ChannelType\nfrom .components import Component\nfrom .interactions import MessageInteraction\nfrom .sticker import StickerItem\nfrom .threads import Thread\n\n\nclass PartialMessage(TypedDict):\n channel_id: Snowflake\n guild_id: NotRequired[Snowflake]\n\n\nclass ChannelMention(TypedDict):\n id: Snowflake\n guild_id: Snowflake\n type: ChannelType\n name: str\n\n\nclass ReactionCountDetails(TypedDict):\n burst: int\n normal: int\n\n\nclass Reaction(TypedDict):\n count: int\n me: bool\n emoji: PartialEmoji\n me_burst: bool\n count_details: ReactionCountDetails\n burst_colors: List[str]\n\n\nclass Attachment(TypedDict):\n id: Snowflake\n filename: str\n size: int\n url: str\n proxy_url: str\n height: NotRequired[Optional[int]]\n width: NotRequired[Optional[int]]\n description: NotRequired[str]\n content_type: NotRequired[str]\n spoiler: NotRequired[bool]\n ephemeral: NotRequired[bool]\n duration_secs: NotRequired[float]\n waveform: NotRequired[str]\n flags: NotRequired[int]\n\n\nMessageActivityType = Literal[1, 2, 3, 5]\n\n\nclass MessageActivity(TypedDict):\n type: MessageActivityType\n party_id: str\n\n\nclass MessageApplication(TypedDict):\n id: Snowflake\n description: str\n icon: Optional[str]\n name: str\n cover_image: NotRequired[str]\n\n\nclass MessageReference(TypedDict, total=False):\n message_id: Snowflake\n channel_id: Required[Snowflake]\n guild_id: Snowflake\n fail_if_not_exists: bool\n\n\nclass RoleSubscriptionData(TypedDict):\n role_subscription_listing_id: Snowflake\n tier_name: str\n total_months_subscribed: int\n is_renewal: bool\n\n\nMessageType = Literal[\n 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 14, 15, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32\n]\n\n\nclass Message(PartialMessage):\n id: Snowflake\n author: User\n content: str\n timestamp: str\n edited_timestamp: Optional[str]\n tts: bool\n mention_everyone: bool\n mentions: List[UserWithMember]\n mention_roles: SnowflakeList\n attachments: List[Attachment]\n embeds: List[Embed]\n pinned: bool\n type: MessageType\n member: NotRequired[Member]\n mention_channels: NotRequired[List[ChannelMention]]\n reactions: NotRequired[List[Reaction]]\n nonce: NotRequired[Union[int, str]]\n webhook_id: NotRequired[Snowflake]\n activity: NotRequired[MessageActivity]\n application: NotRequired[MessageApplication]\n application_id: NotRequired[Snowflake]\n message_reference: NotRequired[MessageReference]\n flags: NotRequired[int]\n sticker_items: NotRequired[List[StickerItem]]\n referenced_message: NotRequired[Optional[Message]]\n interaction: NotRequired[MessageInteraction]\n components: NotRequired[List[Component]]\n position: NotRequired[int]\n role_subscription_data: NotRequired[RoleSubscriptionData]\n thread: NotRequired[Thread]\n\n\nAllowedMentionType = Literal['roles', 'users', 'everyone']\n\n\nclass AllowedMentions(TypedDict):\n parse: List[AllowedMentionType]\n roles: SnowflakeList\n users: SnowflakeList\n replied_user: bool\n",
"path": "discord/types/message.py"
},
{
"content": ".. currentmodule:: discord\n\nAPI Reference\n===============\n\nThe following section outlines the API of discord.py.\n\n.. note::\n\n This module uses the Python logging module to log diagnostic and errors\n in an output independent way. If the logging module is not configured,\n these logs will not be output anywhere. See :ref:`logging_setup` for\n more information on how to set up and use the logging module with\n discord.py.\n\nVersion Related Info\n---------------------\n\nThere are two main ways to query version information about the library. For guarantees, check :ref:`version_guarantees`.\n\n.. data:: version_info\n\n A named tuple that is similar to :obj:`py:sys.version_info`.\n\n Just like :obj:`py:sys.version_info` the valid values for ``releaselevel`` are\n 'alpha', 'beta', 'candidate' and 'final'.\n\n.. data:: __version__\n\n A string representation of the version. e.g. ``'1.0.0rc1'``. This is based\n off of :pep:`440`.\n\nClients\n--------\n\nClient\n~~~~~~~\n\n.. attributetable:: Client\n\n.. autoclass:: Client\n :members:\n :exclude-members: event\n\n .. automethod:: Client.event()\n :decorator:\n\nAutoShardedClient\n~~~~~~~~~~~~~~~~~~\n\n.. attributetable:: AutoShardedClient\n\n.. autoclass:: AutoShardedClient\n :members:\n\nApplication Info\n------------------\n\nAppInfo\n~~~~~~~~\n\n.. attributetable:: AppInfo\n\n.. autoclass:: AppInfo()\n :members:\n\nPartialAppInfo\n~~~~~~~~~~~~~~~\n\n.. attributetable:: PartialAppInfo\n\n.. autoclass:: PartialAppInfo()\n :members:\n\nAppInstallParams\n~~~~~~~~~~~~~~~~\n\n.. attributetable:: AppInstallParams\n\n.. autoclass:: AppInstallParams()\n :members:\n\nTeam\n~~~~~\n\n.. attributetable:: Team\n\n.. autoclass:: Team()\n :members:\n\nTeamMember\n~~~~~~~~~~~\n\n.. attributetable:: TeamMember\n\n.. autoclass:: TeamMember()\n :members:\n :inherited-members:\n\nVoice Related\n---------------\n\nVoiceClient\n~~~~~~~~~~~~\n\n.. attributetable:: VoiceClient\n\n.. autoclass:: VoiceClient()\n :members:\n :exclude-members: connect, on_voice_state_update, on_voice_server_update\n\nVoiceProtocol\n~~~~~~~~~~~~~~~\n\n.. attributetable:: VoiceProtocol\n\n.. autoclass:: VoiceProtocol\n :members:\n\nAudioSource\n~~~~~~~~~~~~\n\n.. attributetable:: AudioSource\n\n.. autoclass:: AudioSource\n :members:\n\nPCMAudio\n~~~~~~~~~\n\n.. attributetable:: PCMAudio\n\n.. autoclass:: PCMAudio\n :members:\n\nFFmpegAudio\n~~~~~~~~~~~~\n\n.. attributetable:: FFmpegAudio\n\n.. autoclass:: FFmpegAudio\n :members:\n\nFFmpegPCMAudio\n~~~~~~~~~~~~~~~\n\n.. attributetable:: FFmpegPCMAudio\n\n.. autoclass:: FFmpegPCMAudio\n :members:\n\nFFmpegOpusAudio\n~~~~~~~~~~~~~~~~\n\n.. attributetable:: FFmpegOpusAudio\n\n.. autoclass:: FFmpegOpusAudio\n :members:\n\nPCMVolumeTransformer\n~~~~~~~~~~~~~~~~~~~~~\n\n.. attributetable:: PCMVolumeTransformer\n\n.. autoclass:: PCMVolumeTransformer\n :members:\n\nOpus Library\n~~~~~~~~~~~~~\n\n.. autofunction:: discord.opus.load_opus\n\n.. autofunction:: discord.opus.is_loaded\n\n.. _discord-api-events:\n\nEvent Reference\n---------------\n\nThis section outlines the different types of events listened by :class:`Client`.\n\nThere are two ways to register an event, the first way is through the use of\n:meth:`Client.event`. The second way is through subclassing :class:`Client` and\noverriding the specific events. For example: ::\n\n import discord\n\n class MyClient(discord.Client):\n async def on_message(self, message):\n if message.author == self.user:\n return\n\n if message.content.startswith('$hello'):\n await message.channel.send('Hello World!')\n\n\nIf an event handler raises an exception, :func:`on_error` will be called\nto handle it, which defaults to logging the traceback and ignoring the exception.\n\n.. warning::\n\n All the events must be a |coroutine_link|_. If they aren't, then you might get unexpected\n errors. In order to turn a function into a coroutine they must be ``async def``\n functions.\n\nApp Commands\n~~~~~~~~~~~~~\n\n.. function:: on_raw_app_command_permissions_update(payload)\n\n Called when application command permissions are updated.\n\n .. versionadded:: 2.0\n\n :param payload: The raw event payload data.\n :type payload: :class:`RawAppCommandPermissionsUpdateEvent`\n\n.. function:: on_app_command_completion(interaction, command)\n\n Called when a :class:`app_commands.Command` or :class:`app_commands.ContextMenu` has\n successfully completed without error.\n\n .. versionadded:: 2.0\n\n :param interaction: The interaction of the command.\n :type interaction: :class:`Interaction`\n :param command: The command that completed successfully\n :type command: Union[:class:`app_commands.Command`, :class:`app_commands.ContextMenu`]\n\nAutoMod\n~~~~~~~~\n\n.. function:: on_automod_rule_create(rule)\n\n Called when a :class:`AutoModRule` is created.\n You must have :attr:`~Permissions.manage_guild` to receive this.\n\n This requires :attr:`Intents.auto_moderation_configuration` to be enabled.\n\n .. versionadded:: 2.0\n\n :param rule: The rule that was created.\n :type rule: :class:`AutoModRule`\n\n.. function:: on_automod_rule_update(rule)\n\n Called when a :class:`AutoModRule` is updated.\n You must have :attr:`~Permissions.manage_guild` to receive this.\n\n This requires :attr:`Intents.auto_moderation_configuration` to be enabled.\n\n .. versionadded:: 2.0\n\n :param rule: The rule that was updated.\n :type rule: :class:`AutoModRule`\n\n.. function:: on_automod_rule_delete(rule)\n\n Called when a :class:`AutoModRule` is deleted.\n You must have :attr:`~Permissions.manage_guild` to receive this.\n\n This requires :attr:`Intents.auto_moderation_configuration` to be enabled.\n\n .. versionadded:: 2.0\n\n :param rule: The rule that was deleted.\n :type rule: :class:`AutoModRule`\n\n.. function:: on_automod_action(execution)\n\n Called when a :class:`AutoModAction` is created/performed.\n You must have :attr:`~Permissions.manage_guild` to receive this.\n\n This requires :attr:`Intents.auto_moderation_execution` to be enabled.\n\n .. versionadded:: 2.0\n\n :param execution: The rule execution that was performed.\n :type execution: :class:`AutoModAction`\n\nChannels\n~~~~~~~~~\n\n.. function:: on_guild_channel_delete(channel)\n on_guild_channel_create(channel)\n\n Called whenever a guild channel is deleted or created.\n\n Note that you can get the guild from :attr:`~abc.GuildChannel.guild`.\n\n This requires :attr:`Intents.guilds` to be enabled.\n\n :param channel: The guild channel that got created or deleted.\n :type channel: :class:`abc.GuildChannel`\n\n.. function:: on_guild_channel_update(before, after)\n\n Called whenever a guild channel is updated. e.g. changed name, topic, permissions.\n\n This requires :attr:`Intents.guilds` to be enabled.\n\n :param before: The updated guild channel's old info.\n :type before: :class:`abc.GuildChannel`\n :param after: The updated guild channel's new info.\n :type after: :class:`abc.GuildChannel`\n\n.. function:: on_guild_channel_pins_update(channel, last_pin)\n\n Called whenever a message is pinned or unpinned from a guild channel.\n\n This requires :attr:`Intents.guilds` to be enabled.\n\n :param channel: The guild channel that had its pins updated.\n :type channel: Union[:class:`abc.GuildChannel`, :class:`Thread`]\n :param last_pin: The latest message that was pinned as an aware datetime in UTC. Could be ``None``.\n :type last_pin: Optional[:class:`datetime.datetime`]\n\n.. function:: on_private_channel_update(before, after)\n\n Called whenever a private group DM is updated. e.g. changed name or topic.\n\n This requires :attr:`Intents.messages` to be enabled.\n\n :param before: The updated group channel's old info.\n :type before: :class:`GroupChannel`\n :param after: The updated group channel's new info.\n :type after: :class:`GroupChannel`\n\n.. function:: on_private_channel_pins_update(channel, last_pin)\n\n Called whenever a message is pinned or unpinned from a private channel.\n\n :param channel: The private channel that had its pins updated.\n :type channel: :class:`abc.PrivateChannel`\n :param last_pin: The latest message that was pinned as an aware datetime in UTC. Could be ``None``.\n :type last_pin: Optional[:class:`datetime.datetime`]\n\n.. function:: on_typing(channel, user, when)\n\n Called when someone begins typing a message.\n\n The ``channel`` parameter can be a :class:`abc.Messageable` instance.\n Which could either be :class:`TextChannel`, :class:`GroupChannel`, or\n :class:`DMChannel`.\n\n If the ``channel`` is a :class:`TextChannel` then the ``user`` parameter\n is a :class:`Member`, otherwise it is a :class:`User`.\n\n If the channel or user could not be found in the internal cache this event\n will not be called, you may use :func:`on_raw_typing` instead.\n\n This requires :attr:`Intents.typing` to be enabled.\n\n :param channel: The location where the typing originated from.\n :type channel: :class:`abc.Messageable`\n :param user: The user that started typing.\n :type user: Union[:class:`User`, :class:`Member`]\n :param when: When the typing started as an aware datetime in UTC.\n :type when: :class:`datetime.datetime`\n\n.. function:: on_raw_typing(payload)\n\n Called when someone begins typing a message. Unlike :func:`on_typing` this\n is called regardless of the channel and user being in the internal cache.\n\n This requires :attr:`Intents.typing` to be enabled.\n\n .. versionadded:: 2.0\n\n :param payload: The raw event payload data.\n :type payload: :class:`RawTypingEvent`\n\nConnection\n~~~~~~~~~~~\n\n.. function:: on_connect()\n\n Called when the client has successfully connected to Discord. This is not\n the same as the client being fully prepared, see :func:`on_ready` for that.\n\n The warnings on :func:`on_ready` also apply.\n\n.. function:: on_disconnect()\n\n Called when the client has disconnected from Discord, or a connection attempt to Discord has failed.\n This could happen either through the internet being disconnected, explicit calls to close,\n or Discord terminating the connection one way or the other.\n\n This function can be called many times without a corresponding :func:`on_connect` call.\n\n.. function:: on_shard_connect(shard_id)\n\n Similar to :func:`on_connect` except used by :class:`AutoShardedClient`\n to denote when a particular shard ID has connected to Discord.\n\n .. versionadded:: 1.4\n\n :param shard_id: The shard ID that has connected.\n :type shard_id: :class:`int`\n\n\n.. function:: on_shard_disconnect(shard_id)\n\n Similar to :func:`on_disconnect` except used by :class:`AutoShardedClient`\n to denote when a particular shard ID has disconnected from Discord.\n\n .. versionadded:: 1.4\n\n :param shard_id: The shard ID that has disconnected.\n :type shard_id: :class:`int`\n\nDebug\n~~~~~~\n\n.. function:: on_error(event, *args, **kwargs)\n\n Usually when an event raises an uncaught exception, a traceback is\n logged to stderr and the exception is ignored. If you want to\n change this behaviour and handle the exception for whatever reason\n yourself, this event can be overridden. Which, when done, will\n suppress the default action of printing the traceback.\n\n The information of the exception raised and the exception itself can\n be retrieved with a standard call to :func:`sys.exc_info`.\n\n .. note::\n\n ``on_error`` will only be dispatched to :meth:`Client.event`.\n\n It will not be received by :meth:`Client.wait_for`, or, if used,\n :ref:`ext_commands_api_bot` listeners such as\n :meth:`~ext.commands.Bot.listen` or :meth:`~ext.commands.Cog.listener`.\n\n .. versionchanged:: 2.0\n\n The traceback is now logged rather than printed.\n\n :param event: The name of the event that raised the exception.\n :type event: :class:`str`\n\n :param args: The positional arguments for the event that raised the\n exception.\n :param kwargs: The keyword arguments for the event that raised the\n exception.\n\n.. function:: on_socket_event_type(event_type)\n\n Called whenever a websocket event is received from the WebSocket.\n\n This is mainly useful for logging how many events you are receiving\n from the Discord gateway.\n\n .. versionadded:: 2.0\n\n :param event_type: The event type from Discord that is received, e.g. ``'READY'``.\n :type event_type: :class:`str`\n\n.. function:: on_socket_raw_receive(msg)\n\n Called whenever a message is completely received from the WebSocket, before\n it's processed and parsed. This event is always dispatched when a\n complete message is received and the passed data is not parsed in any way.\n\n This is only really useful for grabbing the WebSocket stream and\n debugging purposes.\n\n This requires setting the ``enable_debug_events`` setting in the :class:`Client`.\n\n .. note::\n\n This is only for the messages received from the client\n WebSocket. The voice WebSocket will not trigger this event.\n\n :param msg: The message passed in from the WebSocket library.\n :type msg: :class:`str`\n\n.. function:: on_socket_raw_send(payload)\n\n Called whenever a send operation is done on the WebSocket before the\n message is sent. The passed parameter is the message that is being\n sent to the WebSocket.\n\n This is only really useful for grabbing the WebSocket stream and\n debugging purposes.\n\n This requires setting the ``enable_debug_events`` setting in the :class:`Client`.\n\n .. note::\n\n This is only for the messages sent from the client\n WebSocket. The voice WebSocket will not trigger this event.\n\n :param payload: The message that is about to be passed on to the\n WebSocket library. It can be :class:`bytes` to denote a binary\n message or :class:`str` to denote a regular text message.\n :type payload: Union[:class:`bytes`, :class:`str`]\n\n\nEntitlements\n~~~~~~~~~~~~\n\n.. function:: on_entitlement_create(entitlement)\n\n Called when a user subscribes to a SKU.\n\n .. versionadded:: 2.4\n\n :param entitlement: The entitlement that was created.\n :type entitlement: :class:`Entitlement`\n\n.. function:: on_entitlement_update(entitlement)\n\n Called when a user updates their subscription to a SKU. This is usually called when\n the user renews or cancels their subscription.\n\n .. versionadded:: 2.4\n\n :param entitlement: The entitlement that was updated.\n :type entitlement: :class:`Entitlement`\n\n.. function:: on_entitlement_delete(entitlement)\n\n Called when a users subscription to a SKU is cancelled. This is typically only called when:\n\n - Discord issues a refund for the subscription.\n - Discord removes an entitlement from a user.\n\n .. warning::\n\n This event won't be called if the user cancels their subscription manually, instead\n :func:`on_entitlement_update` will be called with :attr:`Entitlement.ends_at` set to the end of the\n current billing period.\n\n .. versionadded:: 2.4\n\n :param entitlement: The entitlement that was deleted.\n :type entitlement: :class:`Entitlement`\n\n\nGateway\n~~~~~~~~\n\n.. function:: on_ready()\n\n Called when the client is done preparing the data received from Discord. Usually after login is successful\n and the :attr:`Client.guilds` and co. are filled up.\n\n .. warning::\n\n This function is not guaranteed to be the first event called.\n Likewise, this function is **not** guaranteed to only be called\n once. This library implements reconnection logic and thus will\n end up calling this event whenever a RESUME request fails.\n\n.. function:: on_resumed()\n\n Called when the client has resumed a session.\n\n.. function:: on_shard_ready(shard_id)\n\n Similar to :func:`on_ready` except used by :class:`AutoShardedClient`\n to denote when a particular shard ID has become ready.\n\n :param shard_id: The shard ID that is ready.\n :type shard_id: :class:`int`\n\n\n.. function:: on_shard_resumed(shard_id)\n\n Similar to :func:`on_resumed` except used by :class:`AutoShardedClient`\n to denote when a particular shard ID has resumed a session.\n\n .. versionadded:: 1.4\n\n :param shard_id: The shard ID that has resumed.\n :type shard_id: :class:`int`\n\nGuilds\n~~~~~~~\n\n.. function:: on_guild_available(guild)\n on_guild_unavailable(guild)\n\n Called when a guild becomes available or unavailable. The guild must have\n existed in the :attr:`Client.guilds` cache.\n\n This requires :attr:`Intents.guilds` to be enabled.\n\n :param guild: The :class:`Guild` that has changed availability.\n\n.. function:: on_guild_join(guild)\n\n Called when a :class:`Guild` is either created by the :class:`Client` or when the\n :class:`Client` joins a guild.\n\n This requires :attr:`Intents.guilds` to be enabled.\n\n :param guild: The guild that was joined.\n :type guild: :class:`Guild`\n\n.. function:: on_guild_remove(guild)\n\n Called when a :class:`Guild` is removed from the :class:`Client`.\n\n This happens through, but not limited to, these circumstances:\n\n - The client got banned.\n - The client got kicked.\n - The client left the guild.\n - The client or the guild owner deleted the guild.\n\n In order for this event to be invoked then the :class:`Client` must have\n been part of the guild to begin with. (i.e. it is part of :attr:`Client.guilds`)\n\n This requires :attr:`Intents.guilds` to be enabled.\n\n :param guild: The guild that got removed.\n :type guild: :class:`Guild`\n\n.. function:: on_guild_update(before, after)\n\n Called when a :class:`Guild` updates, for example:\n\n - Changed name\n - Changed AFK channel\n - Changed AFK timeout\n - etc\n\n This requires :attr:`Intents.guilds` to be enabled.\n\n :param before: The guild prior to being updated.\n :type before: :class:`Guild`\n :param after: The guild after being updated.\n :type after: :class:`Guild`\n\n.. function:: on_guild_emojis_update(guild, before, after)\n\n Called when a :class:`Guild` adds or removes :class:`Emoji`.\n\n This requires :attr:`Intents.emojis_and_stickers` to be enabled.\n\n :param guild: The guild who got their emojis updated.\n :type guild: :class:`Guild`\n :param before: A list of emojis before the update.\n :type before: Sequence[:class:`Emoji`]\n :param after: A list of emojis after the update.\n :type after: Sequence[:class:`Emoji`]\n\n.. function:: on_guild_stickers_update(guild, before, after)\n\n Called when a :class:`Guild` updates its stickers.\n\n This requires :attr:`Intents.emojis_and_stickers` to be enabled.\n\n .. versionadded:: 2.0\n\n :param guild: The guild who got their stickers updated.\n :type guild: :class:`Guild`\n :param before: A list of stickers before the update.\n :type before: Sequence[:class:`GuildSticker`]\n :param after: A list of stickers after the update.\n :type after: Sequence[:class:`GuildSticker`]\n\n.. function:: on_audit_log_entry_create(entry)\n\n Called when a :class:`Guild` gets a new audit log entry.\n You must have :attr:`~Permissions.view_audit_log` to receive this.\n\n This requires :attr:`Intents.moderation` to be enabled.\n\n .. versionadded:: 2.2\n\n .. warning::\n\n Audit log entries received through the gateway are subject to data retrieval\n from cache rather than REST. This means that some data might not be present\n when you expect it to be. For example, the :attr:`AuditLogEntry.target`\n attribute will usually be a :class:`discord.Object` and the\n :attr:`AuditLogEntry.user` attribute will depend on user and member cache.\n\n To get the user ID of entry, :attr:`AuditLogEntry.user_id` can be used instead.\n\n :param entry: The audit log entry that was created.\n :type entry: :class:`AuditLogEntry`\n\n.. function:: on_invite_create(invite)\n\n Called when an :class:`Invite` is created.\n You must have :attr:`~Permissions.manage_channels` to receive this.\n\n .. versionadded:: 1.3\n\n .. note::\n\n There is a rare possibility that the :attr:`Invite.guild` and :attr:`Invite.channel`\n attributes will be of :class:`Object` rather than the respective models.\n\n This requires :attr:`Intents.invites` to be enabled.\n\n :param invite: The invite that was created.\n :type invite: :class:`Invite`\n\n.. function:: on_invite_delete(invite)\n\n Called when an :class:`Invite` is deleted.\n You must have :attr:`~Permissions.manage_channels` to receive this.\n\n .. versionadded:: 1.3\n\n .. note::\n\n There is a rare possibility that the :attr:`Invite.guild` and :attr:`Invite.channel`\n attributes will be of :class:`Object` rather than the respective models.\n\n Outside of those two attributes, the only other attribute guaranteed to be\n filled by the Discord gateway for this event is :attr:`Invite.code`.\n\n This requires :attr:`Intents.invites` to be enabled.\n\n :param invite: The invite that was deleted.\n :type invite: :class:`Invite`\n\n\nIntegrations\n~~~~~~~~~~~~~\n\n.. function:: on_integration_create(integration)\n\n Called when an integration is created.\n\n This requires :attr:`Intents.integrations` to be enabled.\n\n .. versionadded:: 2.0\n\n :param integration: The integration that was created.\n :type integration: :class:`Integration`\n\n.. function:: on_integration_update(integration)\n\n Called when an integration is updated.\n\n This requires :attr:`Intents.integrations` to be enabled.\n\n .. versionadded:: 2.0\n\n :param integration: The integration that was updated.\n :type integration: :class:`Integration`\n\n.. function:: on_guild_integrations_update(guild)\n\n Called whenever an integration is created, modified, or removed from a guild.\n\n This requires :attr:`Intents.integrations` to be enabled.\n\n .. versionadded:: 1.4\n\n :param guild: The guild that had its integrations updated.\n :type guild: :class:`Guild`\n\n.. function:: on_webhooks_update(channel)\n\n Called whenever a webhook is created, modified, or removed from a guild channel.\n\n This requires :attr:`Intents.webhooks` to be enabled.\n\n :param channel: The channel that had its webhooks updated.\n :type channel: :class:`abc.GuildChannel`\n\n.. function:: on_raw_integration_delete(payload)\n\n Called when an integration is deleted.\n\n This requires :attr:`Intents.integrations` to be enabled.\n\n .. versionadded:: 2.0\n\n :param payload: The raw event payload data.\n :type payload: :class:`RawIntegrationDeleteEvent`\n\nInteractions\n~~~~~~~~~~~~~\n\n.. function:: on_interaction(interaction)\n\n Called when an interaction happened.\n\n This currently happens due to slash command invocations or components being used.\n\n .. warning::\n\n This is a low level function that is not generally meant to be used.\n If you are working with components, consider using the callbacks associated\n with the :class:`~discord.ui.View` instead as it provides a nicer user experience.\n\n .. versionadded:: 2.0\n\n :param interaction: The interaction data.\n :type interaction: :class:`Interaction`\n\nMembers\n~~~~~~~~\n\n.. function:: on_member_join(member)\n\n Called when a :class:`Member` joins a :class:`Guild`.\n\n This requires :attr:`Intents.members` to be enabled.\n\n :param member: The member who joined.\n :type member: :class:`Member`\n\n.. function:: on_member_remove(member)\n\n Called when a :class:`Member` leaves a :class:`Guild`.\n\n If the guild or member could not be found in the internal cache this event\n will not be called, you may use :func:`on_raw_member_remove` instead.\n\n This requires :attr:`Intents.members` to be enabled.\n\n :param member: The member who left.\n :type member: :class:`Member`\n\n.. function:: on_raw_member_remove(payload)\n\n Called when a :class:`Member` leaves a :class:`Guild`.\n\n Unlike :func:`on_member_remove`\n this is called regardless of the guild or member being in the internal cache.\n\n This requires :attr:`Intents.members` to be enabled.\n\n .. versionadded:: 2.0\n\n :param payload: The raw event payload data.\n :type payload: :class:`RawMemberRemoveEvent`\n\n.. function:: on_member_update(before, after)\n\n Called when a :class:`Member` updates their profile.\n\n This is called when one or more of the following things change:\n\n - nickname\n - roles\n - pending\n - timeout\n - guild avatar\n - flags\n\n Due to a Discord limitation, this event is not dispatched when a member's timeout expires.\n\n This requires :attr:`Intents.members` to be enabled.\n\n :param before: The updated member's old info.\n :type before: :class:`Member`\n :param after: The updated member's updated info.\n :type after: :class:`Member`\n\n.. function:: on_user_update(before, after)\n\n Called when a :class:`User` updates their profile.\n\n This is called when one or more of the following things change:\n\n - avatar\n - username\n - discriminator\n\n This requires :attr:`Intents.members` to be enabled.\n\n :param before: The updated user's old info.\n :type before: :class:`User`\n :param after: The updated user's updated info.\n :type after: :class:`User`\n\n.. function:: on_member_ban(guild, user)\n\n Called when a user gets banned from a :class:`Guild`.\n\n This requires :attr:`Intents.moderation` to be enabled.\n\n :param guild: The guild the user got banned from.\n :type guild: :class:`Guild`\n :param user: The user that got banned.\n Can be either :class:`User` or :class:`Member` depending if\n the user was in the guild or not at the time of removal.\n :type user: Union[:class:`User`, :class:`Member`]\n\n.. function:: on_member_unban(guild, user)\n\n Called when a :class:`User` gets unbanned from a :class:`Guild`.\n\n This requires :attr:`Intents.moderation` to be enabled.\n\n :param guild: The guild the user got unbanned from.\n :type guild: :class:`Guild`\n :param user: The user that got unbanned.\n :type user: :class:`User`\n\n.. function:: on_presence_update(before, after)\n\n Called when a :class:`Member` updates their presence.\n\n This is called when one or more of the following things change:\n\n - status\n - activity\n\n This requires :attr:`Intents.presences` and :attr:`Intents.members` to be enabled.\n\n .. versionadded:: 2.0\n\n :param before: The updated member's old info.\n :type before: :class:`Member`\n :param after: The updated member's updated info.\n :type after: :class:`Member`\n\nMessages\n~~~~~~~~~\n\n.. function:: on_message(message)\n\n Called when a :class:`Message` is created and sent.\n\n This requires :attr:`Intents.messages` to be enabled.\n\n .. warning::\n\n Your bot's own messages and private messages are sent through this\n event. This can lead cases of 'recursion' depending on how your bot was\n programmed. If you want the bot to not reply to itself, consider\n checking the user IDs. Note that :class:`~ext.commands.Bot` does not\n have this problem.\n\n :param message: The current message.\n :type message: :class:`Message`\n\n.. function:: on_message_edit(before, after)\n\n Called when a :class:`Message` receives an update event. If the message is not found\n in the internal message cache, then these events will not be called.\n Messages might not be in cache if the message is too old\n or the client is participating in high traffic guilds.\n\n If this occurs increase the :class:`max_messages <Client>` parameter\n or use the :func:`on_raw_message_edit` event instead.\n\n The following non-exhaustive cases trigger this event:\n\n - A message has been pinned or unpinned.\n - The message content has been changed.\n - The message has received an embed.\n\n - For performance reasons, the embed server does not do this in a \"consistent\" manner.\n\n - The message's embeds were suppressed or unsuppressed.\n - A call message has received an update to its participants or ending time.\n\n This requires :attr:`Intents.messages` to be enabled.\n\n :param before: The previous version of the message.\n :type before: :class:`Message`\n :param after: The current version of the message.\n :type after: :class:`Message`\n\n.. function:: on_message_delete(message)\n\n Called when a message is deleted. If the message is not found in the\n internal message cache, then this event will not be called.\n Messages might not be in cache if the message is too old\n or the client is participating in high traffic guilds.\n\n If this occurs increase the :class:`max_messages <Client>` parameter\n or use the :func:`on_raw_message_delete` event instead.\n\n This requires :attr:`Intents.messages` to be enabled.\n\n :param message: The deleted message.\n :type message: :class:`Message`\n\n.. function:: on_bulk_message_delete(messages)\n\n Called when messages are bulk deleted. If none of the messages deleted\n are found in the internal message cache, then this event will not be called.\n If individual messages were not found in the internal message cache,\n this event will still be called, but the messages not found will not be included in\n the messages list. Messages might not be in cache if the message is too old\n or the client is participating in high traffic guilds.\n\n If this occurs increase the :class:`max_messages <Client>` parameter\n or use the :func:`on_raw_bulk_message_delete` event instead.\n\n This requires :attr:`Intents.messages` to be enabled.\n\n :param messages: The messages that have been deleted.\n :type messages: List[:class:`Message`]\n\n.. function:: on_raw_message_edit(payload)\n\n Called when a message is edited. Unlike :func:`on_message_edit`, this is called\n regardless of the state of the internal message cache.\n\n If the message is found in the message cache,\n it can be accessed via :attr:`RawMessageUpdateEvent.cached_message`. The cached message represents\n the message before it has been edited. For example, if the content of a message is modified and\n triggers the :func:`on_raw_message_edit` coroutine, the :attr:`RawMessageUpdateEvent.cached_message`\n will return a :class:`Message` object that represents the message before the content was modified.\n\n Due to the inherently raw nature of this event, the data parameter coincides with\n the raw data given by the :ddocs:`gateway <topics/gateway#message-update>`.\n\n Since the data payload can be partial, care must be taken when accessing stuff in the dictionary.\n One example of a common case of partial data is when the ``'content'`` key is inaccessible. This\n denotes an \"embed\" only edit, which is an edit in which only the embeds are updated by the Discord\n embed server.\n\n This requires :attr:`Intents.messages` to be enabled.\n\n :param payload: The raw event payload data.\n :type payload: :class:`RawMessageUpdateEvent`\n\n\n.. function:: on_raw_message_delete(payload)\n\n Called when a message is deleted. Unlike :func:`on_message_delete`, this is\n called regardless of the message being in the internal message cache or not.\n\n If the message is found in the message cache,\n it can be accessed via :attr:`RawMessageDeleteEvent.cached_message`\n\n This requires :attr:`Intents.messages` to be enabled.\n\n :param payload: The raw event payload data.\n :type payload: :class:`RawMessageDeleteEvent`\n\n.. function:: on_raw_bulk_message_delete(payload)\n\n Called when a bulk delete is triggered. Unlike :func:`on_bulk_message_delete`, this is\n called regardless of the messages being in the internal message cache or not.\n\n If the messages are found in the message cache,\n they can be accessed via :attr:`RawBulkMessageDeleteEvent.cached_messages`\n\n This requires :attr:`Intents.messages` to be enabled.\n\n :param payload: The raw event payload data.\n :type payload: :class:`RawBulkMessageDeleteEvent`\n\nReactions\n~~~~~~~~~~\n\n.. function:: on_reaction_add(reaction, user)\n\n Called when a message has a reaction added to it. Similar to :func:`on_message_edit`,\n if the message is not found in the internal message cache, then this\n event will not be called. Consider using :func:`on_raw_reaction_add` instead.\n\n .. note::\n\n To get the :class:`Message` being reacted, access it via :attr:`Reaction.message`.\n\n This requires :attr:`Intents.reactions` to be enabled.\n\n .. note::\n\n This doesn't require :attr:`Intents.members` within a guild context,\n but due to Discord not providing updated user information in a direct message\n it's required for direct messages to receive this event.\n Consider using :func:`on_raw_reaction_add` if you need this and do not otherwise want\n to enable the members intent.\n\n .. warning::\n\n This event does not have a way of differentiating whether a reaction is a\n burst reaction (also known as \"super reaction\") or not. If you need this,\n consider using :func:`on_raw_reaction_add` instead.\n\n :param reaction: The current state of the reaction.\n :type reaction: :class:`Reaction`\n :param user: The user who added the reaction.\n :type user: Union[:class:`Member`, :class:`User`]\n\n.. function:: on_reaction_remove(reaction, user)\n\n Called when a message has a reaction removed from it. Similar to on_message_edit,\n if the message is not found in the internal message cache, then this event\n will not be called.\n\n .. note::\n\n To get the message being reacted, access it via :attr:`Reaction.message`.\n\n This requires both :attr:`Intents.reactions` and :attr:`Intents.members` to be enabled.\n\n .. note::\n\n Consider using :func:`on_raw_reaction_remove` if you need this and do not want\n to enable the members intent.\n\n .. warning::\n\n This event does not have a way of differentiating whether a reaction is a\n burst reaction (also known as \"super reaction\") or not. If you need this,\n consider using :func:`on_raw_reaction_remove` instead.\n\n :param reaction: The current state of the reaction.\n :type reaction: :class:`Reaction`\n :param user: The user whose reaction was removed.\n :type user: Union[:class:`Member`, :class:`User`]\n\n.. function:: on_reaction_clear(message, reactions)\n\n Called when a message has all its reactions removed from it. Similar to :func:`on_message_edit`,\n if the message is not found in the internal message cache, then this event\n will not be called. Consider using :func:`on_raw_reaction_clear` instead.\n\n This requires :attr:`Intents.reactions` to be enabled.\n\n :param message: The message that had its reactions cleared.\n :type message: :class:`Message`\n :param reactions: The reactions that were removed.\n :type reactions: List[:class:`Reaction`]\n\n.. function:: on_reaction_clear_emoji(reaction)\n\n Called when a message has a specific reaction removed from it. Similar to :func:`on_message_edit`,\n if the message is not found in the internal message cache, then this event\n will not be called. Consider using :func:`on_raw_reaction_clear_emoji` instead.\n\n This requires :attr:`Intents.reactions` to be enabled.\n\n .. versionadded:: 1.3\n\n :param reaction: The reaction that got cleared.\n :type reaction: :class:`Reaction`\n\n\n.. function:: on_raw_reaction_add(payload)\n\n Called when a message has a reaction added. Unlike :func:`on_reaction_add`, this is\n called regardless of the state of the internal message cache.\n\n This requires :attr:`Intents.reactions` to be enabled.\n\n :param payload: The raw event payload data.\n :type payload: :class:`RawReactionActionEvent`\n\n.. function:: on_raw_reaction_remove(payload)\n\n Called when a message has a reaction removed. Unlike :func:`on_reaction_remove`, this is\n called regardless of the state of the internal message cache.\n\n This requires :attr:`Intents.reactions` to be enabled.\n\n :param payload: The raw event payload data.\n :type payload: :class:`RawReactionActionEvent`\n\n.. function:: on_raw_reaction_clear(payload)\n\n Called when a message has all its reactions removed. Unlike :func:`on_reaction_clear`,\n this is called regardless of the state of the internal message cache.\n\n This requires :attr:`Intents.reactions` to be enabled.\n\n :param payload: The raw event payload data.\n :type payload: :class:`RawReactionClearEvent`\n\n.. function:: on_raw_reaction_clear_emoji(payload)\n\n Called when a message has a specific reaction removed from it. Unlike :func:`on_reaction_clear_emoji` this is called\n regardless of the state of the internal message cache.\n\n This requires :attr:`Intents.reactions` to be enabled.\n\n .. versionadded:: 1.3\n\n :param payload: The raw event payload data.\n :type payload: :class:`RawReactionClearEmojiEvent`\n\n\nRoles\n~~~~~~\n\n.. function:: on_guild_role_create(role)\n on_guild_role_delete(role)\n\n Called when a :class:`Guild` creates or deletes a new :class:`Role`.\n\n To get the guild it belongs to, use :attr:`Role.guild`.\n\n This requires :attr:`Intents.guilds` to be enabled.\n\n :param role: The role that was created or deleted.\n :type role: :class:`Role`\n\n.. function:: on_guild_role_update(before, after)\n\n Called when a :class:`Role` is changed guild-wide.\n\n This requires :attr:`Intents.guilds` to be enabled.\n\n :param before: The updated role's old info.\n :type before: :class:`Role`\n :param after: The updated role's updated info.\n :type after: :class:`Role`\n\n\nScheduled Events\n~~~~~~~~~~~~~~~~~\n\n.. function:: on_scheduled_event_create(event)\n on_scheduled_event_delete(event)\n\n Called when a :class:`ScheduledEvent` is created or deleted.\n\n This requires :attr:`Intents.guild_scheduled_events` to be enabled.\n\n .. versionadded:: 2.0\n\n :param event: The scheduled event that was created or deleted.\n :type event: :class:`ScheduledEvent`\n\n.. function:: on_scheduled_event_update(before, after)\n\n Called when a :class:`ScheduledEvent` is updated.\n\n This requires :attr:`Intents.guild_scheduled_events` to be enabled.\n\n The following, but not limited to, examples illustrate when this event is called:\n\n - The scheduled start/end times are changed.\n - The channel is changed.\n - The description is changed.\n - The status is changed.\n - The image is changed.\n\n .. versionadded:: 2.0\n\n :param before: The scheduled event before the update.\n :type before: :class:`ScheduledEvent`\n :param after: The scheduled event after the update.\n :type after: :class:`ScheduledEvent`\n\n.. function:: on_scheduled_event_user_add(event, user)\n on_scheduled_event_user_remove(event, user)\n\n Called when a user is added or removed from a :class:`ScheduledEvent`.\n\n This requires :attr:`Intents.guild_scheduled_events` to be enabled.\n\n .. versionadded:: 2.0\n\n :param event: The scheduled event that the user was added or removed from.\n :type event: :class:`ScheduledEvent`\n :param user: The user that was added or removed.\n :type user: :class:`User`\n\n\nStages\n~~~~~~~\n\n.. function:: on_stage_instance_create(stage_instance)\n on_stage_instance_delete(stage_instance)\n\n Called when a :class:`StageInstance` is created or deleted for a :class:`StageChannel`.\n\n .. versionadded:: 2.0\n\n :param stage_instance: The stage instance that was created or deleted.\n :type stage_instance: :class:`StageInstance`\n\n.. function:: on_stage_instance_update(before, after)\n\n Called when a :class:`StageInstance` is updated.\n\n The following, but not limited to, examples illustrate when this event is called:\n\n - The topic is changed.\n - The privacy level is changed.\n\n .. versionadded:: 2.0\n\n :param before: The stage instance before the update.\n :type before: :class:`StageInstance`\n :param after: The stage instance after the update.\n :type after: :class:`StageInstance`\n\nThreads\n~~~~~~~~\n\n.. function:: on_thread_create(thread)\n\n Called whenever a thread is created.\n\n Note that you can get the guild from :attr:`Thread.guild`.\n\n This requires :attr:`Intents.guilds` to be enabled.\n\n .. versionadded:: 2.0\n\n :param thread: The thread that was created.\n :type thread: :class:`Thread`\n\n.. function:: on_thread_join(thread)\n\n Called whenever a thread is joined.\n\n Note that you can get the guild from :attr:`Thread.guild`.\n\n This requires :attr:`Intents.guilds` to be enabled.\n\n .. versionadded:: 2.0\n\n :param thread: The thread that got joined.\n :type thread: :class:`Thread`\n\n.. function:: on_thread_update(before, after)\n\n Called whenever a thread is updated. If the thread could\n not be found in the internal cache this event will not be called.\n Threads will not be in the cache if they are archived.\n\n If you need this information use :func:`on_raw_thread_update` instead.\n\n This requires :attr:`Intents.guilds` to be enabled.\n\n .. versionadded:: 2.0\n\n :param before: The updated thread's old info.\n :type before: :class:`Thread`\n :param after: The updated thread's new info.\n :type after: :class:`Thread`\n\n.. function:: on_thread_remove(thread)\n\n Called whenever a thread is removed. This is different from a thread being deleted.\n\n Note that you can get the guild from :attr:`Thread.guild`.\n\n This requires :attr:`Intents.guilds` to be enabled.\n\n .. warning::\n\n Due to technical limitations, this event might not be called\n as soon as one expects. Since the library tracks thread membership\n locally, the API only sends updated thread membership status upon being\n synced by joining a thread.\n\n .. versionadded:: 2.0\n\n :param thread: The thread that got removed.\n :type thread: :class:`Thread`\n\n.. function:: on_thread_delete(thread)\n\n Called whenever a thread is deleted. If the thread could\n not be found in the internal cache this event will not be called.\n Threads will not be in the cache if they are archived.\n\n If you need this information use :func:`on_raw_thread_delete` instead.\n\n Note that you can get the guild from :attr:`Thread.guild`.\n\n This requires :attr:`Intents.guilds` to be enabled.\n\n .. versionadded:: 2.0\n\n :param thread: The thread that got deleted.\n :type thread: :class:`Thread`\n\n.. function:: on_raw_thread_update(payload)\n\n Called whenever a thread is updated. Unlike :func:`on_thread_update` this\n is called regardless of the thread being in the internal thread cache or not.\n\n This requires :attr:`Intents.guilds` to be enabled.\n\n .. versionadded:: 2.0\n\n :param payload: The raw event payload data.\n :type payload: :class:`RawThreadUpdateEvent`\n\n.. function:: on_raw_thread_delete(payload)\n\n Called whenever a thread is deleted. Unlike :func:`on_thread_delete` this\n is called regardless of the thread being in the internal thread cache or not.\n\n This requires :attr:`Intents.guilds` to be enabled.\n\n .. versionadded:: 2.0\n\n :param payload: The raw event payload data.\n :type payload: :class:`RawThreadDeleteEvent`\n\n.. function:: on_thread_member_join(member)\n on_thread_member_remove(member)\n\n Called when a :class:`ThreadMember` leaves or joins a :class:`Thread`.\n\n You can get the thread a member belongs in by accessing :attr:`ThreadMember.thread`.\n\n This requires :attr:`Intents.members` to be enabled.\n\n .. versionadded:: 2.0\n\n :param member: The member who joined or left.\n :type member: :class:`ThreadMember`\n\n.. function:: on_raw_thread_member_remove(payload)\n\n Called when a :class:`ThreadMember` leaves a :class:`Thread`. Unlike :func:`on_thread_member_remove` this\n is called regardless of the member being in the internal thread's members cache or not.\n\n This requires :attr:`Intents.members` to be enabled.\n\n .. versionadded:: 2.0\n\n :param payload: The raw event payload data.\n :type payload: :class:`RawThreadMembersUpdate`\n\nVoice\n~~~~~~\n\n.. function:: on_voice_state_update(member, before, after)\n\n Called when a :class:`Member` changes their :class:`VoiceState`.\n\n The following, but not limited to, examples illustrate when this event is called:\n\n - A member joins a voice or stage channel.\n - A member leaves a voice or stage channel.\n - A member is muted or deafened by their own accord.\n - A member is muted or deafened by a guild administrator.\n\n This requires :attr:`Intents.voice_states` to be enabled.\n\n :param member: The member whose voice states changed.\n :type member: :class:`Member`\n :param before: The voice state prior to the changes.\n :type before: :class:`VoiceState`\n :param after: The voice state after the changes.\n :type after: :class:`VoiceState`\n\n.. _discord-api-utils:\n\nUtility Functions\n-----------------\n\n.. autofunction:: discord.utils.find\n\n.. autofunction:: discord.utils.get\n\n.. autofunction:: discord.utils.setup_logging\n\n.. autofunction:: discord.utils.maybe_coroutine\n\n.. autofunction:: discord.utils.snowflake_time\n\n.. autofunction:: discord.utils.time_snowflake\n\n.. autofunction:: discord.utils.oauth_url\n\n.. autofunction:: discord.utils.remove_markdown\n\n.. autofunction:: discord.utils.escape_markdown\n\n.. autofunction:: discord.utils.escape_mentions\n\n.. class:: ResolvedInvite\n\n A data class which represents a resolved invite returned from :func:`discord.utils.resolve_invite`.\n\n .. attribute:: code\n\n The invite code.\n\n :type: :class:`str`\n\n .. attribute:: event\n\n The id of the scheduled event that the invite refers to.\n\n :type: Optional[:class:`int`]\n\n.. autofunction:: discord.utils.resolve_invite\n\n.. autofunction:: discord.utils.resolve_template\n\n.. autofunction:: discord.utils.sleep_until\n\n.. autofunction:: discord.utils.utcnow\n\n.. autofunction:: discord.utils.format_dt\n\n.. autofunction:: discord.utils.as_chunks\n\n.. data:: MISSING\n :module: discord.utils\n\n A type safe sentinel used in the library to represent something as missing. Used to distinguish from ``None`` values.\n\n .. versionadded:: 2.0\n\n.. _discord-api-enums:\n\nEnumerations\n-------------\n\nThe API provides some enumerations for certain types of strings to avoid the API\nfrom being stringly typed in case the strings change in the future.\n\nAll enumerations are subclasses of an internal class which mimics the behaviour\nof :class:`enum.Enum`.\n\n.. class:: ChannelType\n\n Specifies the type of channel.\n\n .. attribute:: text\n\n A text channel.\n .. attribute:: voice\n\n A voice channel.\n .. attribute:: private\n\n A private text channel. Also called a direct message.\n .. attribute:: group\n\n A private group text channel.\n .. attribute:: category\n\n A category channel.\n .. attribute:: news\n\n A guild news channel.\n\n .. attribute:: stage_voice\n\n A guild stage voice channel.\n\n .. versionadded:: 1.7\n\n .. attribute:: news_thread\n\n A news thread\n\n .. versionadded:: 2.0\n\n .. attribute:: public_thread\n\n A public thread\n\n .. versionadded:: 2.0\n\n .. attribute:: private_thread\n\n A private thread\n\n .. versionadded:: 2.0\n\n .. attribute:: forum\n\n A forum channel.\n\n .. versionadded:: 2.0\n\n .. attribute:: media\n\n A media channel.\n\n .. versionadded:: 2.4\n\n.. class:: MessageType\n\n Specifies the type of :class:`Message`. This is used to denote if a message\n is to be interpreted as a system message or a regular message.\n\n .. container:: operations\n\n .. describe:: x == y\n\n Checks if two messages are equal.\n .. describe:: x != y\n\n Checks if two messages are not equal.\n\n .. attribute:: default\n\n The default message type. This is the same as regular messages.\n .. attribute:: recipient_add\n\n The system message when a user is added to a group private\n message or a thread.\n .. attribute:: recipient_remove\n\n The system message when a user is removed from a group private\n message or a thread.\n .. attribute:: call\n\n The system message denoting call state, e.g. missed call, started call,\n etc.\n .. attribute:: channel_name_change\n\n The system message denoting that a channel's name has been changed.\n .. attribute:: channel_icon_change\n\n The system message denoting that a channel's icon has been changed.\n .. attribute:: pins_add\n\n The system message denoting that a pinned message has been added to a channel.\n .. attribute:: new_member\n\n The system message denoting that a new member has joined a Guild.\n\n .. attribute:: premium_guild_subscription\n\n The system message denoting that a member has \"nitro boosted\" a guild.\n .. attribute:: premium_guild_tier_1\n\n The system message denoting that a member has \"nitro boosted\" a guild\n and it achieved level 1.\n .. attribute:: premium_guild_tier_2\n\n The system message denoting that a member has \"nitro boosted\" a guild\n and it achieved level 2.\n .. attribute:: premium_guild_tier_3\n\n The system message denoting that a member has \"nitro boosted\" a guild\n and it achieved level 3.\n .. attribute:: channel_follow_add\n\n The system message denoting that an announcement channel has been followed.\n\n .. versionadded:: 1.3\n .. attribute:: guild_stream\n\n The system message denoting that a member is streaming in the guild.\n\n .. versionadded:: 1.7\n .. attribute:: guild_discovery_disqualified\n\n The system message denoting that the guild is no longer eligible for Server\n Discovery.\n\n .. versionadded:: 1.7\n .. attribute:: guild_discovery_requalified\n\n The system message denoting that the guild has become eligible again for Server\n Discovery.\n\n .. versionadded:: 1.7\n .. attribute:: guild_discovery_grace_period_initial_warning\n\n The system message denoting that the guild has failed to meet the Server\n Discovery requirements for one week.\n\n .. versionadded:: 1.7\n .. attribute:: guild_discovery_grace_period_final_warning\n\n The system message denoting that the guild has failed to meet the Server\n Discovery requirements for 3 weeks in a row.\n\n .. versionadded:: 1.7\n .. attribute:: thread_created\n\n The system message denoting that a thread has been created. This is only\n sent if the thread has been created from an older message. The period of time\n required for a message to be considered old cannot be relied upon and is up to\n Discord.\n\n .. versionadded:: 2.0\n .. attribute:: reply\n\n The system message denoting that the author is replying to a message.\n\n .. versionadded:: 2.0\n .. attribute:: chat_input_command\n\n The system message denoting that a slash command was executed.\n\n .. versionadded:: 2.0\n .. attribute:: guild_invite_reminder\n\n The system message sent as a reminder to invite people to the guild.\n\n .. versionadded:: 2.0\n .. attribute:: thread_starter_message\n\n The system message denoting the message in the thread that is the one that started the\n thread's conversation topic.\n\n .. versionadded:: 2.0\n .. attribute:: context_menu_command\n\n The system message denoting that a context menu command was executed.\n\n .. versionadded:: 2.0\n .. attribute:: auto_moderation_action\n\n The system message sent when an AutoMod rule is triggered. This is only\n sent if the rule is configured to sent an alert when triggered.\n\n .. versionadded:: 2.0\n .. attribute:: role_subscription_purchase\n\n The system message sent when a user purchases or renews a role subscription.\n\n .. versionadded:: 2.2\n .. attribute:: interaction_premium_upsell\n\n The system message sent when a user is given an advertisement to purchase a premium tier for\n an application during an interaction.\n\n .. versionadded:: 2.2\n .. attribute:: stage_start\n\n The system message sent when the stage starts.\n\n .. versionadded:: 2.2\n .. attribute:: stage_end\n\n The system message sent when the stage ends.\n\n .. versionadded:: 2.2\n .. attribute:: stage_speaker\n\n The system message sent when the stage speaker changes.\n\n .. versionadded:: 2.2\n .. attribute:: stage_raise_hand\n\n The system message sent when a user is requesting to speak by raising their hands.\n\n .. versionadded:: 2.2\n .. attribute:: stage_topic\n\n The system message sent when the stage topic changes.\n\n .. versionadded:: 2.2\n .. attribute:: guild_application_premium_subscription\n\n The system message sent when an application's premium subscription is purchased for the guild.\n\n .. versionadded:: 2.2\n\n.. class:: UserFlags\n\n Represents Discord User flags.\n\n .. attribute:: staff\n\n The user is a Discord Employee.\n .. attribute:: partner\n\n The user is a Discord Partner.\n .. attribute:: hypesquad\n\n The user is a HypeSquad Events member.\n .. attribute:: bug_hunter\n\n The user is a Bug Hunter.\n .. attribute:: mfa_sms\n\n The user has SMS recovery for Multi Factor Authentication enabled.\n .. attribute:: premium_promo_dismissed\n\n The user has dismissed the Discord Nitro promotion.\n .. attribute:: hypesquad_bravery\n\n The user is a HypeSquad Bravery member.\n .. attribute:: hypesquad_brilliance\n\n The user is a HypeSquad Brilliance member.\n .. attribute:: hypesquad_balance\n\n The user is a HypeSquad Balance member.\n .. attribute:: early_supporter\n\n The user is an Early Supporter.\n .. attribute:: team_user\n\n The user is a Team User.\n .. attribute:: system\n\n The user is a system user (i.e. represents Discord officially).\n .. attribute:: has_unread_urgent_messages\n\n The user has an unread system message.\n .. attribute:: bug_hunter_level_2\n\n The user is a Bug Hunter Level 2.\n .. attribute:: verified_bot\n\n The user is a Verified Bot.\n .. attribute:: verified_bot_developer\n\n The user is an Early Verified Bot Developer.\n .. attribute:: discord_certified_moderator\n\n The user is a Moderator Programs Alumni.\n .. attribute:: bot_http_interactions\n\n The user is a bot that only uses HTTP interactions and is shown in the online member list.\n\n .. versionadded:: 2.0\n .. attribute:: spammer\n\n The user is flagged as a spammer by Discord.\n\n .. versionadded:: 2.0\n\n .. attribute:: active_developer\n\n The user is an active developer.\n\n .. versionadded:: 2.1\n\n.. class:: ActivityType\n\n Specifies the type of :class:`Activity`. This is used to check how to\n interpret the activity itself.\n\n .. attribute:: unknown\n\n An unknown activity type. This should generally not happen.\n .. attribute:: playing\n\n A \"Playing\" activity type.\n .. attribute:: streaming\n\n A \"Streaming\" activity type.\n .. attribute:: listening\n\n A \"Listening\" activity type.\n .. attribute:: watching\n\n A \"Watching\" activity type.\n .. attribute:: custom\n\n A custom activity type.\n .. attribute:: competing\n\n A competing activity type.\n\n .. versionadded:: 1.5\n\n.. class:: VerificationLevel\n\n Specifies a :class:`Guild`\\'s verification level, which is the criteria in\n which a member must meet before being able to send messages to the guild.\n\n .. container:: operations\n\n .. versionadded:: 2.0\n\n .. describe:: x == y\n\n Checks if two verification levels are equal.\n .. describe:: x != y\n\n Checks if two verification levels are not equal.\n .. describe:: x > y\n\n Checks if a verification level is higher than another.\n .. describe:: x < y\n\n Checks if a verification level is lower than another.\n .. describe:: x >= y\n\n Checks if a verification level is higher or equal to another.\n .. describe:: x <= y\n\n Checks if a verification level is lower or equal to another.\n\n .. attribute:: none\n\n No criteria set.\n .. attribute:: low\n\n Member must have a verified email on their Discord account.\n .. attribute:: medium\n\n Member must have a verified email and be registered on Discord for more\n than five minutes.\n .. attribute:: high\n\n Member must have a verified email, be registered on Discord for more\n than five minutes, and be a member of the guild itself for more than\n ten minutes.\n .. attribute:: highest\n\n Member must have a verified phone on their Discord account.\n\n.. class:: NotificationLevel\n\n Specifies whether a :class:`Guild` has notifications on for all messages or mentions only by default.\n\n .. container:: operations\n\n .. versionadded:: 2.0\n\n .. describe:: x == y\n\n Checks if two notification levels are equal.\n .. describe:: x != y\n\n Checks if two notification levels are not equal.\n .. describe:: x > y\n\n Checks if a notification level is higher than another.\n .. describe:: x < y\n\n Checks if a notification level is lower than another.\n .. describe:: x >= y\n\n Checks if a notification level is higher or equal to another.\n .. describe:: x <= y\n\n Checks if a notification level is lower or equal to another.\n\n .. attribute:: all_messages\n\n Members receive notifications for every message regardless of them being mentioned.\n .. attribute:: only_mentions\n\n Members receive notifications for messages they are mentioned in.\n\n.. class:: ContentFilter\n\n Specifies a :class:`Guild`\\'s explicit content filter, which is the machine\n learning algorithms that Discord uses to detect if an image contains\n pornography or otherwise explicit content.\n\n .. container:: operations\n\n .. versionadded:: 2.0\n\n .. describe:: x == y\n\n Checks if two content filter levels are equal.\n .. describe:: x != y\n\n Checks if two content filter levels are not equal.\n .. describe:: x > y\n\n Checks if a content filter level is higher than another.\n .. describe:: x < y\n\n Checks if a content filter level is lower than another.\n .. describe:: x >= y\n\n Checks if a content filter level is higher or equal to another.\n .. describe:: x <= y\n\n Checks if a content filter level is lower or equal to another.\n\n .. attribute:: disabled\n\n The guild does not have the content filter enabled.\n .. attribute:: no_role\n\n The guild has the content filter enabled for members without a role.\n .. attribute:: all_members\n\n The guild has the content filter enabled for every member.\n\n.. class:: Status\n\n Specifies a :class:`Member` 's status.\n\n .. attribute:: online\n\n The member is online.\n .. attribute:: offline\n\n The member is offline.\n .. attribute:: idle\n\n The member is idle.\n .. attribute:: dnd\n\n The member is \"Do Not Disturb\".\n .. attribute:: do_not_disturb\n\n An alias for :attr:`dnd`.\n .. attribute:: invisible\n\n The member is \"invisible\". In reality, this is only used when sending\n a presence a la :meth:`Client.change_presence`. When you receive a\n user's presence this will be :attr:`offline` instead.\n\n\n.. class:: AuditLogAction\n\n Represents the type of action being done for a :class:`AuditLogEntry`\\,\n which is retrievable via :meth:`Guild.audit_logs`.\n\n .. attribute:: guild_update\n\n The guild has updated. Things that trigger this include:\n\n - Changing the guild vanity URL\n - Changing the guild invite splash\n - Changing the guild AFK channel or timeout\n - Changing the guild voice server region\n - Changing the guild icon, banner, or discovery splash\n - Changing the guild moderation settings\n - Changing things related to the guild widget\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`Guild`.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.afk_channel`\n - :attr:`~AuditLogDiff.system_channel`\n - :attr:`~AuditLogDiff.afk_timeout`\n - :attr:`~AuditLogDiff.default_notifications`\n - :attr:`~AuditLogDiff.explicit_content_filter`\n - :attr:`~AuditLogDiff.mfa_level`\n - :attr:`~AuditLogDiff.name`\n - :attr:`~AuditLogDiff.owner`\n - :attr:`~AuditLogDiff.splash`\n - :attr:`~AuditLogDiff.discovery_splash`\n - :attr:`~AuditLogDiff.icon`\n - :attr:`~AuditLogDiff.banner`\n - :attr:`~AuditLogDiff.vanity_url_code`\n - :attr:`~AuditLogDiff.description`\n - :attr:`~AuditLogDiff.preferred_locale`\n - :attr:`~AuditLogDiff.prune_delete_days`\n - :attr:`~AuditLogDiff.public_updates_channel`\n - :attr:`~AuditLogDiff.rules_channel`\n - :attr:`~AuditLogDiff.verification_level`\n - :attr:`~AuditLogDiff.widget_channel`\n - :attr:`~AuditLogDiff.widget_enabled`\n - :attr:`~AuditLogDiff.premium_progress_bar_enabled`\n - :attr:`~AuditLogDiff.system_channel_flags`\n\n .. attribute:: channel_create\n\n A new channel was created.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n either a :class:`abc.GuildChannel` or :class:`Object` with an ID.\n\n A more filled out object in the :class:`Object` case can be found\n by using :attr:`~AuditLogEntry.after`.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.name`\n - :attr:`~AuditLogDiff.type`\n - :attr:`~AuditLogDiff.overwrites`\n\n .. attribute:: channel_update\n\n A channel was updated. Things that trigger this include:\n\n - The channel name or topic was changed\n - The channel bitrate was changed\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`abc.GuildChannel` or :class:`Object` with an ID.\n\n A more filled out object in the :class:`Object` case can be found\n by using :attr:`~AuditLogEntry.after` or :attr:`~AuditLogEntry.before`.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.name`\n - :attr:`~AuditLogDiff.type`\n - :attr:`~AuditLogDiff.position`\n - :attr:`~AuditLogDiff.overwrites`\n - :attr:`~AuditLogDiff.topic`\n - :attr:`~AuditLogDiff.bitrate`\n - :attr:`~AuditLogDiff.rtc_region`\n - :attr:`~AuditLogDiff.video_quality_mode`\n - :attr:`~AuditLogDiff.default_auto_archive_duration`\n - :attr:`~AuditLogDiff.nsfw`\n - :attr:`~AuditLogDiff.slowmode_delay`\n - :attr:`~AuditLogDiff.user_limit`\n\n .. attribute:: channel_delete\n\n A channel was deleted.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n an :class:`Object` with an ID.\n\n A more filled out object can be found by using the\n :attr:`~AuditLogEntry.before` object.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.name`\n - :attr:`~AuditLogDiff.type`\n - :attr:`~AuditLogDiff.overwrites`\n - :attr:`~AuditLogDiff.flags`\n - :attr:`~AuditLogDiff.nsfw`\n - :attr:`~AuditLogDiff.slowmode_delay`\n\n .. attribute:: overwrite_create\n\n A channel permission overwrite was created.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`abc.GuildChannel` or :class:`Object` with an ID.\n\n When this is the action, the type of :attr:`~AuditLogEntry.extra` is\n either a :class:`Role` or :class:`Member`. If the object is not found\n then it is a :class:`Object` with an ID being filled, a name, and a\n ``type`` attribute set to either ``'role'`` or ``'member'`` to help\n dictate what type of ID it is.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.deny`\n - :attr:`~AuditLogDiff.allow`\n - :attr:`~AuditLogDiff.id`\n - :attr:`~AuditLogDiff.type`\n\n .. attribute:: overwrite_update\n\n A channel permission overwrite was changed, this is typically\n when the permission values change.\n\n See :attr:`overwrite_create` for more information on how the\n :attr:`~AuditLogEntry.target` and :attr:`~AuditLogEntry.extra` fields\n are set.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.deny`\n - :attr:`~AuditLogDiff.allow`\n - :attr:`~AuditLogDiff.id`\n - :attr:`~AuditLogDiff.type`\n\n .. attribute:: overwrite_delete\n\n A channel permission overwrite was deleted.\n\n See :attr:`overwrite_create` for more information on how the\n :attr:`~AuditLogEntry.target` and :attr:`~AuditLogEntry.extra` fields\n are set.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.deny`\n - :attr:`~AuditLogDiff.allow`\n - :attr:`~AuditLogDiff.id`\n - :attr:`~AuditLogDiff.type`\n\n .. attribute:: kick\n\n A member was kicked.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`User` or :class:`Object` who got kicked.\n\n When this is the action, the type of :attr:`~AuditLogEntry.extra` is\n set to an unspecified proxy object with one attribute:\n\n - ``integration_type``: An optional string that denotes the type of integration that did the action.\n\n When this is the action, :attr:`~AuditLogEntry.changes` is empty.\n\n .. attribute:: member_prune\n\n A member prune was triggered.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n set to ``None``.\n\n When this is the action, the type of :attr:`~AuditLogEntry.extra` is\n set to an unspecified proxy object with two attributes:\n\n - ``delete_member_days``: An integer specifying how far the prune was.\n - ``members_removed``: An integer specifying how many members were removed.\n\n When this is the action, :attr:`~AuditLogEntry.changes` is empty.\n\n .. attribute:: ban\n\n A member was banned.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`User` or :class:`Object` who got banned.\n\n When this is the action, :attr:`~AuditLogEntry.changes` is empty.\n\n .. attribute:: unban\n\n A member was unbanned.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`User` or :class:`Object` who got unbanned.\n\n When this is the action, :attr:`~AuditLogEntry.changes` is empty.\n\n .. attribute:: member_update\n\n A member has updated. This triggers in the following situations:\n\n - A nickname was changed\n - They were server muted or deafened (or it was undo'd)\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`Member`, :class:`User`, or :class:`Object` who got updated.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.nick`\n - :attr:`~AuditLogDiff.mute`\n - :attr:`~AuditLogDiff.deaf`\n - :attr:`~AuditLogDiff.timed_out_until`\n\n .. attribute:: member_role_update\n\n A member's role has been updated. This triggers when a member\n either gains a role or loses a role.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`Member`, :class:`User`, or :class:`Object` who got the role.\n\n When this is the action, the type of :attr:`~AuditLogEntry.extra` is\n set to an unspecified proxy object with one attribute:\n\n - ``integration_type``: An optional string that denotes the type of integration that did the action.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.roles`\n\n .. attribute:: member_move\n\n A member's voice channel has been updated. This triggers when a\n member is moved to a different voice channel.\n\n When this is the action, the type of :attr:`~AuditLogEntry.extra` is\n set to an unspecified proxy object with two attributes:\n\n - ``channel``: A :class:`TextChannel` or :class:`Object` with the channel ID where the members were moved.\n - ``count``: An integer specifying how many members were moved.\n\n .. versionadded:: 1.3\n\n .. attribute:: member_disconnect\n\n A member's voice state has changed. This triggers when a\n member is force disconnected from voice.\n\n When this is the action, the type of :attr:`~AuditLogEntry.extra` is\n set to an unspecified proxy object with one attribute:\n\n - ``count``: An integer specifying how many members were disconnected.\n\n .. versionadded:: 1.3\n\n .. attribute:: bot_add\n\n A bot was added to the guild.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`Member`, :class:`User`, or :class:`Object` which was added to the guild.\n\n .. versionadded:: 1.3\n\n .. attribute:: role_create\n\n A new role was created.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`Role` or a :class:`Object` with the ID.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.colour`\n - :attr:`~AuditLogDiff.mentionable`\n - :attr:`~AuditLogDiff.hoist`\n - :attr:`~AuditLogDiff.icon`\n - :attr:`~AuditLogDiff.unicode_emoji`\n - :attr:`~AuditLogDiff.name`\n - :attr:`~AuditLogDiff.permissions`\n\n .. attribute:: role_update\n\n A role was updated. This triggers in the following situations:\n\n - The name has changed\n - The permissions have changed\n - The colour has changed\n - The role icon (or unicode emoji) has changed\n - Its hoist/mentionable state has changed\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`Role` or a :class:`Object` with the ID.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.colour`\n - :attr:`~AuditLogDiff.mentionable`\n - :attr:`~AuditLogDiff.hoist`\n - :attr:`~AuditLogDiff.icon`\n - :attr:`~AuditLogDiff.unicode_emoji`\n - :attr:`~AuditLogDiff.name`\n - :attr:`~AuditLogDiff.permissions`\n\n .. attribute:: role_delete\n\n A role was deleted.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`Role` or a :class:`Object` with the ID.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.colour`\n - :attr:`~AuditLogDiff.mentionable`\n - :attr:`~AuditLogDiff.hoist`\n - :attr:`~AuditLogDiff.name`\n - :attr:`~AuditLogDiff.permissions`\n\n .. attribute:: invite_create\n\n An invite was created.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`Invite` that was created.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.max_age`\n - :attr:`~AuditLogDiff.code`\n - :attr:`~AuditLogDiff.temporary`\n - :attr:`~AuditLogDiff.inviter`\n - :attr:`~AuditLogDiff.channel`\n - :attr:`~AuditLogDiff.uses`\n - :attr:`~AuditLogDiff.max_uses`\n\n .. attribute:: invite_update\n\n An invite was updated.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`Invite` that was updated.\n\n .. attribute:: invite_delete\n\n An invite was deleted.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`Invite` that was deleted.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.max_age`\n - :attr:`~AuditLogDiff.code`\n - :attr:`~AuditLogDiff.temporary`\n - :attr:`~AuditLogDiff.inviter`\n - :attr:`~AuditLogDiff.channel`\n - :attr:`~AuditLogDiff.uses`\n - :attr:`~AuditLogDiff.max_uses`\n\n .. attribute:: webhook_create\n\n A webhook was created.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`Object` with the webhook ID.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.channel`\n - :attr:`~AuditLogDiff.name`\n - :attr:`~AuditLogDiff.type` (always set to ``1`` if so)\n\n .. attribute:: webhook_update\n\n A webhook was updated. This trigger in the following situations:\n\n - The webhook name changed\n - The webhook channel changed\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`Object` with the webhook ID.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.channel`\n - :attr:`~AuditLogDiff.name`\n - :attr:`~AuditLogDiff.avatar`\n\n .. attribute:: webhook_delete\n\n A webhook was deleted.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`Object` with the webhook ID.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.channel`\n - :attr:`~AuditLogDiff.name`\n - :attr:`~AuditLogDiff.type` (always set to ``1`` if so)\n\n .. attribute:: emoji_create\n\n An emoji was created.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`Emoji` or :class:`Object` with the emoji ID.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.name`\n\n .. attribute:: emoji_update\n\n An emoji was updated. This triggers when the name has changed.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`Emoji` or :class:`Object` with the emoji ID.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.name`\n\n .. attribute:: emoji_delete\n\n An emoji was deleted.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`Object` with the emoji ID.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.name`\n\n .. attribute:: message_delete\n\n A message was deleted by a moderator. Note that this\n only triggers if the message was deleted by someone other than the author.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`Member`, :class:`User`, or :class:`Object` who had their message deleted.\n\n When this is the action, the type of :attr:`~AuditLogEntry.extra` is\n set to an unspecified proxy object with two attributes:\n\n - ``count``: An integer specifying how many messages were deleted.\n - ``channel``: A :class:`TextChannel` or :class:`Object` with the channel ID where the message got deleted.\n\n .. attribute:: message_bulk_delete\n\n Messages were bulk deleted by a moderator.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`TextChannel` or :class:`Object` with the ID of the channel that was purged.\n\n When this is the action, the type of :attr:`~AuditLogEntry.extra` is\n set to an unspecified proxy object with one attribute:\n\n - ``count``: An integer specifying how many messages were deleted.\n\n .. versionadded:: 1.3\n\n .. attribute:: message_pin\n\n A message was pinned in a channel.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`Member`, :class:`User`, or :class:`Object` who had their message pinned.\n\n When this is the action, the type of :attr:`~AuditLogEntry.extra` is\n set to an unspecified proxy object with two attributes:\n\n - ``channel``: A :class:`TextChannel` or :class:`Object` with the channel ID where the message was pinned.\n - ``message_id``: the ID of the message which was pinned.\n\n .. versionadded:: 1.3\n\n .. attribute:: message_unpin\n\n A message was unpinned in a channel.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`Member`, :class:`User`, or :class:`Object` who had their message unpinned.\n\n When this is the action, the type of :attr:`~AuditLogEntry.extra` is\n set to an unspecified proxy object with two attributes:\n\n - ``channel``: A :class:`TextChannel` or :class:`Object` with the channel ID where the message was unpinned.\n - ``message_id``: the ID of the message which was unpinned.\n\n .. versionadded:: 1.3\n\n .. attribute:: integration_create\n\n A guild integration was created.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n a :class:`PartialIntegration` or :class:`Object` with the\n integration ID of the integration which was created.\n\n .. versionadded:: 1.3\n\n .. attribute:: integration_update\n\n A guild integration was updated.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n a :class:`PartialIntegration` or :class:`Object` with the\n integration ID of the integration which was updated.\n\n .. versionadded:: 1.3\n\n .. attribute:: integration_delete\n\n A guild integration was deleted.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n a :class:`PartialIntegration` or :class:`Object` with the\n integration ID of the integration which was deleted.\n\n .. versionadded:: 1.3\n\n .. attribute:: stage_instance_create\n\n A stage instance was started.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`StageInstance` or :class:`Object` with the ID of the stage\n instance which was created.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.topic`\n - :attr:`~AuditLogDiff.privacy_level`\n\n .. versionadded:: 2.0\n\n .. attribute:: stage_instance_update\n\n A stage instance was updated.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`StageInstance` or :class:`Object` with the ID of the stage\n instance which was updated.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.topic`\n - :attr:`~AuditLogDiff.privacy_level`\n\n .. versionadded:: 2.0\n\n .. attribute:: stage_instance_delete\n\n A stage instance was ended.\n\n .. versionadded:: 2.0\n\n .. attribute:: sticker_create\n\n A sticker was created.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`GuildSticker` or :class:`Object` with the ID of the sticker\n which was created.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.name`\n - :attr:`~AuditLogDiff.emoji`\n - :attr:`~AuditLogDiff.type`\n - :attr:`~AuditLogDiff.format_type`\n - :attr:`~AuditLogDiff.description`\n - :attr:`~AuditLogDiff.available`\n\n .. versionadded:: 2.0\n\n .. attribute:: sticker_update\n\n A sticker was updated.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`GuildSticker` or :class:`Object` with the ID of the sticker\n which was updated.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.name`\n - :attr:`~AuditLogDiff.emoji`\n - :attr:`~AuditLogDiff.type`\n - :attr:`~AuditLogDiff.format_type`\n - :attr:`~AuditLogDiff.description`\n - :attr:`~AuditLogDiff.available`\n\n .. versionadded:: 2.0\n\n .. attribute:: sticker_delete\n\n A sticker was deleted.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`GuildSticker` or :class:`Object` with the ID of the sticker\n which was updated.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.name`\n - :attr:`~AuditLogDiff.emoji`\n - :attr:`~AuditLogDiff.type`\n - :attr:`~AuditLogDiff.format_type`\n - :attr:`~AuditLogDiff.description`\n - :attr:`~AuditLogDiff.available`\n\n .. versionadded:: 2.0\n\n .. attribute:: scheduled_event_create\n\n A scheduled event was created.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`ScheduledEvent` or :class:`Object` with the ID of the event\n which was created.\n\n Possible attributes for :class:`AuditLogDiff`:\n - :attr:`~AuditLogDiff.name`\n - :attr:`~AuditLogDiff.channel`\n - :attr:`~AuditLogDiff.description`\n - :attr:`~AuditLogDiff.privacy_level`\n - :attr:`~AuditLogDiff.status`\n - :attr:`~AuditLogDiff.entity_type`\n - :attr:`~AuditLogDiff.cover_image`\n\n .. versionadded:: 2.0\n\n .. attribute:: scheduled_event_update\n\n A scheduled event was created.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`ScheduledEvent` or :class:`Object` with the ID of the event\n which was updated.\n\n Possible attributes for :class:`AuditLogDiff`:\n - :attr:`~AuditLogDiff.name`\n - :attr:`~AuditLogDiff.channel`\n - :attr:`~AuditLogDiff.description`\n - :attr:`~AuditLogDiff.privacy_level`\n - :attr:`~AuditLogDiff.status`\n - :attr:`~AuditLogDiff.entity_type`\n - :attr:`~AuditLogDiff.cover_image`\n\n .. versionadded:: 2.0\n\n .. attribute:: scheduled_event_delete\n\n A scheduled event was created.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`ScheduledEvent` or :class:`Object` with the ID of the event\n which was deleted.\n\n Possible attributes for :class:`AuditLogDiff`:\n - :attr:`~AuditLogDiff.name`\n - :attr:`~AuditLogDiff.channel`\n - :attr:`~AuditLogDiff.description`\n - :attr:`~AuditLogDiff.privacy_level`\n - :attr:`~AuditLogDiff.status`\n - :attr:`~AuditLogDiff.entity_type`\n - :attr:`~AuditLogDiff.cover_image`\n\n .. versionadded:: 2.0\n\n .. attribute:: thread_create\n\n A thread was created.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`Thread` or :class:`Object` with the ID of the thread which\n was created.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.name`\n - :attr:`~AuditLogDiff.archived`\n - :attr:`~AuditLogDiff.locked`\n - :attr:`~AuditLogDiff.auto_archive_duration`\n - :attr:`~AuditLogDiff.invitable`\n\n .. versionadded:: 2.0\n\n .. attribute:: thread_update\n\n A thread was updated.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`Thread` or :class:`Object` with the ID of the thread which\n was updated.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.name`\n - :attr:`~AuditLogDiff.archived`\n - :attr:`~AuditLogDiff.locked`\n - :attr:`~AuditLogDiff.auto_archive_duration`\n - :attr:`~AuditLogDiff.invitable`\n\n .. versionadded:: 2.0\n\n .. attribute:: thread_delete\n\n A thread was deleted.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n the :class:`Thread` or :class:`Object` with the ID of the thread which\n was deleted.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.name`\n - :attr:`~AuditLogDiff.archived`\n - :attr:`~AuditLogDiff.locked`\n - :attr:`~AuditLogDiff.auto_archive_duration`\n - :attr:`~AuditLogDiff.invitable`\n\n .. versionadded:: 2.0\n\n .. attribute:: app_command_permission_update\n\n An application command or integrations application command permissions\n were updated.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n a :class:`PartialIntegration` for an integrations general permissions,\n :class:`~discord.app_commands.AppCommand` for a specific commands permissions,\n or :class:`Object` with the ID of the command or integration which\n was updated.\n\n When this is the action, the type of :attr:`~AuditLogEntry.extra` is\n set to an :class:`PartialIntegration` or :class:`Object` with the ID of\n application that command or integration belongs to.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.app_command_permissions`\n\n .. versionadded:: 2.0\n\n .. attribute:: automod_rule_create\n\n An automod rule was created.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n a :class:`AutoModRule` or :class:`Object` with the ID of the automod\n rule that was created.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.name`\n - :attr:`~AuditLogDiff.enabled`\n - :attr:`~AuditLogDiff.event_type`\n - :attr:`~AuditLogDiff.trigger_type`\n - :attr:`~AuditLogDiff.trigger`\n - :attr:`~AuditLogDiff.actions`\n - :attr:`~AuditLogDiff.exempt_roles`\n - :attr:`~AuditLogDiff.exempt_channels`\n\n .. versionadded:: 2.0\n\n .. attribute:: automod_rule_update\n\n An automod rule was updated.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n a :class:`AutoModRule` or :class:`Object` with the ID of the automod\n rule that was created.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.name`\n - :attr:`~AuditLogDiff.enabled`\n - :attr:`~AuditLogDiff.event_type`\n - :attr:`~AuditLogDiff.trigger_type`\n - :attr:`~AuditLogDiff.trigger`\n - :attr:`~AuditLogDiff.actions`\n - :attr:`~AuditLogDiff.exempt_roles`\n - :attr:`~AuditLogDiff.exempt_channels`\n\n .. versionadded:: 2.0\n\n .. attribute:: automod_rule_delete\n\n An automod rule was deleted.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n a :class:`AutoModRule` or :class:`Object` with the ID of the automod\n rule that was created.\n\n Possible attributes for :class:`AuditLogDiff`:\n\n - :attr:`~AuditLogDiff.name`\n - :attr:`~AuditLogDiff.enabled`\n - :attr:`~AuditLogDiff.event_type`\n - :attr:`~AuditLogDiff.trigger_type`\n - :attr:`~AuditLogDiff.trigger`\n - :attr:`~AuditLogDiff.actions`\n - :attr:`~AuditLogDiff.exempt_roles`\n - :attr:`~AuditLogDiff.exempt_channels`\n\n .. versionadded:: 2.0\n\n .. attribute:: automod_block_message\n\n An automod rule blocked a message from being sent.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n a :class:`Member` with the ID of the person who triggered the automod rule.\n\n When this is the action, the type of :attr:`~AuditLogEntry.extra` is\n set to an unspecified proxy object with 3 attributes:\n\n - ``automod_rule_name``: The name of the automod rule that was triggered.\n - ``automod_rule_trigger_type``: A :class:`AutoModRuleTriggerType` representation of the rule type that was triggered.\n - ``channel``: The channel in which the automod rule was triggered.\n\n When this is the action, :attr:`AuditLogEntry.changes` is empty.\n\n .. versionadded:: 2.0\n\n .. attribute:: automod_flag_message\n\n An automod rule flagged a message.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n a :class:`Member` with the ID of the person who triggered the automod rule.\n\n When this is the action, the type of :attr:`~AuditLogEntry.extra` is\n set to an unspecified proxy object with 3 attributes:\n\n - ``automod_rule_name``: The name of the automod rule that was triggered.\n - ``automod_rule_trigger_type``: A :class:`AutoModRuleTriggerType` representation of the rule type that was triggered.\n - ``channel``: The channel in which the automod rule was triggered.\n\n When this is the action, :attr:`AuditLogEntry.changes` is empty.\n\n .. versionadded:: 2.1\n\n .. attribute:: automod_timeout_member\n\n An automod rule timed-out a member.\n\n When this is the action, the type of :attr:`~AuditLogEntry.target` is\n a :class:`Member` with the ID of the person who triggered the automod rule.\n\n When this is the action, the type of :attr:`~AuditLogEntry.extra` is\n set to an unspecified proxy object with 3 attributes:\n\n - ``automod_rule_name``: The name of the automod rule that was triggered.\n - ``automod_rule_trigger_type``: A :class:`AutoModRuleTriggerType` representation of the rule type that was triggered.\n - ``channel``: The channel in which the automod rule was triggered.\n\n When this is the action, :attr:`AuditLogEntry.changes` is empty.\n\n .. versionadded:: 2.1\n\n .. attribute:: creator_monetization_request_created\n\n A request to monetize the server was created.\n\n .. versionadded:: 2.4\n\n .. attribute:: creator_monetization_terms_accepted\n\n The terms and conditions for creator monetization were accepted.\n\n .. versionadded:: 2.4\n\n.. class:: AuditLogActionCategory\n\n Represents the category that the :class:`AuditLogAction` belongs to.\n\n This can be retrieved via :attr:`AuditLogEntry.category`.\n\n .. attribute:: create\n\n The action is the creation of something.\n\n .. attribute:: delete\n\n The action is the deletion of something.\n\n .. attribute:: update\n\n The action is the update of something.\n\n.. class:: TeamMembershipState\n\n Represents the membership state of a team member retrieved through :func:`Client.application_info`.\n\n .. versionadded:: 1.3\n\n .. attribute:: invited\n\n Represents an invited member.\n\n .. attribute:: accepted\n\n Represents a member currently in the team.\n\n.. class:: TeamMemberRole\n\n Represents the type of role of a team member retrieved through :func:`Client.application_info`.\n\n .. versionadded:: 2.4\n\n .. attribute:: admin\n\n The team member is an admin. This allows them to invite members to the team, access credentials, edit the application,\n and do most things the owner can do. However they cannot do destructive actions.\n\n .. attribute:: developer\n\n The team member is a developer. This allows them to access information, like the client secret or public key.\n They can also configure interaction endpoints or reset the bot token. Developers cannot invite anyone to the team\n nor can they do destructive actions.\n\n .. attribute:: read_only\n\n The team member is a read-only member. This allows them to access information, but not edit anything.\n\n.. class:: WebhookType\n\n Represents the type of webhook that can be received.\n\n .. versionadded:: 1.3\n\n .. attribute:: incoming\n\n Represents a webhook that can post messages to channels with a token.\n\n .. attribute:: channel_follower\n\n Represents a webhook that is internally managed by Discord, used for following channels.\n\n .. attribute:: application\n\n Represents a webhook that is used for interactions or applications.\n\n .. versionadded:: 2.0\n\n.. class:: ExpireBehaviour\n\n Represents the behaviour the :class:`Integration` should perform\n when a user's subscription has finished.\n\n There is an alias for this called ``ExpireBehavior``.\n\n .. versionadded:: 1.4\n\n .. attribute:: remove_role\n\n This will remove the :attr:`StreamIntegration.role` from the user\n when their subscription is finished.\n\n .. attribute:: kick\n\n This will kick the user when their subscription is finished.\n\n.. class:: DefaultAvatar\n\n Represents the default avatar of a Discord :class:`User`\n\n .. attribute:: blurple\n\n Represents the default avatar with the colour blurple.\n See also :attr:`Colour.blurple`\n .. attribute:: grey\n\n Represents the default avatar with the colour grey.\n See also :attr:`Colour.greyple`\n .. attribute:: gray\n\n An alias for :attr:`grey`.\n .. attribute:: green\n\n Represents the default avatar with the colour green.\n See also :attr:`Colour.green`\n .. attribute:: orange\n\n Represents the default avatar with the colour orange.\n See also :attr:`Colour.orange`\n .. attribute:: red\n\n Represents the default avatar with the colour red.\n See also :attr:`Colour.red`\n .. attribute:: pink\n\n Represents the default avatar with the colour pink.\n See also :attr:`Colour.pink`\n\n .. versionadded:: 2.3\n\n.. class:: StickerType\n\n Represents the type of sticker.\n\n .. versionadded:: 2.0\n\n .. attribute:: standard\n\n Represents a standard sticker that all Nitro users can use.\n\n .. attribute:: guild\n\n Represents a custom sticker created in a guild.\n\n.. class:: StickerFormatType\n\n Represents the type of sticker images.\n\n .. versionadded:: 1.6\n\n .. attribute:: png\n\n Represents a sticker with a png image.\n\n .. attribute:: apng\n\n Represents a sticker with an apng image.\n\n .. attribute:: lottie\n\n Represents a sticker with a lottie image.\n\n .. attribute:: gif\n\n Represents a sticker with a gif image.\n\n .. versionadded:: 2.2\n\n.. class:: InviteTarget\n\n Represents the invite type for voice channel invites.\n\n .. versionadded:: 2.0\n\n .. attribute:: unknown\n\n The invite doesn't target anyone or anything.\n\n .. attribute:: stream\n\n A stream invite that targets a user.\n\n .. attribute:: embedded_application\n\n A stream invite that targets an embedded application.\n\n.. class:: VideoQualityMode\n\n Represents the camera video quality mode for voice channel participants.\n\n .. versionadded:: 2.0\n\n .. attribute:: auto\n\n Represents auto camera video quality.\n\n .. attribute:: full\n\n Represents full camera video quality.\n\n.. class:: PrivacyLevel\n\n Represents the privacy level of a stage instance or scheduled event.\n\n .. versionadded:: 2.0\n\n .. attribute:: guild_only\n\n The stage instance or scheduled event is only accessible within the guild.\n\n.. class:: NSFWLevel\n\n Represents the NSFW level of a guild.\n\n .. versionadded:: 2.0\n\n .. container:: operations\n\n .. describe:: x == y\n\n Checks if two NSFW levels are equal.\n .. describe:: x != y\n\n Checks if two NSFW levels are not equal.\n .. describe:: x > y\n\n Checks if a NSFW level is higher than another.\n .. describe:: x < y\n\n Checks if a NSFW level is lower than another.\n .. describe:: x >= y\n\n Checks if a NSFW level is higher or equal to another.\n .. describe:: x <= y\n\n Checks if a NSFW level is lower or equal to another.\n\n .. attribute:: default\n\n The guild has not been categorised yet.\n\n .. attribute:: explicit\n\n The guild contains NSFW content.\n\n .. attribute:: safe\n\n The guild does not contain any NSFW content.\n\n .. attribute:: age_restricted\n\n The guild may contain NSFW content.\n\n.. class:: Locale\n\n Supported locales by Discord. Mainly used for application command localisation.\n\n .. versionadded:: 2.0\n\n .. attribute:: american_english\n\n The ``en-US`` locale.\n\n .. attribute:: british_english\n\n The ``en-GB`` locale.\n\n .. attribute:: bulgarian\n\n The ``bg`` locale.\n\n .. attribute:: chinese\n\n The ``zh-CN`` locale.\n\n .. attribute:: taiwan_chinese\n\n The ``zh-TW`` locale.\n\n .. attribute:: croatian\n\n The ``hr`` locale.\n\n .. attribute:: czech\n\n The ``cs`` locale.\n\n .. attribute:: indonesian\n\n The ``id`` locale.\n\n .. versionadded:: 2.2\n\n .. attribute:: danish\n\n The ``da`` locale.\n\n .. attribute:: dutch\n\n The ``nl`` locale.\n\n .. attribute:: finnish\n\n The ``fi`` locale.\n\n .. attribute:: french\n\n The ``fr`` locale.\n\n .. attribute:: german\n\n The ``de`` locale.\n\n .. attribute:: greek\n\n The ``el`` locale.\n\n .. attribute:: hindi\n\n The ``hi`` locale.\n\n .. attribute:: hungarian\n\n The ``hu`` locale.\n\n .. attribute:: italian\n\n The ``it`` locale.\n\n .. attribute:: japanese\n\n The ``ja`` locale.\n\n .. attribute:: korean\n\n The ``ko`` locale.\n\n .. attribute:: lithuanian\n\n The ``lt`` locale.\n\n .. attribute:: norwegian\n\n The ``no`` locale.\n\n .. attribute:: polish\n\n The ``pl`` locale.\n\n .. attribute:: brazil_portuguese\n\n The ``pt-BR`` locale.\n\n .. attribute:: romanian\n\n The ``ro`` locale.\n\n .. attribute:: russian\n\n The ``ru`` locale.\n\n .. attribute:: spain_spanish\n\n The ``es-ES`` locale.\n\n .. attribute:: swedish\n\n The ``sv-SE`` locale.\n\n .. attribute:: thai\n\n The ``th`` locale.\n\n .. attribute:: turkish\n\n The ``tr`` locale.\n\n .. attribute:: ukrainian\n\n The ``uk`` locale.\n\n .. attribute:: vietnamese\n\n The ``vi`` locale.\n\n\n.. class:: MFALevel\n\n Represents the Multi-Factor Authentication requirement level of a guild.\n\n .. versionadded:: 2.0\n\n .. container:: operations\n\n .. describe:: x == y\n\n Checks if two MFA levels are equal.\n .. describe:: x != y\n\n Checks if two MFA levels are not equal.\n .. describe:: x > y\n\n Checks if a MFA level is higher than another.\n .. describe:: x < y\n\n Checks if a MFA level is lower than another.\n .. describe:: x >= y\n\n Checks if a MFA level is higher or equal to another.\n .. describe:: x <= y\n\n Checks if a MFA level is lower or equal to another.\n\n .. attribute:: disabled\n\n The guild has no MFA requirement.\n\n .. attribute:: require_2fa\n\n The guild requires 2 factor authentication.\n\n.. class:: EntityType\n\n Represents the type of entity that a scheduled event is for.\n\n .. versionadded:: 2.0\n\n .. attribute:: stage_instance\n\n The scheduled event will occur in a stage instance.\n\n .. attribute:: voice\n\n The scheduled event will occur in a voice channel.\n\n .. attribute:: external\n\n The scheduled event will occur externally.\n\n.. class:: EventStatus\n\n Represents the status of an event.\n\n .. versionadded:: 2.0\n\n .. attribute:: scheduled\n\n The event is scheduled.\n\n .. attribute:: active\n\n The event is active.\n\n .. attribute:: completed\n\n The event has ended.\n\n .. attribute:: cancelled\n\n The event has been cancelled.\n\n .. attribute:: canceled\n\n An alias for :attr:`cancelled`.\n\n .. attribute:: ended\n\n An alias for :attr:`completed`.\n\n.. class:: AutoModRuleTriggerType\n\n Represents the trigger type of an automod rule.\n\n .. versionadded:: 2.0\n\n .. attribute:: keyword\n\n The rule will trigger when a keyword is mentioned.\n\n .. attribute:: harmful_link\n\n The rule will trigger when a harmful link is posted.\n\n .. attribute:: spam\n\n The rule will trigger when a spam message is posted.\n\n .. attribute:: keyword_preset\n\n The rule will trigger when something triggers based on the set keyword preset types.\n\n .. attribute:: mention_spam\n\n The rule will trigger when combined number of role and user mentions\n is greater than the set limit.\n\n .. attribute:: member_profile\n\n The rule will trigger when a user's profile contains a keyword.\n\n .. versionadded:: 2.4\n\n.. class:: AutoModRuleEventType\n\n Represents the event type of an automod rule.\n\n .. versionadded:: 2.0\n\n .. attribute:: message_send\n\n The rule will trigger when a message is sent.\n\n .. attribute:: member_update\n\n The rule will trigger when a member's profile is updated.\n\n .. versionadded:: 2.4\n\n.. class:: AutoModRuleActionType\n\n Represents the action type of an automod rule.\n\n .. versionadded:: 2.0\n\n .. attribute:: block_message\n\n The rule will block a message from being sent.\n\n .. attribute:: send_alert_message\n\n The rule will send an alert message to a predefined channel.\n\n .. attribute:: timeout\n\n The rule will timeout a user.\n\n .. attribute:: block_member_interactions\n\n Similar to :attr:`timeout`, except the user will be timed out indefinitely.\n This will request the user to edit it's profile.\n\n .. versionadded:: 2.4\n\n.. class:: ForumLayoutType\n\n Represents how a forum's posts are layed out in the client.\n\n .. versionadded:: 2.2\n\n .. attribute:: not_set\n\n No default has been set, so it is up to the client to know how to lay it out.\n\n .. attribute:: list_view\n\n Displays posts as a list.\n\n .. attribute:: gallery_view\n\n Displays posts as a collection of tiles.\n\n\n.. class:: ForumOrderType\n\n Represents how a forum's posts are sorted in the client.\n\n .. versionadded:: 2.3\n\n .. attribute:: latest_activity\n\n Sort forum posts by activity.\n\n .. attribute:: creation_date\n\n Sort forum posts by creation time (from most recent to oldest).\n\n.. class:: SelectDefaultValueType\n\n Represents the default value of a select menu.\n\n .. versionadded:: 2.4\n\n .. attribute:: user\n\n The underlying type of the ID is a user.\n\n .. attribute:: role\n\n The underlying type of the ID is a role.\n\n .. attribute:: channel\n\n The underlying type of the ID is a channel or thread.\n\n\n.. class:: SKUType\n\n Represents the type of a SKU.\n\n .. versionadded:: 2.4\n\n .. attribute:: subscription\n\n The SKU is a recurring subscription.\n\n .. attribute:: subscription_group\n\n The SKU is a system-generated group which is created for each :attr:`SKUType.subscription`.\n\n\n.. class:: EntitlementType\n\n Represents the type of an entitlement.\n\n .. versionadded:: 2.4\n\n .. attribute:: application_subscription\n\n The entitlement was purchased as an app subscription.\n\n\n.. class:: EntitlementOwnerType\n\n Represents the type of an entitlement owner.\n\n .. versionadded:: 2.4\n\n .. attribute:: guild\n\n The entitlement owner is a guild.\n\n .. attribute:: user\n\n The entitlement owner is a user.\n\n\n.. _discord-api-audit-logs:\n\nAudit Log Data\n----------------\n\nWorking with :meth:`Guild.audit_logs` is a complicated process with a lot of machinery\ninvolved. The library attempts to make it easy to use and friendly. In order to accomplish\nthis goal, it must make use of a couple of data classes that aid in this goal.\n\nAuditLogEntry\n~~~~~~~~~~~~~~~\n\n.. attributetable:: AuditLogEntry\n\n.. autoclass:: AuditLogEntry\n :members:\n\nAuditLogChanges\n~~~~~~~~~~~~~~~~~\n\n.. attributetable:: AuditLogChanges\n\n.. class:: AuditLogChanges\n\n An audit log change set.\n\n .. attribute:: before\n\n The old value. The attribute has the type of :class:`AuditLogDiff`.\n\n Depending on the :class:`AuditLogActionCategory` retrieved by\n :attr:`~AuditLogEntry.category`\\, the data retrieved by this\n attribute differs:\n\n +----------------------------------------+---------------------------------------------------+\n | Category | Description |\n +----------------------------------------+---------------------------------------------------+\n | :attr:`~AuditLogActionCategory.create` | All attributes are set to ``None``. |\n +----------------------------------------+---------------------------------------------------+\n | :attr:`~AuditLogActionCategory.delete` | All attributes are set the value before deletion. |\n +----------------------------------------+---------------------------------------------------+\n | :attr:`~AuditLogActionCategory.update` | All attributes are set the value before updating. |\n +----------------------------------------+---------------------------------------------------+\n | ``None`` | No attributes are set. |\n +----------------------------------------+---------------------------------------------------+\n\n .. attribute:: after\n\n The new value. The attribute has the type of :class:`AuditLogDiff`.\n\n Depending on the :class:`AuditLogActionCategory` retrieved by\n :attr:`~AuditLogEntry.category`\\, the data retrieved by this\n attribute differs:\n\n +----------------------------------------+--------------------------------------------------+\n | Category | Description |\n +----------------------------------------+--------------------------------------------------+\n | :attr:`~AuditLogActionCategory.create` | All attributes are set to the created value |\n +----------------------------------------+--------------------------------------------------+\n | :attr:`~AuditLogActionCategory.delete` | All attributes are set to ``None`` |\n +----------------------------------------+--------------------------------------------------+\n | :attr:`~AuditLogActionCategory.update` | All attributes are set the value after updating. |\n +----------------------------------------+--------------------------------------------------+\n | ``None`` | No attributes are set. |\n +----------------------------------------+--------------------------------------------------+\n\nAuditLogDiff\n~~~~~~~~~~~~~\n\n.. attributetable:: AuditLogDiff\n\n.. class:: AuditLogDiff\n\n Represents an audit log \"change\" object. A change object has dynamic\n attributes that depend on the type of action being done. Certain actions\n map to certain attributes being set.\n\n Note that accessing an attribute that does not match the specified action\n will lead to an attribute error.\n\n To get a list of attributes that have been set, you can iterate over\n them. To see a list of all possible attributes that could be set based\n on the action being done, check the documentation for :class:`AuditLogAction`,\n otherwise check the documentation below for all attributes that are possible.\n\n .. container:: operations\n\n .. describe:: iter(diff)\n\n Returns an iterator over (attribute, value) tuple of this diff.\n\n .. attribute:: name\n\n A name of something.\n\n :type: :class:`str`\n\n .. attribute:: guild\n\n The guild of something.\n\n :type: :class:`Guild`\n\n .. attribute:: icon\n\n A guild's or role's icon. See also :attr:`Guild.icon` or :attr:`Role.icon`.\n\n :type: :class:`Asset`\n\n .. attribute:: splash\n\n The guild's invite splash. See also :attr:`Guild.splash`.\n\n :type: :class:`Asset`\n\n .. attribute:: discovery_splash\n\n The guild's discovery splash. See also :attr:`Guild.discovery_splash`.\n\n :type: :class:`Asset`\n\n .. attribute:: banner\n\n The guild's banner. See also :attr:`Guild.banner`.\n\n :type: :class:`Asset`\n\n .. attribute:: owner\n\n The guild's owner. See also :attr:`Guild.owner`\n\n :type: Union[:class:`Member`, :class:`User`]\n\n .. attribute:: afk_channel\n\n The guild's AFK channel.\n\n If this could not be found, then it falls back to a :class:`Object`\n with the ID being set.\n\n See :attr:`Guild.afk_channel`.\n\n :type: Union[:class:`VoiceChannel`, :class:`Object`]\n\n .. attribute:: system_channel\n\n The guild's system channel.\n\n If this could not be found, then it falls back to a :class:`Object`\n with the ID being set.\n\n See :attr:`Guild.system_channel`.\n\n :type: Union[:class:`TextChannel`, :class:`Object`]\n\n\n .. attribute:: rules_channel\n\n The guild's rules channel.\n\n If this could not be found then it falls back to a :class:`Object`\n with the ID being set.\n\n See :attr:`Guild.rules_channel`.\n\n :type: Union[:class:`TextChannel`, :class:`Object`]\n\n\n .. attribute:: public_updates_channel\n\n The guild's public updates channel.\n\n If this could not be found then it falls back to a :class:`Object`\n with the ID being set.\n\n See :attr:`Guild.public_updates_channel`.\n\n :type: Union[:class:`TextChannel`, :class:`Object`]\n\n .. attribute:: afk_timeout\n\n The guild's AFK timeout. See :attr:`Guild.afk_timeout`.\n\n :type: :class:`int`\n\n .. attribute:: mfa_level\n\n The guild's MFA level. See :attr:`Guild.mfa_level`.\n\n :type: :class:`MFALevel`\n\n .. attribute:: widget_enabled\n\n The guild's widget has been enabled or disabled.\n\n :type: :class:`bool`\n\n .. attribute:: widget_channel\n\n The widget's channel.\n\n If this could not be found then it falls back to a :class:`Object`\n with the ID being set.\n\n :type: Union[:class:`TextChannel`, :class:`Object`]\n\n .. attribute:: verification_level\n\n The guild's verification level.\n\n See also :attr:`Guild.verification_level`.\n\n :type: :class:`VerificationLevel`\n\n .. attribute:: default_notifications\n\n The guild's default notification level.\n\n See also :attr:`Guild.default_notifications`.\n\n :type: :class:`NotificationLevel`\n\n .. attribute:: explicit_content_filter\n\n The guild's content filter.\n\n See also :attr:`Guild.explicit_content_filter`.\n\n :type: :class:`ContentFilter`\n\n .. attribute:: vanity_url_code\n\n The guild's vanity URL.\n\n See also :meth:`Guild.vanity_invite` and :meth:`Guild.edit`.\n\n :type: :class:`str`\n\n .. attribute:: position\n\n The position of a :class:`Role` or :class:`abc.GuildChannel`.\n\n :type: :class:`int`\n\n .. attribute:: type\n\n The type of channel, sticker, webhook or integration.\n\n :type: Union[:class:`ChannelType`, :class:`StickerType`, :class:`WebhookType`, :class:`str`]\n\n .. attribute:: topic\n\n The topic of a :class:`TextChannel` or :class:`StageChannel`.\n\n See also :attr:`TextChannel.topic` or :attr:`StageChannel.topic`.\n\n :type: :class:`str`\n\n .. attribute:: bitrate\n\n The bitrate of a :class:`VoiceChannel`.\n\n See also :attr:`VoiceChannel.bitrate`.\n\n :type: :class:`int`\n\n .. attribute:: overwrites\n\n A list of permission overwrite tuples that represents a target and a\n :class:`PermissionOverwrite` for said target.\n\n The first element is the object being targeted, which can either\n be a :class:`Member` or :class:`User` or :class:`Role`. If this object\n is not found then it is a :class:`Object` with an ID being filled and\n a ``type`` attribute set to either ``'role'`` or ``'member'`` to help\n decide what type of ID it is.\n\n :type: List[Tuple[target, :class:`PermissionOverwrite`]]\n\n .. attribute:: privacy_level\n\n The privacy level of the stage instance or scheduled event\n\n :type: :class:`PrivacyLevel`\n\n .. attribute:: roles\n\n A list of roles being added or removed from a member.\n\n If a role is not found then it is a :class:`Object` with the ID and name being\n filled in.\n\n :type: List[Union[:class:`Role`, :class:`Object`]]\n\n .. attribute:: nick\n\n The nickname of a member.\n\n See also :attr:`Member.nick`\n\n :type: Optional[:class:`str`]\n\n .. attribute:: deaf\n\n Whether the member is being server deafened.\n\n See also :attr:`VoiceState.deaf`.\n\n :type: :class:`bool`\n\n .. attribute:: mute\n\n Whether the member is being server muted.\n\n See also :attr:`VoiceState.mute`.\n\n :type: :class:`bool`\n\n .. attribute:: permissions\n\n The permissions of a role.\n\n See also :attr:`Role.permissions`.\n\n :type: :class:`Permissions`\n\n .. attribute:: colour\n color\n\n The colour of a role.\n\n See also :attr:`Role.colour`\n\n :type: :class:`Colour`\n\n .. attribute:: hoist\n\n Whether the role is being hoisted or not.\n\n See also :attr:`Role.hoist`\n\n :type: :class:`bool`\n\n .. attribute:: mentionable\n\n Whether the role is mentionable or not.\n\n See also :attr:`Role.mentionable`\n\n :type: :class:`bool`\n\n .. attribute:: code\n\n The invite's code.\n\n See also :attr:`Invite.code`\n\n :type: :class:`str`\n\n .. attribute:: channel\n\n A guild channel.\n\n If the channel is not found then it is a :class:`Object` with the ID\n being set. In some cases the channel name is also set.\n\n :type: Union[:class:`abc.GuildChannel`, :class:`Object`]\n\n .. attribute:: inviter\n\n The user who created the invite.\n\n See also :attr:`Invite.inviter`.\n\n :type: Optional[:class:`User`]\n\n .. attribute:: max_uses\n\n The invite's max uses.\n\n See also :attr:`Invite.max_uses`.\n\n :type: :class:`int`\n\n .. attribute:: uses\n\n The invite's current uses.\n\n See also :attr:`Invite.uses`.\n\n :type: :class:`int`\n\n .. attribute:: max_age\n\n The invite's max age in seconds.\n\n See also :attr:`Invite.max_age`.\n\n :type: :class:`int`\n\n .. attribute:: temporary\n\n If the invite is a temporary invite.\n\n See also :attr:`Invite.temporary`.\n\n :type: :class:`bool`\n\n .. attribute:: allow\n deny\n\n The permissions being allowed or denied.\n\n :type: :class:`Permissions`\n\n .. attribute:: id\n\n The ID of the object being changed.\n\n :type: :class:`int`\n\n .. attribute:: avatar\n\n The avatar of a member.\n\n See also :attr:`User.avatar`.\n\n :type: :class:`Asset`\n\n .. attribute:: slowmode_delay\n\n The number of seconds members have to wait before\n sending another message in the channel.\n\n See also :attr:`TextChannel.slowmode_delay`.\n\n :type: :class:`int`\n\n .. attribute:: rtc_region\n\n The region for the voice channel’s voice communication.\n A value of ``None`` indicates automatic voice region detection.\n\n See also :attr:`VoiceChannel.rtc_region`.\n\n :type: :class:`str`\n\n .. attribute:: video_quality_mode\n\n The camera video quality for the voice channel's participants.\n\n See also :attr:`VoiceChannel.video_quality_mode`.\n\n :type: :class:`VideoQualityMode`\n\n .. attribute:: format_type\n\n The format type of a sticker being changed.\n\n See also :attr:`GuildSticker.format`\n\n :type: :class:`StickerFormatType`\n\n .. attribute:: emoji\n\n The name of the emoji that represents a sticker being changed.\n\n See also :attr:`GuildSticker.emoji`.\n\n :type: :class:`str`\n\n .. attribute:: unicode_emoji\n\n The unicode emoji that is used as an icon for the role being changed.\n\n See also :attr:`Role.unicode_emoji`.\n\n :type: :class:`str`\n\n .. attribute:: description\n\n The description of a guild, a sticker, or a scheduled event.\n\n See also :attr:`Guild.description`, :attr:`GuildSticker.description`, or\n :attr:`ScheduledEvent.description`.\n\n :type: :class:`str`\n\n .. attribute:: available\n\n The availability of a sticker being changed.\n\n See also :attr:`GuildSticker.available`\n\n :type: :class:`bool`\n\n .. attribute:: archived\n\n The thread is now archived.\n\n :type: :class:`bool`\n\n .. attribute:: locked\n\n The thread is being locked or unlocked.\n\n :type: :class:`bool`\n\n .. attribute:: auto_archive_duration\n\n The thread's auto archive duration being changed.\n\n See also :attr:`Thread.auto_archive_duration`\n\n :type: :class:`int`\n\n .. attribute:: default_auto_archive_duration\n\n The default auto archive duration for newly created threads being changed.\n\n :type: :class:`int`\n\n .. attribute:: invitable\n\n Whether non-moderators can add users to this private thread.\n\n :type: :class:`bool`\n\n .. attribute:: timed_out_until\n\n Whether the user is timed out, and if so until when.\n\n :type: Optional[:class:`datetime.datetime`]\n\n .. attribute:: enable_emoticons\n\n Integration emoticons were enabled or disabled.\n\n See also :attr:`StreamIntegration.enable_emoticons`\n\n :type: :class:`bool`\n\n .. attribute:: expire_behaviour\n expire_behavior\n\n The behaviour of expiring subscribers changed.\n\n See also :attr:`StreamIntegration.expire_behaviour`\n\n :type: :class:`ExpireBehaviour`\n\n .. attribute:: expire_grace_period\n\n The grace period before expiring subscribers changed.\n\n See also :attr:`StreamIntegration.expire_grace_period`\n\n :type: :class:`int`\n\n .. attribute:: preferred_locale\n\n The preferred locale for the guild changed.\n\n See also :attr:`Guild.preferred_locale`\n\n :type: :class:`Locale`\n\n .. attribute:: prune_delete_days\n\n The number of days after which inactive and role-unassigned members are kicked has been changed.\n\n :type: :class:`int`\n\n .. attribute:: status\n\n The status of the scheduled event.\n\n :type: :class:`EventStatus`\n\n .. attribute:: entity_type\n\n The type of entity this scheduled event is for.\n\n :type: :class:`EntityType`\n\n .. attribute:: cover_image\n\n The scheduled event's cover image.\n\n See also :attr:`ScheduledEvent.cover_image`.\n\n :type: :class:`Asset`\n\n .. attribute:: app_command_permissions\n\n List of permissions for the app command.\n\n :type: List[:class:`~discord.app_commands.AppCommandPermissions`]\n\n .. attribute:: enabled\n\n Whether the automod rule is active or not.\n\n :type: :class:`bool`\n\n .. attribute:: event_type\n\n The event type for triggering the automod rule.\n\n :type: :class:`AutoModRuleEventType`\n\n .. attribute:: trigger_type\n\n The trigger type for the automod rule.\n\n :type: :class:`AutoModRuleTriggerType`\n\n .. attribute:: trigger\n\n The trigger for the automod rule.\n\n .. note ::\n\n The :attr:`~AutoModTrigger.type` of the trigger may be incorrect.\n Some attributes such as :attr:`~AutoModTrigger.keyword_filter`, :attr:`~AutoModTrigger.regex_patterns`,\n and :attr:`~AutoModTrigger.allow_list` will only have the added or removed values.\n\n :type: :class:`AutoModTrigger`\n\n .. attribute:: actions\n\n The actions to take when an automod rule is triggered.\n\n :type: List[AutoModRuleAction]\n\n .. attribute:: exempt_roles\n\n The list of roles that are exempt from the automod rule.\n\n :type: List[Union[:class:`Role`, :class:`Object`]]\n\n .. attribute:: exempt_channels\n\n The list of channels or threads that are exempt from the automod rule.\n\n :type: List[:class:`abc.GuildChannel`, :class:`Thread`, :class:`Object`]\n\n .. attribute:: premium_progress_bar_enabled\n\n The guild’s display setting to show boost progress bar.\n\n :type: :class:`bool`\n\n .. attribute:: system_channel_flags\n\n The guild’s system channel settings.\n\n See also :attr:`Guild.system_channel_flags`\n\n :type: :class:`SystemChannelFlags`\n\n .. attribute:: nsfw\n\n Whether the channel is marked as “not safe for work” or “age restricted”.\n\n :type: :class:`bool`\n\n .. attribute:: user_limit\n\n The channel’s limit for number of members that can be in a voice or stage channel.\n\n See also :attr:`VoiceChannel.user_limit` and :attr:`StageChannel.user_limit`\n\n :type: :class:`int`\n\n .. attribute:: flags\n\n The channel flags associated with this thread or forum post.\n\n See also :attr:`ForumChannel.flags` and :attr:`Thread.flags`\n\n :type: :class:`ChannelFlags`\n\n .. attribute:: default_thread_slowmode_delay\n\n The default slowmode delay for threads created in this text channel or forum.\n\n See also :attr:`TextChannel.default_thread_slowmode_delay` and :attr:`ForumChannel.default_thread_slowmode_delay`\n\n :type: :class:`int`\n\n .. attribute:: applied_tags\n\n The applied tags of a forum post.\n\n See also :attr:`Thread.applied_tags`\n\n :type: List[Union[:class:`ForumTag`, :class:`Object`]]\n\n .. attribute:: available_tags\n\n The available tags of a forum.\n\n See also :attr:`ForumChannel.available_tags`\n\n :type: Sequence[:class:`ForumTag`]\n\n .. attribute:: default_reaction_emoji\n\n The default_reaction_emoji for forum posts.\n\n See also :attr:`ForumChannel.default_reaction_emoji`\n\n :type: Optional[:class:`PartialEmoji`]\n\n.. this is currently missing the following keys: reason and application_id\n I'm not sure how to port these\n\nWebhook Support\n------------------\n\ndiscord.py offers support for creating, editing, and executing webhooks through the :class:`Webhook` class.\n\nWebhook\n~~~~~~~~~\n\n.. attributetable:: Webhook\n\n.. autoclass:: Webhook()\n :members:\n :inherited-members:\n\nWebhookMessage\n~~~~~~~~~~~~~~~~\n\n.. attributetable:: WebhookMessage\n\n.. autoclass:: WebhookMessage()\n :members:\n :inherited-members:\n\nSyncWebhook\n~~~~~~~~~~~~\n\n.. attributetable:: SyncWebhook\n\n.. autoclass:: SyncWebhook()\n :members:\n :inherited-members:\n\nSyncWebhookMessage\n~~~~~~~~~~~~~~~~~~~\n\n.. attributetable:: SyncWebhookMessage\n\n.. autoclass:: SyncWebhookMessage()\n :members:\n\n.. _discord_api_abcs:\n\nAbstract Base Classes\n-----------------------\n\nAn :term:`abstract base class` (also known as an ``abc``) is a class that models can inherit\nto get their behaviour. **Abstract base classes should not be instantiated**.\nThey are mainly there for usage with :func:`isinstance` and :func:`issubclass`\\.\n\nThis library has a module related to abstract base classes, in which all the ABCs are subclasses of\n:class:`typing.Protocol`.\n\nSnowflake\n~~~~~~~~~~\n\n.. attributetable:: discord.abc.Snowflake\n\n.. autoclass:: discord.abc.Snowflake()\n :members:\n\nUser\n~~~~~\n\n.. attributetable:: discord.abc.User\n\n.. autoclass:: discord.abc.User()\n :members:\n\nPrivateChannel\n~~~~~~~~~~~~~~~\n\n.. attributetable:: discord.abc.PrivateChannel\n\n.. autoclass:: discord.abc.PrivateChannel()\n :members:\n\nGuildChannel\n~~~~~~~~~~~~~\n\n.. attributetable:: discord.abc.GuildChannel\n\n.. autoclass:: discord.abc.GuildChannel()\n :members:\n\nMessageable\n~~~~~~~~~~~~\n\n.. attributetable:: discord.abc.Messageable\n\n.. autoclass:: discord.abc.Messageable()\n :members:\n :exclude-members: typing\n\n .. automethod:: discord.abc.Messageable.typing\n :async-with:\n\nConnectable\n~~~~~~~~~~~~\n\n.. attributetable:: discord.abc.Connectable\n\n.. autoclass:: discord.abc.Connectable()\n :members:\n\n.. _discord_api_models:\n\nDiscord Models\n---------------\n\nModels are classes that are received from Discord and are not meant to be created by\nthe user of the library.\n\n.. danger::\n\n The classes listed below are **not intended to be created by users** and are also\n **read-only**.\n\n For example, this means that you should not make your own :class:`User` instances\n nor should you modify the :class:`User` instance yourself.\n\n If you want to get one of these model classes instances they'd have to be through\n the cache, and a common way of doing so is through the :func:`utils.find` function\n or attributes of model classes that you receive from the events specified in the\n :ref:`discord-api-events`.\n\n.. note::\n\n Nearly all classes here have :ref:`py:slots` defined which means that it is\n impossible to have dynamic attributes to the data classes.\n\n\nClientUser\n~~~~~~~~~~~~\n\n.. attributetable:: ClientUser\n\n.. autoclass:: ClientUser()\n :members:\n :inherited-members:\n\nUser\n~~~~~\n\n.. attributetable:: User\n\n.. autoclass:: User()\n :members:\n :inherited-members:\n :exclude-members: typing\n\n .. automethod:: typing\n :async-with:\n\nAutoMod\n~~~~~~~\n\n.. attributetable:: AutoModRule\n\n.. autoclass:: AutoModRule()\n :members:\n\n.. attributetable:: AutoModAction\n\n.. autoclass:: AutoModAction()\n :members:\n\nAttachment\n~~~~~~~~~~~\n\n.. attributetable:: Attachment\n\n.. autoclass:: Attachment()\n :members:\n\nAsset\n~~~~~\n\n.. attributetable:: Asset\n\n.. autoclass:: Asset()\n :members:\n :inherited-members:\n\nMessage\n~~~~~~~\n\n.. attributetable:: Message\n\n.. autoclass:: Message()\n :members:\n :inherited-members:\n\nDeletedReferencedMessage\n~~~~~~~~~~~~~~~~~~~~~~~~~\n\n.. attributetable:: DeletedReferencedMessage\n\n.. autoclass:: DeletedReferencedMessage()\n :members:\n\n\nReaction\n~~~~~~~~~\n\n.. attributetable:: Reaction\n\n.. autoclass:: Reaction()\n :members:\n\nGuild\n~~~~~~\n\n.. attributetable:: Guild\n\n.. autoclass:: Guild()\n :members:\n\n.. class:: BanEntry\n\n A namedtuple which represents a ban returned from :meth:`~Guild.bans`.\n\n .. attribute:: reason\n\n The reason this user was banned.\n\n :type: Optional[:class:`str`]\n .. attribute:: user\n\n The :class:`User` that was banned.\n\n :type: :class:`User`\n\n\nScheduledEvent\n~~~~~~~~~~~~~~\n\n.. attributetable:: ScheduledEvent\n\n.. autoclass:: ScheduledEvent()\n :members:\n\n\nIntegration\n~~~~~~~~~~~~\n\n.. attributetable:: Integration\n\n.. autoclass:: Integration()\n :members:\n\n.. attributetable:: IntegrationAccount\n\n.. autoclass:: IntegrationAccount()\n :members:\n\n.. attributetable:: BotIntegration\n\n.. autoclass:: BotIntegration()\n :members:\n\n.. attributetable:: IntegrationApplication\n\n.. autoclass:: IntegrationApplication()\n :members:\n\n.. attributetable:: StreamIntegration\n\n.. autoclass:: StreamIntegration()\n :members:\n\n.. attributetable:: PartialIntegration\n\n.. autoclass:: PartialIntegration()\n :members:\n\nMember\n~~~~~~\n\n.. attributetable:: Member\n\n.. autoclass:: Member()\n :members:\n :inherited-members:\n :exclude-members: typing\n\n .. automethod:: typing\n :async-with:\n\nSpotify\n~~~~~~~~\n\n.. attributetable:: Spotify\n\n.. autoclass:: Spotify()\n :members:\n\nVoiceState\n~~~~~~~~~~~\n\n.. attributetable:: VoiceState\n\n.. autoclass:: VoiceState()\n :members:\n\nEmoji\n~~~~~\n\n.. attributetable:: Emoji\n\n.. autoclass:: Emoji()\n :members:\n :inherited-members:\n\nPartialEmoji\n~~~~~~~~~~~~~~~~~~~~~~\n\n.. attributetable:: PartialEmoji\n\n.. autoclass:: PartialEmoji()\n :members:\n :inherited-members:\n\nRole\n~~~~~\n\n.. attributetable:: Role\n\n.. autoclass:: Role()\n :members:\n\nRoleTags\n~~~~~~~~~~\n\n.. attributetable:: RoleTags\n\n.. autoclass:: RoleTags()\n :members:\n\nPartialMessageable\n~~~~~~~~~~~~~~~~~~~~\n\n.. attributetable:: PartialMessageable\n\n.. autoclass:: PartialMessageable()\n :members:\n :inherited-members:\n\nTextChannel\n~~~~~~~~~~~~\n\n.. attributetable:: TextChannel\n\n.. autoclass:: TextChannel()\n :members:\n :inherited-members:\n :exclude-members: typing\n\n .. automethod:: typing\n :async-with:\n\nForumChannel\n~~~~~~~~~~~~~\n\n.. attributetable:: ForumChannel\n\n.. autoclass:: ForumChannel()\n :members:\n :inherited-members:\n\nThread\n~~~~~~~~\n\n.. attributetable:: Thread\n\n.. autoclass:: Thread()\n :members:\n :inherited-members:\n :exclude-members: typing\n\n .. automethod:: typing\n :async-with:\n\nThreadMember\n~~~~~~~~~~~~~\n\n.. attributetable:: ThreadMember\n\n.. autoclass:: ThreadMember()\n :members:\n\nVoiceChannel\n~~~~~~~~~~~~~\n\n.. attributetable:: VoiceChannel\n\n.. autoclass:: VoiceChannel()\n :members:\n :inherited-members:\n\nStageChannel\n~~~~~~~~~~~~~\n\n.. attributetable:: StageChannel\n\n.. autoclass:: StageChannel()\n :members:\n :inherited-members:\n\n\nStageInstance\n~~~~~~~~~~~~~~\n\n.. attributetable:: StageInstance\n\n.. autoclass:: StageInstance()\n :members:\n\nCategoryChannel\n~~~~~~~~~~~~~~~~~\n\n.. attributetable:: CategoryChannel\n\n.. autoclass:: CategoryChannel()\n :members:\n :inherited-members:\n\nDMChannel\n~~~~~~~~~\n\n.. attributetable:: DMChannel\n\n.. autoclass:: DMChannel()\n :members:\n :inherited-members:\n :exclude-members: typing\n\n .. automethod:: typing\n :async-with:\n\nGroupChannel\n~~~~~~~~~~~~\n\n.. attributetable:: GroupChannel\n\n.. autoclass:: GroupChannel()\n :members:\n :inherited-members:\n :exclude-members: typing\n\n .. automethod:: typing\n :async-with:\n\nPartialInviteGuild\n~~~~~~~~~~~~~~~~~~~\n\n.. attributetable:: PartialInviteGuild\n\n.. autoclass:: PartialInviteGuild()\n :members:\n\nPartialInviteChannel\n~~~~~~~~~~~~~~~~~~~~~\n\n.. attributetable:: PartialInviteChannel\n\n.. autoclass:: PartialInviteChannel()\n :members:\n\nInvite\n~~~~~~~\n\n.. attributetable:: Invite\n\n.. autoclass:: Invite()\n :members:\n\nTemplate\n~~~~~~~~~\n\n.. attributetable:: Template\n\n.. autoclass:: Template()\n :members:\n\nWelcomeScreen\n~~~~~~~~~~~~~~~\n\n.. attributetable:: WelcomeScreen\n\n.. autoclass:: WelcomeScreen()\n :members:\n\nWelcomeChannel\n~~~~~~~~~~~~~~~\n\n.. attributetable:: WelcomeChannel\n\n.. autoclass:: WelcomeChannel()\n :members:\n\nWidgetChannel\n~~~~~~~~~~~~~~~\n\n.. attributetable:: WidgetChannel\n\n.. autoclass:: WidgetChannel()\n :members:\n\nWidgetMember\n~~~~~~~~~~~~~\n\n.. attributetable:: WidgetMember\n\n.. autoclass:: WidgetMember()\n :members:\n :inherited-members:\n\nWidget\n~~~~~~~\n\n.. attributetable:: Widget\n\n.. autoclass:: Widget()\n :members:\n\nStickerPack\n~~~~~~~~~~~~~\n\n.. attributetable:: StickerPack\n\n.. autoclass:: StickerPack()\n :members:\n\nStickerItem\n~~~~~~~~~~~~~\n\n.. attributetable:: StickerItem\n\n.. autoclass:: StickerItem()\n :members:\n\nSticker\n~~~~~~~~~~~~~~~\n\n.. attributetable:: Sticker\n\n.. autoclass:: Sticker()\n :members:\n\nStandardSticker\n~~~~~~~~~~~~~~~~\n\n.. attributetable:: StandardSticker\n\n.. autoclass:: StandardSticker()\n :members:\n\nGuildSticker\n~~~~~~~~~~~~~\n\n.. attributetable:: GuildSticker\n\n.. autoclass:: GuildSticker()\n :members:\n\nShardInfo\n~~~~~~~~~~~\n\n.. attributetable:: ShardInfo\n\n.. autoclass:: ShardInfo()\n :members:\n\nSKU\n~~~~~~~~~~~\n\n.. attributetable:: SKU\n\n.. autoclass:: SKU()\n :members:\n\nEntitlement\n~~~~~~~~~~~\n\n.. attributetable:: Entitlement\n\n.. autoclass:: Entitlement()\n :members:\n\nRawMessageDeleteEvent\n~~~~~~~~~~~~~~~~~~~~~~~\n\n.. attributetable:: RawMessageDeleteEvent\n\n.. autoclass:: RawMessageDeleteEvent()\n :members:\n\nRawBulkMessageDeleteEvent\n~~~~~~~~~~~~~~~~~~~~~~~~~~\n\n.. attributetable:: RawBulkMessageDeleteEvent\n\n.. autoclass:: RawBulkMessageDeleteEvent()\n :members:\n\nRawMessageUpdateEvent\n~~~~~~~~~~~~~~~~~~~~~~\n\n.. attributetable:: RawMessageUpdateEvent\n\n.. autoclass:: RawMessageUpdateEvent()\n :members:\n\nRawReactionActionEvent\n~~~~~~~~~~~~~~~~~~~~~~~\n\n.. attributetable:: RawReactionActionEvent\n\n.. autoclass:: RawReactionActionEvent()\n :members:\n\nRawReactionClearEvent\n~~~~~~~~~~~~~~~~~~~~~~\n\n.. attributetable:: RawReactionClearEvent\n\n.. autoclass:: RawReactionClearEvent()\n :members:\n\nRawReactionClearEmojiEvent\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n\n.. attributetable:: RawReactionClearEmojiEvent\n\n.. autoclass:: RawReactionClearEmojiEvent()\n :members:\n\nRawIntegrationDeleteEvent\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n\n.. attributetable:: RawIntegrationDeleteEvent\n\n.. autoclass:: RawIntegrationDeleteEvent()\n :members:\n\nRawThreadUpdateEvent\n~~~~~~~~~~~~~~~~~~~~~~\n\n.. attributetable:: RawThreadUpdateEvent\n\n.. autoclass:: RawThreadUpdateEvent()\n :members:\n\nRawThreadMembersUpdate\n~~~~~~~~~~~~~~~~~~~~~~\n\n.. attributetable:: RawThreadMembersUpdate\n\n.. autoclass:: RawThreadMembersUpdate()\n :members:\n\nRawThreadDeleteEvent\n~~~~~~~~~~~~~~~~~~~~~~\n\n.. attributetable:: RawThreadDeleteEvent\n\n.. autoclass:: RawThreadDeleteEvent()\n :members:\n\nRawTypingEvent\n~~~~~~~~~~~~~~~~\n\n.. attributetable:: RawTypingEvent\n\n.. autoclass:: RawTypingEvent()\n :members:\n\nRawMemberRemoveEvent\n~~~~~~~~~~~~~~~~~~~~~~\n\n.. attributetable:: RawMemberRemoveEvent\n\n.. autoclass:: RawMemberRemoveEvent()\n :members:\n\nRawAppCommandPermissionsUpdateEvent\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n\n.. attributetable:: RawAppCommandPermissionsUpdateEvent\n\n.. autoclass:: RawAppCommandPermissionsUpdateEvent()\n :members:\n\nPartialWebhookGuild\n~~~~~~~~~~~~~~~~~~~~\n\n.. attributetable:: PartialWebhookGuild\n\n.. autoclass:: PartialWebhookGuild()\n :members:\n\nPartialWebhookChannel\n~~~~~~~~~~~~~~~~~~~~~~~\n\n.. attributetable:: PartialWebhookChannel\n\n.. autoclass:: PartialWebhookChannel()\n :members:\n\n.. _discord_api_data:\n\nData Classes\n--------------\n\nSome classes are just there to be data containers, this lists them.\n\nUnlike :ref:`models <discord_api_models>` you are allowed to create\nmost of these yourself, even if they can also be used to hold attributes.\n\nNearly all classes here have :ref:`py:slots` defined which means that it is\nimpossible to have dynamic attributes to the data classes.\n\nThe only exception to this rule is :class:`Object`, which is made with\ndynamic attributes in mind.\n\n\nObject\n~~~~~~~\n\n.. attributetable:: Object\n\n.. autoclass:: Object\n :members:\n\nEmbed\n~~~~~~\n\n.. attributetable:: Embed\n\n.. autoclass:: Embed\n :members:\n\nAllowedMentions\n~~~~~~~~~~~~~~~~~\n\n.. attributetable:: AllowedMentions\n\n.. autoclass:: AllowedMentions\n :members:\n\nMessageReference\n~~~~~~~~~~~~~~~~~\n\n.. attributetable:: MessageReference\n\n.. autoclass:: MessageReference\n :members:\n\nPartialMessage\n~~~~~~~~~~~~~~~~~\n\n.. attributetable:: PartialMessage\n\n.. autoclass:: PartialMessage\n :members:\n\nMessageApplication\n~~~~~~~~~~~~~~~~~~~\n\n.. attributetable:: MessageApplication\n\n.. autoclass:: MessageApplication\n :members:\n\nRoleSubscriptionInfo\n~~~~~~~~~~~~~~~~~~~~~\n\n.. attributetable:: RoleSubscriptionInfo\n\n.. autoclass:: RoleSubscriptionInfo\n :members:\n\nIntents\n~~~~~~~~~~\n\n.. attributetable:: Intents\n\n.. autoclass:: Intents\n :members:\n\nMemberCacheFlags\n~~~~~~~~~~~~~~~~~~\n\n.. attributetable:: MemberCacheFlags\n\n.. autoclass:: MemberCacheFlags\n :members:\n\nApplicationFlags\n~~~~~~~~~~~~~~~~~\n\n.. attributetable:: ApplicationFlags\n\n.. autoclass:: ApplicationFlags\n :members:\n\nChannelFlags\n~~~~~~~~~~~~~~\n\n.. attributetable:: ChannelFlags\n\n.. autoclass:: ChannelFlags\n :members:\n\nAutoModPresets\n~~~~~~~~~~~~~~\n\n.. attributetable:: AutoModPresets\n\n.. autoclass:: AutoModPresets\n :members:\n\nAutoModRuleAction\n~~~~~~~~~~~~~~~~~\n\n.. attributetable:: AutoModRuleAction\n\n.. autoclass:: AutoModRuleAction\n :members:\n\nAutoModTrigger\n~~~~~~~~~~~~~~\n\n.. attributetable:: AutoModTrigger\n\n.. autoclass:: AutoModTrigger\n :members:\n\nFile\n~~~~~\n\n.. attributetable:: File\n\n.. autoclass:: File\n :members:\n\nColour\n~~~~~~\n\n.. attributetable:: Colour\n\n.. autoclass:: Colour\n :members:\n\nBaseActivity\n~~~~~~~~~~~~~~\n\n.. attributetable:: BaseActivity\n\n.. autoclass:: BaseActivity\n :members:\n\nActivity\n~~~~~~~~~\n\n.. attributetable:: Activity\n\n.. autoclass:: Activity\n :members:\n\nGame\n~~~~~\n\n.. attributetable:: Game\n\n.. autoclass:: Game\n :members:\n\nStreaming\n~~~~~~~~~~~\n\n.. attributetable:: Streaming\n\n.. autoclass:: Streaming\n :members:\n\nCustomActivity\n~~~~~~~~~~~~~~~\n\n.. attributetable:: CustomActivity\n\n.. autoclass:: CustomActivity\n :members:\n\nPermissions\n~~~~~~~~~~~~\n\n.. attributetable:: Permissions\n\n.. autoclass:: Permissions\n :members:\n\nPermissionOverwrite\n~~~~~~~~~~~~~~~~~~~~\n\n.. attributetable:: PermissionOverwrite\n\n.. autoclass:: PermissionOverwrite\n :members:\n\nSystemChannelFlags\n~~~~~~~~~~~~~~~~~~~~\n\n.. attributetable:: SystemChannelFlags\n\n.. autoclass:: SystemChannelFlags\n :members:\n\nMessageFlags\n~~~~~~~~~~~~\n\n.. attributetable:: MessageFlags\n\n.. autoclass:: MessageFlags\n :members:\n\nPublicUserFlags\n~~~~~~~~~~~~~~~\n\n.. attributetable:: PublicUserFlags\n\n.. autoclass:: PublicUserFlags\n :members:\n\nMemberFlags\n~~~~~~~~~~~~\n\n.. attributetable:: MemberFlags\n\n.. autoclass:: MemberFlags\n :members:\n\nAttachmentFlags\n~~~~~~~~~~~~~~~~\n\n.. attributetable:: AttachmentFlags\n\n.. autoclass:: AttachmentFlags\n :members:\n\nRoleFlags\n~~~~~~~~~~\n\n.. attributetable:: RoleFlags\n\n.. autoclass:: RoleFlags\n :members:\n\nSKUFlags\n~~~~~~~~~~~\n\n.. attributetable:: SKUFlags\n\n.. autoclass:: SKUFlags()\n :members:\n\nForumTag\n~~~~~~~~~\n\n.. attributetable:: ForumTag\n\n.. autoclass:: ForumTag\n :members:\n\n\nExceptions\n------------\n\nThe following exceptions are thrown by the library.\n\n.. autoexception:: DiscordException\n\n.. autoexception:: ClientException\n\n.. autoexception:: LoginFailure\n\n.. autoexception:: HTTPException\n :members:\n\n.. autoexception:: RateLimited\n :members:\n\n.. autoexception:: Forbidden\n\n.. autoexception:: NotFound\n\n.. autoexception:: DiscordServerError\n\n.. autoexception:: InvalidData\n\n.. autoexception:: GatewayNotFound\n\n.. autoexception:: ConnectionClosed\n\n.. autoexception:: PrivilegedIntentsRequired\n\n.. autoexception:: InteractionResponded\n\n.. autoexception:: discord.opus.OpusError\n\n.. autoexception:: discord.opus.OpusNotLoaded\n\nException Hierarchy\n~~~~~~~~~~~~~~~~~~~~~\n\n.. exception_hierarchy::\n\n - :exc:`Exception`\n - :exc:`DiscordException`\n - :exc:`ClientException`\n - :exc:`InvalidData`\n - :exc:`LoginFailure`\n - :exc:`ConnectionClosed`\n - :exc:`PrivilegedIntentsRequired`\n - :exc:`InteractionResponded`\n - :exc:`GatewayNotFound`\n - :exc:`HTTPException`\n - :exc:`Forbidden`\n - :exc:`NotFound`\n - :exc:`DiscordServerError`\n - :exc:`app_commands.CommandSyncFailure`\n - :exc:`RateLimited`\n",
"path": "docs/api.rst"
}
] | 10_1 | python | import unittest
import sys
class TestNewIncidentMessageTypes(unittest.TestCase):
def setUp(self):
from unittest.mock import Mock
# Mock data for testing
self.mock_state = Mock()
self.mock_channel = Mock()
self.mock_data = {'type': 0, 'content': '', 'author': {'id': 123, 'username': 'TestUser', 'avatar': None, 'discriminator': '0001'}, 'channel_id': 123456789}
def test_incident_message_types(self):
from discord import MessageType
# Check if new incident message types are present in the MessageType enumeration
new_types = [
'guild_incident_alert_mode_enabled',
'guild_incident_alert_mode_disabled',
'guild_incident_report_raid',
'guild_incident_report_false_alarm'
]
# Verify the presence of each new message type
for type_name in new_types:
with self.subTest(type_name=type_name):
self.assertTrue(any(mt.name == type_name for mt in MessageType), f"{type_name} is missing")
def test_incident_message_type_values(self):
from discord import MessageType
# Check if new incident message type values are correctly added
new_type_values = [36, 37, 38, 39]
# Verify the presence of each new message type value
for value in new_type_values:
with self.subTest(value=value):
self.assertIn(value, [mt.value for mt in MessageType], f"Message type value {value} is missing")
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestNewIncidentMessageTypes))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
main()
|
https://github.com/teamqurrent/discord.py | in the `discord/state.py` file, locate the `parse_entitlement_delete` method within the `ConnectionState` class. Change the event name in the dispatch call from 'entitlement_update' to 'entitlement_delete'. This modification ensures that the correct event is dispatched when the `parse_entitlement_delete` method is invoked. | 99618c8 | discord | python3.9 | 7d159920 | diff --git a/discord/state.py b/discord/state.py
--- a/discord/state.py
+++ b/discord/state.py
@@ -1595,7 +1595,7 @@ class ConnectionState(Generic[ClientT]):
def parse_entitlement_delete(self, data: gw.EntitlementDeleteEvent) -> None:
entitlement = Entitlement(data=data, state=self)
- self.dispatch('entitlement_update', entitlement)
+ self.dispatch('entitlement_delete', entitlement)
def _get_reaction_user(self, channel: MessageableChannel, user_id: int) -> Optional[Union[User, Member]]:
if isinstance(channel, (TextChannel, Thread, VoiceChannel)):
| [
{
"content": "\"\"\"\nThe MIT License (MIT)\n\nCopyright (c) 2015-present Rapptz\n\nPermission is hereby granted, free of charge, to any person obtaining a\ncopy of this software and associated documentation files (the \"Software\"),\nto deal in the Software without restriction, including without limitation\nthe rights to use, copy, modify, merge, publish, distribute, sublicense,\nand/or sell copies of the Software, and to permit persons to whom the\nSoftware is furnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in\nall copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\nOR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\nFROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER\nDEALINGS IN THE SOFTWARE.\n\"\"\"\n\nfrom __future__ import annotations\n\nimport asyncio\nfrom collections import deque, OrderedDict\nimport copy\nimport logging\nfrom typing import (\n Dict,\n Optional,\n TYPE_CHECKING,\n Type,\n Union,\n Callable,\n Any,\n List,\n TypeVar,\n Coroutine,\n Sequence,\n Generic,\n Tuple,\n Deque,\n Literal,\n overload,\n)\nimport weakref\nimport inspect\n\nimport os\n\nfrom .guild import Guild\nfrom .activity import BaseActivity\nfrom .sku import Entitlement\nfrom .user import User, ClientUser\nfrom .emoji import Emoji\nfrom .mentions import AllowedMentions\nfrom .partial_emoji import PartialEmoji\nfrom .message import Message\nfrom .channel import *\nfrom .channel import _channel_factory\nfrom .raw_models import *\nfrom .member import Member\nfrom .role import Role\nfrom .enums import ChannelType, try_enum, Status\nfrom . import utils\nfrom .flags import ApplicationFlags, Intents, MemberCacheFlags\nfrom .invite import Invite\nfrom .integrations import _integration_factory\nfrom .interactions import Interaction\nfrom .ui.view import ViewStore, View\nfrom .scheduled_event import ScheduledEvent\nfrom .stage_instance import StageInstance\nfrom .threads import Thread, ThreadMember\nfrom .sticker import GuildSticker\nfrom .automod import AutoModRule, AutoModAction\nfrom .audit_logs import AuditLogEntry\nfrom ._types import ClientT\n\nif TYPE_CHECKING:\n from .abc import PrivateChannel\n from .message import MessageableChannel\n from .guild import GuildChannel\n from .http import HTTPClient\n from .voice_client import VoiceProtocol\n from .gateway import DiscordWebSocket\n from .ui.item import Item\n from .ui.dynamic import DynamicItem\n from .app_commands import CommandTree, Translator\n\n from .types.automod import AutoModerationRule, AutoModerationActionExecution\n from .types.snowflake import Snowflake\n from .types.activity import Activity as ActivityPayload\n from .types.channel import DMChannel as DMChannelPayload\n from .types.user import User as UserPayload, PartialUser as PartialUserPayload\n from .types.emoji import Emoji as EmojiPayload, PartialEmoji as PartialEmojiPayload\n from .types.sticker import GuildSticker as GuildStickerPayload\n from .types.guild import Guild as GuildPayload\n from .types.message import Message as MessagePayload, PartialMessage as PartialMessagePayload\n from .types import gateway as gw\n from .types.command import GuildApplicationCommandPermissions as GuildApplicationCommandPermissionsPayload\n\n T = TypeVar('T')\n Channel = Union[GuildChannel, PrivateChannel, PartialMessageable]\n\n\nclass ChunkRequest:\n def __init__(\n self,\n guild_id: int,\n loop: asyncio.AbstractEventLoop,\n resolver: Callable[[int], Any],\n *,\n cache: bool = True,\n ) -> None:\n self.guild_id: int = guild_id\n self.resolver: Callable[[int], Any] = resolver\n self.loop: asyncio.AbstractEventLoop = loop\n self.cache: bool = cache\n self.nonce: str = os.urandom(16).hex()\n self.buffer: List[Member] = []\n self.waiters: List[asyncio.Future[List[Member]]] = []\n\n def add_members(self, members: List[Member]) -> None:\n self.buffer.extend(members)\n if self.cache:\n guild = self.resolver(self.guild_id)\n if guild is None:\n return\n\n for member in members:\n existing = guild.get_member(member.id)\n if existing is None or existing.joined_at is None:\n guild._add_member(member)\n\n async def wait(self) -> List[Member]:\n future = self.loop.create_future()\n self.waiters.append(future)\n try:\n return await future\n finally:\n self.waiters.remove(future)\n\n def get_future(self) -> asyncio.Future[List[Member]]:\n future = self.loop.create_future()\n self.waiters.append(future)\n return future\n\n def done(self) -> None:\n for future in self.waiters:\n if not future.done():\n future.set_result(self.buffer)\n\n\n_log = logging.getLogger(__name__)\n\n\nasync def logging_coroutine(coroutine: Coroutine[Any, Any, T], *, info: str) -> Optional[T]:\n try:\n await coroutine\n except Exception:\n _log.exception('Exception occurred during %s', info)\n\n\nclass ConnectionState(Generic[ClientT]):\n if TYPE_CHECKING:\n _get_websocket: Callable[..., DiscordWebSocket]\n _get_client: Callable[..., ClientT]\n _parsers: Dict[str, Callable[[Dict[str, Any]], None]]\n\n def __init__(\n self,\n *,\n dispatch: Callable[..., Any],\n handlers: Dict[str, Callable[..., Any]],\n hooks: Dict[str, Callable[..., Coroutine[Any, Any, Any]]],\n http: HTTPClient,\n **options: Any,\n ) -> None:\n # Set later, after Client.login\n self.loop: asyncio.AbstractEventLoop = utils.MISSING\n self.http: HTTPClient = http\n self.max_messages: Optional[int] = options.get('max_messages', 1000)\n if self.max_messages is not None and self.max_messages <= 0:\n self.max_messages = 1000\n\n self.dispatch: Callable[..., Any] = dispatch\n self.handlers: Dict[str, Callable[..., Any]] = handlers\n self.hooks: Dict[str, Callable[..., Coroutine[Any, Any, Any]]] = hooks\n self.shard_count: Optional[int] = None\n self._ready_task: Optional[asyncio.Task] = None\n self.application_id: Optional[int] = utils._get_as_snowflake(options, 'application_id')\n self.application_flags: ApplicationFlags = utils.MISSING\n self.heartbeat_timeout: float = options.get('heartbeat_timeout', 60.0)\n self.guild_ready_timeout: float = options.get('guild_ready_timeout', 2.0)\n if self.guild_ready_timeout < 0:\n raise ValueError('guild_ready_timeout cannot be negative')\n\n allowed_mentions = options.get('allowed_mentions')\n\n if allowed_mentions is not None and not isinstance(allowed_mentions, AllowedMentions):\n raise TypeError('allowed_mentions parameter must be AllowedMentions')\n\n self.allowed_mentions: Optional[AllowedMentions] = allowed_mentions\n self._chunk_requests: Dict[Union[int, str], ChunkRequest] = {}\n\n activity = options.get('activity', None)\n if activity:\n if not isinstance(activity, BaseActivity):\n raise TypeError('activity parameter must derive from BaseActivity.')\n\n activity = activity.to_dict()\n\n status = options.get('status', None)\n if status:\n if status is Status.offline:\n status = 'invisible'\n else:\n status = str(status)\n\n intents = options.get('intents', None)\n if intents is not None:\n if not isinstance(intents, Intents):\n raise TypeError(f'intents parameter must be Intent not {type(intents)!r}')\n else:\n intents = Intents.default()\n\n if not intents.guilds:\n _log.warning('Guilds intent seems to be disabled. This may cause state related issues.')\n\n self._chunk_guilds: bool = options.get('chunk_guilds_at_startup', intents.members)\n\n # Ensure these two are set properly\n if not intents.members and self._chunk_guilds:\n raise ValueError('Intents.members must be enabled to chunk guilds at startup.')\n\n cache_flags = options.get('member_cache_flags', None)\n if cache_flags is None:\n cache_flags = MemberCacheFlags.from_intents(intents)\n else:\n if not isinstance(cache_flags, MemberCacheFlags):\n raise TypeError(f'member_cache_flags parameter must be MemberCacheFlags not {type(cache_flags)!r}')\n\n cache_flags._verify_intents(intents)\n\n self.member_cache_flags: MemberCacheFlags = cache_flags\n self._activity: Optional[ActivityPayload] = activity\n self._status: Optional[str] = status\n self._intents: Intents = intents\n self._command_tree: Optional[CommandTree] = None\n self._translator: Optional[Translator] = None\n\n if not intents.members or cache_flags._empty:\n self.store_user = self.store_user_no_intents\n\n self.parsers: Dict[str, Callable[[Any], None]]\n self.parsers = parsers = {}\n for attr, func in inspect.getmembers(self):\n if attr.startswith('parse_'):\n parsers[attr[6:].upper()] = func\n\n self.clear()\n\n # For some reason Discord still sends emoji/sticker data in payloads\n # This makes it hard to actually swap out the appropriate store methods\n # So this is checked instead, it's a small penalty to pay\n @property\n def cache_guild_expressions(self) -> bool:\n return self._intents.emojis_and_stickers\n\n async def close(self) -> None:\n for voice in self.voice_clients:\n try:\n await voice.disconnect(force=True)\n except Exception:\n # if an error happens during disconnects, disregard it.\n pass\n\n if self._translator:\n await self._translator.unload()\n\n # Purposefully don't call `clear` because users rely on cache being available post-close\n\n def clear(self, *, views: bool = True) -> None:\n self.user: Optional[ClientUser] = None\n self._users: weakref.WeakValueDictionary[int, User] = weakref.WeakValueDictionary()\n self._emojis: Dict[int, Emoji] = {}\n self._stickers: Dict[int, GuildSticker] = {}\n self._guilds: Dict[int, Guild] = {}\n if views:\n self._view_store: ViewStore = ViewStore(self)\n\n self._voice_clients: Dict[int, VoiceProtocol] = {}\n\n # LRU of max size 128\n self._private_channels: OrderedDict[int, PrivateChannel] = OrderedDict()\n # extra dict to look up private channels by user id\n self._private_channels_by_user: Dict[int, DMChannel] = {}\n if self.max_messages is not None:\n self._messages: Optional[Deque[Message]] = deque(maxlen=self.max_messages)\n else:\n self._messages: Optional[Deque[Message]] = None\n\n def process_chunk_requests(self, guild_id: int, nonce: Optional[str], members: List[Member], complete: bool) -> None:\n removed = []\n for key, request in self._chunk_requests.items():\n if request.guild_id == guild_id and request.nonce == nonce:\n request.add_members(members)\n if complete:\n request.done()\n removed.append(key)\n\n for key in removed:\n del self._chunk_requests[key]\n\n def call_handlers(self, key: str, *args: Any, **kwargs: Any) -> None:\n try:\n func = self.handlers[key]\n except KeyError:\n pass\n else:\n func(*args, **kwargs)\n\n async def call_hooks(self, key: str, *args: Any, **kwargs: Any) -> None:\n try:\n coro = self.hooks[key]\n except KeyError:\n pass\n else:\n await coro(*args, **kwargs)\n\n @property\n def self_id(self) -> Optional[int]:\n u = self.user\n return u.id if u else None\n\n @property\n def intents(self) -> Intents:\n ret = Intents.none()\n ret.value = self._intents.value\n return ret\n\n @property\n def voice_clients(self) -> List[VoiceProtocol]:\n return list(self._voice_clients.values())\n\n def _get_voice_client(self, guild_id: Optional[int]) -> Optional[VoiceProtocol]:\n # the keys of self._voice_clients are ints\n return self._voice_clients.get(guild_id) # type: ignore\n\n def _add_voice_client(self, guild_id: int, voice: VoiceProtocol) -> None:\n self._voice_clients[guild_id] = voice\n\n def _remove_voice_client(self, guild_id: int) -> None:\n self._voice_clients.pop(guild_id, None)\n\n def _update_references(self, ws: DiscordWebSocket) -> None:\n for vc in self.voice_clients:\n vc.main_ws = ws # type: ignore # Silencing the unknown attribute (ok at runtime).\n\n def store_user(self, data: Union[UserPayload, PartialUserPayload], *, cache: bool = True) -> User:\n # this way is 300% faster than `dict.setdefault`.\n user_id = int(data['id'])\n try:\n return self._users[user_id]\n except KeyError:\n user = User(state=self, data=data)\n if cache:\n self._users[user_id] = user\n return user\n\n def store_user_no_intents(self, data: Union[UserPayload, PartialUserPayload], *, cache: bool = True) -> User:\n return User(state=self, data=data)\n\n def create_user(self, data: Union[UserPayload, PartialUserPayload]) -> User:\n return User(state=self, data=data)\n\n def get_user(self, id: int) -> Optional[User]:\n return self._users.get(id)\n\n def store_emoji(self, guild: Guild, data: EmojiPayload) -> Emoji:\n # the id will be present here\n emoji_id = int(data['id']) # type: ignore\n self._emojis[emoji_id] = emoji = Emoji(guild=guild, state=self, data=data)\n return emoji\n\n def store_sticker(self, guild: Guild, data: GuildStickerPayload) -> GuildSticker:\n sticker_id = int(data['id'])\n self._stickers[sticker_id] = sticker = GuildSticker(state=self, data=data)\n return sticker\n\n def store_view(self, view: View, message_id: Optional[int] = None, interaction_id: Optional[int] = None) -> None:\n if interaction_id is not None:\n self._view_store.remove_interaction_mapping(interaction_id)\n self._view_store.add_view(view, message_id)\n\n def prevent_view_updates_for(self, message_id: int) -> Optional[View]:\n return self._view_store.remove_message_tracking(message_id)\n\n def store_dynamic_items(self, *items: Type[DynamicItem[Item[Any]]]) -> None:\n self._view_store.add_dynamic_items(*items)\n\n def remove_dynamic_items(self, *items: Type[DynamicItem[Item[Any]]]) -> None:\n self._view_store.remove_dynamic_items(*items)\n\n @property\n def persistent_views(self) -> Sequence[View]:\n return self._view_store.persistent_views\n\n @property\n def guilds(self) -> Sequence[Guild]:\n return utils.SequenceProxy(self._guilds.values())\n\n def _get_guild(self, guild_id: Optional[int]) -> Optional[Guild]:\n # the keys of self._guilds are ints\n return self._guilds.get(guild_id) # type: ignore\n\n def _get_or_create_unavailable_guild(self, guild_id: int) -> Guild:\n return self._guilds.get(guild_id) or Guild._create_unavailable(state=self, guild_id=guild_id)\n\n def _add_guild(self, guild: Guild) -> None:\n self._guilds[guild.id] = guild\n\n def _remove_guild(self, guild: Guild) -> None:\n self._guilds.pop(guild.id, None)\n\n for emoji in guild.emojis:\n self._emojis.pop(emoji.id, None)\n\n for sticker in guild.stickers:\n self._stickers.pop(sticker.id, None)\n\n del guild\n\n @property\n def emojis(self) -> Sequence[Emoji]:\n return utils.SequenceProxy(self._emojis.values())\n\n @property\n def stickers(self) -> Sequence[GuildSticker]:\n return utils.SequenceProxy(self._stickers.values())\n\n def get_emoji(self, emoji_id: Optional[int]) -> Optional[Emoji]:\n # the keys of self._emojis are ints\n return self._emojis.get(emoji_id) # type: ignore\n\n def get_sticker(self, sticker_id: Optional[int]) -> Optional[GuildSticker]:\n # the keys of self._stickers are ints\n return self._stickers.get(sticker_id) # type: ignore\n\n @property\n def private_channels(self) -> Sequence[PrivateChannel]:\n return utils.SequenceProxy(self._private_channels.values())\n\n def _get_private_channel(self, channel_id: Optional[int]) -> Optional[PrivateChannel]:\n try:\n # the keys of self._private_channels are ints\n value = self._private_channels[channel_id] # type: ignore\n except KeyError:\n return None\n else:\n # Type narrowing can't figure out that channel_id isn't None here\n self._private_channels.move_to_end(channel_id) # type: ignore\n return value\n\n def _get_private_channel_by_user(self, user_id: Optional[int]) -> Optional[DMChannel]:\n # the keys of self._private_channels are ints\n return self._private_channels_by_user.get(user_id) # type: ignore\n\n def _add_private_channel(self, channel: PrivateChannel) -> None:\n channel_id = channel.id\n self._private_channels[channel_id] = channel\n\n if len(self._private_channels) > 128:\n _, to_remove = self._private_channels.popitem(last=False)\n if isinstance(to_remove, DMChannel) and to_remove.recipient:\n self._private_channels_by_user.pop(to_remove.recipient.id, None)\n\n if isinstance(channel, DMChannel) and channel.recipient:\n self._private_channels_by_user[channel.recipient.id] = channel\n\n def add_dm_channel(self, data: DMChannelPayload) -> DMChannel:\n # self.user is *always* cached when this is called\n channel = DMChannel(me=self.user, state=self, data=data) # type: ignore\n self._add_private_channel(channel)\n return channel\n\n def _remove_private_channel(self, channel: PrivateChannel) -> None:\n self._private_channels.pop(channel.id, None)\n if isinstance(channel, DMChannel):\n recipient = channel.recipient\n if recipient is not None:\n self._private_channels_by_user.pop(recipient.id, None)\n\n def _get_message(self, msg_id: Optional[int]) -> Optional[Message]:\n return utils.find(lambda m: m.id == msg_id, reversed(self._messages)) if self._messages else None\n\n def _add_guild_from_data(self, data: GuildPayload) -> Guild:\n guild = Guild(data=data, state=self)\n self._add_guild(guild)\n return guild\n\n def _guild_needs_chunking(self, guild: Guild) -> bool:\n # If presences are enabled then we get back the old guild.large behaviour\n return self._chunk_guilds and not guild.chunked and not (self._intents.presences and not guild.large)\n\n def _get_guild_channel(\n self, data: PartialMessagePayload, guild_id: Optional[int] = None\n ) -> Tuple[Union[Channel, Thread], Optional[Guild]]:\n channel_id = int(data['channel_id'])\n try:\n guild_id = guild_id or int(data['guild_id'])\n guild = self._get_guild(guild_id)\n except KeyError:\n channel = DMChannel._from_message(self, channel_id)\n guild = None\n else:\n channel = guild and guild._resolve_channel(channel_id)\n\n return channel or PartialMessageable(state=self, guild_id=guild_id, id=channel_id), guild\n\n async def chunker(\n self, guild_id: int, query: str = '', limit: int = 0, presences: bool = False, *, nonce: Optional[str] = None\n ) -> None:\n ws = self._get_websocket(guild_id) # This is ignored upstream\n await ws.request_chunks(guild_id, query=query, limit=limit, presences=presences, nonce=nonce)\n\n async def query_members(\n self, guild: Guild, query: Optional[str], limit: int, user_ids: Optional[List[int]], cache: bool, presences: bool\n ) -> List[Member]:\n guild_id = guild.id\n ws = self._get_websocket(guild_id)\n if ws is None:\n raise RuntimeError('Somehow do not have a websocket for this guild_id')\n\n request = ChunkRequest(guild.id, self.loop, self._get_guild, cache=cache)\n self._chunk_requests[request.nonce] = request\n\n try:\n # start the query operation\n await ws.request_chunks(\n guild_id, query=query, limit=limit, user_ids=user_ids, presences=presences, nonce=request.nonce\n )\n return await asyncio.wait_for(request.wait(), timeout=30.0)\n except asyncio.TimeoutError:\n _log.warning('Timed out waiting for chunks with query %r and limit %d for guild_id %d', query, limit, guild_id)\n raise\n\n async def _delay_ready(self) -> None:\n try:\n states = []\n while True:\n # this snippet of code is basically waiting N seconds\n # until the last GUILD_CREATE was sent\n try:\n guild = await asyncio.wait_for(self._ready_state.get(), timeout=self.guild_ready_timeout)\n except asyncio.TimeoutError:\n break\n else:\n if self._guild_needs_chunking(guild):\n future = await self.chunk_guild(guild, wait=False)\n states.append((guild, future))\n else:\n if guild.unavailable is False:\n self.dispatch('guild_available', guild)\n else:\n self.dispatch('guild_join', guild)\n\n for guild, future in states:\n timeout = self._chunk_timeout(guild)\n\n try:\n await asyncio.wait_for(future, timeout=timeout)\n except asyncio.TimeoutError:\n _log.warning('Shard ID %s timed out waiting for chunks for guild_id %s.', guild.shard_id, guild.id)\n\n if guild.unavailable is False:\n self.dispatch('guild_available', guild)\n else:\n self.dispatch('guild_join', guild)\n\n # remove the state\n try:\n del self._ready_state\n except AttributeError:\n pass # already been deleted somehow\n\n except asyncio.CancelledError:\n pass\n else:\n # dispatch the event\n self.call_handlers('ready')\n self.dispatch('ready')\n finally:\n self._ready_task = None\n\n def parse_ready(self, data: gw.ReadyEvent) -> None:\n if self._ready_task is not None:\n self._ready_task.cancel()\n\n self._ready_state: asyncio.Queue[Guild] = asyncio.Queue()\n self.clear(views=False)\n self.user = user = ClientUser(state=self, data=data['user'])\n self._users[user.id] = user # type: ignore\n\n if self.application_id is None:\n try:\n application = data['application']\n except KeyError:\n pass\n else:\n self.application_id = utils._get_as_snowflake(application, 'id')\n self.application_flags: ApplicationFlags = ApplicationFlags._from_value(application['flags'])\n\n for guild_data in data['guilds']:\n self._add_guild_from_data(guild_data) # type: ignore\n\n self.dispatch('connect')\n self._ready_task = asyncio.create_task(self._delay_ready())\n\n def parse_resumed(self, data: gw.ResumedEvent) -> None:\n self.dispatch('resumed')\n\n def parse_message_create(self, data: gw.MessageCreateEvent) -> None:\n channel, _ = self._get_guild_channel(data)\n # channel would be the correct type here\n message = Message(channel=channel, data=data, state=self) # type: ignore\n self.dispatch('message', message)\n if self._messages is not None:\n self._messages.append(message)\n # we ensure that the channel is either a TextChannel, VoiceChannel, or Thread\n if channel and channel.__class__ in (TextChannel, VoiceChannel, Thread, StageChannel):\n channel.last_message_id = message.id # type: ignore\n\n def parse_message_delete(self, data: gw.MessageDeleteEvent) -> None:\n raw = RawMessageDeleteEvent(data)\n found = self._get_message(raw.message_id)\n raw.cached_message = found\n self.dispatch('raw_message_delete', raw)\n if self._messages is not None and found is not None:\n self.dispatch('message_delete', found)\n self._messages.remove(found)\n\n def parse_message_delete_bulk(self, data: gw.MessageDeleteBulkEvent) -> None:\n raw = RawBulkMessageDeleteEvent(data)\n if self._messages:\n found_messages = [message for message in self._messages if message.id in raw.message_ids]\n else:\n found_messages = []\n raw.cached_messages = found_messages\n self.dispatch('raw_bulk_message_delete', raw)\n if found_messages:\n self.dispatch('bulk_message_delete', found_messages)\n for msg in found_messages:\n # self._messages won't be None here\n self._messages.remove(msg) # type: ignore\n\n def parse_message_update(self, data: gw.MessageUpdateEvent) -> None:\n raw = RawMessageUpdateEvent(data)\n message = self._get_message(raw.message_id)\n if message is not None:\n older_message = copy.copy(message)\n raw.cached_message = older_message\n self.dispatch('raw_message_edit', raw)\n message._update(data)\n # Coerce the `after` parameter to take the new updated Member\n # ref: #5999\n older_message.author = message.author\n self.dispatch('message_edit', older_message, message)\n else:\n self.dispatch('raw_message_edit', raw)\n\n if 'components' in data:\n try:\n entity_id = int(data['interaction']['id'])\n except (KeyError, ValueError):\n entity_id = raw.message_id\n\n if self._view_store.is_message_tracked(entity_id):\n self._view_store.update_from_message(entity_id, data['components'])\n\n def parse_message_reaction_add(self, data: gw.MessageReactionAddEvent) -> None:\n emoji = PartialEmoji.from_dict(data['emoji'])\n emoji._state = self\n raw = RawReactionActionEvent(data, emoji, 'REACTION_ADD')\n\n member_data = data.get('member')\n if member_data:\n guild = self._get_guild(raw.guild_id)\n if guild is not None:\n raw.member = Member(data=member_data, guild=guild, state=self)\n else:\n raw.member = None\n else:\n raw.member = None\n self.dispatch('raw_reaction_add', raw)\n\n # rich interface here\n message = self._get_message(raw.message_id)\n if message is not None:\n emoji = self._upgrade_partial_emoji(emoji)\n reaction = message._add_reaction(data, emoji, raw.user_id)\n user = raw.member or self._get_reaction_user(message.channel, raw.user_id)\n\n if user:\n self.dispatch('reaction_add', reaction, user)\n\n def parse_message_reaction_remove_all(self, data: gw.MessageReactionRemoveAllEvent) -> None:\n raw = RawReactionClearEvent(data)\n self.dispatch('raw_reaction_clear', raw)\n\n message = self._get_message(raw.message_id)\n if message is not None:\n old_reactions = message.reactions.copy()\n message.reactions.clear()\n self.dispatch('reaction_clear', message, old_reactions)\n\n def parse_message_reaction_remove(self, data: gw.MessageReactionRemoveEvent) -> None:\n emoji = PartialEmoji.from_dict(data['emoji'])\n emoji._state = self\n raw = RawReactionActionEvent(data, emoji, 'REACTION_REMOVE')\n self.dispatch('raw_reaction_remove', raw)\n\n message = self._get_message(raw.message_id)\n if message is not None:\n emoji = self._upgrade_partial_emoji(emoji)\n try:\n reaction = message._remove_reaction(data, emoji, raw.user_id)\n except (AttributeError, ValueError): # eventual consistency lol\n pass\n else:\n user = self._get_reaction_user(message.channel, raw.user_id)\n if user:\n self.dispatch('reaction_remove', reaction, user)\n\n def parse_message_reaction_remove_emoji(self, data: gw.MessageReactionRemoveEmojiEvent) -> None:\n emoji = PartialEmoji.from_dict(data['emoji'])\n emoji._state = self\n raw = RawReactionClearEmojiEvent(data, emoji)\n self.dispatch('raw_reaction_clear_emoji', raw)\n\n message = self._get_message(raw.message_id)\n if message is not None:\n try:\n reaction = message._clear_emoji(emoji)\n except (AttributeError, ValueError): # eventual consistency lol\n pass\n else:\n if reaction:\n self.dispatch('reaction_clear_emoji', reaction)\n\n def parse_interaction_create(self, data: gw.InteractionCreateEvent) -> None:\n interaction = Interaction(data=data, state=self)\n if data['type'] in (2, 4) and self._command_tree: # application command and auto complete\n self._command_tree._from_interaction(interaction)\n elif data['type'] == 3: # interaction component\n # These keys are always there for this interaction type\n inner_data = data['data']\n custom_id = inner_data['custom_id']\n component_type = inner_data['component_type']\n self._view_store.dispatch_view(component_type, custom_id, interaction)\n elif data['type'] == 5: # modal submit\n # These keys are always there for this interaction type\n inner_data = data['data']\n custom_id = inner_data['custom_id']\n components = inner_data['components']\n self._view_store.dispatch_modal(custom_id, interaction, components)\n self.dispatch('interaction', interaction)\n\n def parse_presence_update(self, data: gw.PresenceUpdateEvent) -> None:\n guild_id = utils._get_as_snowflake(data, 'guild_id')\n # guild_id won't be None here\n guild = self._get_guild(guild_id)\n if guild is None:\n _log.debug('PRESENCE_UPDATE referencing an unknown guild ID: %s. Discarding.', guild_id)\n return\n\n user = data['user']\n member_id = int(user['id'])\n member = guild.get_member(member_id)\n if member is None:\n _log.debug('PRESENCE_UPDATE referencing an unknown member ID: %s. Discarding', member_id)\n return\n\n old_member = Member._copy(member)\n user_update = member._presence_update(data=data, user=user)\n if user_update:\n self.dispatch('user_update', user_update[0], user_update[1])\n\n self.dispatch('presence_update', old_member, member)\n\n def parse_user_update(self, data: gw.UserUpdateEvent) -> None:\n if self.user:\n self.user._update(data)\n\n def parse_invite_create(self, data: gw.InviteCreateEvent) -> None:\n invite = Invite.from_gateway(state=self, data=data)\n self.dispatch('invite_create', invite)\n\n def parse_invite_delete(self, data: gw.InviteDeleteEvent) -> None:\n invite = Invite.from_gateway(state=self, data=data)\n self.dispatch('invite_delete', invite)\n\n def parse_channel_delete(self, data: gw.ChannelDeleteEvent) -> None:\n guild = self._get_guild(utils._get_as_snowflake(data, 'guild_id'))\n channel_id = int(data['id'])\n if guild is not None:\n channel = guild.get_channel(channel_id)\n if channel is not None:\n guild._remove_channel(channel)\n self.dispatch('guild_channel_delete', channel)\n\n if channel.type in (ChannelType.voice, ChannelType.stage_voice):\n for s in guild.scheduled_events:\n if s.channel_id == channel.id:\n guild._scheduled_events.pop(s.id)\n self.dispatch('scheduled_event_delete', s)\n\n def parse_channel_update(self, data: gw.ChannelUpdateEvent) -> None:\n channel_type = try_enum(ChannelType, data.get('type'))\n channel_id = int(data['id'])\n if channel_type is ChannelType.group:\n channel = self._get_private_channel(channel_id)\n if channel is not None:\n old_channel = copy.copy(channel)\n # the channel is a GroupChannel rather than PrivateChannel\n channel._update_group(data) # type: ignore\n self.dispatch('private_channel_update', old_channel, channel)\n return\n else:\n _log.debug('CHANNEL_UPDATE referencing an unknown channel ID: %s. Discarding.', channel_id)\n\n guild_id = utils._get_as_snowflake(data, 'guild_id')\n guild = self._get_guild(guild_id)\n if guild is not None:\n channel = guild.get_channel(channel_id)\n if channel is not None:\n old_channel = copy.copy(channel)\n channel._update(guild, data) # type: ignore # the data payload varies based on the channel type.\n self.dispatch('guild_channel_update', old_channel, channel)\n else:\n _log.debug('CHANNEL_UPDATE referencing an unknown channel ID: %s. Discarding.', channel_id)\n else:\n _log.debug('CHANNEL_UPDATE referencing an unknown guild ID: %s. Discarding.', guild_id)\n\n def parse_channel_create(self, data: gw.ChannelCreateEvent) -> None:\n factory, ch_type = _channel_factory(data['type'])\n if factory is None:\n _log.debug('CHANNEL_CREATE referencing an unknown channel type %s. Discarding.', data['type'])\n return\n\n guild_id = utils._get_as_snowflake(data, 'guild_id')\n guild = self._get_guild(guild_id)\n if guild is not None:\n # the factory can't be a DMChannel or GroupChannel here\n channel = factory(guild=guild, state=self, data=data) # type: ignore\n guild._add_channel(channel) # type: ignore\n self.dispatch('guild_channel_create', channel)\n else:\n _log.debug('CHANNEL_CREATE referencing an unknown guild ID: %s. Discarding.', guild_id)\n return\n\n def parse_channel_pins_update(self, data: gw.ChannelPinsUpdateEvent) -> None:\n channel_id = int(data['channel_id'])\n try:\n guild = self._get_guild(int(data['guild_id']))\n except KeyError:\n guild = None\n channel = self._get_private_channel(channel_id)\n else:\n channel = guild and guild._resolve_channel(channel_id)\n\n if channel is None:\n _log.debug('CHANNEL_PINS_UPDATE referencing an unknown channel ID: %s. Discarding.', channel_id)\n return\n\n last_pin = utils.parse_time(data.get('last_pin_timestamp'))\n\n if guild is None:\n self.dispatch('private_channel_pins_update', channel, last_pin)\n else:\n self.dispatch('guild_channel_pins_update', channel, last_pin)\n\n def parse_thread_create(self, data: gw.ThreadCreateEvent) -> None:\n guild_id = int(data['guild_id'])\n guild: Optional[Guild] = self._get_guild(guild_id)\n if guild is None:\n _log.debug('THREAD_CREATE referencing an unknown guild ID: %s. Discarding', guild_id)\n return\n\n thread = Thread(guild=guild, state=guild._state, data=data)\n has_thread = guild.get_thread(thread.id)\n guild._add_thread(thread)\n if not has_thread:\n if data.get('newly_created'):\n if thread.parent.__class__ is ForumChannel:\n thread.parent.last_message_id = thread.id # type: ignore\n\n self.dispatch('thread_create', thread)\n else:\n self.dispatch('thread_join', thread)\n\n def parse_thread_update(self, data: gw.ThreadUpdateEvent) -> None:\n guild_id = int(data['guild_id'])\n guild = self._get_guild(guild_id)\n if guild is None:\n _log.debug('THREAD_UPDATE referencing an unknown guild ID: %s. Discarding', guild_id)\n return\n\n raw = RawThreadUpdateEvent(data)\n raw.thread = thread = guild.get_thread(raw.thread_id)\n self.dispatch('raw_thread_update', raw)\n if thread is not None:\n old = copy.copy(thread)\n thread._update(data)\n if thread.archived:\n guild._remove_thread(thread)\n self.dispatch('thread_update', old, thread)\n else:\n thread = Thread(guild=guild, state=guild._state, data=data)\n if not thread.archived:\n guild._add_thread(thread)\n self.dispatch('thread_join', thread)\n\n def parse_thread_delete(self, data: gw.ThreadDeleteEvent) -> None:\n guild_id = int(data['guild_id'])\n guild = self._get_guild(guild_id)\n if guild is None:\n _log.debug('THREAD_DELETE referencing an unknown guild ID: %s. Discarding', guild_id)\n return\n\n raw = RawThreadDeleteEvent(data)\n raw.thread = thread = guild.get_thread(raw.thread_id)\n self.dispatch('raw_thread_delete', raw)\n\n if thread is not None:\n guild._remove_thread(thread)\n self.dispatch('thread_delete', thread)\n\n def parse_thread_list_sync(self, data: gw.ThreadListSyncEvent) -> None:\n guild_id = int(data['guild_id'])\n guild: Optional[Guild] = self._get_guild(guild_id)\n if guild is None:\n _log.debug('THREAD_LIST_SYNC referencing an unknown guild ID: %s. Discarding', guild_id)\n return\n\n try:\n channel_ids = {int(i) for i in data['channel_ids']}\n except KeyError:\n # If not provided, then the entire guild is being synced\n # So all previous thread data should be overwritten\n previous_threads = guild._threads.copy()\n guild._clear_threads()\n else:\n previous_threads = guild._filter_threads(channel_ids)\n\n threads = {d['id']: guild._store_thread(d) for d in data.get('threads', [])}\n\n for member in data.get('members', []):\n try:\n # note: member['id'] is the thread_id\n thread = threads[member['id']]\n except KeyError:\n continue\n else:\n thread._add_member(ThreadMember(thread, member))\n\n for thread in threads.values():\n old = previous_threads.pop(thread.id, None)\n if old is None:\n self.dispatch('thread_join', thread)\n\n for thread in previous_threads.values():\n self.dispatch('thread_remove', thread)\n\n def parse_thread_member_update(self, data: gw.ThreadMemberUpdate) -> None:\n guild_id = int(data['guild_id'])\n guild: Optional[Guild] = self._get_guild(guild_id)\n if guild is None:\n _log.debug('THREAD_MEMBER_UPDATE referencing an unknown guild ID: %s. Discarding', guild_id)\n return\n\n thread_id = int(data['id'])\n thread: Optional[Thread] = guild.get_thread(thread_id)\n if thread is None:\n _log.debug('THREAD_MEMBER_UPDATE referencing an unknown thread ID: %s. Discarding', thread_id)\n return\n\n member = ThreadMember(thread, data)\n thread.me = member\n\n def parse_thread_members_update(self, data: gw.ThreadMembersUpdate) -> None:\n guild_id = int(data['guild_id'])\n guild: Optional[Guild] = self._get_guild(guild_id)\n if guild is None:\n _log.debug('THREAD_MEMBERS_UPDATE referencing an unknown guild ID: %s. Discarding', guild_id)\n return\n\n thread_id = int(data['id'])\n thread: Optional[Thread] = guild.get_thread(thread_id)\n raw = RawThreadMembersUpdate(data)\n if thread is None:\n _log.debug('THREAD_MEMBERS_UPDATE referencing an unknown thread ID: %s. Discarding', thread_id)\n return\n\n added_members = [ThreadMember(thread, d) for d in data.get('added_members', [])]\n removed_member_ids = [int(x) for x in data.get('removed_member_ids', [])]\n self_id = self.self_id\n for member in added_members:\n if member.id != self_id:\n thread._add_member(member)\n self.dispatch('thread_member_join', member)\n else:\n thread.me = member\n self.dispatch('thread_join', thread)\n\n for member_id in removed_member_ids:\n if member_id != self_id:\n member = thread._pop_member(member_id)\n self.dispatch('raw_thread_member_remove', raw)\n if member is not None:\n self.dispatch('thread_member_remove', member)\n else:\n self.dispatch('thread_remove', thread)\n\n def parse_guild_member_add(self, data: gw.GuildMemberAddEvent) -> None:\n guild = self._get_guild(int(data['guild_id']))\n if guild is None:\n _log.debug('GUILD_MEMBER_ADD referencing an unknown guild ID: %s. Discarding.', data['guild_id'])\n return\n\n member = Member(guild=guild, data=data, state=self)\n if self.member_cache_flags.joined:\n guild._add_member(member)\n\n if guild._member_count is not None:\n guild._member_count += 1\n\n self.dispatch('member_join', member)\n\n def parse_guild_member_remove(self, data: gw.GuildMemberRemoveEvent) -> None:\n user = self.store_user(data['user'])\n raw = RawMemberRemoveEvent(data, user)\n\n guild = self._get_guild(raw.guild_id)\n if guild is not None:\n if guild._member_count is not None:\n guild._member_count -= 1\n\n member = guild.get_member(user.id)\n if member is not None:\n raw.user = member\n guild._remove_member(member)\n self.dispatch('member_remove', member)\n else:\n _log.debug('GUILD_MEMBER_REMOVE referencing an unknown guild ID: %s. Discarding.', data['guild_id'])\n\n self.dispatch('raw_member_remove', raw)\n\n def parse_guild_member_update(self, data: gw.GuildMemberUpdateEvent) -> None:\n guild = self._get_guild(int(data['guild_id']))\n user = data['user']\n user_id = int(user['id'])\n if guild is None:\n _log.debug('GUILD_MEMBER_UPDATE referencing an unknown guild ID: %s. Discarding.', data['guild_id'])\n return\n\n member = guild.get_member(user_id)\n if member is not None:\n old_member = Member._copy(member)\n member._update(data)\n user_update = member._update_inner_user(user)\n if user_update:\n self.dispatch('user_update', user_update[0], user_update[1])\n\n self.dispatch('member_update', old_member, member)\n else:\n if self.member_cache_flags.joined:\n member = Member(data=data, guild=guild, state=self) # type: ignore # the data is not complete, contains a delta of values\n\n # Force an update on the inner user if necessary\n user_update = member._update_inner_user(user)\n if user_update:\n self.dispatch('user_update', user_update[0], user_update[1])\n\n guild._add_member(member)\n _log.debug('GUILD_MEMBER_UPDATE referencing an unknown member ID: %s. Discarding.', user_id)\n\n def parse_guild_emojis_update(self, data: gw.GuildEmojisUpdateEvent) -> None:\n guild = self._get_guild(int(data['guild_id']))\n if guild is None:\n _log.debug('GUILD_EMOJIS_UPDATE referencing an unknown guild ID: %s. Discarding.', data['guild_id'])\n return\n\n before_emojis = guild.emojis\n for emoji in before_emojis:\n self._emojis.pop(emoji.id, None)\n # guild won't be None here\n guild.emojis = tuple(map(lambda d: self.store_emoji(guild, d), data['emojis']))\n self.dispatch('guild_emojis_update', guild, before_emojis, guild.emojis)\n\n def parse_guild_stickers_update(self, data: gw.GuildStickersUpdateEvent) -> None:\n guild = self._get_guild(int(data['guild_id']))\n if guild is None:\n _log.debug('GUILD_STICKERS_UPDATE referencing an unknown guild ID: %s. Discarding.', data['guild_id'])\n return\n\n before_stickers = guild.stickers\n for emoji in before_stickers:\n self._stickers.pop(emoji.id, None)\n\n guild.stickers = tuple(map(lambda d: self.store_sticker(guild, d), data['stickers']))\n self.dispatch('guild_stickers_update', guild, before_stickers, guild.stickers)\n\n def parse_guild_audit_log_entry_create(self, data: gw.GuildAuditLogEntryCreate) -> None:\n guild = self._get_guild(int(data['guild_id']))\n if guild is None:\n _log.debug('GUILD_AUDIT_LOG_ENTRY_CREATE referencing an unknown guild ID: %s. Discarding.', data['guild_id'])\n return\n\n entry = AuditLogEntry(\n users=self._users,\n integrations={},\n app_commands={},\n automod_rules={},\n webhooks={},\n data=data,\n guild=guild,\n )\n\n self.dispatch('audit_log_entry_create', entry)\n\n def parse_auto_moderation_rule_create(self, data: AutoModerationRule) -> None:\n guild = self._get_guild(int(data['guild_id']))\n if guild is None:\n _log.debug('AUTO_MODERATION_RULE_CREATE referencing an unknown guild ID: %s. Discarding.', data['guild_id'])\n return\n\n rule = AutoModRule(data=data, guild=guild, state=self)\n\n self.dispatch('automod_rule_create', rule)\n\n def parse_auto_moderation_rule_update(self, data: AutoModerationRule) -> None:\n guild = self._get_guild(int(data['guild_id']))\n if guild is None:\n _log.debug('AUTO_MODERATION_RULE_UPDATE referencing an unknown guild ID: %s. Discarding.', data['guild_id'])\n return\n\n rule = AutoModRule(data=data, guild=guild, state=self)\n\n self.dispatch('automod_rule_update', rule)\n\n def parse_auto_moderation_rule_delete(self, data: AutoModerationRule) -> None:\n guild = self._get_guild(int(data['guild_id']))\n if guild is None:\n _log.debug('AUTO_MODERATION_RULE_DELETE referencing an unknown guild ID: %s. Discarding.', data['guild_id'])\n return\n\n rule = AutoModRule(data=data, guild=guild, state=self)\n\n self.dispatch('automod_rule_delete', rule)\n\n def parse_auto_moderation_action_execution(self, data: AutoModerationActionExecution) -> None:\n guild = self._get_guild(int(data['guild_id']))\n if guild is None:\n _log.debug('AUTO_MODERATION_ACTION_EXECUTION referencing an unknown guild ID: %s. Discarding.', data['guild_id'])\n return\n\n execution = AutoModAction(data=data, state=self)\n\n self.dispatch('automod_action', execution)\n\n def _get_create_guild(self, data: gw.GuildCreateEvent) -> Guild:\n if data.get('unavailable') is False:\n # GUILD_CREATE with unavailable in the response\n # usually means that the guild has become available\n # and is therefore in the cache\n guild = self._get_guild(int(data['id']))\n if guild is not None:\n guild.unavailable = False\n guild._from_data(data)\n return guild\n\n return self._add_guild_from_data(data)\n\n def is_guild_evicted(self, guild: Guild) -> bool:\n return guild.id not in self._guilds\n\n @overload\n async def chunk_guild(self, guild: Guild, *, wait: Literal[True] = ..., cache: Optional[bool] = ...) -> List[Member]:\n ...\n\n @overload\n async def chunk_guild(\n self, guild: Guild, *, wait: Literal[False] = ..., cache: Optional[bool] = ...\n ) -> asyncio.Future[List[Member]]:\n ...\n\n async def chunk_guild(\n self, guild: Guild, *, wait: bool = True, cache: Optional[bool] = None\n ) -> Union[List[Member], asyncio.Future[List[Member]]]:\n cache = cache or self.member_cache_flags.joined\n request = self._chunk_requests.get(guild.id)\n if request is None:\n self._chunk_requests[guild.id] = request = ChunkRequest(guild.id, self.loop, self._get_guild, cache=cache)\n await self.chunker(guild.id, nonce=request.nonce)\n\n if wait:\n return await request.wait()\n return request.get_future()\n\n def _chunk_timeout(self, guild: Guild) -> float:\n return max(5.0, (guild.member_count or 0) / 10000)\n\n async def _chunk_and_dispatch(self, guild, unavailable):\n timeout = self._chunk_timeout(guild)\n\n try:\n await asyncio.wait_for(self.chunk_guild(guild), timeout=timeout)\n except asyncio.TimeoutError:\n _log.warning('Somehow timed out waiting for chunks for guild ID %s.', guild.id)\n\n if unavailable is False:\n self.dispatch('guild_available', guild)\n else:\n self.dispatch('guild_join', guild)\n\n def _add_ready_state(self, guild: Guild) -> bool:\n try:\n # Notify the on_ready state, if any, that this guild is complete.\n self._ready_state.put_nowait(guild)\n except AttributeError:\n return False\n else:\n return True\n\n def parse_guild_create(self, data: gw.GuildCreateEvent) -> None:\n unavailable = data.get('unavailable')\n if unavailable is True:\n # joined a guild with unavailable == True so..\n return\n\n guild = self._get_create_guild(data)\n\n if self._add_ready_state(guild):\n return # We're waiting for the ready event, put the rest on hold\n\n # check if it requires chunking\n if self._guild_needs_chunking(guild):\n asyncio.create_task(self._chunk_and_dispatch(guild, unavailable))\n return\n\n # Dispatch available if newly available\n if unavailable is False:\n self.dispatch('guild_available', guild)\n else:\n self.dispatch('guild_join', guild)\n\n def parse_guild_update(self, data: gw.GuildUpdateEvent) -> None:\n guild = self._get_guild(int(data['id']))\n if guild is not None:\n old_guild = copy.copy(guild)\n guild._from_data(data)\n self.dispatch('guild_update', old_guild, guild)\n else:\n _log.debug('GUILD_UPDATE referencing an unknown guild ID: %s. Discarding.', data['id'])\n\n def parse_guild_delete(self, data: gw.GuildDeleteEvent) -> None:\n guild = self._get_guild(int(data['id']))\n if guild is None:\n _log.debug('GUILD_DELETE referencing an unknown guild ID: %s. Discarding.', data['id'])\n return\n\n if data.get('unavailable', False):\n # GUILD_DELETE with unavailable being True means that the\n # guild that was available is now currently unavailable\n guild.unavailable = True\n self.dispatch('guild_unavailable', guild)\n return\n\n # do a cleanup of the messages cache\n if self._messages is not None:\n self._messages: Optional[Deque[Message]] = deque(\n (msg for msg in self._messages if msg.guild != guild), maxlen=self.max_messages\n )\n\n self._remove_guild(guild)\n self.dispatch('guild_remove', guild)\n\n def parse_guild_ban_add(self, data: gw.GuildBanAddEvent) -> None:\n # we make the assumption that GUILD_BAN_ADD is done\n # before GUILD_MEMBER_REMOVE is called\n # hence we don't remove it from cache or do anything\n # strange with it, the main purpose of this event\n # is mainly to dispatch to another event worth listening to for logging\n guild = self._get_guild(int(data['guild_id']))\n if guild is not None:\n try:\n user = User(data=data['user'], state=self)\n except KeyError:\n pass\n else:\n member = guild.get_member(user.id) or user\n self.dispatch('member_ban', guild, member)\n\n def parse_guild_ban_remove(self, data: gw.GuildBanRemoveEvent) -> None:\n guild = self._get_guild(int(data['guild_id']))\n if guild is not None and 'user' in data:\n user = self.store_user(data['user'])\n self.dispatch('member_unban', guild, user)\n\n def parse_guild_role_create(self, data: gw.GuildRoleCreateEvent) -> None:\n guild = self._get_guild(int(data['guild_id']))\n if guild is None:\n _log.debug('GUILD_ROLE_CREATE referencing an unknown guild ID: %s. Discarding.', data['guild_id'])\n return\n\n role_data = data['role']\n role = Role(guild=guild, data=role_data, state=self)\n guild._add_role(role)\n self.dispatch('guild_role_create', role)\n\n def parse_guild_role_delete(self, data: gw.GuildRoleDeleteEvent) -> None:\n guild = self._get_guild(int(data['guild_id']))\n if guild is not None:\n role_id = int(data['role_id'])\n try:\n role = guild._remove_role(role_id)\n except KeyError:\n return\n else:\n self.dispatch('guild_role_delete', role)\n else:\n _log.debug('GUILD_ROLE_DELETE referencing an unknown guild ID: %s. Discarding.', data['guild_id'])\n\n def parse_guild_role_update(self, data: gw.GuildRoleUpdateEvent) -> None:\n guild = self._get_guild(int(data['guild_id']))\n if guild is not None:\n role_data = data['role']\n role_id = int(role_data['id'])\n role = guild.get_role(role_id)\n if role is not None:\n old_role = copy.copy(role)\n role._update(role_data)\n self.dispatch('guild_role_update', old_role, role)\n else:\n _log.debug('GUILD_ROLE_UPDATE referencing an unknown guild ID: %s. Discarding.', data['guild_id'])\n\n def parse_guild_members_chunk(self, data: gw.GuildMembersChunkEvent) -> None:\n guild_id = int(data['guild_id'])\n guild = self._get_guild(guild_id)\n presences = data.get('presences', [])\n\n if guild is None:\n return\n\n members = [Member(guild=guild, data=member, state=self) for member in data.get('members', [])]\n _log.debug('Processed a chunk for %s members in guild ID %s.', len(members), guild_id)\n\n if presences:\n member_dict: Dict[Snowflake, Member] = {str(member.id): member for member in members}\n for presence in presences:\n user = presence['user']\n member_id = user['id']\n member = member_dict.get(member_id)\n if member is not None:\n member._presence_update(presence, user)\n\n complete = data.get('chunk_index', 0) + 1 == data.get('chunk_count')\n self.process_chunk_requests(guild_id, data.get('nonce'), members, complete)\n\n def parse_guild_integrations_update(self, data: gw.GuildIntegrationsUpdateEvent) -> None:\n guild = self._get_guild(int(data['guild_id']))\n if guild is not None:\n self.dispatch('guild_integrations_update', guild)\n else:\n _log.debug('GUILD_INTEGRATIONS_UPDATE referencing an unknown guild ID: %s. Discarding.', data['guild_id'])\n\n def parse_integration_create(self, data: gw.IntegrationCreateEvent) -> None:\n guild_id = int(data['guild_id'])\n guild = self._get_guild(guild_id)\n if guild is not None:\n cls, _ = _integration_factory(data['type'])\n integration = cls(data=data, guild=guild)\n self.dispatch('integration_create', integration)\n else:\n _log.debug('INTEGRATION_CREATE referencing an unknown guild ID: %s. Discarding.', guild_id)\n\n def parse_integration_update(self, data: gw.IntegrationUpdateEvent) -> None:\n guild_id = int(data['guild_id'])\n guild = self._get_guild(guild_id)\n if guild is not None:\n cls, _ = _integration_factory(data['type'])\n integration = cls(data=data, guild=guild)\n self.dispatch('integration_update', integration)\n else:\n _log.debug('INTEGRATION_UPDATE referencing an unknown guild ID: %s. Discarding.', guild_id)\n\n def parse_integration_delete(self, data: gw.IntegrationDeleteEvent) -> None:\n guild_id = int(data['guild_id'])\n guild = self._get_guild(guild_id)\n if guild is not None:\n raw = RawIntegrationDeleteEvent(data)\n self.dispatch('raw_integration_delete', raw)\n else:\n _log.debug('INTEGRATION_DELETE referencing an unknown guild ID: %s. Discarding.', guild_id)\n\n def parse_webhooks_update(self, data: gw.WebhooksUpdateEvent) -> None:\n guild = self._get_guild(int(data['guild_id']))\n if guild is None:\n _log.debug('WEBHOOKS_UPDATE referencing an unknown guild ID: %s. Discarding', data['guild_id'])\n return\n\n channel_id = utils._get_as_snowflake(data, 'channel_id')\n channel = guild.get_channel(channel_id) # type: ignore # None is okay here\n if channel is not None:\n self.dispatch('webhooks_update', channel)\n else:\n _log.debug('WEBHOOKS_UPDATE referencing an unknown channel ID: %s. Discarding.', data['channel_id'])\n\n def parse_stage_instance_create(self, data: gw.StageInstanceCreateEvent) -> None:\n guild = self._get_guild(int(data['guild_id']))\n if guild is not None:\n stage_instance = StageInstance(guild=guild, state=self, data=data)\n guild._stage_instances[stage_instance.id] = stage_instance\n self.dispatch('stage_instance_create', stage_instance)\n else:\n _log.debug('STAGE_INSTANCE_CREATE referencing unknown guild ID: %s. Discarding.', data['guild_id'])\n\n def parse_stage_instance_update(self, data: gw.StageInstanceUpdateEvent) -> None:\n guild = self._get_guild(int(data['guild_id']))\n if guild is not None:\n stage_instance = guild._stage_instances.get(int(data['id']))\n if stage_instance is not None:\n old_stage_instance = copy.copy(stage_instance)\n stage_instance._update(data)\n self.dispatch('stage_instance_update', old_stage_instance, stage_instance)\n else:\n _log.debug('STAGE_INSTANCE_UPDATE referencing unknown stage instance ID: %s. Discarding.', data['id'])\n else:\n _log.debug('STAGE_INSTANCE_UPDATE referencing unknown guild ID: %s. Discarding.', data['guild_id'])\n\n def parse_stage_instance_delete(self, data: gw.StageInstanceDeleteEvent) -> None:\n guild = self._get_guild(int(data['guild_id']))\n if guild is not None:\n try:\n stage_instance = guild._stage_instances.pop(int(data['id']))\n except KeyError:\n pass\n else:\n self.dispatch('stage_instance_delete', stage_instance)\n else:\n _log.debug('STAGE_INSTANCE_DELETE referencing unknown guild ID: %s. Discarding.', data['guild_id'])\n\n def parse_guild_scheduled_event_create(self, data: gw.GuildScheduledEventCreateEvent) -> None:\n guild = self._get_guild(int(data['guild_id']))\n if guild is not None:\n scheduled_event = ScheduledEvent(state=self, data=data)\n guild._scheduled_events[scheduled_event.id] = scheduled_event\n self.dispatch('scheduled_event_create', scheduled_event)\n else:\n _log.debug('SCHEDULED_EVENT_CREATE referencing unknown guild ID: %s. Discarding.', data['guild_id'])\n\n def parse_guild_scheduled_event_update(self, data: gw.GuildScheduledEventUpdateEvent) -> None:\n guild = self._get_guild(int(data['guild_id']))\n if guild is not None:\n scheduled_event = guild._scheduled_events.get(int(data['id']))\n if scheduled_event is not None:\n old_scheduled_event = copy.copy(scheduled_event)\n scheduled_event._update(data)\n self.dispatch('scheduled_event_update', old_scheduled_event, scheduled_event)\n else:\n _log.debug('SCHEDULED_EVENT_UPDATE referencing unknown scheduled event ID: %s. Discarding.', data['id'])\n else:\n _log.debug('SCHEDULED_EVENT_UPDATE referencing unknown guild ID: %s. Discarding.', data['guild_id'])\n\n def parse_guild_scheduled_event_delete(self, data: gw.GuildScheduledEventDeleteEvent) -> None:\n guild = self._get_guild(int(data['guild_id']))\n if guild is not None:\n try:\n scheduled_event = guild._scheduled_events.pop(int(data['id']))\n except KeyError:\n pass\n else:\n self.dispatch('scheduled_event_delete', scheduled_event)\n else:\n _log.debug('SCHEDULED_EVENT_DELETE referencing unknown guild ID: %s. Discarding.', data['guild_id'])\n\n def parse_guild_scheduled_event_user_add(self, data: gw.GuildScheduledEventUserAdd) -> None:\n guild = self._get_guild(int(data['guild_id']))\n if guild is not None:\n scheduled_event = guild._scheduled_events.get(int(data['guild_scheduled_event_id']))\n if scheduled_event is not None:\n user = self.get_user(int(data['user_id']))\n if user is not None:\n scheduled_event._add_user(user)\n self.dispatch('scheduled_event_user_add', scheduled_event, user)\n else:\n _log.debug('SCHEDULED_EVENT_USER_ADD referencing unknown user ID: %s. Discarding.', data['user_id'])\n else:\n _log.debug(\n 'SCHEDULED_EVENT_USER_ADD referencing unknown scheduled event ID: %s. Discarding.',\n data['guild_scheduled_event_id'],\n )\n else:\n _log.debug('SCHEDULED_EVENT_USER_ADD referencing unknown guild ID: %s. Discarding.', data['guild_id'])\n\n def parse_guild_scheduled_event_user_remove(self, data: gw.GuildScheduledEventUserRemove) -> None:\n guild = self._get_guild(int(data['guild_id']))\n if guild is not None:\n scheduled_event = guild._scheduled_events.get(int(data['guild_scheduled_event_id']))\n if scheduled_event is not None:\n user = self.get_user(int(data['user_id']))\n if user is not None:\n scheduled_event._pop_user(user.id)\n self.dispatch('scheduled_event_user_remove', scheduled_event, user)\n else:\n _log.debug('SCHEDULED_EVENT_USER_REMOVE referencing unknown user ID: %s. Discarding.', data['user_id'])\n else:\n _log.debug(\n 'SCHEDULED_EVENT_USER_REMOVE referencing unknown scheduled event ID: %s. Discarding.',\n data['guild_scheduled_event_id'],\n )\n else:\n _log.debug('SCHEDULED_EVENT_USER_REMOVE referencing unknown guild ID: %s. Discarding.', data['guild_id'])\n\n def parse_application_command_permissions_update(self, data: GuildApplicationCommandPermissionsPayload):\n raw = RawAppCommandPermissionsUpdateEvent(data=data, state=self)\n self.dispatch('raw_app_command_permissions_update', raw)\n\n def parse_voice_state_update(self, data: gw.VoiceStateUpdateEvent) -> None:\n guild = self._get_guild(utils._get_as_snowflake(data, 'guild_id'))\n channel_id = utils._get_as_snowflake(data, 'channel_id')\n flags = self.member_cache_flags\n # self.user is *always* cached when this is called\n self_id = self.user.id # type: ignore\n if guild is not None:\n if int(data['user_id']) == self_id:\n voice = self._get_voice_client(guild.id)\n if voice is not None:\n coro = voice.on_voice_state_update(data)\n asyncio.create_task(logging_coroutine(coro, info='Voice Protocol voice state update handler'))\n\n member, before, after = guild._update_voice_state(data, channel_id) # type: ignore\n if member is not None:\n if flags.voice:\n if channel_id is None and flags._voice_only and member.id != self_id:\n # Only remove from cache if we only have the voice flag enabled\n guild._remove_member(member)\n elif channel_id is not None:\n guild._add_member(member)\n\n self.dispatch('voice_state_update', member, before, after)\n else:\n _log.debug('VOICE_STATE_UPDATE referencing an unknown member ID: %s. Discarding.', data['user_id'])\n\n def parse_voice_server_update(self, data: gw.VoiceServerUpdateEvent) -> None:\n key_id = int(data['guild_id'])\n\n vc = self._get_voice_client(key_id)\n if vc is not None:\n coro = vc.on_voice_server_update(data)\n asyncio.create_task(logging_coroutine(coro, info='Voice Protocol voice server update handler'))\n\n def parse_typing_start(self, data: gw.TypingStartEvent) -> None:\n raw = RawTypingEvent(data)\n raw.user = self.get_user(raw.user_id)\n channel, guild = self._get_guild_channel(data)\n\n if channel is not None:\n if isinstance(channel, DMChannel):\n channel.recipient = raw.user\n elif guild is not None:\n raw.user = guild.get_member(raw.user_id)\n\n if raw.user is None:\n member_data = data.get('member')\n if member_data:\n raw.user = Member(data=member_data, state=self, guild=guild)\n\n if raw.user is not None:\n self.dispatch('typing', channel, raw.user, raw.timestamp)\n\n self.dispatch('raw_typing', raw)\n\n def parse_entitlement_create(self, data: gw.EntitlementCreateEvent) -> None:\n entitlement = Entitlement(data=data, state=self)\n self.dispatch('entitlement_create', entitlement)\n\n def parse_entitlement_update(self, data: gw.EntitlementUpdateEvent) -> None:\n entitlement = Entitlement(data=data, state=self)\n self.dispatch('entitlement_update', entitlement)\n\n def parse_entitlement_delete(self, data: gw.EntitlementDeleteEvent) -> None:\n entitlement = Entitlement(data=data, state=self)\n self.dispatch('entitlement_update', entitlement)\n\n def _get_reaction_user(self, channel: MessageableChannel, user_id: int) -> Optional[Union[User, Member]]:\n if isinstance(channel, (TextChannel, Thread, VoiceChannel)):\n return channel.guild.get_member(user_id)\n return self.get_user(user_id)\n\n def get_reaction_emoji(self, data: PartialEmojiPayload) -> Union[Emoji, PartialEmoji, str]:\n emoji_id = utils._get_as_snowflake(data, 'id')\n\n if not emoji_id:\n # the name key will be a str\n return data['name'] # type: ignore\n\n try:\n return self._emojis[emoji_id]\n except KeyError:\n return PartialEmoji.with_state(\n self, animated=data.get('animated', False), id=emoji_id, name=data['name'] # type: ignore\n )\n\n def _upgrade_partial_emoji(self, emoji: PartialEmoji) -> Union[Emoji, PartialEmoji, str]:\n emoji_id = emoji.id\n if not emoji_id:\n return emoji.name\n try:\n return self._emojis[emoji_id]\n except KeyError:\n return emoji\n\n def get_channel(self, id: Optional[int]) -> Optional[Union[Channel, Thread]]:\n if id is None:\n return None\n\n pm = self._get_private_channel(id)\n if pm is not None:\n return pm\n\n for guild in self.guilds:\n channel = guild._resolve_channel(id)\n if channel is not None:\n return channel\n\n def create_message(self, *, channel: MessageableChannel, data: MessagePayload) -> Message:\n return Message(state=self, channel=channel, data=data)\n\n\nclass AutoShardedConnectionState(ConnectionState[ClientT]):\n def __init__(self, *args: Any, **kwargs: Any) -> None:\n super().__init__(*args, **kwargs)\n\n self.shard_ids: Union[List[int], range] = []\n\n self._ready_tasks: Dict[int, asyncio.Task[None]] = {}\n self._ready_states: Dict[int, asyncio.Queue[Guild]] = {}\n\n def _update_message_references(self) -> None:\n # self._messages won't be None when this is called\n for msg in self._messages: # type: ignore\n if not msg.guild:\n continue\n\n new_guild = self._get_guild(msg.guild.id)\n if new_guild is not None and new_guild is not msg.guild:\n channel_id = msg.channel.id\n channel = new_guild._resolve_channel(channel_id) or PartialMessageable(\n state=self, id=channel_id, guild_id=new_guild.id\n )\n msg._rebind_cached_references(new_guild, channel)\n\n async def chunker(\n self,\n guild_id: int,\n query: str = '',\n limit: int = 0,\n presences: bool = False,\n *,\n shard_id: Optional[int] = None,\n nonce: Optional[str] = None,\n ) -> None:\n ws = self._get_websocket(guild_id, shard_id=shard_id)\n await ws.request_chunks(guild_id, query=query, limit=limit, presences=presences, nonce=nonce)\n\n def _add_ready_state(self, guild: Guild) -> bool:\n try:\n # Notify the on_ready state, if any, that this guild is complete.\n self._ready_states[guild.shard_id].put_nowait(guild)\n except KeyError:\n return False\n else:\n return True\n\n async def _delay_ready(self) -> None:\n await asyncio.gather(*self._ready_tasks.values())\n\n # clear the current tasks\n self._ready_task = None\n self._ready_tasks = {}\n\n # dispatch the event\n self.call_handlers('ready')\n self.dispatch('ready')\n\n async def _delay_shard_ready(self, shard_id: int) -> None:\n try:\n states = []\n while True:\n # this snippet of code is basically waiting N seconds\n # until the last GUILD_CREATE was sent\n try:\n guild = await asyncio.wait_for(self._ready_states[shard_id].get(), timeout=self.guild_ready_timeout)\n except asyncio.TimeoutError:\n break\n else:\n if self._guild_needs_chunking(guild):\n future = await self.chunk_guild(guild, wait=False)\n states.append((guild, future))\n else:\n if guild.unavailable is False:\n self.dispatch('guild_available', guild)\n else:\n self.dispatch('guild_join', guild)\n\n for guild, future in states:\n timeout = self._chunk_timeout(guild)\n\n try:\n await asyncio.wait_for(future, timeout=timeout)\n except asyncio.TimeoutError:\n _log.warning('Shard ID %s timed out waiting for chunks for guild_id %s.', guild.shard_id, guild.id)\n\n if guild.unavailable is False:\n self.dispatch('guild_available', guild)\n else:\n self.dispatch('guild_join', guild)\n\n # remove the state\n try:\n del self._ready_states[shard_id]\n except KeyError:\n pass # already been deleted somehow\n\n except asyncio.CancelledError:\n pass\n else:\n # dispatch the event\n self.dispatch('shard_ready', shard_id)\n\n def parse_ready(self, data: gw.ReadyEvent) -> None:\n if self._ready_task is not None:\n self._ready_task.cancel()\n\n shard_id = data['shard'][0] # shard_id, num_shards\n\n if shard_id in self._ready_tasks:\n self._ready_tasks[shard_id].cancel()\n\n if shard_id not in self._ready_states:\n self._ready_states[shard_id] = asyncio.Queue()\n\n self.user: Optional[ClientUser]\n self.user = user = ClientUser(state=self, data=data['user'])\n # self._users is a list of Users, we're setting a ClientUser\n self._users[user.id] = user # type: ignore\n\n if self.application_id is None:\n try:\n application = data['application']\n except KeyError:\n pass\n else:\n self.application_id: Optional[int] = utils._get_as_snowflake(application, 'id')\n self.application_flags: ApplicationFlags = ApplicationFlags._from_value(application['flags'])\n\n for guild_data in data['guilds']:\n self._add_guild_from_data(guild_data) # type: ignore # _add_guild_from_data requires a complete Guild payload\n\n if self._messages:\n self._update_message_references()\n\n self.dispatch('connect')\n self.dispatch('shard_connect', shard_id)\n\n self._ready_tasks[shard_id] = asyncio.create_task(self._delay_shard_ready(shard_id))\n\n # The delay task for every shard has been started\n if len(self._ready_tasks) == len(self.shard_ids):\n self._ready_task = asyncio.create_task(self._delay_ready())\n\n def parse_resumed(self, data: gw.ResumedEvent) -> None:\n self.dispatch('resumed')\n self.dispatch('shard_resumed', data['__shard_id__']) # type: ignore # This is an internal discord.py key\n",
"path": "discord/state.py"
}
] | 10_10 | python | import unittest
import sys
import inspect
class TestEntitlementDeleteEventDispatch(unittest.TestCase):
def test_entitlement_delete_event_dispatch(self):
from discord.state import ConnectionState
# Inspect the ConnectionState class for the parse_entitlement_delete method
method = getattr(ConnectionState, 'parse_entitlement_delete', None)
self.assertIsNotNone(method, "Method parse_entitlement_delete not found in ConnectionState")
# Inspect the source code of the method
source_lines = inspect.getsource(method).splitlines()
# Check if the correct dispatch call is present in the method
correct_dispatch = any("dispatch('entitlement_delete'" in line for line in source_lines)
self.assertTrue(correct_dispatch, "The dispatch call for 'entitlement_delete' is not correct in parse_entitlement_delete")
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestEntitlementDeleteEventDispatch))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
main()
|
https://github.com/teamqurrent/discord.py | Enhance the `HTTPClient` class in `http.py` to allow editing various application details via Discord's API. Implement the `edit_application_info` method to send a PATCH request to the /applications/@me endpoint. This method should accept reason and payload, filter the payload for specific valid keys like 'custom_install_url', 'description', etc., and use self.request to send the PATCH request with the filtered payload and reason. Ensure the method returns the response from the API call. | 9810cb9 | discord | python3.9 | 56c67d39 | diff --git a/discord/appinfo.py b/discord/appinfo.py
--- a/discord/appinfo.py
+++ b/discord/appinfo.py
@@ -30,8 +30,11 @@ from . import utils
from .asset import Asset
from .flags import ApplicationFlags
from .permissions import Permissions
+from .utils import MISSING
if TYPE_CHECKING:
+ from typing import Dict, Any
+
from .guild import Guild
from .types.appinfo import (
AppInfo as AppInfoPayload,
@@ -131,6 +134,15 @@ class AppInfo:
a verification method in the guild's role verification configuration.
.. versionadded:: 2.2
+ interactions_endpoint_url: Optional[:class:`str`]
+ The interactions endpoint url of the application to receive interactions over this endpoint rather than
+ over the gateway, if configured.
+
+ .. versionadded:: 2.4
+ redirect_uris: List[:class:`str`]
+ A list of authentication redirect URIs.
+
+ .. versionadded:: 2.4
"""
__slots__ = (
@@ -156,6 +168,8 @@ class AppInfo:
'custom_install_url',
'install_params',
'role_connections_verification_url',
+ 'interactions_endpoint_url',
+ 'redirect_uris',
)
def __init__(self, state: ConnectionState, data: AppInfoPayload):
@@ -190,6 +204,8 @@ class AppInfo:
params = data.get('install_params')
self.install_params: Optional[AppInstallParams] = AppInstallParams(params) if params else None
+ self.interactions_endpoint_url: Optional[str] = data.get('interactions_endpoint_url')
+ self.redirect_uris: List[str] = data.get('redirect_uris', [])
def __repr__(self) -> str:
return (
@@ -232,6 +248,138 @@ class AppInfo:
"""
return ApplicationFlags._from_value(self._flags)
+ async def edit(
+ self,
+ *,
+ reason: Optional[str] = MISSING,
+ custom_install_url: Optional[str] = MISSING,
+ description: Optional[str] = MISSING,
+ role_connections_verification_url: Optional[str] = MISSING,
+ install_params_scopes: Optional[List[str]] = MISSING,
+ install_params_permissions: Optional[Permissions] = MISSING,
+ flags: Optional[ApplicationFlags] = MISSING,
+ icon: Optional[bytes] = MISSING,
+ cover_image: Optional[bytes] = MISSING,
+ interactions_endpoint_url: Optional[str] = MISSING,
+ tags: Optional[List[str]] = MISSING,
+ ) -> AppInfo:
+ r"""|coro|
+
+ Edits the application info.
+
+ .. versionadded:: 2.4
+
+ Parameters
+ ----------
+ custom_install_url: Optional[:class:`str`]
+ The new custom authorization URL for the application. Can be ``None`` to remove the URL.
+ description: Optional[:class:`str`]
+ The new application description. Can be ``None`` to remove the description.
+ role_connections_verification_url: Optional[:class:`str`]
+ The new application’s connection verification URL which will render the application
+ as a verification method in the guild’s role verification configuration. Can be ``None`` to remove the URL.
+ install_params_scopes: Optional[List[:class:`str`]]
+ The new list of :ddocs:`OAuth2 scopes <topics/oauth2#shared-resources-oauth2-scopes>` of
+ the :attr:`~install_params`. Can be ``None`` to remove the scopes.
+ install_params_permissions: Optional[:class:`Permissions`]
+ The new permissions of the :attr:`~install_params`. Can be ``None`` to remove the permissions.
+ flags: Optional[:class:`ApplicationFlags`]
+ The new application’s flags. Only limited intent flags (:attr:`~ApplicationFlags.gateway_presence_limited`,
+ :attr:`~ApplicationFlags.gateway_guild_members_limited`, :attr:`~ApplicationFlags.gateway_message_content_limited`)
+ can be edited. Can be ``None`` to remove the flags.
+
+ .. warning::
+
+ Editing the limited intent flags leads to the termination of the bot.
+
+ icon: Optional[:class:`bytes`]
+ The new application’s icon as a :term:`py:bytes-like object`. Can be ``None`` to remove the icon.
+ cover_image: Optional[:class:`bytes`]
+ The new application’s cover image as a :term:`py:bytes-like object` on a store embed.
+ The cover image is only available if the application is a game sold on Discord.
+ Can be ``None`` to remove the image.
+ interactions_endpoint_url: Optional[:class:`str`]
+ The new interactions endpoint url of the application to receive interactions over this endpoint rather than
+ over the gateway. Can be ``None`` to remove the URL.
+ tags: Optional[List[:class:`str`]]
+ The new list of tags describing the functionality of the application. Can be ``None`` to remove the tags.
+ reason: Optional[:class:`str`]
+ The reason for editing the application. Shows up on the audit log.
+
+ Raises
+ -------
+ HTTPException
+ Editing the application failed
+ ValueError
+ The image format passed in to ``icon`` or ``cover_image`` is invalid. This is also raised
+ when ``install_params_scopes`` and ``install_params_permissions`` are incompatible with each other.
+
+ Returns
+ -------
+ :class:`AppInfo`
+ The newly updated application info.
+ """
+ payload: Dict[str, Any] = {}
+
+ if custom_install_url is not MISSING:
+ payload['custom_install_url'] = custom_install_url
+
+ if description is not MISSING:
+ payload['description'] = description
+
+ if role_connections_verification_url is not MISSING:
+ payload['role_connections_verification_url'] = role_connections_verification_url
+
+ if install_params_scopes is not MISSING:
+ install_params: Optional[Dict[str, Any]] = {}
+ if install_params_scopes is None:
+ install_params = None
+ else:
+ if "bot" not in install_params_scopes and install_params_permissions is not MISSING:
+ raise ValueError("'bot' must be in install_params_scopes if install_params_permissions is set")
+
+ install_params['scopes'] = install_params_scopes
+
+ if install_params_permissions is MISSING:
+ install_params['permissions'] = 0
+ else:
+ if install_params_permissions is None:
+ install_params['permissions'] = 0
+ else:
+ install_params['permissions'] = install_params_permissions.value
+
+ payload['install_params'] = install_params
+
+ else:
+ if install_params_permissions is not MISSING:
+ raise ValueError("install_params_scopes must be set if install_params_permissions is set")
+
+ if flags is not MISSING:
+ if flags is None:
+ payload['flags'] = flags
+ else:
+ payload['flags'] = flags.value
+
+ if icon is not MISSING:
+ if icon is None:
+ payload['icon'] = icon
+ else:
+ payload['icon'] = utils._bytes_to_base64_data(icon)
+
+ if cover_image is not MISSING:
+ if cover_image is None:
+ payload['cover_image'] = cover_image
+ else:
+ payload['cover_image'] = utils._bytes_to_base64_data(cover_image)
+
+ if interactions_endpoint_url is not MISSING:
+ payload['interactions_endpoint_url'] = interactions_endpoint_url
+
+ if tags is not MISSING:
+ payload['tags'] = tags
+ data = await self._state.http.edit_application_info(reason=reason, payload=payload)
+ return AppInfo(data=data, state=self._state)
+
class PartialAppInfo:
"""Represents a partial AppInfo given by :func:`~discord.abc.GuildChannel.create_invite`
diff --git a/discord/http.py b/discord/http.py
--- a/discord/http.py
+++ b/discord/http.py
@@ -2456,6 +2456,22 @@ class HTTPClient:
def application_info(self) -> Response[appinfo.AppInfo]:
return self.request(Route('GET', '/oauth2/applications/@me'))
+ def edit_application_info(self, *, reason: Optional[str], payload: Any) -> Response[appinfo.AppInfo]:
+ valid_keys = (
+ 'custom_install_url',
+ 'description',
+ 'role_connections_verification_url',
+ 'install_params',
+ 'flags',
+ 'icon',
+ 'cover_image',
+ 'interactions_endpoint_url ',
+ 'tags',
+ )
+
+ payload = {k: v for k, v in payload.items() if k in valid_keys}
+ return self.request(Route('PATCH', '/applications/@me'), json=payload, reason=reason)
+
async def get_gateway(self, *, encoding: str = 'json', zlib: bool = True) -> str:
try:
data = await self.request(Route('GET', '/gateway'))
diff --git a/discord/types/appinfo.py b/discord/types/appinfo.py
--- a/discord/types/appinfo.py
+++ b/discord/types/appinfo.py
@@ -49,6 +49,9 @@ class BaseAppInfo(TypedDict):
terms_of_service_url: NotRequired[str]
privacy_policy_url: NotRequired[str]
rpc_origins: NotRequired[List[str]]
+ interactions_endpoint_url: NotRequired[Optional[str]]
+ redirect_uris: NotRequired[List[str]]
+ role_connections_verification_url: NotRequired[Optional[str]]
class AppInfo(BaseAppInfo):
@@ -64,16 +67,12 @@ class AppInfo(BaseAppInfo):
tags: NotRequired[List[str]]
install_params: NotRequired[InstallParams]
custom_install_url: NotRequired[str]
- role_connections_verification_url: NotRequired[str]
class PartialAppInfo(BaseAppInfo, total=False):
hook: bool
max_participants: int
approximate_guild_count: int
- redirect_uris: List[str]
- interactions_endpoint_url: Optional[str]
- role_connections_verification_url: Optional[str]
class GatewayAppInfo(TypedDict):
| [
{
"content": "\"\"\"\nThe MIT License (MIT)\n\nCopyright (c) 2015-present Rapptz\n\nPermission is hereby granted, free of charge, to any person obtaining a\ncopy of this software and associated documentation files (the \"Software\"),\nto deal in the Software without restriction, including without limitation\nthe rights to use, copy, modify, merge, publish, distribute, sublicense,\nand/or sell copies of the Software, and to permit persons to whom the\nSoftware is furnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in\nall copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\nOR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\nFROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER\nDEALINGS IN THE SOFTWARE.\n\"\"\"\n\nfrom __future__ import annotations\n\nfrom typing import List, TYPE_CHECKING, Optional\n\nfrom . import utils\nfrom .asset import Asset\nfrom .flags import ApplicationFlags\nfrom .permissions import Permissions\n\nif TYPE_CHECKING:\n from .guild import Guild\n from .types.appinfo import (\n AppInfo as AppInfoPayload,\n PartialAppInfo as PartialAppInfoPayload,\n Team as TeamPayload,\n InstallParams as InstallParamsPayload,\n )\n from .user import User\n from .state import ConnectionState\n\n__all__ = (\n 'AppInfo',\n 'PartialAppInfo',\n 'AppInstallParams',\n)\n\n\nclass AppInfo:\n \"\"\"Represents the application info for the bot provided by Discord.\n\n\n Attributes\n -------------\n id: :class:`int`\n The application ID.\n name: :class:`str`\n The application name.\n owner: :class:`User`\n The application owner.\n team: Optional[:class:`Team`]\n The application's team.\n\n .. versionadded:: 1.3\n\n description: :class:`str`\n The application description.\n bot_public: :class:`bool`\n Whether the bot can be invited by anyone or if it is locked\n to the application owner.\n bot_require_code_grant: :class:`bool`\n Whether the bot requires the completion of the full oauth2 code\n grant flow to join.\n rpc_origins: Optional[List[:class:`str`]]\n A list of RPC origin URLs, if RPC is enabled.\n\n verify_key: :class:`str`\n The hex encoded key for verification in interactions and the\n GameSDK's :ddocs:`GetTicket <game-sdk/applications#getticket>`.\n\n .. versionadded:: 1.3\n\n guild_id: Optional[:class:`int`]\n If this application is a game sold on Discord,\n this field will be the guild to which it has been linked to.\n\n .. versionadded:: 1.3\n\n primary_sku_id: Optional[:class:`int`]\n If this application is a game sold on Discord,\n this field will be the id of the \"Game SKU\" that is created,\n if it exists.\n\n .. versionadded:: 1.3\n\n slug: Optional[:class:`str`]\n If this application is a game sold on Discord,\n this field will be the URL slug that links to the store page.\n\n .. versionadded:: 1.3\n\n terms_of_service_url: Optional[:class:`str`]\n The application's terms of service URL, if set.\n\n .. versionadded:: 2.0\n\n privacy_policy_url: Optional[:class:`str`]\n The application's privacy policy URL, if set.\n\n .. versionadded:: 2.0\n\n tags: List[:class:`str`]\n The list of tags describing the functionality of the application.\n\n .. versionadded:: 2.0\n\n custom_install_url: List[:class:`str`]\n The custom authorization URL for the application, if enabled.\n\n .. versionadded:: 2.0\n\n install_params: Optional[:class:`AppInstallParams`]\n The settings for custom authorization URL of application, if enabled.\n\n .. versionadded:: 2.0\n role_connections_verification_url: Optional[:class:`str`]\n The application's connection verification URL which will render the application as\n a verification method in the guild's role verification configuration.\n\n .. versionadded:: 2.2\n \"\"\"\n\n __slots__ = (\n '_state',\n 'description',\n 'id',\n 'name',\n 'rpc_origins',\n 'bot_public',\n 'bot_require_code_grant',\n 'owner',\n '_icon',\n 'verify_key',\n 'team',\n 'guild_id',\n 'primary_sku_id',\n 'slug',\n '_cover_image',\n '_flags',\n 'terms_of_service_url',\n 'privacy_policy_url',\n 'tags',\n 'custom_install_url',\n 'install_params',\n 'role_connections_verification_url',\n )\n\n def __init__(self, state: ConnectionState, data: AppInfoPayload):\n from .team import Team\n\n self._state: ConnectionState = state\n self.id: int = int(data['id'])\n self.name: str = data['name']\n self.description: str = data['description']\n self._icon: Optional[str] = data['icon']\n self.rpc_origins: Optional[List[str]] = data.get('rpc_origins')\n self.bot_public: bool = data['bot_public']\n self.bot_require_code_grant: bool = data['bot_require_code_grant']\n self.owner: User = state.create_user(data['owner'])\n\n team: Optional[TeamPayload] = data.get('team')\n self.team: Optional[Team] = Team(state, team) if team else None\n\n self.verify_key: str = data['verify_key']\n\n self.guild_id: Optional[int] = utils._get_as_snowflake(data, 'guild_id')\n\n self.primary_sku_id: Optional[int] = utils._get_as_snowflake(data, 'primary_sku_id')\n self.slug: Optional[str] = data.get('slug')\n self._flags: int = data.get('flags', 0)\n self._cover_image: Optional[str] = data.get('cover_image')\n self.terms_of_service_url: Optional[str] = data.get('terms_of_service_url')\n self.privacy_policy_url: Optional[str] = data.get('privacy_policy_url')\n self.tags: List[str] = data.get('tags', [])\n self.custom_install_url: Optional[str] = data.get('custom_install_url')\n self.role_connections_verification_url: Optional[str] = data.get('role_connections_verification_url')\n\n params = data.get('install_params')\n self.install_params: Optional[AppInstallParams] = AppInstallParams(params) if params else None\n\n def __repr__(self) -> str:\n return (\n f'<{self.__class__.__name__} id={self.id} name={self.name!r} '\n f'description={self.description!r} public={self.bot_public} '\n f'owner={self.owner!r}>'\n )\n\n @property\n def icon(self) -> Optional[Asset]:\n \"\"\"Optional[:class:`.Asset`]: Retrieves the application's icon asset, if any.\"\"\"\n if self._icon is None:\n return None\n return Asset._from_icon(self._state, self.id, self._icon, path='app')\n\n @property\n def cover_image(self) -> Optional[Asset]:\n \"\"\"Optional[:class:`.Asset`]: Retrieves the cover image on a store embed, if any.\n\n This is only available if the application is a game sold on Discord.\n \"\"\"\n if self._cover_image is None:\n return None\n return Asset._from_cover_image(self._state, self.id, self._cover_image)\n\n @property\n def guild(self) -> Optional[Guild]:\n \"\"\"Optional[:class:`Guild`]: If this application is a game sold on Discord,\n this field will be the guild to which it has been linked\n\n .. versionadded:: 1.3\n \"\"\"\n return self._state._get_guild(self.guild_id)\n\n @property\n def flags(self) -> ApplicationFlags:\n \"\"\":class:`ApplicationFlags`: The application's flags.\n\n .. versionadded:: 2.0\n \"\"\"\n return ApplicationFlags._from_value(self._flags)\n\n\nclass PartialAppInfo:\n \"\"\"Represents a partial AppInfo given by :func:`~discord.abc.GuildChannel.create_invite`\n\n .. versionadded:: 2.0\n\n Attributes\n -------------\n id: :class:`int`\n The application ID.\n name: :class:`str`\n The application name.\n description: :class:`str`\n The application description.\n rpc_origins: Optional[List[:class:`str`]]\n A list of RPC origin URLs, if RPC is enabled.\n verify_key: :class:`str`\n The hex encoded key for verification in interactions and the\n GameSDK's :ddocs:`GetTicket <game-sdk/applications#getticket>`.\n terms_of_service_url: Optional[:class:`str`]\n The application's terms of service URL, if set.\n privacy_policy_url: Optional[:class:`str`]\n The application's privacy policy URL, if set.\n approximate_guild_count: :class:`int`\n The approximate count of the guilds the bot was added to.\n\n .. versionadded:: 2.3\n redirect_uris: List[:class:`str`]\n A list of authentication redirect URIs.\n\n .. versionadded:: 2.3\n interactions_endpoint_url: Optional[:class:`str`]\n The interactions endpoint url of the application to receive interactions over this endpoint rather than\n over the gateway, if configured.\n\n .. versionadded:: 2.3\n role_connections_verification_url: Optional[:class:`str`]\n The application's connection verification URL which will render the application as\n a verification method in the guild's role verification configuration.\n\n .. versionadded:: 2.3\n \"\"\"\n\n __slots__ = (\n '_state',\n 'id',\n 'name',\n 'description',\n 'rpc_origins',\n 'verify_key',\n 'terms_of_service_url',\n 'privacy_policy_url',\n '_icon',\n '_flags',\n '_cover_image',\n 'approximate_guild_count',\n 'redirect_uris',\n 'interactions_endpoint_url',\n 'role_connections_verification_url',\n )\n\n def __init__(self, *, state: ConnectionState, data: PartialAppInfoPayload):\n self._state: ConnectionState = state\n self.id: int = int(data['id'])\n self.name: str = data['name']\n self._icon: Optional[str] = data.get('icon')\n self._flags: int = data.get('flags', 0)\n self._cover_image: Optional[str] = data.get('cover_image')\n self.description: str = data['description']\n self.rpc_origins: Optional[List[str]] = data.get('rpc_origins')\n self.verify_key: str = data['verify_key']\n self.terms_of_service_url: Optional[str] = data.get('terms_of_service_url')\n self.privacy_policy_url: Optional[str] = data.get('privacy_policy_url')\n self.approximate_guild_count: int = data.get('approximate_guild_count', 0)\n self.redirect_uris: List[str] = data.get('redirect_uris', [])\n self.interactions_endpoint_url: Optional[str] = data.get('interactions_endpoint_url')\n self.role_connections_verification_url: Optional[str] = data.get('role_connections_verification_url')\n\n def __repr__(self) -> str:\n return f'<{self.__class__.__name__} id={self.id} name={self.name!r} description={self.description!r}>'\n\n @property\n def icon(self) -> Optional[Asset]:\n \"\"\"Optional[:class:`.Asset`]: Retrieves the application's icon asset, if any.\"\"\"\n if self._icon is None:\n return None\n return Asset._from_icon(self._state, self.id, self._icon, path='app')\n\n @property\n def cover_image(self) -> Optional[Asset]:\n \"\"\"Optional[:class:`.Asset`]: Retrieves the cover image of the application's default rich presence.\n\n This is only available if the application is a game sold on Discord.\n\n .. versionadded:: 2.3\n \"\"\"\n if self._cover_image is None:\n return None\n return Asset._from_cover_image(self._state, self.id, self._cover_image)\n\n @property\n def flags(self) -> ApplicationFlags:\n \"\"\":class:`ApplicationFlags`: The application's flags.\n\n .. versionadded:: 2.0\n \"\"\"\n return ApplicationFlags._from_value(self._flags)\n\n\nclass AppInstallParams:\n \"\"\"Represents the settings for custom authorization URL of an application.\n\n .. versionadded:: 2.0\n\n Attributes\n ----------\n scopes: List[:class:`str`]\n The list of :ddocs:`OAuth2 scopes <topics/oauth2#shared-resources-oauth2-scopes>`\n to add the application to a guild with.\n permissions: :class:`Permissions`\n The permissions to give to application in the guild.\n \"\"\"\n\n __slots__ = ('scopes', 'permissions')\n\n def __init__(self, data: InstallParamsPayload) -> None:\n self.scopes: List[str] = data.get('scopes', [])\n self.permissions: Permissions = Permissions(int(data['permissions']))\n",
"path": "discord/appinfo.py"
},
{
"content": "\"\"\"\nThe MIT License (MIT)\n\nCopyright (c) 2015-present Rapptz\n\nPermission is hereby granted, free of charge, to any person obtaining a\ncopy of this software and associated documentation files (the \"Software\"),\nto deal in the Software without restriction, including without limitation\nthe rights to use, copy, modify, merge, publish, distribute, sublicense,\nand/or sell copies of the Software, and to permit persons to whom the\nSoftware is furnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in\nall copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\nOR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\nFROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER\nDEALINGS IN THE SOFTWARE.\n\"\"\"\n\nfrom __future__ import annotations\n\nimport asyncio\nimport logging\nimport sys\nfrom typing import (\n Any,\n ClassVar,\n Coroutine,\n Dict,\n Iterable,\n List,\n Literal,\n NamedTuple,\n Optional,\n overload,\n Sequence,\n Tuple,\n TYPE_CHECKING,\n Type,\n TypeVar,\n Union,\n)\nfrom urllib.parse import quote as _uriquote\nfrom collections import deque\nimport datetime\nimport socket\n\nimport aiohttp\n\nfrom .errors import HTTPException, RateLimited, Forbidden, NotFound, LoginFailure, DiscordServerError, GatewayNotFound\nfrom .gateway import DiscordClientWebSocketResponse\nfrom .file import File\nfrom .mentions import AllowedMentions\nfrom . import __version__, utils\nfrom .utils import MISSING\n\n_log = logging.getLogger(__name__)\n\nif TYPE_CHECKING:\n from typing_extensions import Self\n\n from .ui.view import View\n from .embeds import Embed\n from .message import Attachment\n from .flags import MessageFlags\n\n from .types import (\n appinfo,\n audit_log,\n automod,\n channel,\n command,\n emoji,\n guild,\n integration,\n invite,\n member,\n message,\n template,\n role,\n user,\n webhook,\n widget,\n threads,\n scheduled_event,\n sticker,\n welcome_screen,\n sku,\n )\n from .types.snowflake import Snowflake, SnowflakeList\n\n from types import TracebackType\n\n T = TypeVar('T')\n BE = TypeVar('BE', bound=BaseException)\n Response = Coroutine[Any, Any, T]\n\n\nasync def json_or_text(response: aiohttp.ClientResponse) -> Union[Dict[str, Any], str]:\n text = await response.text(encoding='utf-8')\n try:\n if response.headers['content-type'] == 'application/json':\n return utils._from_json(text)\n except KeyError:\n # Thanks Cloudflare\n pass\n\n return text\n\n\nclass MultipartParameters(NamedTuple):\n payload: Optional[Dict[str, Any]]\n multipart: Optional[List[Dict[str, Any]]]\n files: Optional[Sequence[File]]\n\n def __enter__(self) -> Self:\n return self\n\n def __exit__(\n self,\n exc_type: Optional[Type[BE]],\n exc: Optional[BE],\n traceback: Optional[TracebackType],\n ) -> None:\n if self.files:\n for file in self.files:\n file.close()\n\n\ndef handle_message_parameters(\n content: Optional[str] = MISSING,\n *,\n username: str = MISSING,\n avatar_url: Any = MISSING,\n tts: bool = False,\n nonce: Optional[Union[int, str]] = None,\n flags: MessageFlags = MISSING,\n file: File = MISSING,\n files: Sequence[File] = MISSING,\n embed: Optional[Embed] = MISSING,\n embeds: Sequence[Embed] = MISSING,\n attachments: Sequence[Union[Attachment, File]] = MISSING,\n view: Optional[View] = MISSING,\n allowed_mentions: Optional[AllowedMentions] = MISSING,\n message_reference: Optional[message.MessageReference] = MISSING,\n stickers: Optional[SnowflakeList] = MISSING,\n previous_allowed_mentions: Optional[AllowedMentions] = None,\n mention_author: Optional[bool] = None,\n thread_name: str = MISSING,\n channel_payload: Dict[str, Any] = MISSING,\n) -> MultipartParameters:\n if files is not MISSING and file is not MISSING:\n raise TypeError('Cannot mix file and files keyword arguments.')\n if embeds is not MISSING and embed is not MISSING:\n raise TypeError('Cannot mix embed and embeds keyword arguments.')\n\n if file is not MISSING:\n files = [file]\n\n if attachments is not MISSING and files is not MISSING:\n raise TypeError('Cannot mix attachments and files keyword arguments.')\n\n payload = {}\n if embeds is not MISSING:\n if len(embeds) > 10:\n raise ValueError('embeds has a maximum of 10 elements.')\n payload['embeds'] = [e.to_dict() for e in embeds]\n\n if embed is not MISSING:\n if embed is None:\n payload['embeds'] = []\n else:\n payload['embeds'] = [embed.to_dict()]\n\n if content is not MISSING:\n if content is not None:\n payload['content'] = str(content)\n else:\n payload['content'] = None\n\n if view is not MISSING:\n if view is not None:\n payload['components'] = view.to_components()\n else:\n payload['components'] = []\n\n if nonce is not None:\n payload['nonce'] = str(nonce)\n\n if message_reference is not MISSING:\n payload['message_reference'] = message_reference\n\n if stickers is not MISSING:\n if stickers is not None:\n payload['sticker_ids'] = stickers\n else:\n payload['sticker_ids'] = []\n\n payload['tts'] = tts\n if avatar_url:\n payload['avatar_url'] = str(avatar_url)\n if username:\n payload['username'] = username\n\n if flags is not MISSING:\n payload['flags'] = flags.value\n\n if thread_name is not MISSING:\n payload['thread_name'] = thread_name\n\n if allowed_mentions:\n if previous_allowed_mentions is not None:\n payload['allowed_mentions'] = previous_allowed_mentions.merge(allowed_mentions).to_dict()\n else:\n payload['allowed_mentions'] = allowed_mentions.to_dict()\n elif previous_allowed_mentions is not None:\n payload['allowed_mentions'] = previous_allowed_mentions.to_dict()\n\n if mention_author is not None:\n if 'allowed_mentions' not in payload:\n payload['allowed_mentions'] = AllowedMentions().to_dict()\n payload['allowed_mentions']['replied_user'] = mention_author\n\n if attachments is MISSING:\n attachments = files\n else:\n files = [a for a in attachments if isinstance(a, File)]\n\n if attachments is not MISSING:\n file_index = 0\n attachments_payload = []\n for attachment in attachments:\n if isinstance(attachment, File):\n attachments_payload.append(attachment.to_dict(file_index))\n file_index += 1\n else:\n attachments_payload.append(attachment.to_dict())\n\n payload['attachments'] = attachments_payload\n\n if channel_payload is not MISSING:\n payload = {\n 'message': payload,\n }\n payload.update(channel_payload)\n\n multipart = []\n if files:\n multipart.append({'name': 'payload_json', 'value': utils._to_json(payload)})\n payload = None\n for index, file in enumerate(files):\n multipart.append(\n {\n 'name': f'files[{index}]',\n 'value': file.fp,\n 'filename': file.filename,\n 'content_type': 'application/octet-stream',\n }\n )\n\n return MultipartParameters(payload=payload, multipart=multipart, files=files)\n\n\nINTERNAL_API_VERSION: int = 10\n\n\ndef _set_api_version(value: int):\n global INTERNAL_API_VERSION\n\n if not isinstance(value, int):\n raise TypeError(f'expected int not {value.__class__.__name__}')\n\n if value not in (9, 10):\n raise ValueError(f'expected either 9 or 10 not {value}')\n\n INTERNAL_API_VERSION = value\n Route.BASE = f'https://discord.com/api/v{value}'\n\n\nclass Route:\n BASE: ClassVar[str] = 'https://discord.com/api/v10'\n\n def __init__(self, method: str, path: str, *, metadata: Optional[str] = None, **parameters: Any) -> None:\n self.path: str = path\n self.method: str = method\n # Metadata is a special string used to differentiate between known sub rate limits\n # Since these can't be handled generically, this is the next best way to do so.\n self.metadata: Optional[str] = metadata\n url = self.BASE + self.path\n if parameters:\n url = url.format_map({k: _uriquote(v) if isinstance(v, str) else v for k, v in parameters.items()})\n self.url: str = url\n\n # major parameters:\n self.channel_id: Optional[Snowflake] = parameters.get('channel_id')\n self.guild_id: Optional[Snowflake] = parameters.get('guild_id')\n self.webhook_id: Optional[Snowflake] = parameters.get('webhook_id')\n self.webhook_token: Optional[str] = parameters.get('webhook_token')\n\n @property\n def key(self) -> str:\n \"\"\"The bucket key is used to represent the route in various mappings.\"\"\"\n if self.metadata:\n return f'{self.method} {self.path}:{self.metadata}'\n return f'{self.method} {self.path}'\n\n @property\n def major_parameters(self) -> str:\n \"\"\"Returns the major parameters formatted a string.\n\n This needs to be appended to a bucket hash to constitute as a full rate limit key.\n \"\"\"\n return '+'.join(\n str(k) for k in (self.channel_id, self.guild_id, self.webhook_id, self.webhook_token) if k is not None\n )\n\n\nclass Ratelimit:\n \"\"\"Represents a Discord rate limit.\n\n This is similar to a semaphore except tailored to Discord's rate limits. This is aware of\n the expiry of a token window, along with the number of tokens available. The goal of this\n design is to increase throughput of requests being sent concurrently rather than forcing\n everything into a single lock queue per route.\n \"\"\"\n\n __slots__ = (\n 'limit',\n 'remaining',\n 'outgoing',\n 'reset_after',\n 'expires',\n 'dirty',\n '_last_request',\n '_max_ratelimit_timeout',\n '_loop',\n '_pending_requests',\n '_sleeping',\n )\n\n def __init__(self, max_ratelimit_timeout: Optional[float]) -> None:\n self.limit: int = 1\n self.remaining: int = self.limit\n self.outgoing: int = 0\n self.reset_after: float = 0.0\n self.expires: Optional[float] = None\n self.dirty: bool = False\n self._max_ratelimit_timeout: Optional[float] = max_ratelimit_timeout\n self._loop: asyncio.AbstractEventLoop = asyncio.get_running_loop()\n self._pending_requests: deque[asyncio.Future[Any]] = deque()\n # Only a single rate limit object should be sleeping at a time.\n # The object that is sleeping is ultimately responsible for freeing the semaphore\n # for the requests currently pending.\n self._sleeping: asyncio.Lock = asyncio.Lock()\n self._last_request: float = self._loop.time()\n\n def __repr__(self) -> str:\n return (\n f'<RateLimitBucket limit={self.limit} remaining={self.remaining} pending_requests={len(self._pending_requests)}>'\n )\n\n def reset(self):\n self.remaining = self.limit - self.outgoing\n self.expires = None\n self.reset_after = 0.0\n self.dirty = False\n\n def update(self, response: aiohttp.ClientResponse, *, use_clock: bool = False) -> None:\n headers = response.headers\n self.limit = int(headers.get('X-Ratelimit-Limit', 1))\n\n if self.dirty:\n self.remaining = min(int(headers.get('X-Ratelimit-Remaining', 0)), self.limit - self.outgoing)\n else:\n self.remaining = int(headers.get('X-Ratelimit-Remaining', 0))\n self.dirty = True\n\n reset_after = headers.get('X-Ratelimit-Reset-After')\n if use_clock or not reset_after:\n utc = datetime.timezone.utc\n now = datetime.datetime.now(utc)\n reset = datetime.datetime.fromtimestamp(float(headers['X-Ratelimit-Reset']), utc)\n self.reset_after = (reset - now).total_seconds()\n else:\n self.reset_after = float(reset_after)\n\n self.expires = self._loop.time() + self.reset_after\n\n def _wake_next(self) -> None:\n while self._pending_requests:\n future = self._pending_requests.popleft()\n if not future.done():\n future.set_result(None)\n break\n\n def _wake(self, count: int = 1, *, exception: Optional[RateLimited] = None) -> None:\n awaken = 0\n while self._pending_requests:\n future = self._pending_requests.popleft()\n if not future.done():\n if exception:\n future.set_exception(exception)\n else:\n future.set_result(None)\n awaken += 1\n\n if awaken >= count:\n break\n\n async def _refresh(self) -> None:\n error = self._max_ratelimit_timeout and self.reset_after > self._max_ratelimit_timeout\n exception = RateLimited(self.reset_after) if error else None\n async with self._sleeping:\n if not error:\n await asyncio.sleep(self.reset_after)\n\n self.reset()\n self._wake(self.remaining, exception=exception)\n\n def is_expired(self) -> bool:\n return self.expires is not None and self._loop.time() > self.expires\n\n def is_inactive(self) -> bool:\n delta = self._loop.time() - self._last_request\n return delta >= 300 and self.outgoing == 0 and len(self._pending_requests) == 0\n\n async def acquire(self) -> None:\n self._last_request = self._loop.time()\n if self.is_expired():\n self.reset()\n\n if self._max_ratelimit_timeout is not None and self.expires is not None:\n # Check if we can pre-emptively block this request for having too large of a timeout\n current_reset_after = self.expires - self._loop.time()\n if current_reset_after > self._max_ratelimit_timeout:\n raise RateLimited(current_reset_after)\n\n while self.remaining <= 0:\n future = self._loop.create_future()\n self._pending_requests.append(future)\n try:\n await future\n except:\n future.cancel()\n if self.remaining > 0 and not future.cancelled():\n self._wake_next()\n raise\n\n self.remaining -= 1\n self.outgoing += 1\n\n async def __aenter__(self) -> Self:\n await self.acquire()\n return self\n\n async def __aexit__(self, type: Type[BE], value: BE, traceback: TracebackType) -> None:\n self.outgoing -= 1\n tokens = self.remaining - self.outgoing\n # Check whether the rate limit needs to be pre-emptively slept on\n # Note that this is a Lock to prevent multiple rate limit objects from sleeping at once\n if not self._sleeping.locked():\n if tokens <= 0:\n await self._refresh()\n elif self._pending_requests:\n exception = (\n RateLimited(self.reset_after)\n if self._max_ratelimit_timeout and self.reset_after > self._max_ratelimit_timeout\n else None\n )\n self._wake(tokens, exception=exception)\n\n\n# For some reason, the Discord voice websocket expects this header to be\n# completely lowercase while aiohttp respects spec and does it as case-insensitive\naiohttp.hdrs.WEBSOCKET = 'websocket' # type: ignore\n\n\nclass HTTPClient:\n \"\"\"Represents an HTTP client sending HTTP requests to the Discord API.\"\"\"\n\n def __init__(\n self,\n loop: asyncio.AbstractEventLoop,\n connector: Optional[aiohttp.BaseConnector] = None,\n *,\n proxy: Optional[str] = None,\n proxy_auth: Optional[aiohttp.BasicAuth] = None,\n unsync_clock: bool = True,\n http_trace: Optional[aiohttp.TraceConfig] = None,\n max_ratelimit_timeout: Optional[float] = None,\n ) -> None:\n self.loop: asyncio.AbstractEventLoop = loop\n self.connector: aiohttp.BaseConnector = connector or MISSING\n self.__session: aiohttp.ClientSession = MISSING # filled in static_login\n # Route key -> Bucket hash\n self._bucket_hashes: Dict[str, str] = {}\n # Bucket Hash + Major Parameters -> Rate limit\n # or\n # Route key + Major Parameters -> Rate limit\n # When the key is the latter, it is used for temporary\n # one shot requests that don't have a bucket hash\n # When this reaches 256 elements, it will try to evict based off of expiry\n self._buckets: Dict[str, Ratelimit] = {}\n self._global_over: asyncio.Event = MISSING\n self.token: Optional[str] = None\n self.proxy: Optional[str] = proxy\n self.proxy_auth: Optional[aiohttp.BasicAuth] = proxy_auth\n self.http_trace: Optional[aiohttp.TraceConfig] = http_trace\n self.use_clock: bool = not unsync_clock\n self.max_ratelimit_timeout: Optional[float] = max(30.0, max_ratelimit_timeout) if max_ratelimit_timeout else None\n\n user_agent = 'DiscordBot (https://github.com/Rapptz/discord.py {0}) Python/{1[0]}.{1[1]} aiohttp/{2}'\n self.user_agent: str = user_agent.format(__version__, sys.version_info, aiohttp.__version__)\n\n def clear(self) -> None:\n if self.__session and self.__session.closed:\n self.__session = MISSING\n\n async def ws_connect(self, url: str, *, compress: int = 0) -> aiohttp.ClientWebSocketResponse:\n kwargs = {\n 'proxy_auth': self.proxy_auth,\n 'proxy': self.proxy,\n 'max_msg_size': 0,\n 'timeout': 30.0,\n 'autoclose': False,\n 'headers': {\n 'User-Agent': self.user_agent,\n },\n 'compress': compress,\n }\n\n return await self.__session.ws_connect(url, **kwargs)\n\n def _try_clear_expired_ratelimits(self) -> None:\n if len(self._buckets) < 256:\n return\n\n keys = [key for key, bucket in self._buckets.items() if bucket.is_inactive()]\n for key in keys:\n del self._buckets[key]\n\n def get_ratelimit(self, key: str) -> Ratelimit:\n try:\n value = self._buckets[key]\n except KeyError:\n self._buckets[key] = value = Ratelimit(self.max_ratelimit_timeout)\n self._try_clear_expired_ratelimits()\n return value\n\n async def request(\n self,\n route: Route,\n *,\n files: Optional[Sequence[File]] = None,\n form: Optional[Iterable[Dict[str, Any]]] = None,\n **kwargs: Any,\n ) -> Any:\n method = route.method\n url = route.url\n route_key = route.key\n\n bucket_hash = None\n try:\n bucket_hash = self._bucket_hashes[route_key]\n except KeyError:\n key = f'{route_key}:{route.major_parameters}'\n else:\n key = f'{bucket_hash}:{route.major_parameters}'\n\n ratelimit = self.get_ratelimit(key)\n\n # header creation\n headers: Dict[str, str] = {\n 'User-Agent': self.user_agent,\n }\n\n if self.token is not None:\n headers['Authorization'] = 'Bot ' + self.token\n # some checking if it's a JSON request\n if 'json' in kwargs:\n headers['Content-Type'] = 'application/json'\n kwargs['data'] = utils._to_json(kwargs.pop('json'))\n\n try:\n reason = kwargs.pop('reason')\n except KeyError:\n pass\n else:\n if reason:\n headers['X-Audit-Log-Reason'] = _uriquote(reason, safe='/ ')\n\n kwargs['headers'] = headers\n\n # Proxy support\n if self.proxy is not None:\n kwargs['proxy'] = self.proxy\n if self.proxy_auth is not None:\n kwargs['proxy_auth'] = self.proxy_auth\n\n if not self._global_over.is_set():\n # wait until the global lock is complete\n await self._global_over.wait()\n\n response: Optional[aiohttp.ClientResponse] = None\n data: Optional[Union[Dict[str, Any], str]] = None\n async with ratelimit:\n for tries in range(5):\n if files:\n for f in files:\n f.reset(seek=tries)\n\n if form:\n # with quote_fields=True '[' and ']' in file field names are escaped, which discord does not support\n form_data = aiohttp.FormData(quote_fields=False)\n for params in form:\n form_data.add_field(**params)\n kwargs['data'] = form_data\n\n try:\n async with self.__session.request(method, url, **kwargs) as response:\n _log.debug('%s %s with %s has returned %s', method, url, kwargs.get('data'), response.status)\n\n # even errors have text involved in them so this is safe to call\n data = await json_or_text(response)\n\n # Update and use rate limit information if the bucket header is present\n discord_hash = response.headers.get('X-Ratelimit-Bucket')\n # I am unsure if X-Ratelimit-Bucket is always available\n # However, X-Ratelimit-Remaining has been a consistent cornerstone that worked\n has_ratelimit_headers = 'X-Ratelimit-Remaining' in response.headers\n if discord_hash is not None:\n # If the hash Discord has provided is somehow different from our current hash something changed\n if bucket_hash != discord_hash:\n if bucket_hash is not None:\n # If the previous hash was an actual Discord hash then this means the\n # hash has changed sporadically.\n # This can be due to two reasons\n # 1. It's a sub-ratelimit which is hard to handle\n # 2. The rate limit information genuinely changed\n # There is no good way to discern these, Discord doesn't provide a way to do so.\n # At best, there will be some form of logging to help catch it.\n # Alternating sub-ratelimits means that the requests oscillate between\n # different underlying rate limits -- this can lead to unexpected 429s\n # It is unavoidable.\n fmt = 'A route (%s) has changed hashes: %s -> %s.'\n _log.debug(fmt, route_key, bucket_hash, discord_hash)\n\n self._bucket_hashes[route_key] = discord_hash\n recalculated_key = discord_hash + route.major_parameters\n self._buckets[recalculated_key] = ratelimit\n self._buckets.pop(key, None)\n elif route_key not in self._bucket_hashes:\n fmt = '%s has found its initial rate limit bucket hash (%s).'\n _log.debug(fmt, route_key, discord_hash)\n self._bucket_hashes[route_key] = discord_hash\n self._buckets[discord_hash + route.major_parameters] = ratelimit\n\n if has_ratelimit_headers:\n if response.status != 429:\n ratelimit.update(response, use_clock=self.use_clock)\n if ratelimit.remaining == 0:\n _log.debug(\n 'A rate limit bucket (%s) has been exhausted. Pre-emptively rate limiting...',\n discord_hash or route_key,\n )\n\n # the request was successful so just return the text/json\n if 300 > response.status >= 200:\n _log.debug('%s %s has received %s', method, url, data)\n return data\n\n # we are being rate limited\n if response.status == 429:\n if not response.headers.get('Via') or isinstance(data, str):\n # Banned by Cloudflare more than likely.\n raise HTTPException(response, data)\n\n if ratelimit.remaining > 0:\n # According to night\n # https://github.com/discord/discord-api-docs/issues/2190#issuecomment-816363129\n # Remaining > 0 and 429 means that a sub ratelimit was hit.\n # It is unclear what should happen in these cases other than just using the retry_after\n # value in the body.\n _log.debug(\n '%s %s received a 429 despite having %s remaining requests. This is a sub-ratelimit.',\n method,\n url,\n ratelimit.remaining,\n )\n\n retry_after: float = data['retry_after']\n if self.max_ratelimit_timeout and retry_after > self.max_ratelimit_timeout:\n _log.warning(\n 'We are being rate limited. %s %s responded with 429. Timeout of %.2f was too long, erroring instead.',\n method,\n url,\n retry_after,\n )\n raise RateLimited(retry_after)\n\n fmt = 'We are being rate limited. %s %s responded with 429. Retrying in %.2f seconds.'\n _log.warning(fmt, method, url, retry_after)\n\n _log.debug(\n 'Rate limit is being handled by bucket hash %s with %r major parameters',\n bucket_hash,\n route.major_parameters,\n )\n\n # check if it's a global rate limit\n is_global = data.get('global', False)\n if is_global:\n _log.warning('Global rate limit has been hit. Retrying in %.2f seconds.', retry_after)\n self._global_over.clear()\n\n await asyncio.sleep(retry_after)\n _log.debug('Done sleeping for the rate limit. Retrying...')\n\n # release the global lock now that the\n # global rate limit has passed\n if is_global:\n self._global_over.set()\n _log.debug('Global rate limit is now over.')\n\n continue\n\n # we've received a 500, 502, 504, or 524, unconditional retry\n if response.status in {500, 502, 504, 524}:\n await asyncio.sleep(1 + tries * 2)\n continue\n\n # the usual error cases\n if response.status == 403:\n raise Forbidden(response, data)\n elif response.status == 404:\n raise NotFound(response, data)\n elif response.status >= 500:\n raise DiscordServerError(response, data)\n else:\n raise HTTPException(response, data)\n\n # This is handling exceptions from the request\n except OSError as e:\n # Connection reset by peer\n if tries < 4 and e.errno in (54, 10054):\n await asyncio.sleep(1 + tries * 2)\n continue\n raise\n\n if response is not None:\n # We've run out of retries, raise.\n if response.status >= 500:\n raise DiscordServerError(response, data)\n\n raise HTTPException(response, data)\n\n raise RuntimeError('Unreachable code in HTTP handling')\n\n async def get_from_cdn(self, url: str) -> bytes:\n async with self.__session.get(url) as resp:\n if resp.status == 200:\n return await resp.read()\n elif resp.status == 404:\n raise NotFound(resp, 'asset not found')\n elif resp.status == 403:\n raise Forbidden(resp, 'cannot retrieve asset')\n else:\n raise HTTPException(resp, 'failed to get asset')\n\n raise RuntimeError('Unreachable')\n\n # state management\n\n async def close(self) -> None:\n if self.__session:\n await self.__session.close()\n\n # login management\n\n async def static_login(self, token: str) -> user.User:\n # Necessary to get aiohttp to stop complaining about session creation\n if self.connector is MISSING:\n # discord does not support ipv6\n self.connector = aiohttp.TCPConnector(limit=0, family=socket.AF_INET)\n\n self.__session = aiohttp.ClientSession(\n connector=self.connector,\n ws_response_class=DiscordClientWebSocketResponse,\n trace_configs=None if self.http_trace is None else [self.http_trace],\n )\n self._global_over = asyncio.Event()\n self._global_over.set()\n\n old_token = self.token\n self.token = token\n\n try:\n data = await self.request(Route('GET', '/users/@me'))\n except HTTPException as exc:\n self.token = old_token\n if exc.status == 401:\n raise LoginFailure('Improper token has been passed.') from exc\n raise\n\n return data\n\n def logout(self) -> Response[None]:\n return self.request(Route('POST', '/auth/logout'))\n\n # Group functionality\n\n def start_group(self, user_id: Snowflake, recipients: List[int]) -> Response[channel.GroupDMChannel]:\n payload = {\n 'recipients': recipients,\n }\n\n return self.request(Route('POST', '/users/{user_id}/channels', user_id=user_id), json=payload)\n\n def leave_group(self, channel_id: Snowflake) -> Response[None]:\n return self.request(Route('DELETE', '/channels/{channel_id}', channel_id=channel_id))\n\n # Message management\n\n def start_private_message(self, user_id: Snowflake) -> Response[channel.DMChannel]:\n payload = {\n 'recipient_id': user_id,\n }\n\n return self.request(Route('POST', '/users/@me/channels'), json=payload)\n\n def send_message(\n self,\n channel_id: Snowflake,\n *,\n params: MultipartParameters,\n ) -> Response[message.Message]:\n r = Route('POST', '/channels/{channel_id}/messages', channel_id=channel_id)\n if params.files:\n return self.request(r, files=params.files, form=params.multipart)\n else:\n return self.request(r, json=params.payload)\n\n def send_typing(self, channel_id: Snowflake) -> Response[None]:\n return self.request(Route('POST', '/channels/{channel_id}/typing', channel_id=channel_id))\n\n def delete_message(\n self, channel_id: Snowflake, message_id: Snowflake, *, reason: Optional[str] = None\n ) -> Response[None]:\n # Special case certain sub-rate limits\n # https://github.com/discord/discord-api-docs/issues/1092\n # https://github.com/discord/discord-api-docs/issues/1295\n difference = utils.utcnow() - utils.snowflake_time(int(message_id))\n metadata: Optional[str] = None\n if difference <= datetime.timedelta(seconds=10):\n metadata = 'sub-10-seconds'\n elif difference >= datetime.timedelta(days=14):\n metadata = 'older-than-two-weeks'\n r = Route(\n 'DELETE',\n '/channels/{channel_id}/messages/{message_id}',\n channel_id=channel_id,\n message_id=message_id,\n metadata=metadata,\n )\n return self.request(r, reason=reason)\n\n def delete_messages(\n self, channel_id: Snowflake, message_ids: SnowflakeList, *, reason: Optional[str] = None\n ) -> Response[None]:\n r = Route('POST', '/channels/{channel_id}/messages/bulk-delete', channel_id=channel_id)\n payload = {\n 'messages': message_ids,\n }\n\n return self.request(r, json=payload, reason=reason)\n\n def edit_message(\n self, channel_id: Snowflake, message_id: Snowflake, *, params: MultipartParameters\n ) -> Response[message.Message]:\n r = Route('PATCH', '/channels/{channel_id}/messages/{message_id}', channel_id=channel_id, message_id=message_id)\n if params.files:\n return self.request(r, files=params.files, form=params.multipart)\n else:\n return self.request(r, json=params.payload)\n\n def add_reaction(self, channel_id: Snowflake, message_id: Snowflake, emoji: str) -> Response[None]:\n r = Route(\n 'PUT',\n '/channels/{channel_id}/messages/{message_id}/reactions/{emoji}/@me',\n channel_id=channel_id,\n message_id=message_id,\n emoji=emoji,\n )\n return self.request(r)\n\n def remove_reaction(\n self, channel_id: Snowflake, message_id: Snowflake, emoji: str, member_id: Snowflake\n ) -> Response[None]:\n r = Route(\n 'DELETE',\n '/channels/{channel_id}/messages/{message_id}/reactions/{emoji}/{member_id}',\n channel_id=channel_id,\n message_id=message_id,\n member_id=member_id,\n emoji=emoji,\n )\n return self.request(r)\n\n def remove_own_reaction(self, channel_id: Snowflake, message_id: Snowflake, emoji: str) -> Response[None]:\n r = Route(\n 'DELETE',\n '/channels/{channel_id}/messages/{message_id}/reactions/{emoji}/@me',\n channel_id=channel_id,\n message_id=message_id,\n emoji=emoji,\n )\n return self.request(r)\n\n def get_reaction_users(\n self,\n channel_id: Snowflake,\n message_id: Snowflake,\n emoji: str,\n limit: int,\n after: Optional[Snowflake] = None,\n ) -> Response[List[user.User]]:\n r = Route(\n 'GET',\n '/channels/{channel_id}/messages/{message_id}/reactions/{emoji}',\n channel_id=channel_id,\n message_id=message_id,\n emoji=emoji,\n )\n\n params: Dict[str, Any] = {\n 'limit': limit,\n }\n if after:\n params['after'] = after\n return self.request(r, params=params)\n\n def clear_reactions(self, channel_id: Snowflake, message_id: Snowflake) -> Response[None]:\n r = Route(\n 'DELETE',\n '/channels/{channel_id}/messages/{message_id}/reactions',\n channel_id=channel_id,\n message_id=message_id,\n )\n\n return self.request(r)\n\n def clear_single_reaction(self, channel_id: Snowflake, message_id: Snowflake, emoji: str) -> Response[None]:\n r = Route(\n 'DELETE',\n '/channels/{channel_id}/messages/{message_id}/reactions/{emoji}',\n channel_id=channel_id,\n message_id=message_id,\n emoji=emoji,\n )\n return self.request(r)\n\n def get_message(self, channel_id: Snowflake, message_id: Snowflake) -> Response[message.Message]:\n r = Route('GET', '/channels/{channel_id}/messages/{message_id}', channel_id=channel_id, message_id=message_id)\n return self.request(r)\n\n def get_channel(self, channel_id: Snowflake) -> Response[channel.Channel]:\n r = Route('GET', '/channels/{channel_id}', channel_id=channel_id)\n return self.request(r)\n\n def logs_from(\n self,\n channel_id: Snowflake,\n limit: int,\n before: Optional[Snowflake] = None,\n after: Optional[Snowflake] = None,\n around: Optional[Snowflake] = None,\n ) -> Response[List[message.Message]]:\n params: Dict[str, Any] = {\n 'limit': limit,\n }\n\n if before is not None:\n params['before'] = before\n if after is not None:\n params['after'] = after\n if around is not None:\n params['around'] = around\n\n return self.request(Route('GET', '/channels/{channel_id}/messages', channel_id=channel_id), params=params)\n\n def publish_message(self, channel_id: Snowflake, message_id: Snowflake) -> Response[message.Message]:\n return self.request(\n Route(\n 'POST',\n '/channels/{channel_id}/messages/{message_id}/crosspost',\n channel_id=channel_id,\n message_id=message_id,\n )\n )\n\n def pin_message(self, channel_id: Snowflake, message_id: Snowflake, reason: Optional[str] = None) -> Response[None]:\n r = Route(\n 'PUT',\n '/channels/{channel_id}/pins/{message_id}',\n channel_id=channel_id,\n message_id=message_id,\n )\n return self.request(r, reason=reason)\n\n def unpin_message(self, channel_id: Snowflake, message_id: Snowflake, reason: Optional[str] = None) -> Response[None]:\n r = Route(\n 'DELETE',\n '/channels/{channel_id}/pins/{message_id}',\n channel_id=channel_id,\n message_id=message_id,\n )\n return self.request(r, reason=reason)\n\n def pins_from(self, channel_id: Snowflake) -> Response[List[message.Message]]:\n return self.request(Route('GET', '/channels/{channel_id}/pins', channel_id=channel_id))\n\n # Member management\n\n def kick(self, user_id: Snowflake, guild_id: Snowflake, reason: Optional[str] = None) -> Response[None]:\n r = Route('DELETE', '/guilds/{guild_id}/members/{user_id}', guild_id=guild_id, user_id=user_id)\n return self.request(r, reason=reason)\n\n def ban(\n self,\n user_id: Snowflake,\n guild_id: Snowflake,\n delete_message_seconds: int = 86400, # one day\n reason: Optional[str] = None,\n ) -> Response[None]:\n r = Route('PUT', '/guilds/{guild_id}/bans/{user_id}', guild_id=guild_id, user_id=user_id)\n params = {\n 'delete_message_seconds': delete_message_seconds,\n }\n\n return self.request(r, params=params, reason=reason)\n\n def unban(self, user_id: Snowflake, guild_id: Snowflake, *, reason: Optional[str] = None) -> Response[None]:\n r = Route('DELETE', '/guilds/{guild_id}/bans/{user_id}', guild_id=guild_id, user_id=user_id)\n return self.request(r, reason=reason)\n\n def guild_voice_state(\n self,\n user_id: Snowflake,\n guild_id: Snowflake,\n *,\n mute: Optional[bool] = None,\n deafen: Optional[bool] = None,\n reason: Optional[str] = None,\n ) -> Response[member.Member]:\n r = Route('PATCH', '/guilds/{guild_id}/members/{user_id}', guild_id=guild_id, user_id=user_id)\n payload = {}\n if mute is not None:\n payload['mute'] = mute\n\n if deafen is not None:\n payload['deaf'] = deafen\n\n return self.request(r, json=payload, reason=reason)\n\n def edit_profile(self, payload: Dict[str, Any]) -> Response[user.User]:\n return self.request(Route('PATCH', '/users/@me'), json=payload)\n\n def change_my_nickname(\n self,\n guild_id: Snowflake,\n nickname: str,\n *,\n reason: Optional[str] = None,\n ) -> Response[member.Nickname]:\n r = Route('PATCH', '/guilds/{guild_id}/members/@me/nick', guild_id=guild_id)\n payload = {\n 'nick': nickname,\n }\n return self.request(r, json=payload, reason=reason)\n\n def change_nickname(\n self,\n guild_id: Snowflake,\n user_id: Snowflake,\n nickname: str,\n *,\n reason: Optional[str] = None,\n ) -> Response[member.Member]:\n r = Route('PATCH', '/guilds/{guild_id}/members/{user_id}', guild_id=guild_id, user_id=user_id)\n payload = {\n 'nick': nickname,\n }\n return self.request(r, json=payload, reason=reason)\n\n def edit_my_voice_state(self, guild_id: Snowflake, payload: Dict[str, Any]) -> Response[None]:\n r = Route('PATCH', '/guilds/{guild_id}/voice-states/@me', guild_id=guild_id)\n return self.request(r, json=payload)\n\n def edit_voice_state(self, guild_id: Snowflake, user_id: Snowflake, payload: Dict[str, Any]) -> Response[None]:\n r = Route('PATCH', '/guilds/{guild_id}/voice-states/{user_id}', guild_id=guild_id, user_id=user_id)\n return self.request(r, json=payload)\n\n def edit_member(\n self,\n guild_id: Snowflake,\n user_id: Snowflake,\n *,\n reason: Optional[str] = None,\n **fields: Any,\n ) -> Response[member.MemberWithUser]:\n r = Route('PATCH', '/guilds/{guild_id}/members/{user_id}', guild_id=guild_id, user_id=user_id)\n return self.request(r, json=fields, reason=reason)\n\n # Channel management\n\n def edit_channel(\n self,\n channel_id: Snowflake,\n *,\n reason: Optional[str] = None,\n **options: Any,\n ) -> Response[channel.Channel]:\n r = Route('PATCH', '/channels/{channel_id}', channel_id=channel_id)\n valid_keys = (\n 'name',\n 'parent_id',\n 'topic',\n 'bitrate',\n 'nsfw',\n 'user_limit',\n 'position',\n 'permission_overwrites',\n 'rate_limit_per_user',\n 'type',\n 'rtc_region',\n 'video_quality_mode',\n 'archived',\n 'auto_archive_duration',\n 'locked',\n 'invitable',\n 'default_auto_archive_duration',\n 'flags',\n 'default_thread_rate_limit_per_user',\n 'default_reaction_emoji',\n 'available_tags',\n 'applied_tags',\n 'default_forum_layout',\n 'default_sort_order',\n )\n\n payload = {k: v for k, v in options.items() if k in valid_keys}\n return self.request(r, reason=reason, json=payload)\n\n def bulk_channel_update(\n self,\n guild_id: Snowflake,\n data: List[guild.ChannelPositionUpdate],\n *,\n reason: Optional[str] = None,\n ) -> Response[None]:\n r = Route('PATCH', '/guilds/{guild_id}/channels', guild_id=guild_id)\n return self.request(r, json=data, reason=reason)\n\n def create_channel(\n self,\n guild_id: Snowflake,\n channel_type: channel.ChannelType,\n *,\n reason: Optional[str] = None,\n **options: Any,\n ) -> Response[channel.GuildChannel]:\n payload = {\n 'type': channel_type,\n }\n\n valid_keys = (\n 'name',\n 'parent_id',\n 'topic',\n 'bitrate',\n 'nsfw',\n 'user_limit',\n 'position',\n 'permission_overwrites',\n 'rate_limit_per_user',\n 'rtc_region',\n 'video_quality_mode',\n 'default_auto_archive_duration',\n 'default_thread_rate_limit_per_user',\n 'default_sort_order',\n 'default_reaction_emoji',\n 'default_forum_layout',\n 'available_tags',\n )\n payload.update({k: v for k, v in options.items() if k in valid_keys and v is not None})\n\n return self.request(Route('POST', '/guilds/{guild_id}/channels', guild_id=guild_id), json=payload, reason=reason)\n\n def delete_channel(\n self,\n channel_id: Snowflake,\n *,\n reason: Optional[str] = None,\n ) -> Response[None]:\n return self.request(Route('DELETE', '/channels/{channel_id}', channel_id=channel_id), reason=reason)\n\n # Thread management\n\n def start_thread_with_message(\n self,\n channel_id: Snowflake,\n message_id: Snowflake,\n *,\n name: str,\n auto_archive_duration: threads.ThreadArchiveDuration,\n rate_limit_per_user: Optional[int] = None,\n reason: Optional[str] = None,\n ) -> Response[threads.Thread]:\n payload = {\n 'name': name,\n 'auto_archive_duration': auto_archive_duration,\n 'rate_limit_per_user': rate_limit_per_user,\n }\n\n route = Route(\n 'POST', '/channels/{channel_id}/messages/{message_id}/threads', channel_id=channel_id, message_id=message_id\n )\n return self.request(route, json=payload, reason=reason)\n\n def start_thread_without_message(\n self,\n channel_id: Snowflake,\n *,\n name: str,\n auto_archive_duration: threads.ThreadArchiveDuration,\n type: threads.ThreadType,\n invitable: bool = True,\n rate_limit_per_user: Optional[int] = None,\n reason: Optional[str] = None,\n ) -> Response[threads.Thread]:\n payload = {\n 'name': name,\n 'auto_archive_duration': auto_archive_duration,\n 'type': type,\n 'invitable': invitable,\n 'rate_limit_per_user': rate_limit_per_user,\n }\n\n route = Route('POST', '/channels/{channel_id}/threads', channel_id=channel_id)\n return self.request(route, json=payload, reason=reason)\n\n def start_thread_in_forum(\n self,\n channel_id: Snowflake,\n *,\n params: MultipartParameters,\n reason: Optional[str] = None,\n ) -> Response[threads.ForumThread]:\n query = {'use_nested_fields': 1}\n r = Route('POST', '/channels/{channel_id}/threads', channel_id=channel_id)\n if params.files:\n return self.request(r, files=params.files, form=params.multipart, params=query, reason=reason)\n else:\n return self.request(r, json=params.payload, params=query, reason=reason)\n\n def join_thread(self, channel_id: Snowflake) -> Response[None]:\n return self.request(Route('POST', '/channels/{channel_id}/thread-members/@me', channel_id=channel_id))\n\n def add_user_to_thread(self, channel_id: Snowflake, user_id: Snowflake) -> Response[None]:\n return self.request(\n Route('PUT', '/channels/{channel_id}/thread-members/{user_id}', channel_id=channel_id, user_id=user_id)\n )\n\n def leave_thread(self, channel_id: Snowflake) -> Response[None]:\n return self.request(Route('DELETE', '/channels/{channel_id}/thread-members/@me', channel_id=channel_id))\n\n def remove_user_from_thread(self, channel_id: Snowflake, user_id: Snowflake) -> Response[None]:\n route = Route('DELETE', '/channels/{channel_id}/thread-members/{user_id}', channel_id=channel_id, user_id=user_id)\n return self.request(route)\n\n def get_public_archived_threads(\n self, channel_id: Snowflake, before: Optional[Snowflake] = None, limit: int = 50\n ) -> Response[threads.ThreadPaginationPayload]:\n route = Route('GET', '/channels/{channel_id}/threads/archived/public', channel_id=channel_id)\n\n params = {}\n if before:\n params['before'] = before\n params['limit'] = limit\n return self.request(route, params=params)\n\n def get_private_archived_threads(\n self, channel_id: Snowflake, before: Optional[Snowflake] = None, limit: int = 50\n ) -> Response[threads.ThreadPaginationPayload]:\n route = Route('GET', '/channels/{channel_id}/threads/archived/private', channel_id=channel_id)\n\n params = {}\n if before:\n params['before'] = before\n params['limit'] = limit\n return self.request(route, params=params)\n\n def get_joined_private_archived_threads(\n self, channel_id: Snowflake, before: Optional[Snowflake] = None, limit: int = 50\n ) -> Response[threads.ThreadPaginationPayload]:\n route = Route('GET', '/channels/{channel_id}/users/@me/threads/archived/private', channel_id=channel_id)\n params = {}\n if before:\n params['before'] = before\n params['limit'] = limit\n return self.request(route, params=params)\n\n def get_active_threads(self, guild_id: Snowflake) -> Response[threads.ThreadPaginationPayload]:\n route = Route('GET', '/guilds/{guild_id}/threads/active', guild_id=guild_id)\n return self.request(route)\n\n def get_thread_member(self, channel_id: Snowflake, user_id: Snowflake) -> Response[threads.ThreadMember]:\n route = Route('GET', '/channels/{channel_id}/thread-members/{user_id}', channel_id=channel_id, user_id=user_id)\n return self.request(route)\n\n def get_thread_members(self, channel_id: Snowflake) -> Response[List[threads.ThreadMember]]:\n route = Route('GET', '/channels/{channel_id}/thread-members', channel_id=channel_id)\n return self.request(route)\n\n # Webhook management\n\n def create_webhook(\n self,\n channel_id: Snowflake,\n *,\n name: str,\n avatar: Optional[bytes] = None,\n reason: Optional[str] = None,\n ) -> Response[webhook.Webhook]:\n payload: Dict[str, Any] = {\n 'name': name,\n }\n if avatar is not None:\n payload['avatar'] = avatar\n\n r = Route('POST', '/channels/{channel_id}/webhooks', channel_id=channel_id)\n return self.request(r, json=payload, reason=reason)\n\n def channel_webhooks(self, channel_id: Snowflake) -> Response[List[webhook.Webhook]]:\n return self.request(Route('GET', '/channels/{channel_id}/webhooks', channel_id=channel_id))\n\n def guild_webhooks(self, guild_id: Snowflake) -> Response[List[webhook.Webhook]]:\n return self.request(Route('GET', '/guilds/{guild_id}/webhooks', guild_id=guild_id))\n\n def get_webhook(self, webhook_id: Snowflake) -> Response[webhook.Webhook]:\n return self.request(Route('GET', '/webhooks/{webhook_id}', webhook_id=webhook_id))\n\n def follow_webhook(\n self,\n channel_id: Snowflake,\n webhook_channel_id: Snowflake,\n reason: Optional[str] = None,\n ) -> Response[None]:\n payload = {\n 'webhook_channel_id': str(webhook_channel_id),\n }\n return self.request(\n Route('POST', '/channels/{channel_id}/followers', channel_id=channel_id), json=payload, reason=reason\n )\n\n # Guild management\n\n def get_guilds(\n self,\n limit: int,\n before: Optional[Snowflake] = None,\n after: Optional[Snowflake] = None,\n with_counts: bool = True,\n ) -> Response[List[guild.Guild]]:\n params: Dict[str, Any] = {\n 'limit': limit,\n 'with_counts': int(with_counts),\n }\n\n if before:\n params['before'] = before\n if after:\n params['after'] = after\n\n return self.request(Route('GET', '/users/@me/guilds'), params=params)\n\n def leave_guild(self, guild_id: Snowflake) -> Response[None]:\n return self.request(Route('DELETE', '/users/@me/guilds/{guild_id}', guild_id=guild_id))\n\n def get_guild(self, guild_id: Snowflake, *, with_counts: bool = True) -> Response[guild.Guild]:\n params = {'with_counts': int(with_counts)}\n return self.request(Route('GET', '/guilds/{guild_id}', guild_id=guild_id), params=params)\n\n def delete_guild(self, guild_id: Snowflake) -> Response[None]:\n return self.request(Route('DELETE', '/guilds/{guild_id}', guild_id=guild_id))\n\n def create_guild(self, name: str, icon: Optional[str]) -> Response[guild.Guild]:\n payload = {\n 'name': name,\n }\n if icon:\n payload['icon'] = icon\n\n return self.request(Route('POST', '/guilds'), json=payload)\n\n def edit_guild(self, guild_id: Snowflake, *, reason: Optional[str] = None, **fields: Any) -> Response[guild.Guild]:\n valid_keys = (\n 'name',\n 'region',\n 'icon',\n 'afk_timeout',\n 'owner_id',\n 'afk_channel_id',\n 'splash',\n 'discovery_splash',\n 'features',\n 'verification_level',\n 'system_channel_id',\n 'default_message_notifications',\n 'description',\n 'explicit_content_filter',\n 'banner',\n 'system_channel_flags',\n 'rules_channel_id',\n 'public_updates_channel_id',\n 'preferred_locale',\n 'premium_progress_bar_enabled',\n 'safety_alerts_channel_id',\n )\n\n payload = {k: v for k, v in fields.items() if k in valid_keys}\n\n return self.request(Route('PATCH', '/guilds/{guild_id}', guild_id=guild_id), json=payload, reason=reason)\n\n def edit_guild_mfa_level(\n self, guild_id: Snowflake, *, mfa_level: int, reason: Optional[str] = None\n ) -> Response[guild.GuildMFALevel]:\n payload = {'level': mfa_level}\n return self.request(Route('POST', '/guilds/{guild_id}/mfa', guild_id=guild_id), json=payload, reason=reason)\n\n def get_template(self, code: str) -> Response[template.Template]:\n return self.request(Route('GET', '/guilds/templates/{code}', code=code))\n\n def guild_templates(self, guild_id: Snowflake) -> Response[List[template.Template]]:\n return self.request(Route('GET', '/guilds/{guild_id}/templates', guild_id=guild_id))\n\n def create_template(self, guild_id: Snowflake, payload: Dict[str, Any]) -> Response[template.Template]:\n return self.request(Route('POST', '/guilds/{guild_id}/templates', guild_id=guild_id), json=payload)\n\n def sync_template(self, guild_id: Snowflake, code: str) -> Response[template.Template]:\n return self.request(Route('PUT', '/guilds/{guild_id}/templates/{code}', guild_id=guild_id, code=code))\n\n def edit_template(self, guild_id: Snowflake, code: str, payload: Dict[str, Any]) -> Response[template.Template]:\n valid_keys = (\n 'name',\n 'description',\n )\n payload = {k: v for k, v in payload.items() if k in valid_keys}\n return self.request(\n Route('PATCH', '/guilds/{guild_id}/templates/{code}', guild_id=guild_id, code=code), json=payload\n )\n\n def delete_template(self, guild_id: Snowflake, code: str) -> Response[None]:\n return self.request(Route('DELETE', '/guilds/{guild_id}/templates/{code}', guild_id=guild_id, code=code))\n\n def create_from_template(self, code: str, name: str, icon: Optional[str]) -> Response[guild.Guild]:\n payload = {\n 'name': name,\n }\n if icon:\n payload['icon'] = icon\n return self.request(Route('POST', '/guilds/templates/{code}', code=code), json=payload)\n\n def get_bans(\n self,\n guild_id: Snowflake,\n limit: int,\n before: Optional[Snowflake] = None,\n after: Optional[Snowflake] = None,\n ) -> Response[List[guild.Ban]]:\n params: Dict[str, Any] = {\n 'limit': limit,\n }\n if before is not None:\n params['before'] = before\n if after is not None:\n params['after'] = after\n\n return self.request(Route('GET', '/guilds/{guild_id}/bans', guild_id=guild_id), params=params)\n\n def get_welcome_screen(self, guild_id: Snowflake) -> Response[welcome_screen.WelcomeScreen]:\n return self.request(Route('GET', '/guilds/{guild_id}/welcome-screen', guild_id=guild_id))\n\n def edit_welcome_screen(\n self, guild_id: Snowflake, *, reason: Optional[str] = None, **fields: Any\n ) -> Response[welcome_screen.WelcomeScreen]:\n valid_keys = (\n 'description',\n 'welcome_channels',\n 'enabled',\n )\n payload = {k: v for k, v in fields.items() if k in valid_keys}\n return self.request(\n Route('PATCH', '/guilds/{guild_id}/welcome-screen', guild_id=guild_id), json=payload, reason=reason\n )\n\n def get_ban(self, user_id: Snowflake, guild_id: Snowflake) -> Response[guild.Ban]:\n return self.request(Route('GET', '/guilds/{guild_id}/bans/{user_id}', guild_id=guild_id, user_id=user_id))\n\n def get_vanity_code(self, guild_id: Snowflake) -> Response[invite.VanityInvite]:\n return self.request(Route('GET', '/guilds/{guild_id}/vanity-url', guild_id=guild_id))\n\n def change_vanity_code(self, guild_id: Snowflake, code: str, *, reason: Optional[str] = None) -> Response[None]:\n payload: Dict[str, Any] = {'code': code}\n return self.request(Route('PATCH', '/guilds/{guild_id}/vanity-url', guild_id=guild_id), json=payload, reason=reason)\n\n def get_all_guild_channels(self, guild_id: Snowflake) -> Response[List[guild.GuildChannel]]:\n return self.request(Route('GET', '/guilds/{guild_id}/channels', guild_id=guild_id))\n\n def get_members(\n self, guild_id: Snowflake, limit: int, after: Optional[Snowflake]\n ) -> Response[List[member.MemberWithUser]]:\n params: Dict[str, Any] = {\n 'limit': limit,\n }\n if after:\n params['after'] = after\n\n r = Route('GET', '/guilds/{guild_id}/members', guild_id=guild_id)\n return self.request(r, params=params)\n\n def get_member(self, guild_id: Snowflake, member_id: Snowflake) -> Response[member.MemberWithUser]:\n return self.request(Route('GET', '/guilds/{guild_id}/members/{member_id}', guild_id=guild_id, member_id=member_id))\n\n def prune_members(\n self,\n guild_id: Snowflake,\n days: int,\n compute_prune_count: bool,\n roles: Iterable[str],\n *,\n reason: Optional[str] = None,\n ) -> Response[guild.GuildPrune]:\n payload: Dict[str, Any] = {\n 'days': days,\n 'compute_prune_count': 'true' if compute_prune_count else 'false',\n }\n if roles:\n payload['include_roles'] = ', '.join(roles)\n\n return self.request(Route('POST', '/guilds/{guild_id}/prune', guild_id=guild_id), json=payload, reason=reason)\n\n def estimate_pruned_members(\n self,\n guild_id: Snowflake,\n days: int,\n roles: Iterable[str],\n ) -> Response[guild.GuildPrune]:\n params: Dict[str, Any] = {\n 'days': days,\n }\n if roles:\n params['include_roles'] = ', '.join(roles)\n\n return self.request(Route('GET', '/guilds/{guild_id}/prune', guild_id=guild_id), params=params)\n\n def get_sticker(self, sticker_id: Snowflake) -> Response[sticker.Sticker]:\n return self.request(Route('GET', '/stickers/{sticker_id}', sticker_id=sticker_id))\n\n def list_premium_sticker_packs(self) -> Response[sticker.ListPremiumStickerPacks]:\n return self.request(Route('GET', '/sticker-packs'))\n\n def get_all_guild_stickers(self, guild_id: Snowflake) -> Response[List[sticker.GuildSticker]]:\n return self.request(Route('GET', '/guilds/{guild_id}/stickers', guild_id=guild_id))\n\n def get_guild_sticker(self, guild_id: Snowflake, sticker_id: Snowflake) -> Response[sticker.GuildSticker]:\n return self.request(\n Route('GET', '/guilds/{guild_id}/stickers/{sticker_id}', guild_id=guild_id, sticker_id=sticker_id)\n )\n\n def create_guild_sticker(\n self, guild_id: Snowflake, payload: Dict[str, Any], file: File, reason: Optional[str]\n ) -> Response[sticker.GuildSticker]:\n initial_bytes = file.fp.read(16)\n\n try:\n mime_type = utils._get_mime_type_for_image(initial_bytes)\n except ValueError:\n if initial_bytes.startswith(b'{'):\n mime_type = 'application/json'\n else:\n mime_type = 'application/octet-stream'\n finally:\n file.reset()\n\n form: List[Dict[str, Any]] = [\n {\n 'name': 'file',\n 'value': file.fp,\n 'filename': file.filename,\n 'content_type': mime_type,\n }\n ]\n\n for k, v in payload.items():\n form.append(\n {\n 'name': k,\n 'value': v,\n }\n )\n\n return self.request(\n Route('POST', '/guilds/{guild_id}/stickers', guild_id=guild_id), form=form, files=[file], reason=reason\n )\n\n def modify_guild_sticker(\n self,\n guild_id: Snowflake,\n sticker_id: Snowflake,\n payload: Dict[str, Any],\n reason: Optional[str],\n ) -> Response[sticker.GuildSticker]:\n return self.request(\n Route('PATCH', '/guilds/{guild_id}/stickers/{sticker_id}', guild_id=guild_id, sticker_id=sticker_id),\n json=payload,\n reason=reason,\n )\n\n def delete_guild_sticker(self, guild_id: Snowflake, sticker_id: Snowflake, reason: Optional[str]) -> Response[None]:\n return self.request(\n Route('DELETE', '/guilds/{guild_id}/stickers/{sticker_id}', guild_id=guild_id, sticker_id=sticker_id),\n reason=reason,\n )\n\n def get_all_custom_emojis(self, guild_id: Snowflake) -> Response[List[emoji.Emoji]]:\n return self.request(Route('GET', '/guilds/{guild_id}/emojis', guild_id=guild_id))\n\n def get_custom_emoji(self, guild_id: Snowflake, emoji_id: Snowflake) -> Response[emoji.Emoji]:\n return self.request(Route('GET', '/guilds/{guild_id}/emojis/{emoji_id}', guild_id=guild_id, emoji_id=emoji_id))\n\n def create_custom_emoji(\n self,\n guild_id: Snowflake,\n name: str,\n image: str,\n *,\n roles: Optional[SnowflakeList] = None,\n reason: Optional[str] = None,\n ) -> Response[emoji.Emoji]:\n payload = {\n 'name': name,\n 'image': image,\n 'roles': roles or [],\n }\n\n r = Route('POST', '/guilds/{guild_id}/emojis', guild_id=guild_id)\n return self.request(r, json=payload, reason=reason)\n\n def delete_custom_emoji(\n self,\n guild_id: Snowflake,\n emoji_id: Snowflake,\n *,\n reason: Optional[str] = None,\n ) -> Response[None]:\n r = Route('DELETE', '/guilds/{guild_id}/emojis/{emoji_id}', guild_id=guild_id, emoji_id=emoji_id)\n return self.request(r, reason=reason)\n\n def edit_custom_emoji(\n self,\n guild_id: Snowflake,\n emoji_id: Snowflake,\n *,\n payload: Dict[str, Any],\n reason: Optional[str] = None,\n ) -> Response[emoji.Emoji]:\n r = Route('PATCH', '/guilds/{guild_id}/emojis/{emoji_id}', guild_id=guild_id, emoji_id=emoji_id)\n return self.request(r, json=payload, reason=reason)\n\n def get_all_integrations(self, guild_id: Snowflake) -> Response[List[integration.Integration]]:\n r = Route('GET', '/guilds/{guild_id}/integrations', guild_id=guild_id)\n\n return self.request(r)\n\n def create_integration(self, guild_id: Snowflake, type: integration.IntegrationType, id: int) -> Response[None]:\n payload = {\n 'type': type,\n 'id': id,\n }\n\n r = Route('POST', '/guilds/{guild_id}/integrations', guild_id=guild_id)\n return self.request(r, json=payload)\n\n def edit_integration(self, guild_id: Snowflake, integration_id: Snowflake, **payload: Any) -> Response[None]:\n r = Route(\n 'PATCH', '/guilds/{guild_id}/integrations/{integration_id}', guild_id=guild_id, integration_id=integration_id\n )\n\n return self.request(r, json=payload)\n\n def sync_integration(self, guild_id: Snowflake, integration_id: Snowflake) -> Response[None]:\n r = Route(\n 'POST', '/guilds/{guild_id}/integrations/{integration_id}/sync', guild_id=guild_id, integration_id=integration_id\n )\n\n return self.request(r)\n\n def delete_integration(\n self, guild_id: Snowflake, integration_id: Snowflake, *, reason: Optional[str] = None\n ) -> Response[None]:\n r = Route(\n 'DELETE', '/guilds/{guild_id}/integrations/{integration_id}', guild_id=guild_id, integration_id=integration_id\n )\n\n return self.request(r, reason=reason)\n\n def get_audit_logs(\n self,\n guild_id: Snowflake,\n limit: int = 100,\n before: Optional[Snowflake] = None,\n after: Optional[Snowflake] = None,\n user_id: Optional[Snowflake] = None,\n action_type: Optional[audit_log.AuditLogEvent] = None,\n ) -> Response[audit_log.AuditLog]:\n params: Dict[str, Any] = {'limit': limit}\n if before:\n params['before'] = before\n if after is not None:\n params['after'] = after\n if user_id:\n params['user_id'] = user_id\n if action_type:\n params['action_type'] = action_type\n\n r = Route('GET', '/guilds/{guild_id}/audit-logs', guild_id=guild_id)\n return self.request(r, params=params)\n\n def get_widget(self, guild_id: Snowflake) -> Response[widget.Widget]:\n return self.request(Route('GET', '/guilds/{guild_id}/widget.json', guild_id=guild_id))\n\n def edit_widget(\n self, guild_id: Snowflake, payload: widget.EditWidgetSettings, reason: Optional[str] = None\n ) -> Response[widget.WidgetSettings]:\n return self.request(Route('PATCH', '/guilds/{guild_id}/widget', guild_id=guild_id), json=payload, reason=reason)\n\n # Invite management\n\n def create_invite(\n self,\n channel_id: Snowflake,\n *,\n reason: Optional[str] = None,\n max_age: int = 0,\n max_uses: int = 0,\n temporary: bool = False,\n unique: bool = True,\n target_type: Optional[invite.InviteTargetType] = None,\n target_user_id: Optional[Snowflake] = None,\n target_application_id: Optional[Snowflake] = None,\n ) -> Response[invite.Invite]:\n r = Route('POST', '/channels/{channel_id}/invites', channel_id=channel_id)\n payload = {\n 'max_age': max_age,\n 'max_uses': max_uses,\n 'temporary': temporary,\n 'unique': unique,\n }\n\n if target_type:\n payload['target_type'] = target_type\n\n if target_user_id:\n payload['target_user_id'] = target_user_id\n\n if target_application_id:\n payload['target_application_id'] = str(target_application_id)\n\n return self.request(r, reason=reason, json=payload)\n\n def get_invite(\n self,\n invite_id: str,\n *,\n with_counts: bool = True,\n with_expiration: bool = True,\n guild_scheduled_event_id: Optional[Snowflake] = None,\n ) -> Response[invite.Invite]:\n params: Dict[str, Any] = {\n 'with_counts': int(with_counts),\n 'with_expiration': int(with_expiration),\n }\n\n if guild_scheduled_event_id:\n params['guild_scheduled_event_id'] = guild_scheduled_event_id\n\n return self.request(Route('GET', '/invites/{invite_id}', invite_id=invite_id), params=params)\n\n def invites_from(self, guild_id: Snowflake) -> Response[List[invite.Invite]]:\n return self.request(Route('GET', '/guilds/{guild_id}/invites', guild_id=guild_id))\n\n def invites_from_channel(self, channel_id: Snowflake) -> Response[List[invite.Invite]]:\n return self.request(Route('GET', '/channels/{channel_id}/invites', channel_id=channel_id))\n\n def delete_invite(self, invite_id: str, *, reason: Optional[str] = None) -> Response[None]:\n return self.request(Route('DELETE', '/invites/{invite_id}', invite_id=invite_id), reason=reason)\n\n # Role management\n\n def get_roles(self, guild_id: Snowflake) -> Response[List[role.Role]]:\n return self.request(Route('GET', '/guilds/{guild_id}/roles', guild_id=guild_id))\n\n def edit_role(\n self, guild_id: Snowflake, role_id: Snowflake, *, reason: Optional[str] = None, **fields: Any\n ) -> Response[role.Role]:\n r = Route('PATCH', '/guilds/{guild_id}/roles/{role_id}', guild_id=guild_id, role_id=role_id)\n valid_keys = ('name', 'permissions', 'color', 'hoist', 'icon', 'unicode_emoji', 'mentionable')\n payload = {k: v for k, v in fields.items() if k in valid_keys}\n return self.request(r, json=payload, reason=reason)\n\n def delete_role(self, guild_id: Snowflake, role_id: Snowflake, *, reason: Optional[str] = None) -> Response[None]:\n r = Route('DELETE', '/guilds/{guild_id}/roles/{role_id}', guild_id=guild_id, role_id=role_id)\n return self.request(r, reason=reason)\n\n def replace_roles(\n self,\n user_id: Snowflake,\n guild_id: Snowflake,\n role_ids: List[int],\n *,\n reason: Optional[str] = None,\n ) -> Response[member.MemberWithUser]:\n return self.edit_member(guild_id=guild_id, user_id=user_id, roles=role_ids, reason=reason)\n\n def create_role(self, guild_id: Snowflake, *, reason: Optional[str] = None, **fields: Any) -> Response[role.Role]:\n r = Route('POST', '/guilds/{guild_id}/roles', guild_id=guild_id)\n return self.request(r, json=fields, reason=reason)\n\n def move_role_position(\n self,\n guild_id: Snowflake,\n positions: List[guild.RolePositionUpdate],\n *,\n reason: Optional[str] = None,\n ) -> Response[List[role.Role]]:\n r = Route('PATCH', '/guilds/{guild_id}/roles', guild_id=guild_id)\n return self.request(r, json=positions, reason=reason)\n\n def add_role(\n self, guild_id: Snowflake, user_id: Snowflake, role_id: Snowflake, *, reason: Optional[str] = None\n ) -> Response[None]:\n r = Route(\n 'PUT',\n '/guilds/{guild_id}/members/{user_id}/roles/{role_id}',\n guild_id=guild_id,\n user_id=user_id,\n role_id=role_id,\n )\n return self.request(r, reason=reason)\n\n def remove_role(\n self, guild_id: Snowflake, user_id: Snowflake, role_id: Snowflake, *, reason: Optional[str] = None\n ) -> Response[None]:\n r = Route(\n 'DELETE',\n '/guilds/{guild_id}/members/{user_id}/roles/{role_id}',\n guild_id=guild_id,\n user_id=user_id,\n role_id=role_id,\n )\n return self.request(r, reason=reason)\n\n def edit_channel_permissions(\n self,\n channel_id: Snowflake,\n target: Snowflake,\n allow: str,\n deny: str,\n type: channel.OverwriteType,\n *,\n reason: Optional[str] = None,\n ) -> Response[None]:\n payload = {'id': target, 'allow': allow, 'deny': deny, 'type': type}\n r = Route('PUT', '/channels/{channel_id}/permissions/{target}', channel_id=channel_id, target=target)\n return self.request(r, json=payload, reason=reason)\n\n def delete_channel_permissions(\n self, channel_id: Snowflake, target: Snowflake, *, reason: Optional[str] = None\n ) -> Response[None]:\n r = Route('DELETE', '/channels/{channel_id}/permissions/{target}', channel_id=channel_id, target=target)\n return self.request(r, reason=reason)\n\n # Voice management\n\n def move_member(\n self,\n user_id: Snowflake,\n guild_id: Snowflake,\n channel_id: Snowflake,\n *,\n reason: Optional[str] = None,\n ) -> Response[member.MemberWithUser]:\n return self.edit_member(guild_id=guild_id, user_id=user_id, channel_id=channel_id, reason=reason)\n\n # Stage instance management\n\n def get_stage_instance(self, channel_id: Snowflake) -> Response[channel.StageInstance]:\n return self.request(Route('GET', '/stage-instances/{channel_id}', channel_id=channel_id))\n\n def create_stage_instance(self, *, reason: Optional[str], **payload: Any) -> Response[channel.StageInstance]:\n valid_keys = (\n 'channel_id',\n 'topic',\n 'privacy_level',\n 'send_start_notification',\n 'guild_scheduled_event_id',\n )\n payload = {k: v for k, v in payload.items() if k in valid_keys}\n\n return self.request(Route('POST', '/stage-instances'), json=payload, reason=reason)\n\n def edit_stage_instance(self, channel_id: Snowflake, *, reason: Optional[str] = None, **payload: Any) -> Response[None]:\n valid_keys = (\n 'topic',\n 'privacy_level',\n )\n payload = {k: v for k, v in payload.items() if k in valid_keys}\n\n return self.request(\n Route('PATCH', '/stage-instances/{channel_id}', channel_id=channel_id), json=payload, reason=reason\n )\n\n def delete_stage_instance(self, channel_id: Snowflake, *, reason: Optional[str] = None) -> Response[None]:\n return self.request(Route('DELETE', '/stage-instances/{channel_id}', channel_id=channel_id), reason=reason)\n\n # Guild scheduled event management\n\n @overload\n def get_scheduled_events(\n self, guild_id: Snowflake, with_user_count: Literal[True]\n ) -> Response[List[scheduled_event.GuildScheduledEventWithUserCount]]:\n ...\n\n @overload\n def get_scheduled_events(\n self, guild_id: Snowflake, with_user_count: Literal[False]\n ) -> Response[List[scheduled_event.GuildScheduledEvent]]:\n ...\n\n @overload\n def get_scheduled_events(\n self, guild_id: Snowflake, with_user_count: bool\n ) -> Union[\n Response[List[scheduled_event.GuildScheduledEventWithUserCount]], Response[List[scheduled_event.GuildScheduledEvent]]\n ]:\n ...\n\n def get_scheduled_events(self, guild_id: Snowflake, with_user_count: bool) -> Response[Any]:\n params = {'with_user_count': int(with_user_count)}\n return self.request(Route('GET', '/guilds/{guild_id}/scheduled-events', guild_id=guild_id), params=params)\n\n def create_guild_scheduled_event(\n self, guild_id: Snowflake, *, reason: Optional[str] = None, **payload: Any\n ) -> Response[scheduled_event.GuildScheduledEvent]:\n valid_keys = (\n 'channel_id',\n 'entity_metadata',\n 'name',\n 'privacy_level',\n 'scheduled_start_time',\n 'scheduled_end_time',\n 'description',\n 'entity_type',\n 'image',\n )\n payload = {k: v for k, v in payload.items() if k in valid_keys}\n\n return self.request(\n Route('POST', '/guilds/{guild_id}/scheduled-events', guild_id=guild_id), json=payload, reason=reason\n )\n\n @overload\n def get_scheduled_event(\n self, guild_id: Snowflake, guild_scheduled_event_id: Snowflake, with_user_count: Literal[True]\n ) -> Response[scheduled_event.GuildScheduledEventWithUserCount]:\n ...\n\n @overload\n def get_scheduled_event(\n self, guild_id: Snowflake, guild_scheduled_event_id: Snowflake, with_user_count: Literal[False]\n ) -> Response[scheduled_event.GuildScheduledEvent]:\n ...\n\n @overload\n def get_scheduled_event(\n self, guild_id: Snowflake, guild_scheduled_event_id: Snowflake, with_user_count: bool\n ) -> Union[Response[scheduled_event.GuildScheduledEventWithUserCount], Response[scheduled_event.GuildScheduledEvent]]:\n ...\n\n def get_scheduled_event(\n self, guild_id: Snowflake, guild_scheduled_event_id: Snowflake, with_user_count: bool\n ) -> Response[Any]:\n params = {'with_user_count': int(with_user_count)}\n return self.request(\n Route(\n 'GET',\n '/guilds/{guild_id}/scheduled-events/{guild_scheduled_event_id}',\n guild_id=guild_id,\n guild_scheduled_event_id=guild_scheduled_event_id,\n ),\n params=params,\n )\n\n def edit_scheduled_event(\n self, guild_id: Snowflake, guild_scheduled_event_id: Snowflake, *, reason: Optional[str] = None, **payload: Any\n ) -> Response[scheduled_event.GuildScheduledEvent]:\n valid_keys = (\n 'channel_id',\n 'entity_metadata',\n 'name',\n 'privacy_level',\n 'scheduled_start_time',\n 'scheduled_end_time',\n 'status',\n 'description',\n 'entity_type',\n 'image',\n )\n payload = {k: v for k, v in payload.items() if k in valid_keys}\n\n return self.request(\n Route(\n 'PATCH',\n '/guilds/{guild_id}/scheduled-events/{guild_scheduled_event_id}',\n guild_id=guild_id,\n guild_scheduled_event_id=guild_scheduled_event_id,\n ),\n json=payload,\n reason=reason,\n )\n\n def delete_scheduled_event(\n self,\n guild_id: Snowflake,\n guild_scheduled_event_id: Snowflake,\n *,\n reason: Optional[str] = None,\n ) -> Response[None]:\n return self.request(\n Route(\n 'DELETE',\n '/guilds/{guild_id}/scheduled-events/{guild_scheduled_event_id}',\n guild_id=guild_id,\n guild_scheduled_event_id=guild_scheduled_event_id,\n ),\n reason=reason,\n )\n\n @overload\n def get_scheduled_event_users(\n self,\n guild_id: Snowflake,\n guild_scheduled_event_id: Snowflake,\n limit: int,\n with_member: Literal[True],\n before: Optional[Snowflake] = ...,\n after: Optional[Snowflake] = ...,\n ) -> Response[scheduled_event.ScheduledEventUsersWithMember]:\n ...\n\n @overload\n def get_scheduled_event_users(\n self,\n guild_id: Snowflake,\n guild_scheduled_event_id: Snowflake,\n limit: int,\n with_member: Literal[False],\n before: Optional[Snowflake] = ...,\n after: Optional[Snowflake] = ...,\n ) -> Response[scheduled_event.ScheduledEventUsers]:\n ...\n\n @overload\n def get_scheduled_event_users(\n self,\n guild_id: Snowflake,\n guild_scheduled_event_id: Snowflake,\n limit: int,\n with_member: bool,\n before: Optional[Snowflake] = ...,\n after: Optional[Snowflake] = ...,\n ) -> Union[Response[scheduled_event.ScheduledEventUsersWithMember], Response[scheduled_event.ScheduledEventUsers]]:\n ...\n\n def get_scheduled_event_users(\n self,\n guild_id: Snowflake,\n guild_scheduled_event_id: Snowflake,\n limit: int,\n with_member: bool,\n before: Optional[Snowflake] = None,\n after: Optional[Snowflake] = None,\n ) -> Response[Any]:\n params: Dict[str, Any] = {\n 'limit': limit,\n 'with_member': int(with_member),\n }\n\n if before is not None:\n params['before'] = before\n if after is not None:\n params['after'] = after\n\n return self.request(\n Route(\n 'GET',\n '/guilds/{guild_id}/scheduled-events/{guild_scheduled_event_id}/users',\n guild_id=guild_id,\n guild_scheduled_event_id=guild_scheduled_event_id,\n ),\n params=params,\n )\n\n # Application commands (global)\n\n def get_global_commands(self, application_id: Snowflake) -> Response[List[command.ApplicationCommand]]:\n return self.request(Route('GET', '/applications/{application_id}/commands', application_id=application_id))\n\n def get_global_command(self, application_id: Snowflake, command_id: Snowflake) -> Response[command.ApplicationCommand]:\n r = Route(\n 'GET',\n '/applications/{application_id}/commands/{command_id}',\n application_id=application_id,\n command_id=command_id,\n )\n return self.request(r)\n\n def upsert_global_command(\n self, application_id: Snowflake, payload: command.ApplicationCommand\n ) -> Response[command.ApplicationCommand]:\n r = Route('POST', '/applications/{application_id}/commands', application_id=application_id)\n return self.request(r, json=payload)\n\n def edit_global_command(\n self,\n application_id: Snowflake,\n command_id: Snowflake,\n payload: Dict[str, Any],\n ) -> Response[command.ApplicationCommand]:\n valid_keys = (\n 'name',\n 'description',\n 'options',\n )\n payload = {k: v for k, v in payload.items() if k in valid_keys}\n r = Route(\n 'PATCH',\n '/applications/{application_id}/commands/{command_id}',\n application_id=application_id,\n command_id=command_id,\n )\n return self.request(r, json=payload)\n\n def delete_global_command(self, application_id: Snowflake, command_id: Snowflake) -> Response[None]:\n r = Route(\n 'DELETE',\n '/applications/{application_id}/commands/{command_id}',\n application_id=application_id,\n command_id=command_id,\n )\n return self.request(r)\n\n def bulk_upsert_global_commands(\n self, application_id: Snowflake, payload: List[Dict[str, Any]]\n ) -> Response[List[command.ApplicationCommand]]:\n r = Route('PUT', '/applications/{application_id}/commands', application_id=application_id)\n return self.request(r, json=payload)\n\n # Application commands (guild)\n\n def get_guild_commands(\n self, application_id: Snowflake, guild_id: Snowflake\n ) -> Response[List[command.ApplicationCommand]]:\n r = Route(\n 'GET',\n '/applications/{application_id}/guilds/{guild_id}/commands',\n application_id=application_id,\n guild_id=guild_id,\n )\n return self.request(r)\n\n def get_guild_command(\n self,\n application_id: Snowflake,\n guild_id: Snowflake,\n command_id: Snowflake,\n ) -> Response[command.ApplicationCommand]:\n r = Route(\n 'GET',\n '/applications/{application_id}/guilds/{guild_id}/commands/{command_id}',\n application_id=application_id,\n guild_id=guild_id,\n command_id=command_id,\n )\n return self.request(r)\n\n def upsert_guild_command(\n self,\n application_id: Snowflake,\n guild_id: Snowflake,\n payload: Dict[str, Any],\n ) -> Response[command.ApplicationCommand]:\n r = Route(\n 'POST',\n '/applications/{application_id}/guilds/{guild_id}/commands',\n application_id=application_id,\n guild_id=guild_id,\n )\n return self.request(r, json=payload)\n\n def edit_guild_command(\n self,\n application_id: Snowflake,\n guild_id: Snowflake,\n command_id: Snowflake,\n payload: Dict[str, Any],\n ) -> Response[command.ApplicationCommand]:\n valid_keys = (\n 'name',\n 'description',\n 'options',\n )\n payload = {k: v for k, v in payload.items() if k in valid_keys}\n r = Route(\n 'PATCH',\n '/applications/{application_id}/guilds/{guild_id}/commands/{command_id}',\n application_id=application_id,\n guild_id=guild_id,\n command_id=command_id,\n )\n return self.request(r, json=payload)\n\n def delete_guild_command(\n self,\n application_id: Snowflake,\n guild_id: Snowflake,\n command_id: Snowflake,\n ) -> Response[None]:\n r = Route(\n 'DELETE',\n '/applications/{application_id}/guilds/{guild_id}/commands/{command_id}',\n application_id=application_id,\n guild_id=guild_id,\n command_id=command_id,\n )\n return self.request(r)\n\n def bulk_upsert_guild_commands(\n self,\n application_id: Snowflake,\n guild_id: Snowflake,\n payload: List[Dict[str, Any]],\n ) -> Response[List[command.ApplicationCommand]]:\n r = Route(\n 'PUT',\n '/applications/{application_id}/guilds/{guild_id}/commands',\n application_id=application_id,\n guild_id=guild_id,\n )\n return self.request(r, json=payload)\n\n def get_guild_application_command_permissions(\n self,\n application_id: Snowflake,\n guild_id: Snowflake,\n ) -> Response[List[command.GuildApplicationCommandPermissions]]:\n r = Route(\n 'GET',\n '/applications/{application_id}/guilds/{guild_id}/commands/permissions',\n application_id=application_id,\n guild_id=guild_id,\n )\n return self.request(r)\n\n def get_application_command_permissions(\n self,\n application_id: Snowflake,\n guild_id: Snowflake,\n command_id: Snowflake,\n ) -> Response[command.GuildApplicationCommandPermissions]:\n r = Route(\n 'GET',\n '/applications/{application_id}/guilds/{guild_id}/commands/{command_id}/permissions',\n application_id=application_id,\n guild_id=guild_id,\n command_id=command_id,\n )\n return self.request(r)\n\n def edit_application_command_permissions(\n self,\n application_id: Snowflake,\n guild_id: Snowflake,\n command_id: Snowflake,\n payload: Dict[str, Any],\n ) -> Response[None]:\n r = Route(\n 'PUT',\n '/applications/{application_id}/guilds/{guild_id}/commands/{command_id}/permissions',\n application_id=application_id,\n guild_id=guild_id,\n command_id=command_id,\n )\n return self.request(r, json=payload)\n\n def get_auto_moderation_rules(self, guild_id: Snowflake) -> Response[List[automod.AutoModerationRule]]:\n return self.request(Route('GET', '/guilds/{guild_id}/auto-moderation/rules', guild_id=guild_id))\n\n def get_auto_moderation_rule(self, guild_id: Snowflake, rule_id: Snowflake) -> Response[automod.AutoModerationRule]:\n return self.request(\n Route('GET', '/guilds/{guild_id}/auto-moderation/rules/{rule_id}', guild_id=guild_id, rule_id=rule_id)\n )\n\n def create_auto_moderation_rule(\n self, guild_id: Snowflake, *, reason: Optional[str], **payload: Any\n ) -> Response[automod.AutoModerationRule]:\n valid_keys = (\n 'name',\n 'event_type',\n 'trigger_type',\n 'trigger_metadata',\n 'actions',\n 'enabled',\n 'exempt_roles',\n 'exempt_channels',\n )\n\n payload = {k: v for k, v in payload.items() if k in valid_keys and v is not None}\n\n return self.request(\n Route('POST', '/guilds/{guild_id}/auto-moderation/rules', guild_id=guild_id), json=payload, reason=reason\n )\n\n def edit_auto_moderation_rule(\n self, guild_id: Snowflake, rule_id: Snowflake, *, reason: Optional[str], **payload: Any\n ) -> Response[automod.AutoModerationRule]:\n valid_keys = (\n 'name',\n 'event_type',\n 'trigger_metadata',\n 'actions',\n 'enabled',\n 'exempt_roles',\n 'exempt_channels',\n )\n\n payload = {k: v for k, v in payload.items() if k in valid_keys and v is not None}\n\n return self.request(\n Route('PATCH', '/guilds/{guild_id}/auto-moderation/rules/{rule_id}', guild_id=guild_id, rule_id=rule_id),\n json=payload,\n reason=reason,\n )\n\n def delete_auto_moderation_rule(\n self, guild_id: Snowflake, rule_id: Snowflake, *, reason: Optional[str]\n ) -> Response[None]:\n return self.request(\n Route('DELETE', '/guilds/{guild_id}/auto-moderation/rules/{rule_id}', guild_id=guild_id, rule_id=rule_id),\n reason=reason,\n )\n\n # SKU\n\n def get_skus(self, application_id: Snowflake) -> Response[List[sku.SKU]]:\n return self.request(Route('GET', '/applications/{application_id}/skus', application_id=application_id))\n\n def get_entitlements(\n self,\n application_id: Snowflake,\n user_id: Optional[Snowflake] = None,\n sku_ids: Optional[SnowflakeList] = None,\n before: Optional[Snowflake] = None,\n after: Optional[Snowflake] = None,\n limit: Optional[int] = None,\n guild_id: Optional[Snowflake] = None,\n exclude_ended: Optional[bool] = None,\n ) -> Response[List[sku.Entitlement]]:\n params: Dict[str, Any] = {}\n\n if user_id is not None:\n params['user_id'] = user_id\n if sku_ids is not None:\n params['sku_ids'] = ','.join(map(str, sku_ids))\n if before is not None:\n params['before'] = before\n if after is not None:\n params['after'] = after\n if limit is not None:\n params['limit'] = limit\n if guild_id is not None:\n params['guild_id'] = guild_id\n if exclude_ended is not None:\n params['exclude_ended'] = int(exclude_ended)\n\n return self.request(\n Route('GET', '/applications/{application_id}/entitlements', application_id=application_id), params=params\n )\n\n def get_entitlement(self, application_id: Snowflake, entitlement_id: Snowflake) -> Response[sku.Entitlement]:\n return self.request(\n Route(\n 'GET',\n '/applications/{application_id}/entitlements/{entitlement_id}',\n application_id=application_id,\n entitlement_id=entitlement_id,\n ),\n )\n\n def create_entitlement(\n self, application_id: Snowflake, sku_id: Snowflake, owner_id: Snowflake, owner_type: sku.EntitlementOwnerType\n ) -> Response[sku.Entitlement]:\n payload = {\n 'sku_id': sku_id,\n 'owner_id': owner_id,\n 'owner_type': owner_type,\n }\n\n return self.request(\n Route(\n 'POST',\n '/applications/{application_id}/entitlements',\n application_id=application_id,\n ),\n json=payload,\n )\n\n def delete_entitlement(self, application_id: Snowflake, entitlement_id: Snowflake) -> Response[None]:\n return self.request(\n Route(\n 'DELETE',\n '/applications/{application_id}/entitlements/{entitlement_id}',\n application_id=application_id,\n entitlement_id=entitlement_id,\n ),\n )\n\n # Misc\n\n def application_info(self) -> Response[appinfo.AppInfo]:\n return self.request(Route('GET', '/oauth2/applications/@me'))\n\n async def get_gateway(self, *, encoding: str = 'json', zlib: bool = True) -> str:\n try:\n data = await self.request(Route('GET', '/gateway'))\n except HTTPException as exc:\n raise GatewayNotFound() from exc\n if zlib:\n value = '{0}?encoding={1}&v={2}&compress=zlib-stream'\n else:\n value = '{0}?encoding={1}&v={2}'\n return value.format(data['url'], encoding, INTERNAL_API_VERSION)\n\n async def get_bot_gateway(self, *, encoding: str = 'json', zlib: bool = True) -> Tuple[int, str]:\n try:\n data = await self.request(Route('GET', '/gateway/bot'))\n except HTTPException as exc:\n raise GatewayNotFound() from exc\n\n if zlib:\n value = '{0}?encoding={1}&v={2}&compress=zlib-stream'\n else:\n value = '{0}?encoding={1}&v={2}'\n return data['shards'], value.format(data['url'], encoding, INTERNAL_API_VERSION)\n\n def get_user(self, user_id: Snowflake) -> Response[user.User]:\n return self.request(Route('GET', '/users/{user_id}', user_id=user_id))\n",
"path": "discord/http.py"
},
{
"content": "\"\"\"\nThe MIT License (MIT)\n\nCopyright (c) 2015-present Rapptz\n\nPermission is hereby granted, free of charge, to any person obtaining a\ncopy of this software and associated documentation files (the \"Software\"),\nto deal in the Software without restriction, including without limitation\nthe rights to use, copy, modify, merge, publish, distribute, sublicense,\nand/or sell copies of the Software, and to permit persons to whom the\nSoftware is furnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in\nall copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\nOR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\nFROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER\nDEALINGS IN THE SOFTWARE.\n\"\"\"\n\nfrom __future__ import annotations\n\nfrom typing import TypedDict, List, Optional\nfrom typing_extensions import NotRequired\n\nfrom .user import User\nfrom .team import Team\nfrom .snowflake import Snowflake\n\n\nclass InstallParams(TypedDict):\n scopes: List[str]\n permissions: str\n\n\nclass BaseAppInfo(TypedDict):\n id: Snowflake\n name: str\n verify_key: str\n icon: Optional[str]\n summary: str\n description: str\n flags: int\n cover_image: NotRequired[str]\n terms_of_service_url: NotRequired[str]\n privacy_policy_url: NotRequired[str]\n rpc_origins: NotRequired[List[str]]\n\n\nclass AppInfo(BaseAppInfo):\n owner: User\n bot_public: bool\n bot_require_code_grant: bool\n team: NotRequired[Team]\n guild_id: NotRequired[Snowflake]\n primary_sku_id: NotRequired[Snowflake]\n slug: NotRequired[str]\n hook: NotRequired[bool]\n max_participants: NotRequired[int]\n tags: NotRequired[List[str]]\n install_params: NotRequired[InstallParams]\n custom_install_url: NotRequired[str]\n role_connections_verification_url: NotRequired[str]\n\n\nclass PartialAppInfo(BaseAppInfo, total=False):\n hook: bool\n max_participants: int\n approximate_guild_count: int\n redirect_uris: List[str]\n interactions_endpoint_url: Optional[str]\n role_connections_verification_url: Optional[str]\n\n\nclass GatewayAppInfo(TypedDict):\n id: Snowflake\n flags: int\n",
"path": "discord/types/appinfo.py"
}
] | 10_3 | python | import unittest
import asyncio
import sys
class TestEditApplicationInfo(unittest.TestCase):
def setUp(self):
from discord.http import HTTPClient
from unittest.mock import MagicMock
self.loop = asyncio.new_event_loop()
asyncio.set_event_loop(self.loop)
self.http_client = HTTPClient(loop=self.loop)
self.http_client.request = MagicMock()
def test_edit_application_info(self):
from unittest.mock import ANY
payload = {
'custom_install_url': 'https://example.com',
'description': 'Test Description',
}
self.http_client.edit_application_info(reason='Test Reason', payload=payload)
# Use ANY to match any Route object
self.http_client.request.assert_called_with(
ANY, json=payload, reason='Test Reason'
)
def tearDown(self):
self.loop.close()
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestEditApplicationInfo))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
main()
|
https://github.com/teamqurrent/discord.py | Enhance the `AutoModRuleAction` class in `automod.py` by modifying its `__init__` method to handle different action types more effectively. Specifically, implement conditional logic to set and validate attributes based on the action type: ensure channel_id is assigned for send_alert_message actions, duration for timeout actions, and custom_message for block_message actions. Include checks to raise errors if necessary attributes for each specific action type are missing, thereby ensuring the integrity and correctness of each `AutoModRuleAction` instance. | 933460c | discord | python3.9 | e1c1a72a | diff --git a/discord/automod.py b/discord/automod.py
--- a/discord/automod.py
+++ b/discord/automod.py
@@ -135,6 +135,10 @@ class AutoModRuleAction:
raise ValueError('Only one of channel_id, duration, or custom_message can be passed.')
self.type: AutoModRuleActionType
+ self.channel_id: Optional[int] = None
+ self.duration: Optional[datetime.timedelta] = None
+ self.custom_message: Optional[str] = None
+
if type is not None:
self.type = type
elif channel_id is not None:
@@ -147,14 +151,15 @@ class AutoModRuleAction:
if self.type is AutoModRuleActionType.send_alert_message:
if channel_id is None:
raise ValueError('channel_id cannot be None if type is send_alert_message')
- self.channel_id: Optional[int] = channel_id
+ self.channel_id = channel_id
if self.type is AutoModRuleActionType.timeout:
if duration is None:
raise ValueError('duration cannot be None set if type is timeout')
- self.duration: Optional[datetime.timedelta] = duration
+ self.duration = duration
- self.custom_message: Optional[str] = custom_message
+ if self.type is AutoModRuleActionType.block_message:
+ self.custom_message = custom_message
def __repr__(self) -> str:
return f'<AutoModRuleAction type={self.type.value} channel={self.channel_id} duration={self.duration}>'
| [
{
"content": "\"\"\"\nThe MIT License (MIT)\n\nCopyright (c) 2015-present Rapptz\n\nPermission is hereby granted, free of charge, to any person obtaining a\ncopy of this software and associated documentation files (the \"Software\"),\nto deal in the Software without restriction, including without limitation\nthe rights to use, copy, modify, merge, publish, distribute, sublicense,\nand/or sell copies of the Software, and to permit persons to whom the\nSoftware is furnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in\nall copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\nOR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\nFROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER\nDEALINGS IN THE SOFTWARE.\n\"\"\"\n\nfrom __future__ import annotations\nimport datetime\n\nfrom typing import TYPE_CHECKING, Any, Dict, Optional, List, Set, Union, Sequence, overload, Literal\n\nfrom .enums import AutoModRuleTriggerType, AutoModRuleActionType, AutoModRuleEventType, try_enum\nfrom .flags import AutoModPresets\nfrom . import utils\nfrom .utils import MISSING, cached_slot_property\n\nif TYPE_CHECKING:\n from typing_extensions import Self\n from .abc import Snowflake, GuildChannel\n from .threads import Thread\n from .guild import Guild\n from .member import Member\n from .state import ConnectionState\n from .types.automod import (\n AutoModerationRule as AutoModerationRulePayload,\n AutoModerationTriggerMetadata as AutoModerationTriggerMetadataPayload,\n AutoModerationAction as AutoModerationActionPayload,\n AutoModerationActionExecution as AutoModerationActionExecutionPayload,\n )\n from .role import Role\n\n__all__ = (\n 'AutoModRuleAction',\n 'AutoModTrigger',\n 'AutoModRule',\n 'AutoModAction',\n)\n\n\nclass AutoModRuleAction:\n \"\"\"Represents an auto moderation's rule action.\n\n .. note::\n Only one of ``channel_id``, ``duration``, or ``custom_message`` can be used.\n\n .. versionadded:: 2.0\n\n Attributes\n -----------\n type: :class:`AutoModRuleActionType`\n The type of action to take.\n Defaults to :attr:`~AutoModRuleActionType.block_message`.\n channel_id: Optional[:class:`int`]\n The ID of the channel or thread to send the alert message to, if any.\n Passing this sets :attr:`type` to :attr:`~AutoModRuleActionType.send_alert_message`.\n duration: Optional[:class:`datetime.timedelta`]\n The duration of the timeout to apply, if any.\n Has a maximum of 28 days.\n Passing this sets :attr:`type` to :attr:`~AutoModRuleActionType.timeout`.\n custom_message: Optional[:class:`str`]\n A custom message which will be shown to a user when their message is blocked.\n Passing this sets :attr:`type` to :attr:`~AutoModRuleActionType.block_message`.\n\n .. versionadded:: 2.2\n \"\"\"\n\n __slots__ = ('type', 'channel_id', 'duration', 'custom_message')\n\n @overload\n def __init__(self, *, channel_id: int = ...) -> None:\n ...\n\n @overload\n def __init__(self, *, type: Literal[AutoModRuleActionType.send_alert_message], channel_id: int = ...) -> None:\n ...\n\n @overload\n def __init__(self, *, duration: datetime.timedelta = ...) -> None:\n ...\n\n @overload\n def __init__(self, *, type: Literal[AutoModRuleActionType.timeout], duration: datetime.timedelta = ...) -> None:\n ...\n\n @overload\n def __init__(self, *, custom_message: str = ...) -> None:\n ...\n\n @overload\n def __init__(self, *, type: Literal[AutoModRuleActionType.block_message]) -> None:\n ...\n\n @overload\n def __init__(self, *, type: Literal[AutoModRuleActionType.block_message], custom_message: Optional[str] = ...) -> None:\n ...\n\n @overload\n def __init__(\n self,\n *,\n type: Optional[AutoModRuleActionType] = ...,\n channel_id: Optional[int] = ...,\n duration: Optional[datetime.timedelta] = ...,\n custom_message: Optional[str] = ...,\n ) -> None:\n ...\n\n def __init__(\n self,\n *,\n type: Optional[AutoModRuleActionType] = None,\n channel_id: Optional[int] = None,\n duration: Optional[datetime.timedelta] = None,\n custom_message: Optional[str] = None,\n ) -> None:\n if sum(v is None for v in (channel_id, duration, custom_message)) < 2:\n raise ValueError('Only one of channel_id, duration, or custom_message can be passed.')\n\n self.type: AutoModRuleActionType\n if type is not None:\n self.type = type\n elif channel_id is not None:\n self.type = AutoModRuleActionType.send_alert_message\n elif duration is not None:\n self.type = AutoModRuleActionType.timeout\n else:\n self.type = AutoModRuleActionType.block_message\n\n if self.type is AutoModRuleActionType.send_alert_message:\n if channel_id is None:\n raise ValueError('channel_id cannot be None if type is send_alert_message')\n self.channel_id: Optional[int] = channel_id\n\n if self.type is AutoModRuleActionType.timeout:\n if duration is None:\n raise ValueError('duration cannot be None set if type is timeout')\n self.duration: Optional[datetime.timedelta] = duration\n\n self.custom_message: Optional[str] = custom_message\n\n def __repr__(self) -> str:\n return f'<AutoModRuleAction type={self.type.value} channel={self.channel_id} duration={self.duration}>'\n\n @classmethod\n def from_data(cls, data: AutoModerationActionPayload) -> Self:\n if data['type'] == AutoModRuleActionType.timeout.value:\n duration_seconds = data['metadata']['duration_seconds']\n return cls(duration=datetime.timedelta(seconds=duration_seconds))\n elif data['type'] == AutoModRuleActionType.send_alert_message.value:\n channel_id = int(data['metadata']['channel_id'])\n return cls(channel_id=channel_id)\n elif data['type'] == AutoModRuleActionType.block_message.value:\n custom_message = data.get('metadata', {}).get('custom_message')\n return cls(type=AutoModRuleActionType.block_message, custom_message=custom_message)\n\n return cls(type=AutoModRuleActionType.block_member_interactions)\n\n def to_dict(self) -> Dict[str, Any]:\n ret = {'type': self.type.value, 'metadata': {}}\n if self.type is AutoModRuleActionType.block_message and self.custom_message is not None:\n ret['metadata'] = {'custom_message': self.custom_message}\n elif self.type is AutoModRuleActionType.timeout:\n ret['metadata'] = {'duration_seconds': int(self.duration.total_seconds())} # type: ignore # duration cannot be None here\n elif self.type is AutoModRuleActionType.send_alert_message:\n ret['metadata'] = {'channel_id': str(self.channel_id)}\n return ret\n\n\nclass AutoModTrigger:\n r\"\"\"Represents a trigger for an auto moderation rule.\n\n The following table illustrates relevant attributes for each :class:`AutoModRuleTriggerType`:\n\n +-----------------------------------------------+------------------------------------------------+\n | Type | Attributes |\n +===============================================+================================================+\n | :attr:`AutoModRuleTriggerType.keyword` | :attr:`keyword_filter`, :attr:`regex_patterns`,|\n | | :attr:`allow_list` |\n +-----------------------------------------------+------------------------------------------------+\n | :attr:`AutoModRuleTriggerType.spam` | |\n +-----------------------------------------------+------------------------------------------------+\n | :attr:`AutoModRuleTriggerType.keyword_preset` | :attr:`presets`\\, :attr:`allow_list` |\n +-----------------------------------------------+------------------------------------------------+\n | :attr:`AutoModRuleTriggerType.mention_spam` | :attr:`mention_limit`, |\n | | :attr:`mention_raid_protection` |\n +-----------------------------------------------+------------------------------------------------+\n | :attr:`AutoModRuleTriggerType.member_profile` | :attr:`keyword_filter`, :attr:`regex_patterns`,|\n | | :attr:`allow_list` |\n +-----------------------------------------------+------------------------------------------------+\n\n .. versionadded:: 2.0\n\n Attributes\n -----------\n type: :class:`AutoModRuleTriggerType`\n The type of trigger.\n keyword_filter: List[:class:`str`]\n The list of strings that will trigger the filter.\n Maximum of 1000. Keywords can only be up to 60 characters in length.\n\n This could be combined with :attr:`regex_patterns`.\n regex_patterns: List[:class:`str`]\n The regex pattern that will trigger the filter. The syntax is based off of\n `Rust's regex syntax <https://docs.rs/regex/latest/regex/#syntax>`_.\n Maximum of 10. Regex strings can only be up to 260 characters in length.\n\n This could be combined with :attr:`keyword_filter` and/or :attr:`allow_list`\n\n .. versionadded:: 2.1\n presets: :class:`AutoModPresets`\n The presets used with the preset keyword filter.\n allow_list: List[:class:`str`]\n The list of words that are exempt from the commonly flagged words. Maximum of 100.\n Keywords can only be up to 60 characters in length.\n mention_limit: :class:`int`\n The total number of user and role mentions a message can contain.\n Has a maximum of 50.\n mention_raid_protection: :class:`bool`\n Whether mention raid protection is enabled or not.\n\n .. versionadded:: 2.4\n \"\"\"\n\n __slots__ = (\n 'type',\n 'keyword_filter',\n 'presets',\n 'allow_list',\n 'mention_limit',\n 'regex_patterns',\n 'mention_raid_protection',\n )\n\n def __init__(\n self,\n *,\n type: Optional[AutoModRuleTriggerType] = None,\n keyword_filter: Optional[List[str]] = None,\n presets: Optional[AutoModPresets] = None,\n allow_list: Optional[List[str]] = None,\n mention_limit: Optional[int] = None,\n regex_patterns: Optional[List[str]] = None,\n mention_raid_protection: Optional[bool] = None,\n ) -> None:\n unique_args = (keyword_filter or regex_patterns, presets, mention_limit or mention_raid_protection)\n if type is None and sum(arg is not None for arg in unique_args) > 1:\n raise ValueError(\n 'Please pass only one of keyword_filter/regex_patterns, presets, or mention_limit/mention_raid_protection.'\n )\n\n if type is not None:\n self.type = type\n elif keyword_filter is not None or regex_patterns is not None:\n self.type = AutoModRuleTriggerType.keyword\n elif presets is not None:\n self.type = AutoModRuleTriggerType.keyword_preset\n elif mention_limit is not None or mention_raid_protection is not None:\n self.type = AutoModRuleTriggerType.mention_spam\n else:\n raise ValueError(\n 'Please pass the trigger type explicitly if not using keyword_filter, regex_patterns, presets, mention_limit, or mention_raid_protection.'\n )\n\n self.keyword_filter: List[str] = keyword_filter if keyword_filter is not None else []\n self.presets: AutoModPresets = presets if presets is not None else AutoModPresets()\n self.allow_list: List[str] = allow_list if allow_list is not None else []\n self.mention_limit: int = mention_limit if mention_limit is not None else 0\n self.mention_raid_protection: bool = mention_raid_protection if mention_raid_protection is not None else False\n self.regex_patterns: List[str] = regex_patterns if regex_patterns is not None else []\n\n def __repr__(self) -> str:\n data = self.to_metadata_dict()\n if data:\n joined = ' '.join(f'{k}={v!r}' for k, v in data.items())\n return f'<AutoModTrigger type={self.type} {joined}>'\n\n return f'<AutoModTrigger type={self.type}>'\n\n @classmethod\n def from_data(cls, type: int, data: Optional[AutoModerationTriggerMetadataPayload]) -> Self:\n type_ = try_enum(AutoModRuleTriggerType, type)\n if data is None:\n return cls(type=type_)\n elif type_ in (AutoModRuleTriggerType.keyword, AutoModRuleTriggerType.member_profile):\n return cls(\n type=type_,\n keyword_filter=data.get('keyword_filter'),\n regex_patterns=data.get('regex_patterns'),\n allow_list=data.get('allow_list'),\n )\n elif type_ is AutoModRuleTriggerType.keyword_preset:\n return cls(\n type=type_, presets=AutoModPresets._from_value(data.get('presets', [])), allow_list=data.get('allow_list')\n )\n elif type_ is AutoModRuleTriggerType.mention_spam:\n return cls(\n type=type_,\n mention_limit=data.get('mention_total_limit'),\n mention_raid_protection=data.get('mention_raid_protection_enabled'),\n )\n else:\n return cls(type=type_)\n\n def to_metadata_dict(self) -> Optional[Dict[str, Any]]:\n if self.type in (AutoModRuleTriggerType.keyword, AutoModRuleTriggerType.member_profile):\n return {\n 'keyword_filter': self.keyword_filter,\n 'regex_patterns': self.regex_patterns,\n 'allow_list': self.allow_list,\n }\n elif self.type is AutoModRuleTriggerType.keyword_preset:\n return {'presets': self.presets.to_array(), 'allow_list': self.allow_list}\n elif self.type is AutoModRuleTriggerType.mention_spam:\n return {\n 'mention_total_limit': self.mention_limit,\n 'mention_raid_protection_enabled': self.mention_raid_protection,\n }\n\n\nclass AutoModRule:\n \"\"\"Represents an auto moderation rule.\n\n .. versionadded:: 2.0\n\n Attributes\n -----------\n id: :class:`int`\n The ID of the rule.\n guild: :class:`Guild`\n The guild the rule is for.\n name: :class:`str`\n The name of the rule.\n creator_id: :class:`int`\n The ID of the user that created the rule.\n trigger: :class:`AutoModTrigger`\n The rule's trigger.\n enabled: :class:`bool`\n Whether the rule is enabled.\n exempt_role_ids: Set[:class:`int`]\n The IDs of the roles that are exempt from the rule.\n exempt_channel_ids: Set[:class:`int`]\n The IDs of the channels that are exempt from the rule.\n event_type: :class:`AutoModRuleEventType`\n The type of event that will trigger the the rule.\n \"\"\"\n\n __slots__ = (\n '_state',\n '_cs_exempt_roles',\n '_cs_exempt_channels',\n '_cs_actions',\n 'id',\n 'guild',\n 'name',\n 'creator_id',\n 'event_type',\n 'trigger',\n 'enabled',\n 'exempt_role_ids',\n 'exempt_channel_ids',\n '_actions',\n )\n\n def __init__(self, *, data: AutoModerationRulePayload, guild: Guild, state: ConnectionState) -> None:\n self._state: ConnectionState = state\n self.guild: Guild = guild\n self.id: int = int(data['id'])\n self.name: str = data['name']\n self.creator_id = int(data['creator_id'])\n self.event_type: AutoModRuleEventType = try_enum(AutoModRuleEventType, data['event_type'])\n self.trigger: AutoModTrigger = AutoModTrigger.from_data(data['trigger_type'], data=data.get('trigger_metadata'))\n self.enabled: bool = data['enabled']\n self.exempt_role_ids: Set[int] = {int(role_id) for role_id in data['exempt_roles']}\n self.exempt_channel_ids: Set[int] = {int(channel_id) for channel_id in data['exempt_channels']}\n self._actions: List[AutoModerationActionPayload] = data['actions']\n\n def __repr__(self) -> str:\n return f'<AutoModRule id={self.id} name={self.name!r} guild={self.guild!r}>'\n\n def to_dict(self) -> AutoModerationRulePayload:\n ret: AutoModerationRulePayload = {\n 'id': str(self.id),\n 'guild_id': str(self.guild.id),\n 'name': self.name,\n 'creator_id': str(self.creator_id),\n 'event_type': self.event_type.value,\n 'trigger_type': self.trigger.type.value,\n 'trigger_metadata': self.trigger.to_metadata_dict(),\n 'actions': [action.to_dict() for action in self.actions],\n 'enabled': self.enabled,\n 'exempt_roles': [str(role_id) for role_id in self.exempt_role_ids],\n 'exempt_channels': [str(channel_id) for channel_id in self.exempt_channel_ids],\n } # type: ignore # trigger types break the flow here.\n\n return ret\n\n @property\n def creator(self) -> Optional[Member]:\n \"\"\"Optional[:class:`Member`]: The member that created this rule.\"\"\"\n return self.guild.get_member(self.creator_id)\n\n @cached_slot_property('_cs_exempt_roles')\n def exempt_roles(self) -> List[Role]:\n \"\"\"List[:class:`Role`]: The roles that are exempt from this rule.\"\"\"\n result = []\n get_role = self.guild.get_role\n for role_id in self.exempt_role_ids:\n role = get_role(role_id)\n if role is not None:\n result.append(role)\n\n return utils._unique(result)\n\n @cached_slot_property('_cs_exempt_channels')\n def exempt_channels(self) -> List[Union[GuildChannel, Thread]]:\n \"\"\"List[Union[:class:`abc.GuildChannel`, :class:`Thread`]]: The channels that are exempt from this rule.\"\"\"\n it = filter(None, map(self.guild._resolve_channel, self.exempt_channel_ids))\n return utils._unique(it)\n\n @cached_slot_property('_cs_actions')\n def actions(self) -> List[AutoModRuleAction]:\n \"\"\"List[:class:`AutoModRuleAction`]: The actions that are taken when this rule is triggered.\"\"\"\n return [AutoModRuleAction.from_data(action) for action in self._actions]\n\n def is_exempt(self, obj: Snowflake, /) -> bool:\n \"\"\"Check if an object is exempt from the automod rule.\n\n Parameters\n -----------\n obj: :class:`abc.Snowflake`\n The role, channel, or thread to check.\n\n Returns\n --------\n :class:`bool`\n Whether the object is exempt from the automod rule.\n \"\"\"\n return obj.id in self.exempt_channel_ids or obj.id in self.exempt_role_ids\n\n async def edit(\n self,\n *,\n name: str = MISSING,\n event_type: AutoModRuleEventType = MISSING,\n actions: List[AutoModRuleAction] = MISSING,\n trigger: AutoModTrigger = MISSING,\n enabled: bool = MISSING,\n exempt_roles: Sequence[Snowflake] = MISSING,\n exempt_channels: Sequence[Snowflake] = MISSING,\n reason: str = MISSING,\n ) -> Self:\n \"\"\"|coro|\n\n Edits this auto moderation rule.\n\n You must have :attr:`Permissions.manage_guild` to edit rules.\n\n Parameters\n -----------\n name: :class:`str`\n The new name to change to.\n event_type: :class:`AutoModRuleEventType`\n The new event type to change to.\n actions: List[:class:`AutoModRuleAction`]\n The new rule actions to update.\n trigger: :class:`AutoModTrigger`\n The new trigger to update.\n You can only change the trigger metadata, not the type.\n enabled: :class:`bool`\n Whether the rule should be enabled or not.\n exempt_roles: Sequence[:class:`abc.Snowflake`]\n The new roles to exempt from the rule.\n exempt_channels: Sequence[:class:`abc.Snowflake`]\n The new channels to exempt from the rule.\n reason: :class:`str`\n The reason for updating this rule. Shows up on the audit log.\n\n Raises\n -------\n Forbidden\n You do not have permission to edit this rule.\n HTTPException\n Editing the rule failed.\n\n Returns\n --------\n :class:`AutoModRule`\n The updated auto moderation rule.\n \"\"\"\n payload = {}\n if actions is not MISSING:\n payload['actions'] = [action.to_dict() for action in actions]\n\n if name is not MISSING:\n payload['name'] = name\n\n if event_type is not MISSING:\n payload['event_type'] = event_type\n\n if trigger is not MISSING:\n trigger_metadata = trigger.to_metadata_dict()\n if trigger_metadata is not None:\n payload['trigger_metadata'] = trigger_metadata\n\n if enabled is not MISSING:\n payload['enabled'] = enabled\n\n if exempt_roles is not MISSING:\n payload['exempt_roles'] = [x.id for x in exempt_roles]\n\n if exempt_channels is not MISSING:\n payload['exempt_channels'] = [x.id for x in exempt_channels]\n\n data = await self._state.http.edit_auto_moderation_rule(\n self.guild.id,\n self.id,\n reason=reason,\n **payload,\n )\n\n return AutoModRule(data=data, guild=self.guild, state=self._state)\n\n async def delete(self, *, reason: str = MISSING) -> None:\n \"\"\"|coro|\n\n Deletes the auto moderation rule.\n\n You must have :attr:`Permissions.manage_guild` to delete rules.\n\n Parameters\n -----------\n reason: :class:`str`\n The reason for deleting this rule. Shows up on the audit log.\n\n Raises\n -------\n Forbidden\n You do not have permissions to delete the rule.\n HTTPException\n Deleting the rule failed.\n \"\"\"\n await self._state.http.delete_auto_moderation_rule(self.guild.id, self.id, reason=reason)\n\n\nclass AutoModAction:\n \"\"\"Represents an action that was taken as the result of a moderation rule.\n\n .. versionadded:: 2.0\n\n Attributes\n -----------\n action: :class:`AutoModRuleAction`\n The action that was taken.\n message_id: Optional[:class:`int`]\n The message ID that triggered the action. This is only available if the\n action is done on an edited message.\n rule_id: :class:`int`\n The ID of the rule that was triggered.\n rule_trigger_type: :class:`AutoModRuleTriggerType`\n The trigger type of the rule that was triggered.\n guild_id: :class:`int`\n The ID of the guild where the rule was triggered.\n user_id: :class:`int`\n The ID of the user that triggered the rule.\n channel_id: :class:`int`\n The ID of the channel where the rule was triggered.\n alert_system_message_id: Optional[:class:`int`]\n The ID of the system message that was sent to the predefined alert channel.\n content: :class:`str`\n The content of the message that triggered the rule.\n Requires the :attr:`Intents.message_content` or it will always return an empty string.\n matched_keyword: Optional[:class:`str`]\n The matched keyword from the triggering message.\n matched_content: Optional[:class:`str`]\n The matched content from the triggering message.\n Requires the :attr:`Intents.message_content` or it will always return ``None``.\n \"\"\"\n\n __slots__ = (\n '_state',\n 'action',\n 'rule_id',\n 'rule_trigger_type',\n 'guild_id',\n 'user_id',\n 'channel_id',\n 'message_id',\n 'alert_system_message_id',\n 'content',\n 'matched_keyword',\n 'matched_content',\n )\n\n def __init__(self, *, data: AutoModerationActionExecutionPayload, state: ConnectionState) -> None:\n self._state: ConnectionState = state\n self.message_id: Optional[int] = utils._get_as_snowflake(data, 'message_id')\n self.action: AutoModRuleAction = AutoModRuleAction.from_data(data['action'])\n self.rule_id: int = int(data['rule_id'])\n self.rule_trigger_type: AutoModRuleTriggerType = try_enum(AutoModRuleTriggerType, data['rule_trigger_type'])\n self.guild_id: int = int(data['guild_id'])\n self.channel_id: Optional[int] = utils._get_as_snowflake(data, 'channel_id')\n self.user_id: int = int(data['user_id'])\n self.alert_system_message_id: Optional[int] = utils._get_as_snowflake(data, 'alert_system_message_id')\n self.content: str = data.get('content', '')\n self.matched_keyword: Optional[str] = data['matched_keyword']\n self.matched_content: Optional[str] = data.get('matched_content')\n\n def __repr__(self) -> str:\n return f'<AutoModRuleExecution rule_id={self.rule_id} action={self.action!r}>'\n\n @property\n def guild(self) -> Guild:\n \"\"\":class:`Guild`: The guild this action was taken in.\"\"\"\n return self._state._get_or_create_unavailable_guild(self.guild_id)\n\n @property\n def channel(self) -> Optional[Union[GuildChannel, Thread]]:\n \"\"\"Optional[Union[:class:`abc.GuildChannel`, :class:`Thread`]]: The channel this action was taken in.\"\"\"\n if self.channel_id:\n return self.guild.get_channel_or_thread(self.channel_id)\n return None\n\n @property\n def member(self) -> Optional[Member]:\n \"\"\"Optional[:class:`Member`]: The member this action was taken against /who triggered this rule.\"\"\"\n return self.guild.get_member(self.user_id)\n\n async def fetch_rule(self) -> AutoModRule:\n \"\"\"|coro|\n\n Fetch the rule whose action was taken.\n\n You must have :attr:`Permissions.manage_guild` to do this.\n\n Raises\n -------\n Forbidden\n You do not have permissions to view the rule.\n HTTPException\n Fetching the rule failed.\n\n Returns\n --------\n :class:`AutoModRule`\n The rule that was executed.\n \"\"\"\n\n data = await self._state.http.get_auto_moderation_rule(self.guild.id, self.rule_id)\n return AutoModRule(data=data, guild=self.guild, state=self._state)\n",
"path": "discord/automod.py"
}
] | 10_4 | python | import unittest
import datetime
import sys
class TestAutoModRuleAction(unittest.TestCase):
def test_send_alert_message_initialization(self):
from discord.automod import AutoModRuleAction, AutoModRuleActionType
action = AutoModRuleAction(type=AutoModRuleActionType.send_alert_message, channel_id=12345)
self.assertEqual(action.channel_id, 12345)
self.assertIsNone(action.duration)
self.assertIsNone(action.custom_message)
def test_timeout_initialization(self):
from discord.automod import AutoModRuleAction, AutoModRuleActionType
duration = datetime.timedelta(minutes=10)
action = AutoModRuleAction(type=AutoModRuleActionType.timeout, duration=duration)
self.assertEqual(action.duration, duration)
self.assertIsNone(action.channel_id)
self.assertIsNone(action.custom_message)
def test_block_message_initialization(self):
from discord.automod import AutoModRuleAction, AutoModRuleActionType
action = AutoModRuleAction(type=AutoModRuleActionType.block_message, custom_message="Blocked")
self.assertEqual(action.custom_message, "Blocked")
self.assertIsNone(action.channel_id)
self.assertIsNone(action.duration)
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestAutoModRuleAction))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
main()
|
https://github.com/teamqurrent/discord.py | In `app_commands/commands.py`, modify the `Group` class's `__init__` method to include validation checks that ensure both name and description parameters are provided when creating a command group. Implement conditional logic to raise a TypeError if either of these essential attributes is missing, thereby enforcing the requirement for command groups to have both a name and a description before any further processing. | 576ab26 | discord | python3.9 | 55594035 | diff --git a/discord/app_commands/commands.py b/discord/app_commands/commands.py
--- a/discord/app_commands/commands.py
+++ b/discord/app_commands/commands.py
@@ -1548,6 +1548,9 @@ class Group:
if not self.description:
raise TypeError('groups must have a description')
+ if not self.name:
+ raise TypeError('groups must have a name')
+
self.parent: Optional[Group] = parent
self.module: Optional[str]
if cls.__discord_app_commands_has_module__:
| [
{
"content": "\"\"\"\nThe MIT License (MIT)\n\nCopyright (c) 2015-present Rapptz\n\nPermission is hereby granted, free of charge, to any person obtaining a\ncopy of this software and associated documentation files (the \"Software\"),\nto deal in the Software without restriction, including without limitation\nthe rights to use, copy, modify, merge, publish, distribute, sublicense,\nand/or sell copies of the Software, and to permit persons to whom the\nSoftware is furnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in\nall copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\nOR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\nFROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER\nDEALINGS IN THE SOFTWARE.\n\"\"\"\n\nfrom __future__ import annotations\nimport inspect\n\nfrom typing import (\n Any,\n Callable,\n ClassVar,\n Coroutine,\n Dict,\n Generator,\n Generic,\n List,\n MutableMapping,\n Optional,\n Set,\n TYPE_CHECKING,\n Tuple,\n Type,\n TypeVar,\n Union,\n overload,\n)\n\nimport re\nfrom copy import copy as shallow_copy\n\nfrom ..enums import AppCommandOptionType, AppCommandType, ChannelType, Locale\nfrom .models import Choice\nfrom .transformers import annotation_to_parameter, CommandParameter, NoneType\nfrom .errors import AppCommandError, CheckFailure, CommandInvokeError, CommandSignatureMismatch, CommandAlreadyRegistered\nfrom .translator import TranslationContextLocation, TranslationContext, Translator, locale_str\nfrom ..message import Message\nfrom ..user import User\nfrom ..member import Member\nfrom ..permissions import Permissions\nfrom ..utils import resolve_annotation, MISSING, is_inside_class, maybe_coroutine, async_all, _shorten, _to_kebab_case\n\nif TYPE_CHECKING:\n from typing_extensions import ParamSpec, Concatenate\n from ..interactions import Interaction\n from ..abc import Snowflake\n from .namespace import Namespace\n from .models import ChoiceT\n\n # Generally, these two libraries are supposed to be separate from each other.\n # However, for type hinting purposes it's unfortunately necessary for one to\n # reference the other to prevent type checking errors in callbacks\n from discord.ext import commands\n\n ErrorFunc = Callable[[Interaction, AppCommandError], Coroutine[Any, Any, None]]\n\n__all__ = (\n 'Command',\n 'ContextMenu',\n 'Group',\n 'Parameter',\n 'context_menu',\n 'command',\n 'describe',\n 'check',\n 'rename',\n 'choices',\n 'autocomplete',\n 'guilds',\n 'guild_only',\n 'default_permissions',\n)\n\nif TYPE_CHECKING:\n P = ParamSpec('P')\nelse:\n P = TypeVar('P')\n\nT = TypeVar('T')\nF = TypeVar('F', bound=Callable[..., Any])\nGroupT = TypeVar('GroupT', bound='Binding')\nCoro = Coroutine[Any, Any, T]\nUnboundError = Callable[['Interaction[Any]', AppCommandError], Coro[Any]]\nError = Union[\n Callable[[GroupT, 'Interaction[Any]', AppCommandError], Coro[Any]],\n UnboundError,\n]\nCheck = Callable[['Interaction[Any]'], Union[bool, Coro[bool]]]\nBinding = Union['Group', 'commands.Cog']\n\n\nif TYPE_CHECKING:\n CommandCallback = Union[\n Callable[Concatenate[GroupT, 'Interaction[Any]', P], Coro[T]],\n Callable[Concatenate['Interaction[Any]', P], Coro[T]],\n ]\n\n ContextMenuCallback = Union[\n # If groups end up support context menus these would be uncommented\n # Callable[[GroupT, 'Interaction', Member], Coro[Any]],\n # Callable[[GroupT, 'Interaction', User], Coro[Any]],\n # Callable[[GroupT, 'Interaction', Message], Coro[Any]],\n # Callable[[GroupT, 'Interaction', Union[Member, User]], Coro[Any]],\n Callable[['Interaction[Any]', Member], Coro[Any]],\n Callable[['Interaction[Any]', User], Coro[Any]],\n Callable[['Interaction[Any]', Message], Coro[Any]],\n Callable[['Interaction[Any]', Union[Member, User]], Coro[Any]],\n ]\n\n AutocompleteCallback = Union[\n Callable[[GroupT, 'Interaction[Any]', str], Coro[List[Choice[ChoiceT]]]],\n Callable[['Interaction[Any]', str], Coro[List[Choice[ChoiceT]]]],\n ]\nelse:\n CommandCallback = Callable[..., Coro[T]]\n ContextMenuCallback = Callable[..., Coro[T]]\n AutocompleteCallback = Callable[..., Coro[T]]\n\n\nCheckInputParameter = Union['Command[Any, ..., Any]', 'ContextMenu', 'CommandCallback[Any, ..., Any]', ContextMenuCallback]\n\n# The re module doesn't support \\p{} so we have to list characters from Thai and Devanagari manually.\nTHAI_COMBINING = r'\\u0e31-\\u0e3a\\u0e47-\\u0e4e'\nDEVANAGARI_COMBINING = r'\\u0900-\\u0903\\u093a\\u093b\\u093c\\u093e\\u093f\\u0940-\\u094f\\u0955\\u0956\\u0957\\u0962\\u0963'\nVALID_SLASH_COMMAND_NAME = re.compile(r'^[-_\\w' + THAI_COMBINING + DEVANAGARI_COMBINING + r']{1,32}$')\n\nARG_NAME_SUBREGEX = r'(?:\\\\?\\*){0,2}(?P<name>\\w+)'\n\nARG_DESCRIPTION_SUBREGEX = r'(?P<description>(?:.|\\n)+?(?:\\Z|\\r?\\n(?=[\\S\\r\\n])))'\n\nARG_TYPE_SUBREGEX = r'(?:.+)'\n\nGOOGLE_DOCSTRING_ARG_REGEX = re.compile(\n rf'^{ARG_NAME_SUBREGEX}[ \\t]*(?:\\({ARG_TYPE_SUBREGEX}\\))?[ \\t]*:[ \\t]*{ARG_DESCRIPTION_SUBREGEX}',\n re.MULTILINE,\n)\n\nSPHINX_DOCSTRING_ARG_REGEX = re.compile(\n rf'^:param {ARG_NAME_SUBREGEX}:[ \\t]+{ARG_DESCRIPTION_SUBREGEX}',\n re.MULTILINE,\n)\n\nNUMPY_DOCSTRING_ARG_REGEX = re.compile(\n rf'^{ARG_NAME_SUBREGEX}(?:[ \\t]*:)?(?:[ \\t]+{ARG_TYPE_SUBREGEX})?[ \\t]*\\r?\\n[ \\t]+{ARG_DESCRIPTION_SUBREGEX}',\n re.MULTILINE,\n)\n\n\ndef _parse_args_from_docstring(func: Callable[..., Any], params: Dict[str, CommandParameter]) -> Dict[str, str]:\n docstring = inspect.getdoc(func)\n\n if docstring is None:\n return {}\n\n # Extract the arguments\n # Note: These are loose regexes, but they are good enough for our purposes\n # For Google-style, look only at the lines that are indented\n section_lines = inspect.cleandoc('\\n'.join(line for line in docstring.splitlines() if line.startswith(' ')))\n docstring_styles = (\n GOOGLE_DOCSTRING_ARG_REGEX.finditer(section_lines),\n SPHINX_DOCSTRING_ARG_REGEX.finditer(docstring),\n NUMPY_DOCSTRING_ARG_REGEX.finditer(docstring),\n )\n\n return {\n m.group('name'): m.group('description') for matches in docstring_styles for m in matches if m.group('name') in params\n }\n\n\ndef validate_name(name: str) -> str:\n match = VALID_SLASH_COMMAND_NAME.match(name)\n if match is None:\n raise ValueError(\n f'{name!r} must be between 1-32 characters and contain only lower-case letters, numbers, hyphens, or underscores.'\n )\n\n # Ideally, name.islower() would work instead but since certain characters\n # are Lo (e.g. CJK) those don't pass the test. I'd use `casefold` instead as\n # well, but chances are the server-side check is probably something similar to\n # this code anyway.\n if name.lower() != name:\n raise ValueError(f'{name!r} must be all lower-case')\n return name\n\n\ndef validate_context_menu_name(name: str) -> str:\n if not name or len(name) > 32:\n raise ValueError('context menu names must be between 1-32 characters')\n return name\n\n\ndef validate_auto_complete_callback(\n callback: AutocompleteCallback[GroupT, ChoiceT]\n) -> AutocompleteCallback[GroupT, ChoiceT]:\n # This function needs to ensure the following is true:\n # If self.foo is passed then don't pass command.binding to the callback\n # If Class.foo is passed then it is assumed command.binding has to be passed\n # If free_function_foo is passed then no binding should be passed at all\n # Passing command.binding is mandated by pass_command_binding\n\n binding = getattr(callback, '__self__', None)\n pass_command_binding = binding is None and is_inside_class(callback)\n\n # 'method' objects can't have dynamic attributes\n if binding is None:\n callback.pass_command_binding = pass_command_binding\n\n required_parameters = 2 + pass_command_binding\n params = inspect.signature(callback).parameters\n if len(params) != required_parameters:\n raise TypeError(f'autocomplete callback {callback.__qualname__!r} requires either 2 or 3 parameters to be passed')\n\n return callback\n\n\ndef _context_menu_annotation(annotation: Any, *, _none: type = NoneType) -> AppCommandType:\n if annotation is Message:\n return AppCommandType.message\n\n supported_types: Set[Any] = {Member, User}\n if annotation in supported_types:\n return AppCommandType.user\n\n # Check if there's an origin\n origin = getattr(annotation, '__origin__', None)\n if origin is not Union:\n # Only Union is supported so bail early\n msg = (\n f'unsupported type annotation {annotation!r}, must be either discord.Member, '\n 'discord.User, discord.Message, or a typing.Union of discord.Member and discord.User'\n )\n raise TypeError(msg)\n\n # Only Union[Member, User] is supported\n if not all(arg in supported_types for arg in annotation.__args__):\n raise TypeError(f'unsupported types given inside {annotation!r}')\n\n return AppCommandType.user\n\n\ndef _populate_descriptions(params: Dict[str, CommandParameter], descriptions: Dict[str, Any]) -> None:\n for name, param in params.items():\n description = descriptions.pop(name, MISSING)\n if description is MISSING:\n param.description = '…'\n continue\n\n if not isinstance(description, (str, locale_str)):\n raise TypeError('description must be a string')\n\n if isinstance(description, str):\n param.description = _shorten(description)\n else:\n param.description = description\n\n if descriptions:\n first = next(iter(descriptions))\n raise TypeError(f'unknown parameter given: {first}')\n\n\ndef _populate_renames(params: Dict[str, CommandParameter], renames: Dict[str, Union[str, locale_str]]) -> None:\n rename_map: Dict[str, Union[str, locale_str]] = {}\n\n # original name to renamed name\n\n for name in params.keys():\n new_name = renames.pop(name, MISSING)\n\n if new_name is MISSING:\n rename_map[name] = name\n continue\n\n if name in rename_map:\n raise ValueError(f'{new_name} is already used')\n\n if isinstance(new_name, str):\n new_name = validate_name(new_name)\n else:\n validate_name(new_name.message)\n\n rename_map[name] = new_name\n params[name]._rename = new_name\n\n if renames:\n first = next(iter(renames))\n raise ValueError(f'unknown parameter given: {first}')\n\n\ndef _populate_choices(params: Dict[str, CommandParameter], all_choices: Dict[str, List[Choice]]) -> None:\n for name, param in params.items():\n choices = all_choices.pop(name, MISSING)\n if choices is MISSING:\n continue\n\n if not isinstance(choices, list):\n raise TypeError('choices must be a list of Choice')\n\n if not all(isinstance(choice, Choice) for choice in choices):\n raise TypeError('choices must be a list of Choice')\n\n if param.type not in (AppCommandOptionType.string, AppCommandOptionType.number, AppCommandOptionType.integer):\n raise TypeError('choices are only supported for integer, string, or number option types')\n\n if not all(param.type == choice._option_type for choice in choices):\n raise TypeError('choices must all have the same inner option type as the parameter choice type')\n\n param.choices = choices\n\n if all_choices:\n first = next(iter(all_choices))\n raise TypeError(f'unknown parameter given: {first}')\n\n\ndef _populate_autocomplete(params: Dict[str, CommandParameter], autocomplete: Dict[str, Any]) -> None:\n for name, param in params.items():\n callback = autocomplete.pop(name, MISSING)\n if callback is MISSING:\n continue\n\n if not inspect.iscoroutinefunction(callback):\n raise TypeError('autocomplete callback must be a coroutine function')\n\n if param.type not in (AppCommandOptionType.string, AppCommandOptionType.number, AppCommandOptionType.integer):\n raise TypeError('autocomplete is only supported for integer, string, or number option types')\n\n if param.is_choice_annotation():\n raise TypeError(\n 'Choice annotation unsupported for autocomplete parameters, consider using a regular annotation instead'\n )\n\n param.autocomplete = validate_auto_complete_callback(callback)\n\n if autocomplete:\n first = next(iter(autocomplete))\n raise TypeError(f'unknown parameter given: {first}')\n\n\ndef _extract_parameters_from_callback(func: Callable[..., Any], globalns: Dict[str, Any]) -> Dict[str, CommandParameter]:\n params = inspect.signature(func).parameters\n cache = {}\n required_params = is_inside_class(func) + 1\n if len(params) < required_params:\n raise TypeError(f'callback {func.__qualname__!r} must have more than {required_params - 1} parameter(s)')\n\n iterator = iter(params.values())\n for _ in range(0, required_params):\n next(iterator)\n\n parameters: List[CommandParameter] = []\n for parameter in iterator:\n if parameter.annotation is parameter.empty:\n raise TypeError(f'parameter {parameter.name!r} is missing a type annotation in callback {func.__qualname__!r}')\n\n resolved = resolve_annotation(parameter.annotation, globalns, globalns, cache)\n param = annotation_to_parameter(resolved, parameter)\n parameters.append(param)\n\n values = sorted(parameters, key=lambda a: a.required, reverse=True)\n result = {v.name: v for v in values}\n\n descriptions = _parse_args_from_docstring(func, result)\n\n try:\n descriptions.update(func.__discord_app_commands_param_description__)\n except AttributeError:\n for param in values:\n if param.description is MISSING:\n param.description = '…'\n if descriptions:\n _populate_descriptions(result, descriptions)\n\n try:\n renames = func.__discord_app_commands_param_rename__\n except AttributeError:\n pass\n else:\n _populate_renames(result, renames.copy())\n\n try:\n choices = func.__discord_app_commands_param_choices__\n except AttributeError:\n pass\n else:\n _populate_choices(result, choices.copy())\n\n try:\n autocomplete = func.__discord_app_commands_param_autocomplete__\n except AttributeError:\n pass\n else:\n _populate_autocomplete(result, autocomplete.copy())\n\n return result\n\n\ndef _get_context_menu_parameter(func: ContextMenuCallback) -> Tuple[str, Any, AppCommandType]:\n params = inspect.signature(func).parameters\n if is_inside_class(func) and not hasattr(func, '__self__'):\n raise TypeError('context menus cannot be defined inside a class')\n\n if len(params) != 2:\n msg = (\n f'context menu callback {func.__qualname__!r} requires 2 parameters, '\n 'the first one being the interaction and the other one explicitly '\n 'annotated with either discord.Message, discord.User, discord.Member, '\n 'or a typing.Union of discord.Member and discord.User'\n )\n raise TypeError(msg)\n\n iterator = iter(params.values())\n next(iterator) # skip interaction\n parameter = next(iterator)\n if parameter.annotation is parameter.empty:\n msg = (\n f'second parameter of context menu callback {func.__qualname__!r} must be explicitly '\n 'annotated with either discord.Message, discord.User, discord.Member, or '\n 'a typing.Union of discord.Member and discord.User'\n )\n raise TypeError(msg)\n\n resolved = resolve_annotation(parameter.annotation, func.__globals__, func.__globals__, {})\n type = _context_menu_annotation(resolved)\n return (parameter.name, resolved, type)\n\n\ndef mark_overrideable(func: F) -> F:\n func.__discord_app_commands_base_function__ = None\n return func\n\n\nclass Parameter:\n \"\"\"A class that contains the parameter information of a :class:`Command` callback.\n\n .. versionadded:: 2.0\n\n Attributes\n -----------\n name: :class:`str`\n The name of the parameter. This is the Python identifier for the parameter.\n display_name: :class:`str`\n The displayed name of the parameter on Discord.\n description: :class:`str`\n The description of the parameter.\n autocomplete: :class:`bool`\n Whether the parameter has an autocomplete handler.\n locale_name: Optional[:class:`locale_str`]\n The display name's locale string, if available.\n locale_description: Optional[:class:`locale_str`]\n The description's locale string, if available.\n required: :class:`bool`\n Whether the parameter is required\n choices: List[:class:`~discord.app_commands.Choice`]\n A list of choices this parameter takes, if any.\n type: :class:`~discord.AppCommandOptionType`\n The underlying type of this parameter.\n channel_types: List[:class:`~discord.ChannelType`]\n The channel types that are allowed for this parameter.\n min_value: Optional[Union[:class:`int`, :class:`float`]]\n The minimum supported value for this parameter.\n max_value: Optional[Union[:class:`int`, :class:`float`]]\n The maximum supported value for this parameter.\n default: Any\n The default value of the parameter, if given.\n If not given then this is :data:`~discord.utils.MISSING`.\n command: :class:`Command`\n The command this parameter is attached to.\n \"\"\"\n\n def __init__(self, parent: CommandParameter, command: Command[Any, ..., Any]) -> None:\n self.__parent: CommandParameter = parent\n self.__command: Command[Any, ..., Any] = command\n\n @property\n def command(self) -> Command[Any, ..., Any]:\n return self.__command\n\n @property\n def name(self) -> str:\n return self.__parent.name\n\n @property\n def display_name(self) -> str:\n return self.__parent.display_name\n\n @property\n def required(self) -> bool:\n return self.__parent.required\n\n @property\n def description(self) -> str:\n return str(self.__parent.description)\n\n @property\n def locale_name(self) -> Optional[locale_str]:\n if isinstance(self.__parent._rename, locale_str):\n return self.__parent._rename\n return None\n\n @property\n def locale_description(self) -> Optional[locale_str]:\n if isinstance(self.__parent.description, locale_str):\n return self.__parent.description\n return None\n\n @property\n def autocomplete(self) -> bool:\n return self.__parent.autocomplete is not None\n\n @property\n def default(self) -> Any:\n return self.__parent.default\n\n @property\n def type(self) -> AppCommandOptionType:\n return self.__parent.type\n\n @property\n def choices(self) -> List[Choice[Union[int, float, str]]]:\n choices = self.__parent.choices\n if choices is MISSING:\n return []\n return choices.copy()\n\n @property\n def channel_types(self) -> List[ChannelType]:\n channel_types = self.__parent.channel_types\n if channel_types is MISSING:\n return []\n return channel_types.copy()\n\n @property\n def min_value(self) -> Optional[Union[int, float]]:\n return self.__parent.min_value\n\n @property\n def max_value(self) -> Optional[Union[int, float]]:\n return self.__parent.max_value\n\n\nclass Command(Generic[GroupT, P, T]):\n \"\"\"A class that implements an application command.\n\n These are usually not created manually, instead they are created using\n one of the following decorators:\n\n - :func:`~discord.app_commands.command`\n - :meth:`Group.command <discord.app_commands.Group.command>`\n - :meth:`CommandTree.command <discord.app_commands.CommandTree.command>`\n\n .. versionadded:: 2.0\n\n Parameters\n -----------\n name: Union[:class:`str`, :class:`locale_str`]\n The name of the application command.\n description: Union[:class:`str`, :class:`locale_str`]\n The description of the application command. This shows up in the UI to describe\n the application command.\n callback: :ref:`coroutine <coroutine>`\n The coroutine that is executed when the command is called.\n auto_locale_strings: :class:`bool`\n If this is set to ``True``, then all translatable strings will implicitly\n be wrapped into :class:`locale_str` rather than :class:`str`. This could\n avoid some repetition and be more ergonomic for certain defaults such\n as default command names, command descriptions, and parameter names.\n Defaults to ``True``.\n nsfw: :class:`bool`\n Whether the command is NSFW and should only work in NSFW channels.\n Defaults to ``False``.\n\n Due to a Discord limitation, this does not work on subcommands.\n parent: Optional[:class:`Group`]\n The parent application command. ``None`` if there isn't one.\n extras: :class:`dict`\n A dictionary that can be used to store extraneous data.\n The library will not touch any values or keys within this dictionary.\n\n Attributes\n ------------\n name: :class:`str`\n The name of the application command.\n description: :class:`str`\n The description of the application command. This shows up in the UI to describe\n the application command.\n checks\n A list of predicates that take a :class:`~discord.Interaction` parameter\n to indicate whether the command callback should be executed. If an exception\n is necessary to be thrown to signal failure, then one inherited from\n :exc:`AppCommandError` should be used. If all the checks fail without\n propagating an exception, :exc:`CheckFailure` is raised.\n default_permissions: Optional[:class:`~discord.Permissions`]\n The default permissions that can execute this command on Discord. Note\n that server administrators can override this value in the client.\n Setting an empty permissions field will disallow anyone except server\n administrators from using the command in a guild.\n\n Due to a Discord limitation, this does not work on subcommands.\n guild_only: :class:`bool`\n Whether the command should only be usable in guild contexts.\n\n Due to a Discord limitation, this does not work on subcommands.\n nsfw: :class:`bool`\n Whether the command is NSFW and should only work in NSFW channels.\n\n Due to a Discord limitation, this does not work on subcommands.\n parent: Optional[:class:`Group`]\n The parent application command. ``None`` if there isn't one.\n extras: :class:`dict`\n A dictionary that can be used to store extraneous data.\n The library will not touch any values or keys within this dictionary.\n \"\"\"\n\n def __init__(\n self,\n *,\n name: Union[str, locale_str],\n description: Union[str, locale_str],\n callback: CommandCallback[GroupT, P, T],\n nsfw: bool = False,\n parent: Optional[Group] = None,\n guild_ids: Optional[List[int]] = None,\n auto_locale_strings: bool = True,\n extras: Dict[Any, Any] = MISSING,\n ):\n name, locale = (name.message, name) if isinstance(name, locale_str) else (name, None)\n self.name: str = validate_name(name)\n self._locale_name: Optional[locale_str] = locale\n description, locale = (\n (description.message, description) if isinstance(description, locale_str) else (description, None)\n )\n self.description: str = description\n self._locale_description: Optional[locale_str] = locale\n self._attr: Optional[str] = None\n self._callback: CommandCallback[GroupT, P, T] = callback\n self.parent: Optional[Group] = parent\n self.binding: Optional[GroupT] = None\n self.on_error: Optional[Error[GroupT]] = None\n self.module: Optional[str] = callback.__module__\n\n # Unwrap __self__ for bound methods\n try:\n self.binding = callback.__self__\n self._callback = callback = callback.__func__\n except AttributeError:\n pass\n\n self._params: Dict[str, CommandParameter] = _extract_parameters_from_callback(callback, callback.__globals__)\n self.checks: List[Check] = getattr(callback, '__discord_app_commands_checks__', [])\n self._guild_ids: Optional[List[int]] = guild_ids or getattr(\n callback, '__discord_app_commands_default_guilds__', None\n )\n self.default_permissions: Optional[Permissions] = getattr(\n callback, '__discord_app_commands_default_permissions__', None\n )\n self.guild_only: bool = getattr(callback, '__discord_app_commands_guild_only__', False)\n self.nsfw: bool = nsfw\n self.extras: Dict[Any, Any] = extras or {}\n\n if self._guild_ids is not None and self.parent is not None:\n raise ValueError('child commands cannot have default guilds set, consider setting them in the parent instead')\n\n if auto_locale_strings:\n self._convert_to_locale_strings()\n\n def _convert_to_locale_strings(self) -> None:\n if self._locale_name is None:\n self._locale_name = locale_str(self.name)\n if self._locale_description is None:\n self._locale_description = locale_str(self.description)\n\n for param in self._params.values():\n param._convert_to_locale_strings()\n\n def __set_name__(self, owner: Type[Any], name: str) -> None:\n self._attr = name\n\n @property\n def callback(self) -> CommandCallback[GroupT, P, T]:\n \"\"\":ref:`coroutine <coroutine>`: The coroutine that is executed when the command is called.\"\"\"\n return self._callback\n\n def _copy_with(\n self,\n *,\n parent: Optional[Group],\n binding: GroupT,\n bindings: MutableMapping[GroupT, GroupT] = MISSING,\n set_on_binding: bool = True,\n ) -> Command:\n bindings = {} if bindings is MISSING else bindings\n\n copy = shallow_copy(self)\n copy._params = self._params.copy()\n copy.parent = parent\n copy.binding = bindings.get(self.binding) if self.binding is not None else binding\n\n if copy._attr and set_on_binding:\n setattr(copy.binding, copy._attr, copy)\n\n return copy\n\n async def get_translated_payload(self, translator: Translator) -> Dict[str, Any]:\n base = self.to_dict()\n name_localizations: Dict[str, str] = {}\n description_localizations: Dict[str, str] = {}\n\n # Prevent creating these objects in a heavy loop\n name_context = TranslationContext(location=TranslationContextLocation.command_name, data=self)\n description_context = TranslationContext(location=TranslationContextLocation.command_description, data=self)\n\n for locale in Locale:\n if self._locale_name:\n translation = await translator._checked_translate(self._locale_name, locale, name_context)\n if translation is not None:\n name_localizations[locale.value] = translation\n\n if self._locale_description:\n translation = await translator._checked_translate(self._locale_description, locale, description_context)\n if translation is not None:\n description_localizations[locale.value] = translation\n\n base['name_localizations'] = name_localizations\n base['description_localizations'] = description_localizations\n base['options'] = [\n await param.get_translated_payload(translator, Parameter(param, self)) for param in self._params.values()\n ]\n return base\n\n def to_dict(self) -> Dict[str, Any]:\n # If we have a parent then our type is a subcommand\n # Otherwise, the type falls back to the specific command type (e.g. slash command or context menu)\n option_type = AppCommandType.chat_input.value if self.parent is None else AppCommandOptionType.subcommand.value\n base: Dict[str, Any] = {\n 'name': self.name,\n 'description': self.description,\n 'type': option_type,\n 'options': [param.to_dict() for param in self._params.values()],\n }\n\n if self.parent is None:\n base['nsfw'] = self.nsfw\n base['dm_permission'] = not self.guild_only\n base['default_member_permissions'] = None if self.default_permissions is None else self.default_permissions.value\n\n return base\n\n async def _invoke_error_handlers(self, interaction: Interaction, error: AppCommandError) -> None:\n # These type ignores are because the type checker can't narrow this type properly.\n if self.on_error is not None:\n if self.binding is not None:\n await self.on_error(self.binding, interaction, error) # type: ignore\n else:\n await self.on_error(interaction, error) # type: ignore\n\n parent = self.parent\n if parent is not None:\n await parent.on_error(interaction, error)\n\n if parent.parent is not None:\n await parent.parent.on_error(interaction, error)\n\n binding_error_handler = getattr(self.binding, '__discord_app_commands_error_handler__', None)\n if binding_error_handler is not None:\n await binding_error_handler(interaction, error)\n\n def _has_any_error_handlers(self) -> bool:\n if self.on_error is not None:\n return True\n\n parent = self.parent\n if parent is not None:\n # Check if the on_error is overridden\n if not hasattr(parent.on_error, '__discord_app_commands_base_function__'):\n return True\n\n if parent.parent is not None:\n if not hasattr(parent.parent.on_error, '__discord_app_commands_base_function__'):\n return True\n\n # Check if we have a bound error handler\n if getattr(self.binding, '__discord_app_commands_error_handler__', None) is not None:\n return True\n\n return False\n\n async def _transform_arguments(self, interaction: Interaction, namespace: Namespace) -> Dict[str, Any]:\n values = namespace.__dict__\n transformed_values = {}\n\n for param in self._params.values():\n try:\n value = values[param.display_name]\n except KeyError:\n if not param.required:\n transformed_values[param.name] = param.default\n else:\n raise CommandSignatureMismatch(self) from None\n else:\n transformed_values[param.name] = await param.transform(interaction, value)\n\n return transformed_values\n\n async def _do_call(self, interaction: Interaction, params: Dict[str, Any]) -> T:\n # These type ignores are because the type checker doesn't quite understand the narrowing here\n # Likewise, it thinks we're missing positional arguments when there aren't any.\n try:\n if self.binding is not None:\n return await self._callback(self.binding, interaction, **params) # type: ignore\n return await self._callback(interaction, **params) # type: ignore\n except TypeError as e:\n # In order to detect mismatch from the provided signature and the Discord data,\n # there are many ways it can go wrong yet all of them eventually lead to a TypeError\n # from the Python compiler showcasing that the signature is incorrect. This lovely\n # piece of code essentially checks the last frame of the caller and checks if the\n # locals contains our `self` reference.\n #\n # This is because there is a possibility that a TypeError is raised within the body\n # of the function, and in that case the locals wouldn't contain a reference to\n # the command object under the name `self`.\n frame = inspect.trace()[-1].frame\n if frame.f_locals.get('self') is self:\n raise CommandSignatureMismatch(self) from None\n raise CommandInvokeError(self, e) from e\n except AppCommandError:\n raise\n except Exception as e:\n raise CommandInvokeError(self, e) from e\n\n async def _invoke_with_namespace(self, interaction: Interaction, namespace: Namespace) -> T:\n if not await self._check_can_run(interaction):\n raise CheckFailure(f'The check functions for command {self.name!r} failed.')\n\n transformed_values = await self._transform_arguments(interaction, namespace)\n return await self._do_call(interaction, transformed_values)\n\n async def _invoke_autocomplete(self, interaction: Interaction, name: str, namespace: Namespace):\n # The namespace contains the Discord provided names so this will be fine\n # even if the name is renamed\n value = namespace.__dict__[name]\n\n try:\n param = self._params[name]\n except KeyError:\n # Slow case, it might be a rename\n params = {param.display_name: param for param in self._params.values()}\n try:\n param = params[name]\n except KeyError:\n raise CommandSignatureMismatch(self) from None\n\n if param.autocomplete is None:\n raise CommandSignatureMismatch(self)\n\n predicates = getattr(param.autocomplete, '__discord_app_commands_checks__', [])\n if predicates:\n try:\n passed = await async_all(f(interaction) for f in predicates)\n except Exception:\n passed = False\n\n if not passed:\n if not interaction.response.is_done():\n await interaction.response.autocomplete([])\n return\n\n if getattr(param.autocomplete, 'pass_command_binding', False):\n binding = self.binding\n if binding is not None:\n choices = await param.autocomplete(binding, interaction, value)\n else:\n raise TypeError('autocomplete parameter expected a bound self parameter but one was not provided')\n else:\n choices = await param.autocomplete(interaction, value)\n\n if interaction.response.is_done():\n return\n\n await interaction.response.autocomplete(choices)\n\n def _get_internal_command(self, name: str) -> Optional[Union[Command, Group]]:\n return None\n\n @property\n def parameters(self) -> List[Parameter]:\n \"\"\"Returns a list of parameters for this command.\n\n This does not include the ``self`` or ``interaction`` parameters.\n\n Returns\n --------\n List[:class:`Parameter`]\n The parameters of this command.\n \"\"\"\n return [Parameter(p, self) for p in self._params.values()]\n\n def get_parameter(self, name: str) -> Optional[Parameter]:\n \"\"\"Retrieves a parameter by its name.\n\n The name must be the Python identifier rather than the renamed\n one for display on Discord.\n\n Parameters\n -----------\n name: :class:`str`\n The parameter name in the callback function.\n\n Returns\n --------\n Optional[:class:`Parameter`]\n The parameter or ``None`` if not found.\n \"\"\"\n\n parent = self._params.get(name)\n if parent is not None:\n return Parameter(parent, self)\n return None\n\n @property\n def root_parent(self) -> Optional[Group]:\n \"\"\"Optional[:class:`Group`]: The root parent of this command.\"\"\"\n if self.parent is None:\n return None\n parent = self.parent\n return parent.parent or parent\n\n @property\n def qualified_name(self) -> str:\n \"\"\":class:`str`: Returns the fully qualified command name.\n\n The qualified name includes the parent name as well. For example,\n in a command like ``/foo bar`` the qualified name is ``foo bar``.\n \"\"\"\n # A B C\n # ^ self\n # ^ parent\n # ^ grandparent\n if self.parent is None:\n return self.name\n\n names = [self.name, self.parent.name]\n grandparent = self.parent.parent\n if grandparent is not None:\n names.append(grandparent.name)\n\n return ' '.join(reversed(names))\n\n async def _check_can_run(self, interaction: Interaction) -> bool:\n if self.parent is not None and self.parent is not self.binding:\n # For commands with a parent which isn't the binding, i.e.\n # <binding>\n # <parent>\n # <command>\n # The parent check needs to be called first\n if not await maybe_coroutine(self.parent.interaction_check, interaction):\n return False\n\n if self.binding is not None:\n check: Optional[Check] = getattr(self.binding, 'interaction_check', None)\n if check:\n ret = await maybe_coroutine(check, interaction)\n if not ret:\n return False\n\n predicates = self.checks\n if not predicates:\n return True\n\n return await async_all(f(interaction) for f in predicates)\n\n def error(self, coro: Error[GroupT]) -> Error[GroupT]:\n \"\"\"A decorator that registers a coroutine as a local error handler.\n\n The local error handler is called whenever an exception is raised in the body\n of the command or during handling of the command. The error handler must take\n 2 parameters, the interaction and the error.\n\n The error passed will be derived from :exc:`AppCommandError`.\n\n Parameters\n -----------\n coro: :ref:`coroutine <coroutine>`\n The coroutine to register as the local error handler.\n\n Raises\n -------\n TypeError\n The coroutine passed is not actually a coroutine.\n \"\"\"\n\n if not inspect.iscoroutinefunction(coro):\n raise TypeError('The error handler must be a coroutine.')\n\n self.on_error = coro\n return coro\n\n def autocomplete(\n self, name: str\n ) -> Callable[[AutocompleteCallback[GroupT, ChoiceT]], AutocompleteCallback[GroupT, ChoiceT]]:\n \"\"\"A decorator that registers a coroutine as an autocomplete prompt for a parameter.\n\n The coroutine callback must have 2 parameters, the :class:`~discord.Interaction`,\n and the current value by the user (the string currently being typed by the user).\n\n To get the values from other parameters that may be filled in, accessing\n :attr:`.Interaction.namespace` will give a :class:`Namespace` object with those\n values.\n\n Parent :func:`checks <check>` are ignored within an autocomplete. However, checks can be added\n to the autocomplete callback and the ones added will be called. If the checks fail for any reason\n then an empty list is sent as the interaction response.\n\n The coroutine decorator **must** return a list of :class:`~discord.app_commands.Choice` objects.\n Only up to 25 objects are supported.\n\n .. warning::\n The choices returned from this coroutine are suggestions. The user may ignore them and input their own value.\n\n Example:\n\n .. code-block:: python3\n\n @app_commands.command()\n async def fruits(interaction: discord.Interaction, fruit: str):\n await interaction.response.send_message(f'Your favourite fruit seems to be {fruit}')\n\n @fruits.autocomplete('fruit')\n async def fruits_autocomplete(\n interaction: discord.Interaction,\n current: str,\n ) -> List[app_commands.Choice[str]]:\n fruits = ['Banana', 'Pineapple', 'Apple', 'Watermelon', 'Melon', 'Cherry']\n return [\n app_commands.Choice(name=fruit, value=fruit)\n for fruit in fruits if current.lower() in fruit.lower()\n ]\n\n\n Parameters\n -----------\n name: :class:`str`\n The parameter name to register as autocomplete.\n\n Raises\n -------\n TypeError\n The coroutine passed is not actually a coroutine or\n the parameter is not found or of an invalid type.\n \"\"\"\n\n def decorator(coro: AutocompleteCallback[GroupT, ChoiceT]) -> AutocompleteCallback[GroupT, ChoiceT]:\n if not inspect.iscoroutinefunction(coro):\n raise TypeError('The error handler must be a coroutine.')\n\n try:\n param = self._params[name]\n except KeyError:\n raise TypeError(f'unknown parameter: {name!r}') from None\n\n if param.type not in (AppCommandOptionType.string, AppCommandOptionType.number, AppCommandOptionType.integer):\n raise TypeError('autocomplete is only supported for integer, string, or number option types')\n\n if param.is_choice_annotation():\n raise TypeError(\n 'Choice annotation unsupported for autocomplete parameters, consider using a regular annotation instead'\n )\n\n param.autocomplete = validate_auto_complete_callback(coro)\n return coro\n\n return decorator\n\n def add_check(self, func: Check, /) -> None:\n \"\"\"Adds a check to the command.\n\n This is the non-decorator interface to :func:`check`.\n\n Parameters\n -----------\n func\n The function that will be used as a check.\n \"\"\"\n\n self.checks.append(func)\n\n def remove_check(self, func: Check, /) -> None:\n \"\"\"Removes a check from the command.\n\n This function is idempotent and will not raise an exception\n if the function is not in the command's checks.\n\n Parameters\n -----------\n func\n The function to remove from the checks.\n \"\"\"\n\n try:\n self.checks.remove(func)\n except ValueError:\n pass\n\n\nclass ContextMenu:\n \"\"\"A class that implements a context menu application command.\n\n These are usually not created manually, instead they are created using\n one of the following decorators:\n\n - :func:`~discord.app_commands.context_menu`\n - :meth:`CommandTree.context_menu <discord.app_commands.CommandTree.context_menu>`\n\n .. versionadded:: 2.0\n\n Parameters\n -----------\n name: Union[:class:`str`, :class:`locale_str`]\n The name of the context menu.\n callback: :ref:`coroutine <coroutine>`\n The coroutine that is executed when the command is called.\n type: :class:`.AppCommandType`\n The type of context menu application command. By default, this is inferred\n by the parameter of the callback.\n auto_locale_strings: :class:`bool`\n If this is set to ``True``, then all translatable strings will implicitly\n be wrapped into :class:`locale_str` rather than :class:`str`. This could\n avoid some repetition and be more ergonomic for certain defaults such\n as default command names, command descriptions, and parameter names.\n Defaults to ``True``.\n nsfw: :class:`bool`\n Whether the command is NSFW and should only work in NSFW channels.\n Defaults to ``False``.\n extras: :class:`dict`\n A dictionary that can be used to store extraneous data.\n The library will not touch any values or keys within this dictionary.\n\n Attributes\n ------------\n name: :class:`str`\n The name of the context menu.\n type: :class:`.AppCommandType`\n The type of context menu application command. By default, this is inferred\n by the parameter of the callback.\n default_permissions: Optional[:class:`~discord.Permissions`]\n The default permissions that can execute this command on Discord. Note\n that server administrators can override this value in the client.\n Setting an empty permissions field will disallow anyone except server\n administrators from using the command in a guild.\n guild_only: :class:`bool`\n Whether the command should only be usable in guild contexts.\n Defaults to ``False``.\n nsfw: :class:`bool`\n Whether the command is NSFW and should only work in NSFW channels.\n Defaults to ``False``.\n checks\n A list of predicates that take a :class:`~discord.Interaction` parameter\n to indicate whether the command callback should be executed. If an exception\n is necessary to be thrown to signal failure, then one inherited from\n :exc:`AppCommandError` should be used. If all the checks fail without\n propagating an exception, :exc:`CheckFailure` is raised.\n extras: :class:`dict`\n A dictionary that can be used to store extraneous data.\n The library will not touch any values or keys within this dictionary.\n \"\"\"\n\n def __init__(\n self,\n *,\n name: Union[str, locale_str],\n callback: ContextMenuCallback,\n type: AppCommandType = MISSING,\n nsfw: bool = False,\n guild_ids: Optional[List[int]] = None,\n auto_locale_strings: bool = True,\n extras: Dict[Any, Any] = MISSING,\n ):\n name, locale = (name.message, name) if isinstance(name, locale_str) else (name, None)\n self.name: str = validate_context_menu_name(name)\n self._locale_name: Optional[locale_str] = locale\n self._callback: ContextMenuCallback = callback\n (param, annotation, actual_type) = _get_context_menu_parameter(callback)\n if type is MISSING:\n type = actual_type\n\n if actual_type != type:\n raise ValueError(f'context menu callback implies a type of {actual_type} but {type} was passed.')\n\n self.type: AppCommandType = type\n self._param_name = param\n self._annotation = annotation\n self.module: Optional[str] = callback.__module__\n self._guild_ids = guild_ids or getattr(callback, '__discord_app_commands_default_guilds__', None)\n self.on_error: Optional[UnboundError] = None\n self.default_permissions: Optional[Permissions] = getattr(\n callback, '__discord_app_commands_default_permissions__', None\n )\n self.nsfw: bool = nsfw\n self.guild_only: bool = getattr(callback, '__discord_app_commands_guild_only__', False)\n self.checks: List[Check] = getattr(callback, '__discord_app_commands_checks__', [])\n self.extras: Dict[Any, Any] = extras or {}\n\n if auto_locale_strings:\n if self._locale_name is None:\n self._locale_name = locale_str(self.name)\n\n @property\n def callback(self) -> ContextMenuCallback:\n \"\"\":ref:`coroutine <coroutine>`: The coroutine that is executed when the context menu is called.\"\"\"\n return self._callback\n\n @property\n def qualified_name(self) -> str:\n \"\"\":class:`str`: Returns the fully qualified command name.\"\"\"\n return self.name\n\n async def get_translated_payload(self, translator: Translator) -> Dict[str, Any]:\n base = self.to_dict()\n context = TranslationContext(location=TranslationContextLocation.command_name, data=self)\n if self._locale_name:\n name_localizations: Dict[str, str] = {}\n for locale in Locale:\n translation = await translator._checked_translate(self._locale_name, locale, context)\n if translation is not None:\n name_localizations[locale.value] = translation\n\n base['name_localizations'] = name_localizations\n return base\n\n def to_dict(self) -> Dict[str, Any]:\n return {\n 'name': self.name,\n 'type': self.type.value,\n 'dm_permission': not self.guild_only,\n 'default_member_permissions': None if self.default_permissions is None else self.default_permissions.value,\n 'nsfw': self.nsfw,\n }\n\n async def _check_can_run(self, interaction: Interaction) -> bool:\n predicates = self.checks\n if not predicates:\n return True\n\n return await async_all(f(interaction) for f in predicates)\n\n def _has_any_error_handlers(self) -> bool:\n return self.on_error is not None\n\n async def _invoke(self, interaction: Interaction, arg: Any):\n try:\n if not await self._check_can_run(interaction):\n raise CheckFailure(f'The check functions for context menu {self.name!r} failed.')\n\n await self._callback(interaction, arg)\n except AppCommandError:\n raise\n except Exception as e:\n raise CommandInvokeError(self, e) from e\n\n def error(self, coro: UnboundError) -> UnboundError:\n \"\"\"A decorator that registers a coroutine as a local error handler.\n\n The local error handler is called whenever an exception is raised in the body\n of the command or during handling of the command. The error handler must take\n 2 parameters, the interaction and the error.\n\n The error passed will be derived from :exc:`AppCommandError`.\n\n Parameters\n -----------\n coro: :ref:`coroutine <coroutine>`\n The coroutine to register as the local error handler.\n\n Raises\n -------\n TypeError\n The coroutine passed is not actually a coroutine.\n \"\"\"\n\n if not inspect.iscoroutinefunction(coro):\n raise TypeError('The error handler must be a coroutine.')\n\n self.on_error = coro\n return coro\n\n def add_check(self, func: Check, /) -> None:\n \"\"\"Adds a check to the command.\n\n This is the non-decorator interface to :func:`check`.\n\n Parameters\n -----------\n func\n The function that will be used as a check.\n \"\"\"\n\n self.checks.append(func)\n\n def remove_check(self, func: Check, /) -> None:\n \"\"\"Removes a check from the command.\n\n This function is idempotent and will not raise an exception\n if the function is not in the command's checks.\n\n Parameters\n -----------\n func\n The function to remove from the checks.\n \"\"\"\n\n try:\n self.checks.remove(func)\n except ValueError:\n pass\n\n\nclass Group:\n \"\"\"A class that implements an application command group.\n\n These are usually inherited rather than created manually.\n\n Decorators such as :func:`guild_only`, :func:`guilds`, and :func:`default_permissions`\n will apply to the group if used on top of a subclass. For example:\n\n .. code-block:: python3\n\n from discord import app_commands\n\n @app_commands.guild_only()\n class MyGroup(app_commands.Group):\n pass\n\n .. versionadded:: 2.0\n\n Parameters\n -----------\n name: Union[:class:`str`, :class:`locale_str`]\n The name of the group. If not given, it defaults to a lower-case\n kebab-case version of the class name.\n description: Union[:class:`str`, :class:`locale_str`]\n The description of the group. This shows up in the UI to describe\n the group. If not given, it defaults to the docstring of the\n class shortened to 100 characters.\n auto_locale_strings: :class:`bool`\n If this is set to ``True``, then all translatable strings will implicitly\n be wrapped into :class:`locale_str` rather than :class:`str`. This could\n avoid some repetition and be more ergonomic for certain defaults such\n as default command names, command descriptions, and parameter names.\n Defaults to ``True``.\n default_permissions: Optional[:class:`~discord.Permissions`]\n The default permissions that can execute this group on Discord. Note\n that server administrators can override this value in the client.\n Setting an empty permissions field will disallow anyone except server\n administrators from using the command in a guild.\n\n Due to a Discord limitation, this does not work on subcommands.\n guild_only: :class:`bool`\n Whether the group should only be usable in guild contexts.\n Defaults to ``False``.\n\n Due to a Discord limitation, this does not work on subcommands.\n nsfw: :class:`bool`\n Whether the command is NSFW and should only work in NSFW channels.\n Defaults to ``False``.\n\n Due to a Discord limitation, this does not work on subcommands.\n parent: Optional[:class:`Group`]\n The parent application command. ``None`` if there isn't one.\n extras: :class:`dict`\n A dictionary that can be used to store extraneous data.\n The library will not touch any values or keys within this dictionary.\n\n Attributes\n ------------\n name: :class:`str`\n The name of the group.\n description: :class:`str`\n The description of the group. This shows up in the UI to describe\n the group.\n default_permissions: Optional[:class:`~discord.Permissions`]\n The default permissions that can execute this group on Discord. Note\n that server administrators can override this value in the client.\n Setting an empty permissions field will disallow anyone except server\n administrators from using the command in a guild.\n\n Due to a Discord limitation, this does not work on subcommands.\n guild_only: :class:`bool`\n Whether the group should only be usable in guild contexts.\n\n Due to a Discord limitation, this does not work on subcommands.\n nsfw: :class:`bool`\n Whether the command is NSFW and should only work in NSFW channels.\n\n Due to a Discord limitation, this does not work on subcommands.\n parent: Optional[:class:`Group`]\n The parent group. ``None`` if there isn't one.\n extras: :class:`dict`\n A dictionary that can be used to store extraneous data.\n The library will not touch any values or keys within this dictionary.\n \"\"\"\n\n __discord_app_commands_group_children__: ClassVar[List[Union[Command[Any, ..., Any], Group]]] = []\n __discord_app_commands_skip_init_binding__: bool = False\n __discord_app_commands_group_name__: str = MISSING\n __discord_app_commands_group_description__: str = MISSING\n __discord_app_commands_group_locale_name__: Optional[locale_str] = None\n __discord_app_commands_group_locale_description__: Optional[locale_str] = None\n __discord_app_commands_group_nsfw__: bool = False\n __discord_app_commands_guild_only__: bool = MISSING\n __discord_app_commands_default_permissions__: Optional[Permissions] = MISSING\n __discord_app_commands_has_module__: bool = False\n __discord_app_commands_error_handler__: Optional[\n Callable[[Interaction, AppCommandError], Coroutine[Any, Any, None]]\n ] = None\n\n def __init_subclass__(\n cls,\n *,\n name: Union[str, locale_str] = MISSING,\n description: Union[str, locale_str] = MISSING,\n guild_only: bool = MISSING,\n nsfw: bool = False,\n default_permissions: Optional[Permissions] = MISSING,\n ) -> None:\n if not cls.__discord_app_commands_group_children__:\n children: List[Union[Command[Any, ..., Any], Group]] = [\n member for member in cls.__dict__.values() if isinstance(member, (Group, Command)) and member.parent is None\n ]\n\n cls.__discord_app_commands_group_children__ = children\n\n found = set()\n for child in children:\n if child.name in found:\n raise TypeError(f'Command {child.name!r} is a duplicate')\n found.add(child.name)\n\n if len(children) > 25:\n raise TypeError('groups cannot have more than 25 commands')\n\n if name is MISSING:\n cls.__discord_app_commands_group_name__ = validate_name(_to_kebab_case(cls.__name__))\n elif isinstance(name, str):\n cls.__discord_app_commands_group_name__ = validate_name(name)\n else:\n cls.__discord_app_commands_group_name__ = validate_name(name.message)\n cls.__discord_app_commands_group_locale_name__ = name\n\n if description is MISSING:\n if cls.__doc__ is None:\n cls.__discord_app_commands_group_description__ = '…'\n else:\n cls.__discord_app_commands_group_description__ = _shorten(cls.__doc__)\n elif isinstance(description, str):\n cls.__discord_app_commands_group_description__ = description\n else:\n cls.__discord_app_commands_group_description__ = description.message\n cls.__discord_app_commands_group_locale_description__ = description\n\n if guild_only is not MISSING:\n cls.__discord_app_commands_guild_only__ = guild_only\n\n if default_permissions is not MISSING:\n cls.__discord_app_commands_default_permissions__ = default_permissions\n\n if cls.__module__ != __name__:\n cls.__discord_app_commands_has_module__ = True\n cls.__discord_app_commands_group_nsfw__ = nsfw\n\n def __init__(\n self,\n *,\n name: Union[str, locale_str] = MISSING,\n description: Union[str, locale_str] = MISSING,\n parent: Optional[Group] = None,\n guild_ids: Optional[List[int]] = None,\n guild_only: bool = MISSING,\n nsfw: bool = MISSING,\n auto_locale_strings: bool = True,\n default_permissions: Optional[Permissions] = MISSING,\n extras: Dict[Any, Any] = MISSING,\n ):\n cls = self.__class__\n\n if name is MISSING:\n name, locale = cls.__discord_app_commands_group_name__, cls.__discord_app_commands_group_locale_name__\n elif isinstance(name, str):\n name, locale = validate_name(name), None\n else:\n name, locale = validate_name(name.message), name\n self.name: str = name\n self._locale_name: Optional[locale_str] = locale\n\n if description is MISSING:\n description, locale = (\n cls.__discord_app_commands_group_description__,\n cls.__discord_app_commands_group_locale_description__,\n )\n elif isinstance(description, str):\n description, locale = description, None\n else:\n description, locale = description.message, description\n self.description: str = description\n self._locale_description: Optional[locale_str] = locale\n\n self._attr: Optional[str] = None\n self._owner_cls: Optional[Type[Any]] = None\n self._guild_ids: Optional[List[int]] = guild_ids or getattr(cls, '__discord_app_commands_default_guilds__', None)\n\n if default_permissions is MISSING:\n if cls.__discord_app_commands_default_permissions__ is MISSING:\n default_permissions = None\n else:\n default_permissions = cls.__discord_app_commands_default_permissions__\n\n self.default_permissions: Optional[Permissions] = default_permissions\n\n if guild_only is MISSING:\n if cls.__discord_app_commands_guild_only__ is MISSING:\n guild_only = False\n else:\n guild_only = cls.__discord_app_commands_guild_only__\n\n self.guild_only: bool = guild_only\n\n if nsfw is MISSING:\n nsfw = cls.__discord_app_commands_group_nsfw__\n\n self.nsfw: bool = nsfw\n\n if not self.description:\n raise TypeError('groups must have a description')\n\n self.parent: Optional[Group] = parent\n self.module: Optional[str]\n if cls.__discord_app_commands_has_module__:\n self.module = cls.__module__\n else:\n try:\n # This is pretty hacky\n # It allows the module to be fetched if someone just constructs a bare Group object though.\n self.module = inspect.currentframe().f_back.f_globals['__name__'] # type: ignore\n except (AttributeError, IndexError, KeyError):\n self.module = None\n\n self._children: Dict[str, Union[Command, Group]] = {}\n self.extras: Dict[Any, Any] = extras or {}\n\n bindings: Dict[Group, Group] = {}\n\n for child in self.__discord_app_commands_group_children__:\n # commands and groups created directly in this class (no parent)\n copy = (\n child._copy_with(parent=self, binding=self, bindings=bindings, set_on_binding=False)\n if not cls.__discord_app_commands_skip_init_binding__\n else child\n )\n\n self._children[copy.name] = copy\n if copy._attr and not cls.__discord_app_commands_skip_init_binding__:\n setattr(self, copy._attr, copy)\n\n if parent is not None:\n if parent.parent is not None:\n raise ValueError('groups can only be nested at most one level')\n parent.add_command(self)\n\n if auto_locale_strings:\n self._convert_to_locale_strings()\n\n def _convert_to_locale_strings(self) -> None:\n if self._locale_name is None:\n self._locale_name = locale_str(self.name)\n if self._locale_description is None:\n self._locale_description = locale_str(self.description)\n\n # I don't know if propagating to the children is the right behaviour here.\n\n def __set_name__(self, owner: Type[Any], name: str) -> None:\n self._attr = name\n self.module = owner.__module__\n self._owner_cls = owner\n\n def _copy_with(\n self,\n *,\n parent: Optional[Group],\n binding: Binding,\n bindings: MutableMapping[Group, Group] = MISSING,\n set_on_binding: bool = True,\n ) -> Group:\n bindings = {} if bindings is MISSING else bindings\n\n copy = shallow_copy(self)\n copy.parent = parent\n copy._children = {}\n\n bindings[self] = copy\n\n for child in self._children.values():\n child_copy = child._copy_with(parent=copy, binding=binding, bindings=bindings)\n child_copy.parent = copy\n copy._children[child_copy.name] = child_copy\n\n if isinstance(child_copy, Group) and child_copy._attr and set_on_binding:\n if binding.__class__ is child_copy._owner_cls:\n setattr(binding, child_copy._attr, child_copy)\n elif child_copy._owner_cls is copy.__class__:\n setattr(copy, child_copy._attr, child_copy)\n\n if copy._attr and set_on_binding:\n setattr(parent or binding, copy._attr, copy)\n\n return copy\n\n async def get_translated_payload(self, translator: Translator) -> Dict[str, Any]:\n base = self.to_dict()\n name_localizations: Dict[str, str] = {}\n description_localizations: Dict[str, str] = {}\n\n # Prevent creating these objects in a heavy loop\n name_context = TranslationContext(location=TranslationContextLocation.group_name, data=self)\n description_context = TranslationContext(location=TranslationContextLocation.group_description, data=self)\n for locale in Locale:\n if self._locale_name:\n translation = await translator._checked_translate(self._locale_name, locale, name_context)\n if translation is not None:\n name_localizations[locale.value] = translation\n\n if self._locale_description:\n translation = await translator._checked_translate(self._locale_description, locale, description_context)\n if translation is not None:\n description_localizations[locale.value] = translation\n\n base['name_localizations'] = name_localizations\n base['description_localizations'] = description_localizations\n base['options'] = [await child.get_translated_payload(translator) for child in self._children.values()]\n return base\n\n def to_dict(self) -> Dict[str, Any]:\n # If this has a parent command then it's part of a subcommand group\n # Otherwise, it's just a regular command\n option_type = 1 if self.parent is None else AppCommandOptionType.subcommand_group.value\n base: Dict[str, Any] = {\n 'name': self.name,\n 'description': self.description,\n 'type': option_type,\n 'options': [child.to_dict() for child in self._children.values()],\n }\n\n if self.parent is None:\n base['nsfw'] = self.nsfw\n base['dm_permission'] = not self.guild_only\n base['default_member_permissions'] = None if self.default_permissions is None else self.default_permissions.value\n\n return base\n\n @property\n def root_parent(self) -> Optional[Group]:\n \"\"\"Optional[:class:`Group`]: The parent of this group.\"\"\"\n return self.parent\n\n @property\n def qualified_name(self) -> str:\n \"\"\":class:`str`: Returns the fully qualified group name.\n\n The qualified name includes the parent name as well. For example,\n in a group like ``/foo bar`` the qualified name is ``foo bar``.\n \"\"\"\n\n if self.parent is None:\n return self.name\n return f'{self.parent.name} {self.name}'\n\n def _get_internal_command(self, name: str) -> Optional[Union[Command[Any, ..., Any], Group]]:\n return self._children.get(name)\n\n @property\n def commands(self) -> List[Union[Command[Any, ..., Any], Group]]:\n \"\"\"List[Union[:class:`Command`, :class:`Group`]]: The commands that this group contains.\"\"\"\n return list(self._children.values())\n\n def walk_commands(self) -> Generator[Union[Command[Any, ..., Any], Group], None, None]:\n \"\"\"An iterator that recursively walks through all commands that this group contains.\n\n Yields\n ---------\n Union[:class:`Command`, :class:`Group`]\n The commands in this group.\n \"\"\"\n\n for command in self._children.values():\n yield command\n if isinstance(command, Group):\n yield from command.walk_commands()\n\n @mark_overrideable\n async def on_error(self, interaction: Interaction, error: AppCommandError, /) -> None:\n \"\"\"|coro|\n\n A callback that is called when a child's command raises an :exc:`AppCommandError`.\n\n To get the command that failed, :attr:`discord.Interaction.command` should be used.\n\n The default implementation does nothing.\n\n Parameters\n -----------\n interaction: :class:`~discord.Interaction`\n The interaction that is being handled.\n error: :exc:`AppCommandError`\n The exception that was raised.\n \"\"\"\n\n pass\n\n def error(self, coro: ErrorFunc) -> ErrorFunc:\n \"\"\"A decorator that registers a coroutine as a local error handler.\n\n The local error handler is called whenever an exception is raised in a child command.\n The error handler must take 2 parameters, the interaction and the error.\n\n The error passed will be derived from :exc:`AppCommandError`.\n\n Parameters\n -----------\n coro: :ref:`coroutine <coroutine>`\n The coroutine to register as the local error handler.\n\n Raises\n -------\n TypeError\n The coroutine passed is not actually a coroutine, or is an invalid coroutine.\n \"\"\"\n\n if not inspect.iscoroutinefunction(coro):\n raise TypeError('The error handler must be a coroutine.')\n\n params = inspect.signature(coro).parameters\n if len(params) != 2:\n raise TypeError('The error handler must have 2 parameters.')\n\n self.on_error = coro\n return coro\n\n async def interaction_check(self, interaction: Interaction, /) -> bool:\n \"\"\"|coro|\n\n A callback that is called when an interaction happens within the group\n that checks whether a command inside the group should be executed.\n\n This is useful to override if, for example, you want to ensure that the\n interaction author is a given user.\n\n The default implementation of this returns ``True``.\n\n .. note::\n\n If an exception occurs within the body then the check\n is considered a failure and error handlers such as\n :meth:`on_error` is called. See :exc:`AppCommandError`\n for more information.\n\n Parameters\n -----------\n interaction: :class:`~discord.Interaction`\n The interaction that occurred.\n\n Returns\n ---------\n :class:`bool`\n Whether the view children's callbacks should be called.\n \"\"\"\n\n return True\n\n def add_command(self, command: Union[Command[Any, ..., Any], Group], /, *, override: bool = False) -> None:\n \"\"\"Adds a command or group to this group's internal list of commands.\n\n Parameters\n -----------\n command: Union[:class:`Command`, :class:`Group`]\n The command or group to add.\n override: :class:`bool`\n Whether to override a pre-existing command or group with the same name.\n If ``False`` then an exception is raised.\n\n Raises\n -------\n CommandAlreadyRegistered\n The command or group is already registered. Note that the :attr:`CommandAlreadyRegistered.guild_id`\n attribute will always be ``None`` in this case.\n ValueError\n There are too many commands already registered or the group is too\n deeply nested.\n TypeError\n The wrong command type was passed.\n \"\"\"\n\n if not isinstance(command, (Command, Group)):\n raise TypeError(f'expected Command or Group not {command.__class__.__name__}')\n\n if isinstance(command, Group) and self.parent is not None:\n # In a tree like so:\n # <group>\n # <self>\n # <group>\n # this needs to be forbidden\n raise ValueError(f'{command.name!r} is too nested, groups can only be nested at most one level')\n\n if not override and command.name in self._children:\n raise CommandAlreadyRegistered(command.name, guild_id=None)\n\n self._children[command.name] = command\n command.parent = self\n if len(self._children) > 25:\n raise ValueError('maximum number of child commands exceeded')\n\n def remove_command(self, name: str, /) -> Optional[Union[Command[Any, ..., Any], Group]]:\n \"\"\"Removes a command or group from the internal list of commands.\n\n Parameters\n -----------\n name: :class:`str`\n The name of the command or group to remove.\n\n Returns\n --------\n Optional[Union[:class:`~discord.app_commands.Command`, :class:`~discord.app_commands.Group`]]\n The command that was removed. If nothing was removed\n then ``None`` is returned instead.\n \"\"\"\n\n self._children.pop(name, None)\n\n def get_command(self, name: str, /) -> Optional[Union[Command[Any, ..., Any], Group]]:\n \"\"\"Retrieves a command or group from its name.\n\n Parameters\n -----------\n name: :class:`str`\n The name of the command or group to retrieve.\n\n Returns\n --------\n Optional[Union[:class:`~discord.app_commands.Command`, :class:`~discord.app_commands.Group`]]\n The command or group that was retrieved. If nothing was found\n then ``None`` is returned instead.\n \"\"\"\n return self._children.get(name)\n\n def command(\n self,\n *,\n name: Union[str, locale_str] = MISSING,\n description: Union[str, locale_str] = MISSING,\n nsfw: bool = False,\n auto_locale_strings: bool = True,\n extras: Dict[Any, Any] = MISSING,\n ) -> Callable[[CommandCallback[GroupT, P, T]], Command[GroupT, P, T]]:\n \"\"\"A decorator that creates an application command from a regular function under this group.\n\n Parameters\n ------------\n name: Union[:class:`str`, :class:`locale_str`]\n The name of the application command. If not given, it defaults to a lower-case\n version of the callback name.\n description: Union[:class:`str`, :class:`locale_str`]\n The description of the application command. This shows up in the UI to describe\n the application command. If not given, it defaults to the first line of the docstring\n of the callback shortened to 100 characters.\n nsfw: :class:`bool`\n Whether the command is NSFW and should only work in NSFW channels. Defaults to ``False``.\n auto_locale_strings: :class:`bool`\n If this is set to ``True``, then all translatable strings will implicitly\n be wrapped into :class:`locale_str` rather than :class:`str`. This could\n avoid some repetition and be more ergonomic for certain defaults such\n as default command names, command descriptions, and parameter names.\n Defaults to ``True``.\n extras: :class:`dict`\n A dictionary that can be used to store extraneous data.\n The library will not touch any values or keys within this dictionary.\n \"\"\"\n\n def decorator(func: CommandCallback[GroupT, P, T]) -> Command[GroupT, P, T]:\n if not inspect.iscoroutinefunction(func):\n raise TypeError('command function must be a coroutine function')\n\n if description is MISSING:\n if func.__doc__ is None:\n desc = '…'\n else:\n desc = _shorten(func.__doc__)\n else:\n desc = description\n\n command = Command(\n name=name if name is not MISSING else func.__name__,\n description=desc,\n callback=func,\n nsfw=nsfw,\n parent=self,\n auto_locale_strings=auto_locale_strings,\n extras=extras,\n )\n self.add_command(command)\n return command\n\n return decorator\n\n\ndef command(\n *,\n name: Union[str, locale_str] = MISSING,\n description: Union[str, locale_str] = MISSING,\n nsfw: bool = False,\n auto_locale_strings: bool = True,\n extras: Dict[Any, Any] = MISSING,\n) -> Callable[[CommandCallback[GroupT, P, T]], Command[GroupT, P, T]]:\n \"\"\"Creates an application command from a regular function.\n\n Parameters\n ------------\n name: :class:`str`\n The name of the application command. If not given, it defaults to a lower-case\n version of the callback name.\n description: :class:`str`\n The description of the application command. This shows up in the UI to describe\n the application command. If not given, it defaults to the first line of the docstring\n of the callback shortened to 100 characters.\n nsfw: :class:`bool`\n Whether the command is NSFW and should only work in NSFW channels. Defaults to ``False``.\n\n Due to a Discord limitation, this does not work on subcommands.\n auto_locale_strings: :class:`bool`\n If this is set to ``True``, then all translatable strings will implicitly\n be wrapped into :class:`locale_str` rather than :class:`str`. This could\n avoid some repetition and be more ergonomic for certain defaults such\n as default command names, command descriptions, and parameter names.\n Defaults to ``True``.\n extras: :class:`dict`\n A dictionary that can be used to store extraneous data.\n The library will not touch any values or keys within this dictionary.\n \"\"\"\n\n def decorator(func: CommandCallback[GroupT, P, T]) -> Command[GroupT, P, T]:\n if not inspect.iscoroutinefunction(func):\n raise TypeError('command function must be a coroutine function')\n\n if description is MISSING:\n if func.__doc__ is None:\n desc = '…'\n else:\n desc = _shorten(func.__doc__)\n else:\n desc = description\n\n return Command(\n name=name if name is not MISSING else func.__name__,\n description=desc,\n callback=func,\n parent=None,\n nsfw=nsfw,\n auto_locale_strings=auto_locale_strings,\n extras=extras,\n )\n\n return decorator\n\n\ndef context_menu(\n *,\n name: Union[str, locale_str] = MISSING,\n nsfw: bool = False,\n auto_locale_strings: bool = True,\n extras: Dict[Any, Any] = MISSING,\n) -> Callable[[ContextMenuCallback], ContextMenu]:\n \"\"\"Creates an application command context menu from a regular function.\n\n This function must have a signature of :class:`~discord.Interaction` as its first parameter\n and taking either a :class:`~discord.Member`, :class:`~discord.User`, or :class:`~discord.Message`,\n or a :obj:`typing.Union` of ``Member`` and ``User`` as its second parameter.\n\n Examples\n ---------\n\n .. code-block:: python3\n\n @app_commands.context_menu()\n async def react(interaction: discord.Interaction, message: discord.Message):\n await interaction.response.send_message('Very cool message!', ephemeral=True)\n\n @app_commands.context_menu()\n async def ban(interaction: discord.Interaction, user: discord.Member):\n await interaction.response.send_message(f'Should I actually ban {user}...', ephemeral=True)\n\n Parameters\n ------------\n name: Union[:class:`str`, :class:`locale_str`]\n The name of the context menu command. If not given, it defaults to a title-case\n version of the callback name. Note that unlike regular slash commands this can\n have spaces and upper case characters in the name.\n nsfw: :class:`bool`\n Whether the command is NSFW and should only work in NSFW channels. Defaults to ``False``.\n\n Due to a Discord limitation, this does not work on subcommands.\n auto_locale_strings: :class:`bool`\n If this is set to ``True``, then all translatable strings will implicitly\n be wrapped into :class:`locale_str` rather than :class:`str`. This could\n avoid some repetition and be more ergonomic for certain defaults such\n as default command names, command descriptions, and parameter names.\n Defaults to ``True``.\n extras: :class:`dict`\n A dictionary that can be used to store extraneous data.\n The library will not touch any values or keys within this dictionary.\n \"\"\"\n\n def decorator(func: ContextMenuCallback) -> ContextMenu:\n if not inspect.iscoroutinefunction(func):\n raise TypeError('context menu function must be a coroutine function')\n\n actual_name = func.__name__.title() if name is MISSING else name\n return ContextMenu(\n name=actual_name,\n nsfw=nsfw,\n callback=func,\n auto_locale_strings=auto_locale_strings,\n extras=extras,\n )\n\n return decorator\n\n\ndef describe(**parameters: Union[str, locale_str]) -> Callable[[T], T]:\n r'''Describes the given parameters by their name using the key of the keyword argument\n as the name.\n\n Example:\n\n .. code-block:: python3\n\n @app_commands.command(description='Bans a member')\n @app_commands.describe(member='the member to ban')\n async def ban(interaction: discord.Interaction, member: discord.Member):\n await interaction.response.send_message(f'Banned {member}')\n\n Alternatively, you can describe parameters using Google, Sphinx, or Numpy style docstrings.\n\n Example:\n\n .. code-block:: python3\n\n @app_commands.command()\n async def ban(interaction: discord.Interaction, member: discord.Member):\n \"\"\"Bans a member\n\n Parameters\n -----------\n member: discord.Member\n the member to ban\n \"\"\"\n await interaction.response.send_message(f'Banned {member}')\n\n Parameters\n -----------\n \\*\\*parameters: Union[:class:`str`, :class:`locale_str`]\n The description of the parameters.\n\n Raises\n --------\n TypeError\n The parameter name is not found.\n '''\n\n def decorator(inner: T) -> T:\n if isinstance(inner, Command):\n _populate_descriptions(inner._params, parameters)\n else:\n try:\n inner.__discord_app_commands_param_description__.update(parameters) # type: ignore # Runtime attribute access\n except AttributeError:\n inner.__discord_app_commands_param_description__ = parameters # type: ignore # Runtime attribute assignment\n\n return inner\n\n return decorator\n\n\ndef rename(**parameters: Union[str, locale_str]) -> Callable[[T], T]:\n r\"\"\"Renames the given parameters by their name using the key of the keyword argument\n as the name.\n\n This renames the parameter within the Discord UI. When referring to the parameter in other\n decorators, the parameter name used in the function is used instead of the renamed one.\n\n Example:\n\n .. code-block:: python3\n\n @app_commands.command()\n @app_commands.rename(the_member_to_ban='member')\n async def ban(interaction: discord.Interaction, the_member_to_ban: discord.Member):\n await interaction.response.send_message(f'Banned {the_member_to_ban}')\n\n Parameters\n -----------\n \\*\\*parameters: Union[:class:`str`, :class:`locale_str`]\n The name of the parameters.\n\n Raises\n --------\n ValueError\n The parameter name is already used by another parameter.\n TypeError\n The parameter name is not found.\n \"\"\"\n\n def decorator(inner: T) -> T:\n if isinstance(inner, Command):\n _populate_renames(inner._params, parameters)\n else:\n try:\n inner.__discord_app_commands_param_rename__.update(parameters) # type: ignore # Runtime attribute access\n except AttributeError:\n inner.__discord_app_commands_param_rename__ = parameters # type: ignore # Runtime attribute assignment\n\n return inner\n\n return decorator\n\n\ndef choices(**parameters: List[Choice[ChoiceT]]) -> Callable[[T], T]:\n r\"\"\"Instructs the given parameters by their name to use the given choices for their choices.\n\n Example:\n\n .. code-block:: python3\n\n @app_commands.command()\n @app_commands.describe(fruits='fruits to choose from')\n @app_commands.choices(fruits=[\n Choice(name='apple', value=1),\n Choice(name='banana', value=2),\n Choice(name='cherry', value=3),\n ])\n async def fruit(interaction: discord.Interaction, fruits: Choice[int]):\n await interaction.response.send_message(f'Your favourite fruit is {fruits.name}.')\n\n .. note::\n\n This is not the only way to provide choices to a command. There are two more ergonomic ways\n of doing this. The first one is to use a :obj:`typing.Literal` annotation:\n\n .. code-block:: python3\n\n @app_commands.command()\n @app_commands.describe(fruits='fruits to choose from')\n async def fruit(interaction: discord.Interaction, fruits: Literal['apple', 'banana', 'cherry']):\n await interaction.response.send_message(f'Your favourite fruit is {fruits}.')\n\n The second way is to use an :class:`enum.Enum`:\n\n .. code-block:: python3\n\n class Fruits(enum.Enum):\n apple = 1\n banana = 2\n cherry = 3\n\n @app_commands.command()\n @app_commands.describe(fruits='fruits to choose from')\n async def fruit(interaction: discord.Interaction, fruits: Fruits):\n await interaction.response.send_message(f'Your favourite fruit is {fruits}.')\n\n\n Parameters\n -----------\n \\*\\*parameters\n The choices of the parameters.\n\n Raises\n --------\n TypeError\n The parameter name is not found or the parameter type was incorrect.\n \"\"\"\n\n def decorator(inner: T) -> T:\n if isinstance(inner, Command):\n _populate_choices(inner._params, parameters)\n else:\n try:\n inner.__discord_app_commands_param_choices__.update(parameters) # type: ignore # Runtime attribute access\n except AttributeError:\n inner.__discord_app_commands_param_choices__ = parameters # type: ignore # Runtime attribute assignment\n\n return inner\n\n return decorator\n\n\ndef autocomplete(**parameters: AutocompleteCallback[GroupT, ChoiceT]) -> Callable[[T], T]:\n r\"\"\"Associates the given parameters with the given autocomplete callback.\n\n Autocomplete is only supported on types that have :class:`str`, :class:`int`, or :class:`float`\n values.\n\n :func:`Checks <check>` are supported, however they must be attached to the autocomplete\n callback in order to work. Checks attached to the command are ignored when invoking the autocomplete\n callback.\n\n For more information, see the :meth:`Command.autocomplete` documentation.\n\n .. warning::\n The choices returned from this coroutine are suggestions. The user may ignore them and input their own value.\n\n Example:\n\n .. code-block:: python3\n\n async def fruit_autocomplete(\n interaction: discord.Interaction,\n current: str,\n ) -> List[app_commands.Choice[str]]:\n fruits = ['Banana', 'Pineapple', 'Apple', 'Watermelon', 'Melon', 'Cherry']\n return [\n app_commands.Choice(name=fruit, value=fruit)\n for fruit in fruits if current.lower() in fruit.lower()\n ]\n\n @app_commands.command()\n @app_commands.autocomplete(fruit=fruit_autocomplete)\n async def fruits(interaction: discord.Interaction, fruit: str):\n await interaction.response.send_message(f'Your favourite fruit seems to be {fruit}')\n\n Parameters\n -----------\n \\*\\*parameters\n The parameters to mark as autocomplete.\n\n Raises\n --------\n TypeError\n The parameter name is not found or the parameter type was incorrect.\n \"\"\"\n\n def decorator(inner: T) -> T:\n if isinstance(inner, Command):\n _populate_autocomplete(inner._params, parameters)\n else:\n try:\n inner.__discord_app_commands_param_autocomplete__.update(parameters) # type: ignore # Runtime attribute access\n except AttributeError:\n inner.__discord_app_commands_param_autocomplete__ = parameters # type: ignore # Runtime attribute assignment\n\n return inner\n\n return decorator\n\n\ndef guilds(*guild_ids: Union[Snowflake, int]) -> Callable[[T], T]:\n r\"\"\"Associates the given guilds with the command.\n\n When the command instance is added to a :class:`CommandTree`, the guilds that are\n specified by this decorator become the default guilds that it's added to rather\n than being a global command.\n\n .. note::\n\n Due to an implementation quirk and Python limitation, if this is used in conjunction\n with the :meth:`CommandTree.command` or :meth:`CommandTree.context_menu` decorator\n then this must go below that decorator.\n\n Example:\n\n .. code-block:: python3\n\n MY_GUILD_ID = discord.Object(...) # Guild ID here\n\n @app_commands.command()\n @app_commands.guilds(MY_GUILD_ID)\n async def bonk(interaction: discord.Interaction):\n await interaction.response.send_message('Bonk', ephemeral=True)\n\n Parameters\n -----------\n \\*guild_ids: Union[:class:`int`, :class:`~discord.abc.Snowflake`]\n The guilds to associate this command with. The command tree will\n use this as the default when added rather than adding it as a global\n command.\n \"\"\"\n\n defaults: List[int] = [g if isinstance(g, int) else g.id for g in guild_ids]\n\n def decorator(inner: T) -> T:\n if isinstance(inner, (Group, ContextMenu)):\n inner._guild_ids = defaults\n elif isinstance(inner, Command):\n if inner.parent is not None:\n raise ValueError('child commands of a group cannot have default guilds set')\n\n inner._guild_ids = defaults\n else:\n # Runtime attribute assignment\n inner.__discord_app_commands_default_guilds__ = defaults # type: ignore\n\n return inner\n\n return decorator\n\n\ndef check(predicate: Check) -> Callable[[T], T]:\n r\"\"\"A decorator that adds a check to an application command.\n\n These checks should be predicates that take in a single parameter taking\n a :class:`~discord.Interaction`. If the check returns a ``False``\\-like value then\n during invocation a :exc:`CheckFailure` exception is raised and sent to\n the appropriate error handlers.\n\n These checks can be either a coroutine or not.\n\n Examples\n ---------\n\n Creating a basic check to see if the command invoker is you.\n\n .. code-block:: python3\n\n def check_if_it_is_me(interaction: discord.Interaction) -> bool:\n return interaction.user.id == 85309593344815104\n\n @tree.command()\n @app_commands.check(check_if_it_is_me)\n async def only_for_me(interaction: discord.Interaction):\n await interaction.response.send_message('I know you!', ephemeral=True)\n\n Transforming common checks into its own decorator:\n\n .. code-block:: python3\n\n def is_me():\n def predicate(interaction: discord.Interaction) -> bool:\n return interaction.user.id == 85309593344815104\n return app_commands.check(predicate)\n\n @tree.command()\n @is_me()\n async def only_me(interaction: discord.Interaction):\n await interaction.response.send_message('Only you!')\n\n Parameters\n -----------\n predicate: Callable[[:class:`~discord.Interaction`], :class:`bool`]\n The predicate to check if the command should be invoked.\n \"\"\"\n\n def decorator(func: CheckInputParameter) -> CheckInputParameter:\n if isinstance(func, (Command, ContextMenu)):\n func.checks.append(predicate)\n else:\n if not hasattr(func, '__discord_app_commands_checks__'):\n func.__discord_app_commands_checks__ = []\n\n func.__discord_app_commands_checks__.append(predicate)\n\n return func\n\n return decorator # type: ignore\n\n\n@overload\ndef guild_only(func: None = ...) -> Callable[[T], T]:\n ...\n\n\n@overload\ndef guild_only(func: T) -> T:\n ...\n\n\ndef guild_only(func: Optional[T] = None) -> Union[T, Callable[[T], T]]:\n \"\"\"A decorator that indicates this command can only be used in a guild context.\n\n This is **not** implemented as a :func:`check`, and is instead verified by Discord server side.\n Therefore, there is no error handler called when a command is used within a private message.\n\n This decorator can be called with or without parentheses.\n\n Due to a Discord limitation, this decorator does nothing in subcommands and is ignored.\n\n Examples\n ---------\n\n .. code-block:: python3\n\n @app_commands.command()\n @app_commands.guild_only()\n async def my_guild_only_command(interaction: discord.Interaction) -> None:\n await interaction.response.send_message('I am only available in guilds!')\n \"\"\"\n\n def inner(f: T) -> T:\n if isinstance(f, (Command, Group, ContextMenu)):\n f.guild_only = True\n else:\n f.__discord_app_commands_guild_only__ = True # type: ignore # Runtime attribute assignment\n return f\n\n # Check if called with parentheses or not\n if func is None:\n # Called with parentheses\n return inner\n else:\n return inner(func)\n\n\ndef default_permissions(**perms: bool) -> Callable[[T], T]:\n r\"\"\"A decorator that sets the default permissions needed to execute this command.\n\n When this decorator is used, by default users must have these permissions to execute the command.\n However, an administrator can change the permissions needed to execute this command using the official\n client. Therefore, this only serves as a hint.\n\n Setting an empty permissions field, including via calling this with no arguments, will disallow anyone\n except server administrators from using the command in a guild.\n\n This is sent to Discord server side, and is not a :func:`check`. Therefore, error handlers are not called.\n\n Due to a Discord limitation, this decorator does nothing in subcommands and is ignored.\n\n .. warning::\n\n This serves as a *hint* and members are *not* required to have the permissions given to actually\n execute this command. If you want to ensure that members have the permissions needed, consider using\n :func:`~discord.app_commands.checks.has_permissions` instead.\n\n Parameters\n -----------\n \\*\\*perms: :class:`bool`\n Keyword arguments denoting the permissions to set as the default.\n\n Example\n ---------\n\n .. code-block:: python3\n\n @app_commands.command()\n @app_commands.default_permissions(manage_messages=True)\n async def test(interaction: discord.Interaction):\n await interaction.response.send_message('You may or may not have manage messages.')\n \"\"\"\n\n permissions = Permissions(**perms)\n\n def decorator(func: T) -> T:\n if isinstance(func, (Command, Group, ContextMenu)):\n func.default_permissions = permissions\n else:\n func.__discord_app_commands_default_permissions__ = permissions # type: ignore # Runtime attribute assignment\n\n return func\n\n return decorator\n",
"path": "discord/app_commands/commands.py"
}
] | 10_5 | python | import unittest
import sys
class TestCommandGroupInitialization(unittest.TestCase):
def test_group_initialization_without_name(self):
from discord.app_commands import Group
with self.assertRaises(TypeError):
Group(description="Test Description", parent=None)
def test_group_initialization_without_description(self):
from discord.app_commands import Group
with self.assertRaises(TypeError):
Group(name="test", parent=None)
def test_group_initialization_with_name_and_description(self):
from discord.app_commands import Group
try:
Group(name="test", description="Test Description", parent=None)
except TypeError:
self.fail("Group initialization raised TypeError unexpectedly.")
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestCommandGroupInitialization))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
main()
|
https://github.com/teamqurrent/discord.py | Enhance the discord.py library by adding a remove_dynamic_items method to the `Client`, `ConnectionState`, and `ViewStore` classes, enabling the removal of registered DynamicItem classes from persistent listening. In `client.py`, implement remove_dynamic_items in the `Client` class to validate and pass dynamic item classes to the _connection object's similar method. In `state.py`, add a corresponding remove_dynamic_items method to the `ConnectionState` class, which should forward the request to the ViewStore. Finally, in `view.py`, update the `ViewStore` class with a remove_dynamic_items method that actually removes the specified dynamic items from its internal storage. This series of methods across different classes will ensure that dynamic items can be effectively unregistered from the system. | 4182306 | discord | python3.9 | 7c3868ef | diff --git a/discord/client.py b/discord/client.py
--- a/discord/client.py
+++ b/discord/client.py
@@ -2681,7 +2681,7 @@ class Client:
return state.add_dm_channel(data)
def add_dynamic_items(self, *items: Type[DynamicItem[Item[Any]]]) -> None:
- r"""Registers a :class:`~discord.ui.DynamicItem` class for persistent listening.
+ r"""Registers :class:`~discord.ui.DynamicItem` classes for persistent listening.
This method accepts *class types* rather than instances.
@@ -2695,7 +2695,7 @@ class Client:
Raises
-------
TypeError
- The class is not a subclass of :class:`~discord.ui.DynamicItem`.
+ A class is not a subclass of :class:`~discord.ui.DynamicItem`.
"""
for item in items:
@@ -2704,6 +2704,30 @@ class Client:
self._connection.store_dynamic_items(*items)
+ def remove_dynamic_items(self, *items: Type[DynamicItem[Item[Any]]]) -> None:
+ r"""Removes :class:`~discord.ui.DynamicItem` classes from persistent listening.
+
+ This method accepts *class types* rather than instances.
+
+ .. versionadded:: 2.4
+
+ Parameters
+ -----------
+ \*items: Type[:class:`~discord.ui.DynamicItem`]
+ The classes of dynamic items to remove.
+
+ Raises
+ -------
+ TypeError
+ A class is not a subclass of :class:`~discord.ui.DynamicItem`.
+ """
+
+ for item in items:
+ if not issubclass(item, DynamicItem):
+ raise TypeError(f'expected subclass of DynamicItem not {item.__name__}')
+
+ self._connection.remove_dynamic_items(*items)
+
def add_view(self, view: View, *, message_id: Optional[int] = None) -> None:
"""Registers a :class:`~discord.ui.View` for persistent listening.
diff --git a/discord/state.py b/discord/state.py
--- a/discord/state.py
+++ b/discord/state.py
@@ -401,6 +401,9 @@ class ConnectionState(Generic[ClientT]):
def store_dynamic_items(self, *items: Type[DynamicItem[Item[Any]]]) -> None:
self._view_store.add_dynamic_items(*items)
+ def remove_dynamic_items(self, *items: Type[DynamicItem[Item[Any]]]) -> None:
+ self._view_store.remove_dynamic_items(*items)
+
@property
def persistent_views(self) -> Sequence[View]:
return self._view_store.persistent_views
diff --git a/discord/ui/view.py b/discord/ui/view.py
--- a/discord/ui/view.py
+++ b/discord/ui/view.py
@@ -557,6 +557,11 @@ class ViewStore:
pattern = item.__discord_ui_compiled_template__
self._dynamic_items[pattern] = item
+ def remove_dynamic_items(self, *items: Type[DynamicItem[Item[Any]]]) -> None:
+ for item in items:
+ pattern = item.__discord_ui_compiled_template__
+ self._dynamic_items.pop(pattern, None)
+
def add_view(self, view: View, message_id: Optional[int] = None) -> None:
view._start_listening_from_store(self)
if view.__discord_ui_modal__:
| [
{
"content": "\"\"\"\nThe MIT License (MIT)\n\nCopyright (c) 2015-present Rapptz\n\nPermission is hereby granted, free of charge, to any person obtaining a\ncopy of this software and associated documentation files (the \"Software\"),\nto deal in the Software without restriction, including without limitation\nthe rights to use, copy, modify, merge, publish, distribute, sublicense,\nand/or sell copies of the Software, and to permit persons to whom the\nSoftware is furnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in\nall copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\nOR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\nFROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER\nDEALINGS IN THE SOFTWARE.\n\"\"\"\n\nfrom __future__ import annotations\n\nimport asyncio\nimport datetime\nimport logging\nfrom typing import (\n TYPE_CHECKING,\n Any,\n AsyncIterator,\n Callable,\n Coroutine,\n Dict,\n Generator,\n List,\n Literal,\n Optional,\n Sequence,\n Tuple,\n Type,\n TypeVar,\n Union,\n overload,\n)\n\nimport aiohttp\n\nfrom .user import User, ClientUser\nfrom .invite import Invite\nfrom .template import Template\nfrom .widget import Widget\nfrom .guild import Guild\nfrom .emoji import Emoji\nfrom .channel import _threaded_channel_factory, PartialMessageable\nfrom .enums import ChannelType\nfrom .mentions import AllowedMentions\nfrom .errors import *\nfrom .enums import Status\nfrom .flags import ApplicationFlags, Intents\nfrom .gateway import *\nfrom .activity import ActivityTypes, BaseActivity, create_activity\nfrom .voice_client import VoiceClient\nfrom .http import HTTPClient\nfrom .state import ConnectionState\nfrom . import utils\nfrom .utils import MISSING, time_snowflake\nfrom .object import Object\nfrom .backoff import ExponentialBackoff\nfrom .webhook import Webhook\nfrom .appinfo import AppInfo\nfrom .ui.view import View\nfrom .ui.dynamic import DynamicItem\nfrom .stage_instance import StageInstance\nfrom .threads import Thread\nfrom .sticker import GuildSticker, StandardSticker, StickerPack, _sticker_factory\n\nif TYPE_CHECKING:\n from types import TracebackType\n\n from typing_extensions import Self\n\n from .abc import Messageable, PrivateChannel, Snowflake, SnowflakeTime\n from .app_commands import Command, ContextMenu\n from .automod import AutoModAction, AutoModRule\n from .channel import DMChannel, GroupChannel\n from .ext.commands import AutoShardedBot, Bot, Context, CommandError\n from .guild import GuildChannel\n from .integrations import Integration\n from .interactions import Interaction\n from .member import Member, VoiceState\n from .message import Message\n from .raw_models import (\n RawAppCommandPermissionsUpdateEvent,\n RawBulkMessageDeleteEvent,\n RawIntegrationDeleteEvent,\n RawMemberRemoveEvent,\n RawMessageDeleteEvent,\n RawMessageUpdateEvent,\n RawReactionActionEvent,\n RawReactionClearEmojiEvent,\n RawReactionClearEvent,\n RawThreadDeleteEvent,\n RawThreadMembersUpdate,\n RawThreadUpdateEvent,\n RawTypingEvent,\n )\n from .reaction import Reaction\n from .role import Role\n from .scheduled_event import ScheduledEvent\n from .threads import ThreadMember\n from .types.guild import Guild as GuildPayload\n from .ui.item import Item\n from .voice_client import VoiceProtocol\n from .audit_logs import AuditLogEntry\n\n\n# fmt: off\n__all__ = (\n 'Client',\n)\n# fmt: on\n\nT = TypeVar('T')\nCoro = Coroutine[Any, Any, T]\nCoroT = TypeVar('CoroT', bound=Callable[..., Coro[Any]])\n\n_log = logging.getLogger(__name__)\n\n\nclass _LoopSentinel:\n __slots__ = ()\n\n def __getattr__(self, attr: str) -> None:\n msg = (\n 'loop attribute cannot be accessed in non-async contexts. '\n 'Consider using either an asynchronous main function and passing it to asyncio.run or '\n 'using asynchronous initialisation hooks such as Client.setup_hook'\n )\n raise AttributeError(msg)\n\n\n_loop: Any = _LoopSentinel()\n\n\nclass Client:\n r\"\"\"Represents a client connection that connects to Discord.\n This class is used to interact with the Discord WebSocket and API.\n\n .. container:: operations\n\n .. describe:: async with x\n\n Asynchronously initialises the client and automatically cleans up.\n\n .. versionadded:: 2.0\n\n A number of options can be passed to the :class:`Client`.\n\n Parameters\n -----------\n max_messages: Optional[:class:`int`]\n The maximum number of messages to store in the internal message cache.\n This defaults to ``1000``. Passing in ``None`` disables the message cache.\n\n .. versionchanged:: 1.3\n Allow disabling the message cache and change the default size to ``1000``.\n proxy: Optional[:class:`str`]\n Proxy URL.\n proxy_auth: Optional[:class:`aiohttp.BasicAuth`]\n An object that represents proxy HTTP Basic Authorization.\n shard_id: Optional[:class:`int`]\n Integer starting at ``0`` and less than :attr:`.shard_count`.\n shard_count: Optional[:class:`int`]\n The total number of shards.\n application_id: :class:`int`\n The client's application ID.\n intents: :class:`Intents`\n The intents that you want to enable for the session. This is a way of\n disabling and enabling certain gateway events from triggering and being sent.\n\n .. versionadded:: 1.5\n\n .. versionchanged:: 2.0\n Parameter is now required.\n member_cache_flags: :class:`MemberCacheFlags`\n Allows for finer control over how the library caches members.\n If not given, defaults to cache as much as possible with the\n currently selected intents.\n\n .. versionadded:: 1.5\n chunk_guilds_at_startup: :class:`bool`\n Indicates if :func:`.on_ready` should be delayed to chunk all guilds\n at start-up if necessary. This operation is incredibly slow for large\n amounts of guilds. The default is ``True`` if :attr:`Intents.members`\n is ``True``.\n\n .. versionadded:: 1.5\n status: Optional[:class:`.Status`]\n A status to start your presence with upon logging on to Discord.\n activity: Optional[:class:`.BaseActivity`]\n An activity to start your presence with upon logging on to Discord.\n allowed_mentions: Optional[:class:`AllowedMentions`]\n Control how the client handles mentions by default on every message sent.\n\n .. versionadded:: 1.4\n heartbeat_timeout: :class:`float`\n The maximum numbers of seconds before timing out and restarting the\n WebSocket in the case of not receiving a HEARTBEAT_ACK. Useful if\n processing the initial packets take too long to the point of disconnecting\n you. The default timeout is 60 seconds.\n guild_ready_timeout: :class:`float`\n The maximum number of seconds to wait for the GUILD_CREATE stream to end before\n preparing the member cache and firing READY. The default timeout is 2 seconds.\n\n .. versionadded:: 1.4\n assume_unsync_clock: :class:`bool`\n Whether to assume the system clock is unsynced. This applies to the ratelimit handling\n code. If this is set to ``True``, the default, then the library uses the time to reset\n a rate limit bucket given by Discord. If this is ``False`` then your system clock is\n used to calculate how long to sleep for. If this is set to ``False`` it is recommended to\n sync your system clock to Google's NTP server.\n\n .. versionadded:: 1.3\n enable_debug_events: :class:`bool`\n Whether to enable events that are useful only for debugging gateway related information.\n\n Right now this involves :func:`on_socket_raw_receive` and :func:`on_socket_raw_send`. If\n this is ``False`` then those events will not be dispatched (due to performance considerations).\n To enable these events, this must be set to ``True``. Defaults to ``False``.\n\n .. versionadded:: 2.0\n http_trace: :class:`aiohttp.TraceConfig`\n The trace configuration to use for tracking HTTP requests the library does using ``aiohttp``.\n This allows you to check requests the library is using. For more information, check the\n `aiohttp documentation <https://docs.aiohttp.org/en/stable/client_advanced.html#client-tracing>`_.\n\n .. versionadded:: 2.0\n max_ratelimit_timeout: Optional[:class:`float`]\n The maximum number of seconds to wait when a non-global rate limit is encountered.\n If a request requires sleeping for more than the seconds passed in, then\n :exc:`~discord.RateLimited` will be raised. By default, there is no timeout limit.\n In order to prevent misuse and unnecessary bans, the minimum value this can be\n set to is ``30.0`` seconds.\n\n .. versionadded:: 2.0\n\n Attributes\n -----------\n ws\n The websocket gateway the client is currently connected to. Could be ``None``.\n \"\"\"\n\n def __init__(self, *, intents: Intents, **options: Any) -> None:\n self.loop: asyncio.AbstractEventLoop = _loop\n # self.ws is set in the connect method\n self.ws: DiscordWebSocket = None # type: ignore\n self._listeners: Dict[str, List[Tuple[asyncio.Future, Callable[..., bool]]]] = {}\n self.shard_id: Optional[int] = options.get('shard_id')\n self.shard_count: Optional[int] = options.get('shard_count')\n\n proxy: Optional[str] = options.pop('proxy', None)\n proxy_auth: Optional[aiohttp.BasicAuth] = options.pop('proxy_auth', None)\n unsync_clock: bool = options.pop('assume_unsync_clock', True)\n http_trace: Optional[aiohttp.TraceConfig] = options.pop('http_trace', None)\n max_ratelimit_timeout: Optional[float] = options.pop('max_ratelimit_timeout', None)\n self.http: HTTPClient = HTTPClient(\n self.loop,\n proxy=proxy,\n proxy_auth=proxy_auth,\n unsync_clock=unsync_clock,\n http_trace=http_trace,\n max_ratelimit_timeout=max_ratelimit_timeout,\n )\n\n self._handlers: Dict[str, Callable[..., None]] = {\n 'ready': self._handle_ready,\n }\n\n self._hooks: Dict[str, Callable[..., Coroutine[Any, Any, Any]]] = {\n 'before_identify': self._call_before_identify_hook,\n }\n\n self._enable_debug_events: bool = options.pop('enable_debug_events', False)\n self._connection: ConnectionState[Self] = self._get_state(intents=intents, **options)\n self._connection.shard_count = self.shard_count\n self._closed: bool = False\n self._ready: asyncio.Event = MISSING\n self._application: Optional[AppInfo] = None\n self._connection._get_websocket = self._get_websocket\n self._connection._get_client = lambda: self\n\n if VoiceClient.warn_nacl:\n VoiceClient.warn_nacl = False\n _log.warning(\"PyNaCl is not installed, voice will NOT be supported\")\n\n async def __aenter__(self) -> Self:\n await self._async_setup_hook()\n return self\n\n async def __aexit__(\n self,\n exc_type: Optional[Type[BaseException]],\n exc_value: Optional[BaseException],\n traceback: Optional[TracebackType],\n ) -> None:\n if not self.is_closed():\n await self.close()\n\n # internals\n\n def _get_websocket(self, guild_id: Optional[int] = None, *, shard_id: Optional[int] = None) -> DiscordWebSocket:\n return self.ws\n\n def _get_state(self, **options: Any) -> ConnectionState:\n return ConnectionState(dispatch=self.dispatch, handlers=self._handlers, hooks=self._hooks, http=self.http, **options)\n\n def _handle_ready(self) -> None:\n self._ready.set()\n\n @property\n def latency(self) -> float:\n \"\"\":class:`float`: Measures latency between a HEARTBEAT and a HEARTBEAT_ACK in seconds.\n\n This could be referred to as the Discord WebSocket protocol latency.\n \"\"\"\n ws = self.ws\n return float('nan') if not ws else ws.latency\n\n def is_ws_ratelimited(self) -> bool:\n \"\"\":class:`bool`: Whether the websocket is currently rate limited.\n\n This can be useful to know when deciding whether you should query members\n using HTTP or via the gateway.\n\n .. versionadded:: 1.6\n \"\"\"\n if self.ws:\n return self.ws.is_ratelimited()\n return False\n\n @property\n def user(self) -> Optional[ClientUser]:\n \"\"\"Optional[:class:`.ClientUser`]: Represents the connected client. ``None`` if not logged in.\"\"\"\n return self._connection.user\n\n @property\n def guilds(self) -> Sequence[Guild]:\n \"\"\"Sequence[:class:`.Guild`]: The guilds that the connected client is a member of.\"\"\"\n return self._connection.guilds\n\n @property\n def emojis(self) -> Sequence[Emoji]:\n \"\"\"Sequence[:class:`.Emoji`]: The emojis that the connected client has.\"\"\"\n return self._connection.emojis\n\n @property\n def stickers(self) -> Sequence[GuildSticker]:\n \"\"\"Sequence[:class:`.GuildSticker`]: The stickers that the connected client has.\n\n .. versionadded:: 2.0\n \"\"\"\n return self._connection.stickers\n\n @property\n def cached_messages(self) -> Sequence[Message]:\n \"\"\"Sequence[:class:`.Message`]: Read-only list of messages the connected client has cached.\n\n .. versionadded:: 1.1\n \"\"\"\n return utils.SequenceProxy(self._connection._messages or [])\n\n @property\n def private_channels(self) -> Sequence[PrivateChannel]:\n \"\"\"Sequence[:class:`.abc.PrivateChannel`]: The private channels that the connected client is participating on.\n\n .. note::\n\n This returns only up to 128 most recent private channels due to an internal working\n on how Discord deals with private channels.\n \"\"\"\n return self._connection.private_channels\n\n @property\n def voice_clients(self) -> List[VoiceProtocol]:\n \"\"\"List[:class:`.VoiceProtocol`]: Represents a list of voice connections.\n\n These are usually :class:`.VoiceClient` instances.\n \"\"\"\n return self._connection.voice_clients\n\n @property\n def application_id(self) -> Optional[int]:\n \"\"\"Optional[:class:`int`]: The client's application ID.\n\n If this is not passed via ``__init__`` then this is retrieved\n through the gateway when an event contains the data or after a call\n to :meth:`~discord.Client.login`. Usually after :func:`~discord.on_connect`\n is called.\n\n .. versionadded:: 2.0\n \"\"\"\n return self._connection.application_id\n\n @property\n def application_flags(self) -> ApplicationFlags:\n \"\"\":class:`~discord.ApplicationFlags`: The client's application flags.\n\n .. versionadded:: 2.0\n \"\"\"\n return self._connection.application_flags\n\n @property\n def application(self) -> Optional[AppInfo]:\n \"\"\"Optional[:class:`~discord.AppInfo`]: The client's application info.\n\n This is retrieved on :meth:`~discord.Client.login` and is not updated\n afterwards. This allows populating the application_id without requiring a\n gateway connection.\n\n This is ``None`` if accessed before :meth:`~discord.Client.login` is called.\n\n .. seealso:: The :meth:`~discord.Client.application_info` API call\n\n .. versionadded:: 2.0\n \"\"\"\n return self._application\n\n def is_ready(self) -> bool:\n \"\"\":class:`bool`: Specifies if the client's internal cache is ready for use.\"\"\"\n return self._ready is not MISSING and self._ready.is_set()\n\n async def _run_event(\n self,\n coro: Callable[..., Coroutine[Any, Any, Any]],\n event_name: str,\n *args: Any,\n **kwargs: Any,\n ) -> None:\n try:\n await coro(*args, **kwargs)\n except asyncio.CancelledError:\n pass\n except Exception:\n try:\n await self.on_error(event_name, *args, **kwargs)\n except asyncio.CancelledError:\n pass\n\n def _schedule_event(\n self,\n coro: Callable[..., Coroutine[Any, Any, Any]],\n event_name: str,\n *args: Any,\n **kwargs: Any,\n ) -> asyncio.Task:\n wrapped = self._run_event(coro, event_name, *args, **kwargs)\n # Schedules the task\n return self.loop.create_task(wrapped, name=f'discord.py: {event_name}')\n\n def dispatch(self, event: str, /, *args: Any, **kwargs: Any) -> None:\n _log.debug('Dispatching event %s', event)\n method = 'on_' + event\n\n listeners = self._listeners.get(event)\n if listeners:\n removed = []\n for i, (future, condition) in enumerate(listeners):\n if future.cancelled():\n removed.append(i)\n continue\n\n try:\n result = condition(*args)\n except Exception as exc:\n future.set_exception(exc)\n removed.append(i)\n else:\n if result:\n if len(args) == 0:\n future.set_result(None)\n elif len(args) == 1:\n future.set_result(args[0])\n else:\n future.set_result(args)\n removed.append(i)\n\n if len(removed) == len(listeners):\n self._listeners.pop(event)\n else:\n for idx in reversed(removed):\n del listeners[idx]\n\n try:\n coro = getattr(self, method)\n except AttributeError:\n pass\n else:\n self._schedule_event(coro, method, *args, **kwargs)\n\n async def on_error(self, event_method: str, /, *args: Any, **kwargs: Any) -> None:\n \"\"\"|coro|\n\n The default error handler provided by the client.\n\n By default this logs to the library logger however it could be\n overridden to have a different implementation.\n Check :func:`~discord.on_error` for more details.\n\n .. versionchanged:: 2.0\n\n ``event_method`` parameter is now positional-only\n and instead of writing to ``sys.stderr`` it logs instead.\n \"\"\"\n _log.exception('Ignoring exception in %s', event_method)\n\n # hooks\n\n async def _call_before_identify_hook(self, shard_id: Optional[int], *, initial: bool = False) -> None:\n # This hook is an internal hook that actually calls the public one.\n # It allows the library to have its own hook without stepping on the\n # toes of those who need to override their own hook.\n await self.before_identify_hook(shard_id, initial=initial)\n\n async def before_identify_hook(self, shard_id: Optional[int], *, initial: bool = False) -> None:\n \"\"\"|coro|\n\n A hook that is called before IDENTIFYing a session. This is useful\n if you wish to have more control over the synchronization of multiple\n IDENTIFYing clients.\n\n The default implementation sleeps for 5 seconds.\n\n .. versionadded:: 1.4\n\n Parameters\n ------------\n shard_id: :class:`int`\n The shard ID that requested being IDENTIFY'd\n initial: :class:`bool`\n Whether this IDENTIFY is the first initial IDENTIFY.\n \"\"\"\n\n if not initial:\n await asyncio.sleep(5.0)\n\n async def _async_setup_hook(self) -> None:\n # Called whenever the client needs to initialise asyncio objects with a running loop\n loop = asyncio.get_running_loop()\n self.loop = loop\n self.http.loop = loop\n self._connection.loop = loop\n\n self._ready = asyncio.Event()\n\n async def setup_hook(self) -> None:\n \"\"\"|coro|\n\n A coroutine to be called to setup the bot, by default this is blank.\n\n To perform asynchronous setup after the bot is logged in but before\n it has connected to the Websocket, overwrite this coroutine.\n\n This is only called once, in :meth:`login`, and will be called before\n any events are dispatched, making it a better solution than doing such\n setup in the :func:`~discord.on_ready` event.\n\n .. warning::\n\n Since this is called *before* the websocket connection is made therefore\n anything that waits for the websocket will deadlock, this includes things\n like :meth:`wait_for` and :meth:`wait_until_ready`.\n\n .. versionadded:: 2.0\n \"\"\"\n pass\n\n # login state management\n\n async def login(self, token: str) -> None:\n \"\"\"|coro|\n\n Logs in the client with the specified credentials and\n calls the :meth:`setup_hook`.\n\n\n Parameters\n -----------\n token: :class:`str`\n The authentication token. Do not prefix this token with\n anything as the library will do it for you.\n\n Raises\n ------\n LoginFailure\n The wrong credentials are passed.\n HTTPException\n An unknown HTTP related error occurred,\n usually when it isn't 200 or the known incorrect credentials\n passing status code.\n \"\"\"\n\n _log.info('logging in using static token')\n\n if self.loop is _loop:\n await self._async_setup_hook()\n\n if not isinstance(token, str):\n raise TypeError(f'expected token to be a str, received {token.__class__.__name__} instead')\n token = token.strip()\n\n data = await self.http.static_login(token)\n self._connection.user = ClientUser(state=self._connection, data=data)\n self._application = await self.application_info()\n if self._connection.application_id is None:\n self._connection.application_id = self._application.id\n\n if not self._connection.application_flags:\n self._connection.application_flags = self._application.flags\n\n await self.setup_hook()\n\n async def connect(self, *, reconnect: bool = True) -> None:\n \"\"\"|coro|\n\n Creates a websocket connection and lets the websocket listen\n to messages from Discord. This is a loop that runs the entire\n event system and miscellaneous aspects of the library. Control\n is not resumed until the WebSocket connection is terminated.\n\n Parameters\n -----------\n reconnect: :class:`bool`\n If we should attempt reconnecting, either due to internet\n failure or a specific failure on Discord's part. Certain\n disconnects that lead to bad state will not be handled (such as\n invalid sharding payloads or bad tokens).\n\n Raises\n -------\n GatewayNotFound\n If the gateway to connect to Discord is not found. Usually if this\n is thrown then there is a Discord API outage.\n ConnectionClosed\n The websocket connection has been terminated.\n \"\"\"\n\n backoff = ExponentialBackoff()\n ws_params = {\n 'initial': True,\n 'shard_id': self.shard_id,\n }\n while not self.is_closed():\n try:\n coro = DiscordWebSocket.from_client(self, **ws_params)\n self.ws = await asyncio.wait_for(coro, timeout=60.0)\n ws_params['initial'] = False\n while True:\n await self.ws.poll_event()\n except ReconnectWebSocket as e:\n _log.debug('Got a request to %s the websocket.', e.op)\n self.dispatch('disconnect')\n ws_params.update(sequence=self.ws.sequence, resume=e.resume, session=self.ws.session_id)\n if e.resume:\n ws_params['gateway'] = self.ws.gateway\n continue\n except (\n OSError,\n HTTPException,\n GatewayNotFound,\n ConnectionClosed,\n aiohttp.ClientError,\n asyncio.TimeoutError,\n ) as exc:\n\n self.dispatch('disconnect')\n if not reconnect:\n await self.close()\n if isinstance(exc, ConnectionClosed) and exc.code == 1000:\n # clean close, don't re-raise this\n return\n raise\n\n if self.is_closed():\n return\n\n # If we get connection reset by peer then try to RESUME\n if isinstance(exc, OSError) and exc.errno in (54, 10054):\n ws_params.update(\n sequence=self.ws.sequence,\n gateway=self.ws.gateway,\n initial=False,\n resume=True,\n session=self.ws.session_id,\n )\n continue\n\n # We should only get this when an unhandled close code happens,\n # such as a clean disconnect (1000) or a bad state (bad token, no sharding, etc)\n # sometimes, discord sends us 1000 for unknown reasons so we should reconnect\n # regardless and rely on is_closed instead\n if isinstance(exc, ConnectionClosed):\n if exc.code == 4014:\n raise PrivilegedIntentsRequired(exc.shard_id) from None\n if exc.code != 1000:\n await self.close()\n raise\n\n retry = backoff.delay()\n _log.exception(\"Attempting a reconnect in %.2fs\", retry)\n await asyncio.sleep(retry)\n # Always try to RESUME the connection\n # If the connection is not RESUME-able then the gateway will invalidate the session.\n # This is apparently what the official Discord client does.\n ws_params.update(\n sequence=self.ws.sequence,\n gateway=self.ws.gateway,\n resume=True,\n session=self.ws.session_id,\n )\n\n async def close(self) -> None:\n \"\"\"|coro|\n\n Closes the connection to Discord.\n \"\"\"\n if self._closed:\n return\n\n self._closed = True\n\n await self._connection.close()\n\n if self.ws is not None and self.ws.open:\n await self.ws.close(code=1000)\n\n await self.http.close()\n\n if self._ready is not MISSING:\n self._ready.clear()\n\n self.loop = MISSING\n\n def clear(self) -> None:\n \"\"\"Clears the internal state of the bot.\n\n After this, the bot can be considered \"re-opened\", i.e. :meth:`is_closed`\n and :meth:`is_ready` both return ``False`` along with the bot's internal\n cache cleared.\n \"\"\"\n self._closed = False\n self._ready.clear()\n self._connection.clear()\n self.http.clear()\n\n async def start(self, token: str, *, reconnect: bool = True) -> None:\n \"\"\"|coro|\n\n A shorthand coroutine for :meth:`login` + :meth:`connect`.\n\n Parameters\n -----------\n token: :class:`str`\n The authentication token. Do not prefix this token with\n anything as the library will do it for you.\n reconnect: :class:`bool`\n If we should attempt reconnecting, either due to internet\n failure or a specific failure on Discord's part. Certain\n disconnects that lead to bad state will not be handled (such as\n invalid sharding payloads or bad tokens).\n\n Raises\n -------\n TypeError\n An unexpected keyword argument was received.\n \"\"\"\n await self.login(token)\n await self.connect(reconnect=reconnect)\n\n def run(\n self,\n token: str,\n *,\n reconnect: bool = True,\n log_handler: Optional[logging.Handler] = MISSING,\n log_formatter: logging.Formatter = MISSING,\n log_level: int = MISSING,\n root_logger: bool = False,\n ) -> None:\n \"\"\"A blocking call that abstracts away the event loop\n initialisation from you.\n\n If you want more control over the event loop then this\n function should not be used. Use :meth:`start` coroutine\n or :meth:`connect` + :meth:`login`.\n\n This function also sets up the logging library to make it easier\n for beginners to know what is going on with the library. For more\n advanced users, this can be disabled by passing ``None`` to\n the ``log_handler`` parameter.\n\n .. warning::\n\n This function must be the last function to call due to the fact that it\n is blocking. That means that registration of events or anything being\n called after this function call will not execute until it returns.\n\n Parameters\n -----------\n token: :class:`str`\n The authentication token. Do not prefix this token with\n anything as the library will do it for you.\n reconnect: :class:`bool`\n If we should attempt reconnecting, either due to internet\n failure or a specific failure on Discord's part. Certain\n disconnects that lead to bad state will not be handled (such as\n invalid sharding payloads or bad tokens).\n log_handler: Optional[:class:`logging.Handler`]\n The log handler to use for the library's logger. If this is ``None``\n then the library will not set up anything logging related. Logging\n will still work if ``None`` is passed, though it is your responsibility\n to set it up.\n\n The default log handler if not provided is :class:`logging.StreamHandler`.\n\n .. versionadded:: 2.0\n log_formatter: :class:`logging.Formatter`\n The formatter to use with the given log handler. If not provided then it\n defaults to a colour based logging formatter (if available).\n\n .. versionadded:: 2.0\n log_level: :class:`int`\n The default log level for the library's logger. This is only applied if the\n ``log_handler`` parameter is not ``None``. Defaults to ``logging.INFO``.\n\n .. versionadded:: 2.0\n root_logger: :class:`bool`\n Whether to set up the root logger rather than the library logger.\n By default, only the library logger (``'discord'``) is set up. If this\n is set to ``True`` then the root logger is set up as well.\n\n Defaults to ``False``.\n\n .. versionadded:: 2.0\n \"\"\"\n\n async def runner():\n async with self:\n await self.start(token, reconnect=reconnect)\n\n if log_handler is not None:\n utils.setup_logging(\n handler=log_handler,\n formatter=log_formatter,\n level=log_level,\n root=root_logger,\n )\n\n try:\n asyncio.run(runner())\n except KeyboardInterrupt:\n # nothing to do here\n # `asyncio.run` handles the loop cleanup\n # and `self.start` closes all sockets and the HTTPClient instance.\n return\n\n # properties\n\n def is_closed(self) -> bool:\n \"\"\":class:`bool`: Indicates if the websocket connection is closed.\"\"\"\n return self._closed\n\n @property\n def activity(self) -> Optional[ActivityTypes]:\n \"\"\"Optional[:class:`.BaseActivity`]: The activity being used upon\n logging in.\n \"\"\"\n return create_activity(self._connection._activity, self._connection)\n\n @activity.setter\n def activity(self, value: Optional[ActivityTypes]) -> None:\n if value is None:\n self._connection._activity = None\n elif isinstance(value, BaseActivity):\n # ConnectionState._activity is typehinted as ActivityPayload, we're passing Dict[str, Any]\n self._connection._activity = value.to_dict() # type: ignore\n else:\n raise TypeError('activity must derive from BaseActivity.')\n\n @property\n def status(self) -> Status:\n \"\"\":class:`.Status`:\n The status being used upon logging on to Discord.\n\n .. versionadded: 2.0\n \"\"\"\n if self._connection._status in set(state.value for state in Status):\n return Status(self._connection._status)\n return Status.online\n\n @status.setter\n def status(self, value: Status) -> None:\n if value is Status.offline:\n self._connection._status = 'invisible'\n elif isinstance(value, Status):\n self._connection._status = str(value)\n else:\n raise TypeError('status must derive from Status.')\n\n @property\n def allowed_mentions(self) -> Optional[AllowedMentions]:\n \"\"\"Optional[:class:`~discord.AllowedMentions`]: The allowed mention configuration.\n\n .. versionadded:: 1.4\n \"\"\"\n return self._connection.allowed_mentions\n\n @allowed_mentions.setter\n def allowed_mentions(self, value: Optional[AllowedMentions]) -> None:\n if value is None or isinstance(value, AllowedMentions):\n self._connection.allowed_mentions = value\n else:\n raise TypeError(f'allowed_mentions must be AllowedMentions not {value.__class__.__name__}')\n\n @property\n def intents(self) -> Intents:\n \"\"\":class:`~discord.Intents`: The intents configured for this connection.\n\n .. versionadded:: 1.5\n \"\"\"\n return self._connection.intents\n\n # helpers/getters\n\n @property\n def users(self) -> List[User]:\n \"\"\"List[:class:`~discord.User`]: Returns a list of all the users the bot can see.\"\"\"\n return list(self._connection._users.values())\n\n def get_channel(self, id: int, /) -> Optional[Union[GuildChannel, Thread, PrivateChannel]]:\n \"\"\"Returns a channel or thread with the given ID.\n\n .. versionchanged:: 2.0\n\n ``id`` parameter is now positional-only.\n\n Parameters\n -----------\n id: :class:`int`\n The ID to search for.\n\n Returns\n --------\n Optional[Union[:class:`.abc.GuildChannel`, :class:`.Thread`, :class:`.abc.PrivateChannel`]]\n The returned channel or ``None`` if not found.\n \"\"\"\n return self._connection.get_channel(id) # type: ignore # The cache contains all channel types\n\n def get_partial_messageable(\n self, id: int, *, guild_id: Optional[int] = None, type: Optional[ChannelType] = None\n ) -> PartialMessageable:\n \"\"\"Returns a partial messageable with the given channel ID.\n\n This is useful if you have a channel_id but don't want to do an API call\n to send messages to it.\n\n .. versionadded:: 2.0\n\n Parameters\n -----------\n id: :class:`int`\n The channel ID to create a partial messageable for.\n guild_id: Optional[:class:`int`]\n The optional guild ID to create a partial messageable for.\n\n This is not required to actually send messages, but it does allow the\n :meth:`~discord.PartialMessageable.jump_url` and\n :attr:`~discord.PartialMessageable.guild` properties to function properly.\n type: Optional[:class:`.ChannelType`]\n The underlying channel type for the partial messageable.\n\n Returns\n --------\n :class:`.PartialMessageable`\n The partial messageable\n \"\"\"\n return PartialMessageable(state=self._connection, id=id, guild_id=guild_id, type=type)\n\n def get_stage_instance(self, id: int, /) -> Optional[StageInstance]:\n \"\"\"Returns a stage instance with the given stage channel ID.\n\n .. versionadded:: 2.0\n\n Parameters\n -----------\n id: :class:`int`\n The ID to search for.\n\n Returns\n --------\n Optional[:class:`.StageInstance`]\n The stage instance or ``None`` if not found.\n \"\"\"\n from .channel import StageChannel\n\n channel = self._connection.get_channel(id)\n\n if isinstance(channel, StageChannel):\n return channel.instance\n\n def get_guild(self, id: int, /) -> Optional[Guild]:\n \"\"\"Returns a guild with the given ID.\n\n .. versionchanged:: 2.0\n\n ``id`` parameter is now positional-only.\n\n Parameters\n -----------\n id: :class:`int`\n The ID to search for.\n\n Returns\n --------\n Optional[:class:`.Guild`]\n The guild or ``None`` if not found.\n \"\"\"\n return self._connection._get_guild(id)\n\n def get_user(self, id: int, /) -> Optional[User]:\n \"\"\"Returns a user with the given ID.\n\n .. versionchanged:: 2.0\n\n ``id`` parameter is now positional-only.\n\n Parameters\n -----------\n id: :class:`int`\n The ID to search for.\n\n Returns\n --------\n Optional[:class:`~discord.User`]\n The user or ``None`` if not found.\n \"\"\"\n return self._connection.get_user(id)\n\n def get_emoji(self, id: int, /) -> Optional[Emoji]:\n \"\"\"Returns an emoji with the given ID.\n\n .. versionchanged:: 2.0\n\n ``id`` parameter is now positional-only.\n\n Parameters\n -----------\n id: :class:`int`\n The ID to search for.\n\n Returns\n --------\n Optional[:class:`.Emoji`]\n The custom emoji or ``None`` if not found.\n \"\"\"\n return self._connection.get_emoji(id)\n\n def get_sticker(self, id: int, /) -> Optional[GuildSticker]:\n \"\"\"Returns a guild sticker with the given ID.\n\n .. versionadded:: 2.0\n\n .. note::\n\n To retrieve standard stickers, use :meth:`.fetch_sticker`.\n or :meth:`.fetch_premium_sticker_packs`.\n\n Returns\n --------\n Optional[:class:`.GuildSticker`]\n The sticker or ``None`` if not found.\n \"\"\"\n return self._connection.get_sticker(id)\n\n def get_all_channels(self) -> Generator[GuildChannel, None, None]:\n \"\"\"A generator that retrieves every :class:`.abc.GuildChannel` the client can 'access'.\n\n This is equivalent to: ::\n\n for guild in client.guilds:\n for channel in guild.channels:\n yield channel\n\n .. note::\n\n Just because you receive a :class:`.abc.GuildChannel` does not mean that\n you can communicate in said channel. :meth:`.abc.GuildChannel.permissions_for` should\n be used for that.\n\n Yields\n ------\n :class:`.abc.GuildChannel`\n A channel the client can 'access'.\n \"\"\"\n\n for guild in self.guilds:\n yield from guild.channels\n\n def get_all_members(self) -> Generator[Member, None, None]:\n \"\"\"Returns a generator with every :class:`.Member` the client can see.\n\n This is equivalent to: ::\n\n for guild in client.guilds:\n for member in guild.members:\n yield member\n\n Yields\n ------\n :class:`.Member`\n A member the client can see.\n \"\"\"\n for guild in self.guilds:\n yield from guild.members\n\n # listeners/waiters\n\n async def wait_until_ready(self) -> None:\n \"\"\"|coro|\n\n Waits until the client's internal cache is all ready.\n\n .. warning::\n\n Calling this inside :meth:`setup_hook` can lead to a deadlock.\n \"\"\"\n if self._ready is not MISSING:\n await self._ready.wait()\n else:\n raise RuntimeError(\n 'Client has not been properly initialised. '\n 'Please use the login method or asynchronous context manager before calling this method'\n )\n\n # App Commands\n\n @overload\n async def wait_for(\n self,\n event: Literal['raw_app_command_permissions_update'],\n /,\n *,\n check: Optional[Callable[[RawAppCommandPermissionsUpdateEvent], bool]],\n timeout: Optional[float] = None,\n ) -> RawAppCommandPermissionsUpdateEvent:\n ...\n\n @overload\n async def wait_for(\n self,\n event: Literal['app_command_completion'],\n /,\n *,\n check: Optional[Callable[[Interaction[Self], Union[Command[Any, ..., Any], ContextMenu]], bool]],\n timeout: Optional[float] = None,\n ) -> Tuple[Interaction[Self], Union[Command[Any, ..., Any], ContextMenu]]:\n ...\n\n # AutoMod\n\n @overload\n async def wait_for(\n self,\n event: Literal['automod_rule_create', 'automod_rule_update', 'automod_rule_delete'],\n /,\n *,\n check: Optional[Callable[[AutoModRule], bool]],\n timeout: Optional[float] = None,\n ) -> AutoModRule:\n ...\n\n @overload\n async def wait_for(\n self,\n event: Literal['automod_action'],\n /,\n *,\n check: Optional[Callable[[AutoModAction], bool]],\n timeout: Optional[float] = None,\n ) -> AutoModAction:\n ...\n\n # Channels\n\n @overload\n async def wait_for(\n self,\n event: Literal['private_channel_update'],\n /,\n *,\n check: Optional[Callable[[GroupChannel, GroupChannel], bool]],\n timeout: Optional[float] = None,\n ) -> Tuple[GroupChannel, GroupChannel]:\n ...\n\n @overload\n async def wait_for(\n self,\n event: Literal['private_channel_pins_update'],\n /,\n *,\n check: Optional[Callable[[PrivateChannel, datetime.datetime], bool]],\n timeout: Optional[float] = None,\n ) -> Tuple[PrivateChannel, datetime.datetime]:\n ...\n\n @overload\n async def wait_for(\n self,\n event: Literal['guild_channel_delete', 'guild_channel_create'],\n /,\n *,\n check: Optional[Callable[[GuildChannel], bool]],\n timeout: Optional[float] = None,\n ) -> GuildChannel:\n ...\n\n @overload\n async def wait_for(\n self,\n event: Literal['guild_channel_update'],\n /,\n *,\n check: Optional[Callable[[GuildChannel, GuildChannel], bool]],\n timeout: Optional[float] = None,\n ) -> Tuple[GuildChannel, GuildChannel]:\n ...\n\n @overload\n async def wait_for(\n self,\n event: Literal['guild_channel_pins_update'],\n /,\n *,\n check: Optional[\n Callable[\n [Union[GuildChannel, Thread], Optional[datetime.datetime]],\n bool,\n ]\n ],\n timeout: Optional[float] = None,\n ) -> Tuple[Union[GuildChannel, Thread], Optional[datetime.datetime]]:\n ...\n\n @overload\n async def wait_for(\n self,\n event: Literal['typing'],\n /,\n *,\n check: Optional[Callable[[Messageable, Union[User, Member], datetime.datetime], bool]],\n timeout: Optional[float] = None,\n ) -> Tuple[Messageable, Union[User, Member], datetime.datetime]:\n ...\n\n @overload\n async def wait_for(\n self,\n event: Literal['raw_typing'],\n /,\n *,\n check: Optional[Callable[[RawTypingEvent], bool]],\n timeout: Optional[float] = None,\n ) -> RawTypingEvent:\n ...\n\n # Debug & Gateway events\n\n @overload\n async def wait_for(\n self,\n event: Literal['connect', 'disconnect', 'ready', 'resumed'],\n /,\n *,\n check: Optional[Callable[[], bool]],\n timeout: Optional[float] = None,\n ) -> None:\n ...\n\n @overload\n async def wait_for(\n self,\n event: Literal['shard_connect', 'shard_disconnect', 'shard_ready', 'shard_resumed'],\n /,\n *,\n check: Optional[Callable[[int], bool]],\n timeout: Optional[float] = None,\n ) -> int:\n ...\n\n @overload\n async def wait_for(\n self,\n event: Literal['socket_event_type', 'socket_raw_receive'],\n /,\n *,\n check: Optional[Callable[[str], bool]],\n timeout: Optional[float] = None,\n ) -> str:\n ...\n\n @overload\n async def wait_for(\n self,\n event: Literal['socket_raw_send'],\n /,\n *,\n check: Optional[Callable[[Union[str, bytes]], bool]],\n timeout: Optional[float] = None,\n ) -> Union[str, bytes]:\n ...\n\n # Guilds\n\n @overload\n async def wait_for(\n self,\n event: Literal[\n 'guild_available',\n 'guild_unavailable',\n 'guild_join',\n 'guild_remove',\n ],\n /,\n *,\n check: Optional[Callable[[Guild], bool]],\n timeout: Optional[float] = None,\n ) -> Guild:\n ...\n\n @overload\n async def wait_for(\n self,\n event: Literal['guild_update'],\n /,\n *,\n check: Optional[Callable[[Guild, Guild], bool]],\n timeout: Optional[float] = None,\n ) -> Tuple[Guild, Guild]:\n ...\n\n @overload\n async def wait_for(\n self,\n event: Literal['guild_emojis_update'],\n /,\n *,\n check: Optional[Callable[[Guild, Sequence[Emoji], Sequence[Emoji]], bool]],\n timeout: Optional[float] = None,\n ) -> Tuple[Guild, Sequence[Emoji], Sequence[Emoji]]:\n ...\n\n @overload\n async def wait_for(\n self,\n event: Literal['guild_stickers_update'],\n /,\n *,\n check: Optional[Callable[[Guild, Sequence[GuildSticker], Sequence[GuildSticker]], bool]],\n timeout: Optional[float] = None,\n ) -> Tuple[Guild, Sequence[GuildSticker], Sequence[GuildSticker]]:\n ...\n\n @overload\n async def wait_for(\n self,\n event: Literal['invite_create', 'invite_delete'],\n /,\n *,\n check: Optional[Callable[[Invite], bool]],\n timeout: Optional[float] = None,\n ) -> Invite:\n ...\n\n @overload\n async def wait_for(\n self,\n event: Literal['audit_log_entry_create'],\n /,\n *,\n check: Optional[Callable[[AuditLogEntry], bool]],\n timeout: Optional[float] = None,\n ) -> AuditLogEntry:\n ...\n\n # Integrations\n\n @overload\n async def wait_for(\n self,\n event: Literal['integration_create', 'integration_update'],\n /,\n *,\n check: Optional[Callable[[Integration], bool]],\n timeout: Optional[float] = None,\n ) -> Integration:\n ...\n\n @overload\n async def wait_for(\n self,\n event: Literal['guild_integrations_update'],\n /,\n *,\n check: Optional[Callable[[Guild], bool]],\n timeout: Optional[float] = None,\n ) -> Guild:\n ...\n\n @overload\n async def wait_for(\n self,\n event: Literal['webhooks_update'],\n /,\n *,\n check: Optional[Callable[[GuildChannel], bool]],\n timeout: Optional[float] = None,\n ) -> GuildChannel:\n ...\n\n @overload\n async def wait_for(\n self,\n event: Literal['raw_integration_delete'],\n /,\n *,\n check: Optional[Callable[[RawIntegrationDeleteEvent], bool]],\n timeout: Optional[float] = None,\n ) -> RawIntegrationDeleteEvent:\n ...\n\n # Interactions\n\n @overload\n async def wait_for(\n self,\n event: Literal['interaction'],\n /,\n *,\n check: Optional[Callable[[Interaction[Self]], bool]],\n timeout: Optional[float] = None,\n ) -> Interaction[Self]:\n ...\n\n # Members\n\n @overload\n async def wait_for(\n self,\n event: Literal['member_join', 'member_remove'],\n /,\n *,\n check: Optional[Callable[[Member], bool]],\n timeout: Optional[float] = None,\n ) -> Member:\n ...\n\n @overload\n async def wait_for(\n self,\n event: Literal['raw_member_remove'],\n /,\n *,\n check: Optional[Callable[[RawMemberRemoveEvent], bool]],\n timeout: Optional[float] = None,\n ) -> RawMemberRemoveEvent:\n ...\n\n @overload\n async def wait_for(\n self,\n event: Literal['member_update', 'presence_update'],\n /,\n *,\n check: Optional[Callable[[Member, Member], bool]],\n timeout: Optional[float] = None,\n ) -> Tuple[Member, Member]:\n ...\n\n @overload\n async def wait_for(\n self,\n event: Literal['user_update'],\n /,\n *,\n check: Optional[Callable[[User, User], bool]],\n timeout: Optional[float] = None,\n ) -> Tuple[User, User]:\n ...\n\n @overload\n async def wait_for(\n self,\n event: Literal['member_ban'],\n /,\n *,\n check: Optional[Callable[[Guild, Union[User, Member]], bool]],\n timeout: Optional[float] = None,\n ) -> Tuple[Guild, Union[User, Member]]:\n ...\n\n @overload\n async def wait_for(\n self,\n event: Literal['member_unban'],\n /,\n *,\n check: Optional[Callable[[Guild, User], bool]],\n timeout: Optional[float] = None,\n ) -> Tuple[Guild, User]:\n ...\n\n # Messages\n\n @overload\n async def wait_for(\n self,\n event: Literal['message', 'message_delete'],\n /,\n *,\n check: Optional[Callable[[Message], bool]],\n timeout: Optional[float] = None,\n ) -> Message:\n ...\n\n @overload\n async def wait_for(\n self,\n event: Literal['message_edit'],\n /,\n *,\n check: Optional[Callable[[Message, Message], bool]],\n timeout: Optional[float] = None,\n ) -> Tuple[Message, Message]:\n ...\n\n @overload\n async def wait_for(\n self,\n event: Literal['bulk_message_delete'],\n /,\n *,\n check: Optional[Callable[[List[Message]], bool]],\n timeout: Optional[float] = None,\n ) -> List[Message]:\n ...\n\n @overload\n async def wait_for(\n self,\n event: Literal['raw_message_edit'],\n /,\n *,\n check: Optional[Callable[[RawMessageUpdateEvent], bool]],\n timeout: Optional[float] = None,\n ) -> RawMessageUpdateEvent:\n ...\n\n @overload\n async def wait_for(\n self,\n event: Literal['raw_message_delete'],\n /,\n *,\n check: Optional[Callable[[RawMessageDeleteEvent], bool]],\n timeout: Optional[float] = None,\n ) -> RawMessageDeleteEvent:\n ...\n\n @overload\n async def wait_for(\n self,\n event: Literal['raw_bulk_message_delete'],\n /,\n *,\n check: Optional[Callable[[RawBulkMessageDeleteEvent], bool]],\n timeout: Optional[float] = None,\n ) -> RawBulkMessageDeleteEvent:\n ...\n\n # Reactions\n\n @overload\n async def wait_for(\n self,\n event: Literal['reaction_add', 'reaction_remove'],\n /,\n *,\n check: Optional[Callable[[Reaction, Union[Member, User]], bool]],\n timeout: Optional[float] = None,\n ) -> Tuple[Reaction, Union[Member, User]]:\n ...\n\n @overload\n async def wait_for(\n self,\n event: Literal['reaction_clear'],\n /,\n *,\n check: Optional[Callable[[Message, List[Reaction]], bool]],\n timeout: Optional[float] = None,\n ) -> Tuple[Message, List[Reaction]]:\n ...\n\n @overload\n async def wait_for(\n self,\n event: Literal['reaction_clear_emoji'],\n /,\n *,\n check: Optional[Callable[[Reaction], bool]],\n timeout: Optional[float] = None,\n ) -> Reaction:\n ...\n\n @overload\n async def wait_for(\n self,\n event: Literal['raw_reaction_add', 'raw_reaction_remove'],\n /,\n *,\n check: Optional[Callable[[RawReactionActionEvent], bool]],\n timeout: Optional[float] = None,\n ) -> RawReactionActionEvent:\n ...\n\n @overload\n async def wait_for(\n self,\n event: Literal['raw_reaction_clear'],\n /,\n *,\n check: Optional[Callable[[RawReactionClearEvent], bool]],\n timeout: Optional[float] = None,\n ) -> RawReactionClearEvent:\n ...\n\n @overload\n async def wait_for(\n self,\n event: Literal['raw_reaction_clear_emoji'],\n /,\n *,\n check: Optional[Callable[[RawReactionClearEmojiEvent], bool]],\n timeout: Optional[float] = None,\n ) -> RawReactionClearEmojiEvent:\n ...\n\n # Roles\n\n @overload\n async def wait_for(\n self,\n event: Literal['guild_role_create', 'guild_role_delete'],\n /,\n *,\n check: Optional[Callable[[Role], bool]],\n timeout: Optional[float] = None,\n ) -> Role:\n ...\n\n @overload\n async def wait_for(\n self,\n event: Literal['guild_role_update'],\n /,\n *,\n check: Optional[Callable[[Role, Role], bool]],\n timeout: Optional[float] = None,\n ) -> Tuple[Role, Role]:\n ...\n\n # Scheduled Events\n\n @overload\n async def wait_for(\n self,\n event: Literal['scheduled_event_create', 'scheduled_event_delete'],\n /,\n *,\n check: Optional[Callable[[ScheduledEvent], bool]],\n timeout: Optional[float] = None,\n ) -> ScheduledEvent:\n ...\n\n @overload\n async def wait_for(\n self,\n event: Literal['scheduled_event_user_add', 'scheduled_event_user_remove'],\n /,\n *,\n check: Optional[Callable[[ScheduledEvent, User], bool]],\n timeout: Optional[float] = None,\n ) -> Tuple[ScheduledEvent, User]:\n ...\n\n # Stages\n\n @overload\n async def wait_for(\n self,\n event: Literal['stage_instance_create', 'stage_instance_delete'],\n /,\n *,\n check: Optional[Callable[[StageInstance], bool]],\n timeout: Optional[float] = None,\n ) -> StageInstance:\n ...\n\n @overload\n async def wait_for(\n self,\n event: Literal['stage_instance_update'],\n /,\n *,\n check: Optional[Callable[[StageInstance, StageInstance], bool]],\n timeout: Optional[float] = None,\n ) -> Coroutine[Any, Any, Tuple[StageInstance, StageInstance]]:\n ...\n\n # Threads\n @overload\n async def wait_for(\n self,\n event: Literal['thread_create', 'thread_join', 'thread_remove', 'thread_delete'],\n /,\n *,\n check: Optional[Callable[[Thread], bool]],\n timeout: Optional[float] = None,\n ) -> Thread:\n ...\n\n @overload\n async def wait_for(\n self,\n event: Literal['thread_update'],\n /,\n *,\n check: Optional[Callable[[Thread, Thread], bool]],\n timeout: Optional[float] = None,\n ) -> Tuple[Thread, Thread]:\n ...\n\n @overload\n async def wait_for(\n self,\n event: Literal['raw_thread_update'],\n /,\n *,\n check: Optional[Callable[[RawThreadUpdateEvent], bool]],\n timeout: Optional[float] = None,\n ) -> RawThreadUpdateEvent:\n ...\n\n @overload\n async def wait_for(\n self,\n event: Literal['raw_thread_delete'],\n /,\n *,\n check: Optional[Callable[[RawThreadDeleteEvent], bool]],\n timeout: Optional[float] = None,\n ) -> RawThreadDeleteEvent:\n ...\n\n @overload\n async def wait_for(\n self,\n event: Literal['thread_member_join', 'thread_member_remove'],\n /,\n *,\n check: Optional[Callable[[ThreadMember], bool]],\n timeout: Optional[float] = None,\n ) -> ThreadMember:\n ...\n\n @overload\n async def wait_for(\n self,\n event: Literal['raw_thread_member_remove'],\n /,\n *,\n check: Optional[Callable[[RawThreadMembersUpdate], bool]],\n timeout: Optional[float] = None,\n ) -> RawThreadMembersUpdate:\n ...\n\n # Voice\n\n @overload\n async def wait_for(\n self,\n event: Literal['voice_state_update'],\n /,\n *,\n check: Optional[Callable[[Member, VoiceState, VoiceState], bool]],\n timeout: Optional[float] = None,\n ) -> Tuple[Member, VoiceState, VoiceState]:\n ...\n\n # Commands\n\n @overload\n async def wait_for(\n self: Union[Bot, AutoShardedBot],\n event: Literal[\"command\", \"command_completion\"],\n /,\n *,\n check: Optional[Callable[[Context[Any]], bool]] = None,\n timeout: Optional[float] = None,\n ) -> Context[Any]:\n ...\n\n @overload\n async def wait_for(\n self: Union[Bot, AutoShardedBot],\n event: Literal[\"command_error\"],\n /,\n *,\n check: Optional[Callable[[Context[Any], CommandError], bool]] = None,\n timeout: Optional[float] = None,\n ) -> Tuple[Context[Any], CommandError]:\n ...\n\n @overload\n async def wait_for(\n self,\n event: str,\n /,\n *,\n check: Optional[Callable[..., bool]] = None,\n timeout: Optional[float] = None,\n ) -> Any:\n ...\n\n def wait_for(\n self,\n event: str,\n /,\n *,\n check: Optional[Callable[..., bool]] = None,\n timeout: Optional[float] = None,\n ) -> Coro[Any]:\n \"\"\"|coro|\n\n Waits for a WebSocket event to be dispatched.\n\n This could be used to wait for a user to reply to a message,\n or to react to a message, or to edit a message in a self-contained\n way.\n\n The ``timeout`` parameter is passed onto :func:`asyncio.wait_for`. By default,\n it does not timeout. Note that this does propagate the\n :exc:`asyncio.TimeoutError` for you in case of timeout and is provided for\n ease of use.\n\n In case the event returns multiple arguments, a :class:`tuple` containing those\n arguments is returned instead. Please check the\n :ref:`documentation <discord-api-events>` for a list of events and their\n parameters.\n\n This function returns the **first event that meets the requirements**.\n\n Examples\n ---------\n\n Waiting for a user reply: ::\n\n @client.event\n async def on_message(message):\n if message.content.startswith('$greet'):\n channel = message.channel\n await channel.send('Say hello!')\n\n def check(m):\n return m.content == 'hello' and m.channel == channel\n\n msg = await client.wait_for('message', check=check)\n await channel.send(f'Hello {msg.author}!')\n\n Waiting for a thumbs up reaction from the message author: ::\n\n @client.event\n async def on_message(message):\n if message.content.startswith('$thumb'):\n channel = message.channel\n await channel.send('Send me that \\N{THUMBS UP SIGN} reaction, mate')\n\n def check(reaction, user):\n return user == message.author and str(reaction.emoji) == '\\N{THUMBS UP SIGN}'\n\n try:\n reaction, user = await client.wait_for('reaction_add', timeout=60.0, check=check)\n except asyncio.TimeoutError:\n await channel.send('\\N{THUMBS DOWN SIGN}')\n else:\n await channel.send('\\N{THUMBS UP SIGN}')\n\n .. versionchanged:: 2.0\n\n ``event`` parameter is now positional-only.\n\n\n Parameters\n ------------\n event: :class:`str`\n The event name, similar to the :ref:`event reference <discord-api-events>`,\n but without the ``on_`` prefix, to wait for.\n check: Optional[Callable[..., :class:`bool`]]\n A predicate to check what to wait for. The arguments must meet the\n parameters of the event being waited for.\n timeout: Optional[:class:`float`]\n The number of seconds to wait before timing out and raising\n :exc:`asyncio.TimeoutError`.\n\n Raises\n -------\n asyncio.TimeoutError\n If a timeout is provided and it was reached.\n\n Returns\n --------\n Any\n Returns no arguments, a single argument, or a :class:`tuple` of multiple\n arguments that mirrors the parameters passed in the\n :ref:`event reference <discord-api-events>`.\n \"\"\"\n\n future = self.loop.create_future()\n if check is None:\n\n def _check(*args):\n return True\n\n check = _check\n\n ev = event.lower()\n try:\n listeners = self._listeners[ev]\n except KeyError:\n listeners = []\n self._listeners[ev] = listeners\n\n listeners.append((future, check))\n return asyncio.wait_for(future, timeout)\n\n # event registration\n\n def event(self, coro: CoroT, /) -> CoroT:\n \"\"\"A decorator that registers an event to listen to.\n\n You can find more info about the events on the :ref:`documentation below <discord-api-events>`.\n\n The events must be a :ref:`coroutine <coroutine>`, if not, :exc:`TypeError` is raised.\n\n Example\n ---------\n\n .. code-block:: python3\n\n @client.event\n async def on_ready():\n print('Ready!')\n\n .. versionchanged:: 2.0\n\n ``coro`` parameter is now positional-only.\n\n Raises\n --------\n TypeError\n The coroutine passed is not actually a coroutine.\n \"\"\"\n\n if not asyncio.iscoroutinefunction(coro):\n raise TypeError('event registered must be a coroutine function')\n\n setattr(self, coro.__name__, coro)\n _log.debug('%s has successfully been registered as an event', coro.__name__)\n return coro\n\n async def change_presence(\n self,\n *,\n activity: Optional[BaseActivity] = None,\n status: Optional[Status] = None,\n ) -> None:\n \"\"\"|coro|\n\n Changes the client's presence.\n\n Example\n ---------\n\n .. code-block:: python3\n\n game = discord.Game(\"with the API\")\n await client.change_presence(status=discord.Status.idle, activity=game)\n\n .. versionchanged:: 2.0\n Removed the ``afk`` keyword-only parameter.\n\n .. versionchanged:: 2.0\n This function will now raise :exc:`TypeError` instead of\n ``InvalidArgument``.\n\n Parameters\n ----------\n activity: Optional[:class:`.BaseActivity`]\n The activity being done. ``None`` if no currently active activity is done.\n status: Optional[:class:`.Status`]\n Indicates what status to change to. If ``None``, then\n :attr:`.Status.online` is used.\n\n Raises\n ------\n TypeError\n If the ``activity`` parameter is not the proper type.\n \"\"\"\n\n if status is None:\n status_str = 'online'\n status = Status.online\n elif status is Status.offline:\n status_str = 'invisible'\n status = Status.offline\n else:\n status_str = str(status)\n\n await self.ws.change_presence(activity=activity, status=status_str)\n\n for guild in self._connection.guilds:\n me = guild.me\n if me is None:\n continue\n\n if activity is not None:\n me.activities = (activity,) # type: ignore # Type checker does not understand the downcast here\n else:\n me.activities = ()\n\n me.status = status\n\n # Guild stuff\n\n async def fetch_guilds(\n self,\n *,\n limit: Optional[int] = 200,\n before: Optional[SnowflakeTime] = None,\n after: Optional[SnowflakeTime] = None,\n with_counts: bool = True,\n ) -> AsyncIterator[Guild]:\n \"\"\"Retrieves an :term:`asynchronous iterator` that enables receiving your guilds.\n\n .. note::\n\n Using this, you will only receive :attr:`.Guild.owner`, :attr:`.Guild.icon`,\n :attr:`.Guild.id`, :attr:`.Guild.name`, :attr:`.Guild.approximate_member_count`,\n and :attr:`.Guild.approximate_presence_count` per :class:`.Guild`.\n\n .. note::\n\n This method is an API call. For general usage, consider :attr:`guilds` instead.\n\n Examples\n ---------\n\n Usage ::\n\n async for guild in client.fetch_guilds(limit=150):\n print(guild.name)\n\n Flattening into a list ::\n\n guilds = [guild async for guild in client.fetch_guilds(limit=150)]\n # guilds is now a list of Guild...\n\n All parameters are optional.\n\n Parameters\n -----------\n limit: Optional[:class:`int`]\n The number of guilds to retrieve.\n If ``None``, it retrieves every guild you have access to. Note, however,\n that this would make it a slow operation.\n Defaults to ``200``.\n\n .. versionchanged:: 2.0\n\n The default has been changed to 200.\n\n before: Union[:class:`.abc.Snowflake`, :class:`datetime.datetime`]\n Retrieves guilds before this date or object.\n If a datetime is provided, it is recommended to use a UTC aware datetime.\n If the datetime is naive, it is assumed to be local time.\n after: Union[:class:`.abc.Snowflake`, :class:`datetime.datetime`]\n Retrieve guilds after this date or object.\n If a datetime is provided, it is recommended to use a UTC aware datetime.\n If the datetime is naive, it is assumed to be local time.\n with_counts: :class:`bool`\n Whether to include count information in the guilds. This fills the\n :attr:`.Guild.approximate_member_count` and :attr:`.Guild.approximate_presence_count`\n attributes without needing any privileged intents. Defaults to ``True``.\n\n .. versionadded:: 2.3\n\n Raises\n ------\n HTTPException\n Getting the guilds failed.\n\n Yields\n --------\n :class:`.Guild`\n The guild with the guild data parsed.\n \"\"\"\n\n async def _before_strategy(retrieve: int, before: Optional[Snowflake], limit: Optional[int]):\n before_id = before.id if before else None\n data = await self.http.get_guilds(retrieve, before=before_id, with_counts=with_counts)\n\n if data:\n if limit is not None:\n limit -= len(data)\n\n before = Object(id=int(data[0]['id']))\n\n return data, before, limit\n\n async def _after_strategy(retrieve: int, after: Optional[Snowflake], limit: Optional[int]):\n after_id = after.id if after else None\n data = await self.http.get_guilds(retrieve, after=after_id, with_counts=with_counts)\n\n if data:\n if limit is not None:\n limit -= len(data)\n\n after = Object(id=int(data[-1]['id']))\n\n return data, after, limit\n\n if isinstance(before, datetime.datetime):\n before = Object(id=time_snowflake(before, high=False))\n if isinstance(after, datetime.datetime):\n after = Object(id=time_snowflake(after, high=True))\n\n predicate: Optional[Callable[[GuildPayload], bool]] = None\n strategy, state = _after_strategy, after\n\n if before:\n strategy, state = _before_strategy, before\n\n if before and after:\n predicate = lambda m: int(m['id']) > after.id\n\n while True:\n retrieve = 200 if limit is None else min(limit, 200)\n if retrieve < 1:\n return\n\n data, state, limit = await strategy(retrieve, state, limit)\n\n if predicate:\n data = filter(predicate, data)\n\n count = 0\n\n for count, raw_guild in enumerate(data, 1):\n yield Guild(state=self._connection, data=raw_guild)\n\n if count < 200:\n # There's no data left after this\n break\n\n async def fetch_template(self, code: Union[Template, str]) -> Template:\n \"\"\"|coro|\n\n Gets a :class:`.Template` from a discord.new URL or code.\n\n Parameters\n -----------\n code: Union[:class:`.Template`, :class:`str`]\n The Discord Template Code or URL (must be a discord.new URL).\n\n Raises\n -------\n NotFound\n The template is invalid.\n HTTPException\n Getting the template failed.\n\n Returns\n --------\n :class:`.Template`\n The template from the URL/code.\n \"\"\"\n code = utils.resolve_template(code)\n data = await self.http.get_template(code)\n return Template(data=data, state=self._connection)\n\n async def fetch_guild(self, guild_id: int, /, *, with_counts: bool = True) -> Guild:\n \"\"\"|coro|\n\n Retrieves a :class:`.Guild` from an ID.\n\n .. note::\n\n Using this, you will **not** receive :attr:`.Guild.channels`, :attr:`.Guild.members`,\n :attr:`.Member.activity` and :attr:`.Member.voice` per :class:`.Member`.\n\n .. note::\n\n This method is an API call. For general usage, consider :meth:`get_guild` instead.\n\n .. versionchanged:: 2.0\n\n ``guild_id`` parameter is now positional-only.\n\n\n Parameters\n -----------\n guild_id: :class:`int`\n The guild's ID to fetch from.\n with_counts: :class:`bool`\n Whether to include count information in the guild. This fills the\n :attr:`.Guild.approximate_member_count` and :attr:`.Guild.approximate_presence_count`\n attributes without needing any privileged intents. Defaults to ``True``.\n\n .. versionadded:: 2.0\n\n Raises\n ------\n Forbidden\n You do not have access to the guild.\n HTTPException\n Getting the guild failed.\n\n Returns\n --------\n :class:`.Guild`\n The guild from the ID.\n \"\"\"\n data = await self.http.get_guild(guild_id, with_counts=with_counts)\n return Guild(data=data, state=self._connection)\n\n async def create_guild(\n self,\n *,\n name: str,\n icon: bytes = MISSING,\n code: str = MISSING,\n ) -> Guild:\n \"\"\"|coro|\n\n Creates a :class:`.Guild`.\n\n Bot accounts in more than 10 guilds are not allowed to create guilds.\n\n .. versionchanged:: 2.0\n ``name`` and ``icon`` parameters are now keyword-only. The ``region`` parameter has been removed.\n\n .. versionchanged:: 2.0\n This function will now raise :exc:`ValueError` instead of\n ``InvalidArgument``.\n\n Parameters\n ----------\n name: :class:`str`\n The name of the guild.\n icon: Optional[:class:`bytes`]\n The :term:`py:bytes-like object` representing the icon. See :meth:`.ClientUser.edit`\n for more details on what is expected.\n code: :class:`str`\n The code for a template to create the guild with.\n\n .. versionadded:: 1.4\n\n Raises\n ------\n HTTPException\n Guild creation failed.\n ValueError\n Invalid icon image format given. Must be PNG or JPG.\n\n Returns\n -------\n :class:`.Guild`\n The guild created. This is not the same guild that is\n added to cache.\n \"\"\"\n if icon is not MISSING:\n icon_base64 = utils._bytes_to_base64_data(icon)\n else:\n icon_base64 = None\n\n if code:\n data = await self.http.create_from_template(code, name, icon_base64)\n else:\n data = await self.http.create_guild(name, icon_base64)\n return Guild(data=data, state=self._connection)\n\n async def fetch_stage_instance(self, channel_id: int, /) -> StageInstance:\n \"\"\"|coro|\n\n Gets a :class:`.StageInstance` for a stage channel id.\n\n .. versionadded:: 2.0\n\n Parameters\n -----------\n channel_id: :class:`int`\n The stage channel ID.\n\n Raises\n -------\n NotFound\n The stage instance or channel could not be found.\n HTTPException\n Getting the stage instance failed.\n\n Returns\n --------\n :class:`.StageInstance`\n The stage instance from the stage channel ID.\n \"\"\"\n data = await self.http.get_stage_instance(channel_id)\n guild = self.get_guild(int(data['guild_id']))\n # Guild can technically be None here but this is being explicitly silenced right now.\n return StageInstance(guild=guild, state=self._connection, data=data) # type: ignore\n\n # Invite management\n\n async def fetch_invite(\n self,\n url: Union[Invite, str],\n *,\n with_counts: bool = True,\n with_expiration: bool = True,\n scheduled_event_id: Optional[int] = None,\n ) -> Invite:\n \"\"\"|coro|\n\n Gets an :class:`.Invite` from a discord.gg URL or ID.\n\n .. note::\n\n If the invite is for a guild you have not joined, the guild and channel\n attributes of the returned :class:`.Invite` will be :class:`.PartialInviteGuild` and\n :class:`.PartialInviteChannel` respectively.\n\n Parameters\n -----------\n url: Union[:class:`.Invite`, :class:`str`]\n The Discord invite ID or URL (must be a discord.gg URL).\n with_counts: :class:`bool`\n Whether to include count information in the invite. This fills the\n :attr:`.Invite.approximate_member_count` and :attr:`.Invite.approximate_presence_count`\n fields.\n with_expiration: :class:`bool`\n Whether to include the expiration date of the invite. This fills the\n :attr:`.Invite.expires_at` field.\n\n .. versionadded:: 2.0\n scheduled_event_id: Optional[:class:`int`]\n The ID of the scheduled event this invite is for.\n\n .. note::\n\n It is not possible to provide a url that contains an ``event_id`` parameter\n when using this parameter.\n\n .. versionadded:: 2.0\n\n Raises\n -------\n ValueError\n The url contains an ``event_id``, but ``scheduled_event_id`` has also been provided.\n NotFound\n The invite has expired or is invalid.\n HTTPException\n Getting the invite failed.\n\n Returns\n --------\n :class:`.Invite`\n The invite from the URL/ID.\n \"\"\"\n\n resolved = utils.resolve_invite(url)\n\n if scheduled_event_id and resolved.event:\n raise ValueError('Cannot specify scheduled_event_id and contain an event_id in the url.')\n\n scheduled_event_id = scheduled_event_id or resolved.event\n\n data = await self.http.get_invite(\n resolved.code,\n with_counts=with_counts,\n with_expiration=with_expiration,\n guild_scheduled_event_id=scheduled_event_id,\n )\n return Invite.from_incomplete(state=self._connection, data=data)\n\n async def delete_invite(self, invite: Union[Invite, str], /) -> None:\n \"\"\"|coro|\n\n Revokes an :class:`.Invite`, URL, or ID to an invite.\n\n You must have :attr:`~.Permissions.manage_channels` in\n the associated guild to do this.\n\n .. versionchanged:: 2.0\n\n ``invite`` parameter is now positional-only.\n\n Parameters\n ----------\n invite: Union[:class:`.Invite`, :class:`str`]\n The invite to revoke.\n\n Raises\n -------\n Forbidden\n You do not have permissions to revoke invites.\n NotFound\n The invite is invalid or expired.\n HTTPException\n Revoking the invite failed.\n \"\"\"\n\n resolved = utils.resolve_invite(invite)\n await self.http.delete_invite(resolved.code)\n\n # Miscellaneous stuff\n\n async def fetch_widget(self, guild_id: int, /) -> Widget:\n \"\"\"|coro|\n\n Gets a :class:`.Widget` from a guild ID.\n\n .. note::\n\n The guild must have the widget enabled to get this information.\n\n .. versionchanged:: 2.0\n\n ``guild_id`` parameter is now positional-only.\n\n Parameters\n -----------\n guild_id: :class:`int`\n The ID of the guild.\n\n Raises\n -------\n Forbidden\n The widget for this guild is disabled.\n HTTPException\n Retrieving the widget failed.\n\n Returns\n --------\n :class:`.Widget`\n The guild's widget.\n \"\"\"\n data = await self.http.get_widget(guild_id)\n\n return Widget(state=self._connection, data=data)\n\n async def application_info(self) -> AppInfo:\n \"\"\"|coro|\n\n Retrieves the bot's application information.\n\n Raises\n -------\n HTTPException\n Retrieving the information failed somehow.\n\n Returns\n --------\n :class:`.AppInfo`\n The bot's application information.\n \"\"\"\n data = await self.http.application_info()\n return AppInfo(self._connection, data)\n\n async def fetch_user(self, user_id: int, /) -> User:\n \"\"\"|coro|\n\n Retrieves a :class:`~discord.User` based on their ID.\n You do not have to share any guilds with the user to get this information,\n however many operations do require that you do.\n\n .. note::\n\n This method is an API call. If you have :attr:`discord.Intents.members` and member cache enabled, consider :meth:`get_user` instead.\n\n .. versionchanged:: 2.0\n\n ``user_id`` parameter is now positional-only.\n\n Parameters\n -----------\n user_id: :class:`int`\n The user's ID to fetch from.\n\n Raises\n -------\n NotFound\n A user with this ID does not exist.\n HTTPException\n Fetching the user failed.\n\n Returns\n --------\n :class:`~discord.User`\n The user you requested.\n \"\"\"\n data = await self.http.get_user(user_id)\n return User(state=self._connection, data=data)\n\n async def fetch_channel(self, channel_id: int, /) -> Union[GuildChannel, PrivateChannel, Thread]:\n \"\"\"|coro|\n\n Retrieves a :class:`.abc.GuildChannel`, :class:`.abc.PrivateChannel`, or :class:`.Thread` with the specified ID.\n\n .. note::\n\n This method is an API call. For general usage, consider :meth:`get_channel` instead.\n\n .. versionadded:: 1.2\n\n .. versionchanged:: 2.0\n\n ``channel_id`` parameter is now positional-only.\n\n Raises\n -------\n InvalidData\n An unknown channel type was received from Discord.\n HTTPException\n Retrieving the channel failed.\n NotFound\n Invalid Channel ID.\n Forbidden\n You do not have permission to fetch this channel.\n\n Returns\n --------\n Union[:class:`.abc.GuildChannel`, :class:`.abc.PrivateChannel`, :class:`.Thread`]\n The channel from the ID.\n \"\"\"\n data = await self.http.get_channel(channel_id)\n\n factory, ch_type = _threaded_channel_factory(data['type'])\n if factory is None:\n raise InvalidData('Unknown channel type {type} for channel ID {id}.'.format_map(data))\n\n if ch_type in (ChannelType.group, ChannelType.private):\n # the factory will be a DMChannel or GroupChannel here\n channel = factory(me=self.user, data=data, state=self._connection) # type: ignore\n else:\n # the factory can't be a DMChannel or GroupChannel here\n guild_id = int(data['guild_id']) # type: ignore\n guild = self._connection._get_or_create_unavailable_guild(guild_id)\n # the factory should be a GuildChannel or Thread\n channel = factory(guild=guild, state=self._connection, data=data) # type: ignore\n\n return channel\n\n async def fetch_webhook(self, webhook_id: int, /) -> Webhook:\n \"\"\"|coro|\n\n Retrieves a :class:`.Webhook` with the specified ID.\n\n .. versionchanged:: 2.0\n\n ``webhook_id`` parameter is now positional-only.\n\n Raises\n --------\n HTTPException\n Retrieving the webhook failed.\n NotFound\n Invalid webhook ID.\n Forbidden\n You do not have permission to fetch this webhook.\n\n Returns\n ---------\n :class:`.Webhook`\n The webhook you requested.\n \"\"\"\n data = await self.http.get_webhook(webhook_id)\n return Webhook.from_state(data, state=self._connection)\n\n async def fetch_sticker(self, sticker_id: int, /) -> Union[StandardSticker, GuildSticker]:\n \"\"\"|coro|\n\n Retrieves a :class:`.Sticker` with the specified ID.\n\n .. versionadded:: 2.0\n\n Raises\n --------\n HTTPException\n Retrieving the sticker failed.\n NotFound\n Invalid sticker ID.\n\n Returns\n --------\n Union[:class:`.StandardSticker`, :class:`.GuildSticker`]\n The sticker you requested.\n \"\"\"\n data = await self.http.get_sticker(sticker_id)\n cls, _ = _sticker_factory(data['type'])\n # The type checker is not smart enough to figure out the constructor is correct\n return cls(state=self._connection, data=data) # type: ignore\n\n async def fetch_premium_sticker_packs(self) -> List[StickerPack]:\n \"\"\"|coro|\n\n Retrieves all available premium sticker packs.\n\n .. versionadded:: 2.0\n\n Raises\n -------\n HTTPException\n Retrieving the sticker packs failed.\n\n Returns\n ---------\n List[:class:`.StickerPack`]\n All available premium sticker packs.\n \"\"\"\n data = await self.http.list_premium_sticker_packs()\n return [StickerPack(state=self._connection, data=pack) for pack in data['sticker_packs']]\n\n async def create_dm(self, user: Snowflake) -> DMChannel:\n \"\"\"|coro|\n\n Creates a :class:`.DMChannel` with this user.\n\n This should be rarely called, as this is done transparently for most\n people.\n\n .. versionadded:: 2.0\n\n Parameters\n -----------\n user: :class:`~discord.abc.Snowflake`\n The user to create a DM with.\n\n Returns\n -------\n :class:`.DMChannel`\n The channel that was created.\n \"\"\"\n state = self._connection\n found = state._get_private_channel_by_user(user.id)\n if found:\n return found\n\n data = await state.http.start_private_message(user.id)\n return state.add_dm_channel(data)\n\n def add_dynamic_items(self, *items: Type[DynamicItem[Item[Any]]]) -> None:\n r\"\"\"Registers a :class:`~discord.ui.DynamicItem` class for persistent listening.\n\n This method accepts *class types* rather than instances.\n\n .. versionadded:: 2.4\n\n Parameters\n -----------\n \\*items: Type[:class:`~discord.ui.DynamicItem`]\n The classes of dynamic items to add.\n\n Raises\n -------\n TypeError\n The class is not a subclass of :class:`~discord.ui.DynamicItem`.\n \"\"\"\n\n for item in items:\n if not issubclass(item, DynamicItem):\n raise TypeError(f'expected subclass of DynamicItem not {item.__name__}')\n\n self._connection.store_dynamic_items(*items)\n\n def add_view(self, view: View, *, message_id: Optional[int] = None) -> None:\n \"\"\"Registers a :class:`~discord.ui.View` for persistent listening.\n\n This method should be used for when a view is comprised of components\n that last longer than the lifecycle of the program.\n\n .. versionadded:: 2.0\n\n Parameters\n ------------\n view: :class:`discord.ui.View`\n The view to register for dispatching.\n message_id: Optional[:class:`int`]\n The message ID that the view is attached to. This is currently used to\n refresh the view's state during message update events. If not given\n then message update events are not propagated for the view.\n\n Raises\n -------\n TypeError\n A view was not passed.\n ValueError\n The view is not persistent or is already finished. A persistent view has no timeout\n and all their components have an explicitly provided custom_id.\n \"\"\"\n\n if not isinstance(view, View):\n raise TypeError(f'expected an instance of View not {view.__class__.__name__}')\n\n if not view.is_persistent():\n raise ValueError('View is not persistent. Items need to have a custom_id set and View must have no timeout')\n\n if view.is_finished():\n raise ValueError('View is already finished.')\n\n self._connection.store_view(view, message_id)\n\n @property\n def persistent_views(self) -> Sequence[View]:\n \"\"\"Sequence[:class:`.View`]: A sequence of persistent views added to the client.\n\n .. versionadded:: 2.0\n \"\"\"\n return self._connection.persistent_views\n",
"path": "discord/client.py"
},
{
"content": "\"\"\"\nThe MIT License (MIT)\n\nCopyright (c) 2015-present Rapptz\n\nPermission is hereby granted, free of charge, to any person obtaining a\ncopy of this software and associated documentation files (the \"Software\"),\nto deal in the Software without restriction, including without limitation\nthe rights to use, copy, modify, merge, publish, distribute, sublicense,\nand/or sell copies of the Software, and to permit persons to whom the\nSoftware is furnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in\nall copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\nOR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\nFROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER\nDEALINGS IN THE SOFTWARE.\n\"\"\"\n\nfrom __future__ import annotations\n\nimport asyncio\nfrom collections import deque, OrderedDict\nimport copy\nimport logging\nfrom typing import (\n Dict,\n Optional,\n TYPE_CHECKING,\n Type,\n Union,\n Callable,\n Any,\n List,\n TypeVar,\n Coroutine,\n Sequence,\n Generic,\n Tuple,\n Deque,\n Literal,\n overload,\n)\nimport weakref\nimport inspect\n\nimport os\n\nfrom .guild import Guild\nfrom .activity import BaseActivity\nfrom .user import User, ClientUser\nfrom .emoji import Emoji\nfrom .mentions import AllowedMentions\nfrom .partial_emoji import PartialEmoji\nfrom .message import Message\nfrom .channel import *\nfrom .channel import _channel_factory\nfrom .raw_models import *\nfrom .member import Member\nfrom .role import Role\nfrom .enums import ChannelType, try_enum, Status\nfrom . import utils\nfrom .flags import ApplicationFlags, Intents, MemberCacheFlags\nfrom .invite import Invite\nfrom .integrations import _integration_factory\nfrom .interactions import Interaction\nfrom .ui.view import ViewStore, View\nfrom .scheduled_event import ScheduledEvent\nfrom .stage_instance import StageInstance\nfrom .threads import Thread, ThreadMember\nfrom .sticker import GuildSticker\nfrom .automod import AutoModRule, AutoModAction\nfrom .audit_logs import AuditLogEntry\nfrom ._types import ClientT\n\nif TYPE_CHECKING:\n from .abc import PrivateChannel\n from .message import MessageableChannel\n from .guild import GuildChannel\n from .http import HTTPClient\n from .voice_client import VoiceProtocol\n from .gateway import DiscordWebSocket\n from .ui.item import Item\n from .ui.dynamic import DynamicItem\n from .app_commands import CommandTree, Translator\n\n from .types.automod import AutoModerationRule, AutoModerationActionExecution\n from .types.snowflake import Snowflake\n from .types.activity import Activity as ActivityPayload\n from .types.channel import DMChannel as DMChannelPayload\n from .types.user import User as UserPayload, PartialUser as PartialUserPayload\n from .types.emoji import Emoji as EmojiPayload, PartialEmoji as PartialEmojiPayload\n from .types.sticker import GuildSticker as GuildStickerPayload\n from .types.guild import Guild as GuildPayload\n from .types.message import Message as MessagePayload, PartialMessage as PartialMessagePayload\n from .types import gateway as gw\n from .types.command import GuildApplicationCommandPermissions as GuildApplicationCommandPermissionsPayload\n\n T = TypeVar('T')\n Channel = Union[GuildChannel, PrivateChannel, PartialMessageable]\n\n\nclass ChunkRequest:\n def __init__(\n self,\n guild_id: int,\n loop: asyncio.AbstractEventLoop,\n resolver: Callable[[int], Any],\n *,\n cache: bool = True,\n ) -> None:\n self.guild_id: int = guild_id\n self.resolver: Callable[[int], Any] = resolver\n self.loop: asyncio.AbstractEventLoop = loop\n self.cache: bool = cache\n self.nonce: str = os.urandom(16).hex()\n self.buffer: List[Member] = []\n self.waiters: List[asyncio.Future[List[Member]]] = []\n\n def add_members(self, members: List[Member]) -> None:\n self.buffer.extend(members)\n if self.cache:\n guild = self.resolver(self.guild_id)\n if guild is None:\n return\n\n for member in members:\n existing = guild.get_member(member.id)\n if existing is None or existing.joined_at is None:\n guild._add_member(member)\n\n async def wait(self) -> List[Member]:\n future = self.loop.create_future()\n self.waiters.append(future)\n try:\n return await future\n finally:\n self.waiters.remove(future)\n\n def get_future(self) -> asyncio.Future[List[Member]]:\n future = self.loop.create_future()\n self.waiters.append(future)\n return future\n\n def done(self) -> None:\n for future in self.waiters:\n if not future.done():\n future.set_result(self.buffer)\n\n\n_log = logging.getLogger(__name__)\n\n\nasync def logging_coroutine(coroutine: Coroutine[Any, Any, T], *, info: str) -> Optional[T]:\n try:\n await coroutine\n except Exception:\n _log.exception('Exception occurred during %s', info)\n\n\nclass ConnectionState(Generic[ClientT]):\n if TYPE_CHECKING:\n _get_websocket: Callable[..., DiscordWebSocket]\n _get_client: Callable[..., ClientT]\n _parsers: Dict[str, Callable[[Dict[str, Any]], None]]\n\n def __init__(\n self,\n *,\n dispatch: Callable[..., Any],\n handlers: Dict[str, Callable[..., Any]],\n hooks: Dict[str, Callable[..., Coroutine[Any, Any, Any]]],\n http: HTTPClient,\n **options: Any,\n ) -> None:\n # Set later, after Client.login\n self.loop: asyncio.AbstractEventLoop = utils.MISSING\n self.http: HTTPClient = http\n self.max_messages: Optional[int] = options.get('max_messages', 1000)\n if self.max_messages is not None and self.max_messages <= 0:\n self.max_messages = 1000\n\n self.dispatch: Callable[..., Any] = dispatch\n self.handlers: Dict[str, Callable[..., Any]] = handlers\n self.hooks: Dict[str, Callable[..., Coroutine[Any, Any, Any]]] = hooks\n self.shard_count: Optional[int] = None\n self._ready_task: Optional[asyncio.Task] = None\n self.application_id: Optional[int] = utils._get_as_snowflake(options, 'application_id')\n self.application_flags: ApplicationFlags = utils.MISSING\n self.heartbeat_timeout: float = options.get('heartbeat_timeout', 60.0)\n self.guild_ready_timeout: float = options.get('guild_ready_timeout', 2.0)\n if self.guild_ready_timeout < 0:\n raise ValueError('guild_ready_timeout cannot be negative')\n\n allowed_mentions = options.get('allowed_mentions')\n\n if allowed_mentions is not None and not isinstance(allowed_mentions, AllowedMentions):\n raise TypeError('allowed_mentions parameter must be AllowedMentions')\n\n self.allowed_mentions: Optional[AllowedMentions] = allowed_mentions\n self._chunk_requests: Dict[Union[int, str], ChunkRequest] = {}\n\n activity = options.get('activity', None)\n if activity:\n if not isinstance(activity, BaseActivity):\n raise TypeError('activity parameter must derive from BaseActivity.')\n\n activity = activity.to_dict()\n\n status = options.get('status', None)\n if status:\n if status is Status.offline:\n status = 'invisible'\n else:\n status = str(status)\n\n intents = options.get('intents', None)\n if intents is not None:\n if not isinstance(intents, Intents):\n raise TypeError(f'intents parameter must be Intent not {type(intents)!r}')\n else:\n intents = Intents.default()\n\n if not intents.guilds:\n _log.warning('Guilds intent seems to be disabled. This may cause state related issues.')\n\n self._chunk_guilds: bool = options.get('chunk_guilds_at_startup', intents.members)\n\n # Ensure these two are set properly\n if not intents.members and self._chunk_guilds:\n raise ValueError('Intents.members must be enabled to chunk guilds at startup.')\n\n cache_flags = options.get('member_cache_flags', None)\n if cache_flags is None:\n cache_flags = MemberCacheFlags.from_intents(intents)\n else:\n if not isinstance(cache_flags, MemberCacheFlags):\n raise TypeError(f'member_cache_flags parameter must be MemberCacheFlags not {type(cache_flags)!r}')\n\n cache_flags._verify_intents(intents)\n\n self.member_cache_flags: MemberCacheFlags = cache_flags\n self._activity: Optional[ActivityPayload] = activity\n self._status: Optional[str] = status\n self._intents: Intents = intents\n self._command_tree: Optional[CommandTree] = None\n self._translator: Optional[Translator] = None\n\n if not intents.members or cache_flags._empty:\n self.store_user = self.store_user_no_intents\n\n self.parsers: Dict[str, Callable[[Any], None]]\n self.parsers = parsers = {}\n for attr, func in inspect.getmembers(self):\n if attr.startswith('parse_'):\n parsers[attr[6:].upper()] = func\n\n self.clear()\n\n # For some reason Discord still sends emoji/sticker data in payloads\n # This makes it hard to actually swap out the appropriate store methods\n # So this is checked instead, it's a small penalty to pay\n @property\n def cache_guild_expressions(self) -> bool:\n return self._intents.emojis_and_stickers\n\n async def close(self) -> None:\n for voice in self.voice_clients:\n try:\n await voice.disconnect(force=True)\n except Exception:\n # if an error happens during disconnects, disregard it.\n pass\n\n if self._translator:\n await self._translator.unload()\n\n # Purposefully don't call `clear` because users rely on cache being available post-close\n\n def clear(self, *, views: bool = True) -> None:\n self.user: Optional[ClientUser] = None\n self._users: weakref.WeakValueDictionary[int, User] = weakref.WeakValueDictionary()\n self._emojis: Dict[int, Emoji] = {}\n self._stickers: Dict[int, GuildSticker] = {}\n self._guilds: Dict[int, Guild] = {}\n if views:\n self._view_store: ViewStore = ViewStore(self)\n\n self._voice_clients: Dict[int, VoiceProtocol] = {}\n\n # LRU of max size 128\n self._private_channels: OrderedDict[int, PrivateChannel] = OrderedDict()\n # extra dict to look up private channels by user id\n self._private_channels_by_user: Dict[int, DMChannel] = {}\n if self.max_messages is not None:\n self._messages: Optional[Deque[Message]] = deque(maxlen=self.max_messages)\n else:\n self._messages: Optional[Deque[Message]] = None\n\n def process_chunk_requests(self, guild_id: int, nonce: Optional[str], members: List[Member], complete: bool) -> None:\n removed = []\n for key, request in self._chunk_requests.items():\n if request.guild_id == guild_id and request.nonce == nonce:\n request.add_members(members)\n if complete:\n request.done()\n removed.append(key)\n\n for key in removed:\n del self._chunk_requests[key]\n\n def call_handlers(self, key: str, *args: Any, **kwargs: Any) -> None:\n try:\n func = self.handlers[key]\n except KeyError:\n pass\n else:\n func(*args, **kwargs)\n\n async def call_hooks(self, key: str, *args: Any, **kwargs: Any) -> None:\n try:\n coro = self.hooks[key]\n except KeyError:\n pass\n else:\n await coro(*args, **kwargs)\n\n @property\n def self_id(self) -> Optional[int]:\n u = self.user\n return u.id if u else None\n\n @property\n def intents(self) -> Intents:\n ret = Intents.none()\n ret.value = self._intents.value\n return ret\n\n @property\n def voice_clients(self) -> List[VoiceProtocol]:\n return list(self._voice_clients.values())\n\n def _get_voice_client(self, guild_id: Optional[int]) -> Optional[VoiceProtocol]:\n # the keys of self._voice_clients are ints\n return self._voice_clients.get(guild_id) # type: ignore\n\n def _add_voice_client(self, guild_id: int, voice: VoiceProtocol) -> None:\n self._voice_clients[guild_id] = voice\n\n def _remove_voice_client(self, guild_id: int) -> None:\n self._voice_clients.pop(guild_id, None)\n\n def _update_references(self, ws: DiscordWebSocket) -> None:\n for vc in self.voice_clients:\n vc.main_ws = ws # type: ignore # Silencing the unknown attribute (ok at runtime).\n\n def store_user(self, data: Union[UserPayload, PartialUserPayload], *, cache: bool = True) -> User:\n # this way is 300% faster than `dict.setdefault`.\n user_id = int(data['id'])\n try:\n return self._users[user_id]\n except KeyError:\n user = User(state=self, data=data)\n if cache:\n self._users[user_id] = user\n return user\n\n def store_user_no_intents(self, data: Union[UserPayload, PartialUserPayload], *, cache: bool = True) -> User:\n return User(state=self, data=data)\n\n def create_user(self, data: Union[UserPayload, PartialUserPayload]) -> User:\n return User(state=self, data=data)\n\n def get_user(self, id: int) -> Optional[User]:\n return self._users.get(id)\n\n def store_emoji(self, guild: Guild, data: EmojiPayload) -> Emoji:\n # the id will be present here\n emoji_id = int(data['id']) # type: ignore\n self._emojis[emoji_id] = emoji = Emoji(guild=guild, state=self, data=data)\n return emoji\n\n def store_sticker(self, guild: Guild, data: GuildStickerPayload) -> GuildSticker:\n sticker_id = int(data['id'])\n self._stickers[sticker_id] = sticker = GuildSticker(state=self, data=data)\n return sticker\n\n def store_view(self, view: View, message_id: Optional[int] = None, interaction_id: Optional[int] = None) -> None:\n if interaction_id is not None:\n self._view_store.remove_interaction_mapping(interaction_id)\n self._view_store.add_view(view, message_id)\n\n def prevent_view_updates_for(self, message_id: int) -> Optional[View]:\n return self._view_store.remove_message_tracking(message_id)\n\n def store_dynamic_items(self, *items: Type[DynamicItem[Item[Any]]]) -> None:\n self._view_store.add_dynamic_items(*items)\n\n @property\n def persistent_views(self) -> Sequence[View]:\n return self._view_store.persistent_views\n\n @property\n def guilds(self) -> Sequence[Guild]:\n return utils.SequenceProxy(self._guilds.values())\n\n def _get_guild(self, guild_id: Optional[int]) -> Optional[Guild]:\n # the keys of self._guilds are ints\n return self._guilds.get(guild_id) # type: ignore\n\n def _get_or_create_unavailable_guild(self, guild_id: int) -> Guild:\n return self._guilds.get(guild_id) or Guild._create_unavailable(state=self, guild_id=guild_id)\n\n def _add_guild(self, guild: Guild) -> None:\n self._guilds[guild.id] = guild\n\n def _remove_guild(self, guild: Guild) -> None:\n self._guilds.pop(guild.id, None)\n\n for emoji in guild.emojis:\n self._emojis.pop(emoji.id, None)\n\n for sticker in guild.stickers:\n self._stickers.pop(sticker.id, None)\n\n del guild\n\n @property\n def emojis(self) -> Sequence[Emoji]:\n return utils.SequenceProxy(self._emojis.values())\n\n @property\n def stickers(self) -> Sequence[GuildSticker]:\n return utils.SequenceProxy(self._stickers.values())\n\n def get_emoji(self, emoji_id: Optional[int]) -> Optional[Emoji]:\n # the keys of self._emojis are ints\n return self._emojis.get(emoji_id) # type: ignore\n\n def get_sticker(self, sticker_id: Optional[int]) -> Optional[GuildSticker]:\n # the keys of self._stickers are ints\n return self._stickers.get(sticker_id) # type: ignore\n\n @property\n def private_channels(self) -> Sequence[PrivateChannel]:\n return utils.SequenceProxy(self._private_channels.values())\n\n def _get_private_channel(self, channel_id: Optional[int]) -> Optional[PrivateChannel]:\n try:\n # the keys of self._private_channels are ints\n value = self._private_channels[channel_id] # type: ignore\n except KeyError:\n return None\n else:\n # Type narrowing can't figure out that channel_id isn't None here\n self._private_channels.move_to_end(channel_id) # type: ignore\n return value\n\n def _get_private_channel_by_user(self, user_id: Optional[int]) -> Optional[DMChannel]:\n # the keys of self._private_channels are ints\n return self._private_channels_by_user.get(user_id) # type: ignore\n\n def _add_private_channel(self, channel: PrivateChannel) -> None:\n channel_id = channel.id\n self._private_channels[channel_id] = channel\n\n if len(self._private_channels) > 128:\n _, to_remove = self._private_channels.popitem(last=False)\n if isinstance(to_remove, DMChannel) and to_remove.recipient:\n self._private_channels_by_user.pop(to_remove.recipient.id, None)\n\n if isinstance(channel, DMChannel) and channel.recipient:\n self._private_channels_by_user[channel.recipient.id] = channel\n\n def add_dm_channel(self, data: DMChannelPayload) -> DMChannel:\n # self.user is *always* cached when this is called\n channel = DMChannel(me=self.user, state=self, data=data) # type: ignore\n self._add_private_channel(channel)\n return channel\n\n def _remove_private_channel(self, channel: PrivateChannel) -> None:\n self._private_channels.pop(channel.id, None)\n if isinstance(channel, DMChannel):\n recipient = channel.recipient\n if recipient is not None:\n self._private_channels_by_user.pop(recipient.id, None)\n\n def _get_message(self, msg_id: Optional[int]) -> Optional[Message]:\n return utils.find(lambda m: m.id == msg_id, reversed(self._messages)) if self._messages else None\n\n def _add_guild_from_data(self, data: GuildPayload) -> Guild:\n guild = Guild(data=data, state=self)\n self._add_guild(guild)\n return guild\n\n def _guild_needs_chunking(self, guild: Guild) -> bool:\n # If presences are enabled then we get back the old guild.large behaviour\n return self._chunk_guilds and not guild.chunked and not (self._intents.presences and not guild.large)\n\n def _get_guild_channel(\n self, data: PartialMessagePayload, guild_id: Optional[int] = None\n ) -> Tuple[Union[Channel, Thread], Optional[Guild]]:\n channel_id = int(data['channel_id'])\n try:\n guild_id = guild_id or int(data['guild_id'])\n guild = self._get_guild(guild_id)\n except KeyError:\n channel = DMChannel._from_message(self, channel_id)\n guild = None\n else:\n channel = guild and guild._resolve_channel(channel_id)\n\n return channel or PartialMessageable(state=self, guild_id=guild_id, id=channel_id), guild\n\n async def chunker(\n self, guild_id: int, query: str = '', limit: int = 0, presences: bool = False, *, nonce: Optional[str] = None\n ) -> None:\n ws = self._get_websocket(guild_id) # This is ignored upstream\n await ws.request_chunks(guild_id, query=query, limit=limit, presences=presences, nonce=nonce)\n\n async def query_members(\n self, guild: Guild, query: Optional[str], limit: int, user_ids: Optional[List[int]], cache: bool, presences: bool\n ) -> List[Member]:\n guild_id = guild.id\n ws = self._get_websocket(guild_id)\n if ws is None:\n raise RuntimeError('Somehow do not have a websocket for this guild_id')\n\n request = ChunkRequest(guild.id, self.loop, self._get_guild, cache=cache)\n self._chunk_requests[request.nonce] = request\n\n try:\n # start the query operation\n await ws.request_chunks(\n guild_id, query=query, limit=limit, user_ids=user_ids, presences=presences, nonce=request.nonce\n )\n return await asyncio.wait_for(request.wait(), timeout=30.0)\n except asyncio.TimeoutError:\n _log.warning('Timed out waiting for chunks with query %r and limit %d for guild_id %d', query, limit, guild_id)\n raise\n\n async def _delay_ready(self) -> None:\n try:\n states = []\n while True:\n # this snippet of code is basically waiting N seconds\n # until the last GUILD_CREATE was sent\n try:\n guild = await asyncio.wait_for(self._ready_state.get(), timeout=self.guild_ready_timeout)\n except asyncio.TimeoutError:\n break\n else:\n if self._guild_needs_chunking(guild):\n future = await self.chunk_guild(guild, wait=False)\n states.append((guild, future))\n else:\n if guild.unavailable is False:\n self.dispatch('guild_available', guild)\n else:\n self.dispatch('guild_join', guild)\n\n for guild, future in states:\n timeout = self._chunk_timeout(guild)\n\n try:\n await asyncio.wait_for(future, timeout=timeout)\n except asyncio.TimeoutError:\n _log.warning('Shard ID %s timed out waiting for chunks for guild_id %s.', guild.shard_id, guild.id)\n\n if guild.unavailable is False:\n self.dispatch('guild_available', guild)\n else:\n self.dispatch('guild_join', guild)\n\n # remove the state\n try:\n del self._ready_state\n except AttributeError:\n pass # already been deleted somehow\n\n except asyncio.CancelledError:\n pass\n else:\n # dispatch the event\n self.call_handlers('ready')\n self.dispatch('ready')\n finally:\n self._ready_task = None\n\n def parse_ready(self, data: gw.ReadyEvent) -> None:\n if self._ready_task is not None:\n self._ready_task.cancel()\n\n self._ready_state: asyncio.Queue[Guild] = asyncio.Queue()\n self.clear(views=False)\n self.user = user = ClientUser(state=self, data=data['user'])\n self._users[user.id] = user # type: ignore\n\n if self.application_id is None:\n try:\n application = data['application']\n except KeyError:\n pass\n else:\n self.application_id = utils._get_as_snowflake(application, 'id')\n self.application_flags: ApplicationFlags = ApplicationFlags._from_value(application['flags'])\n\n for guild_data in data['guilds']:\n self._add_guild_from_data(guild_data) # type: ignore\n\n self.dispatch('connect')\n self._ready_task = asyncio.create_task(self._delay_ready())\n\n def parse_resumed(self, data: gw.ResumedEvent) -> None:\n self.dispatch('resumed')\n\n def parse_message_create(self, data: gw.MessageCreateEvent) -> None:\n channel, _ = self._get_guild_channel(data)\n # channel would be the correct type here\n message = Message(channel=channel, data=data, state=self) # type: ignore\n self.dispatch('message', message)\n if self._messages is not None:\n self._messages.append(message)\n # we ensure that the channel is either a TextChannel, VoiceChannel, or Thread\n if channel and channel.__class__ in (TextChannel, VoiceChannel, Thread, StageChannel):\n channel.last_message_id = message.id # type: ignore\n\n def parse_message_delete(self, data: gw.MessageDeleteEvent) -> None:\n raw = RawMessageDeleteEvent(data)\n found = self._get_message(raw.message_id)\n raw.cached_message = found\n self.dispatch('raw_message_delete', raw)\n if self._messages is not None and found is not None:\n self.dispatch('message_delete', found)\n self._messages.remove(found)\n\n def parse_message_delete_bulk(self, data: gw.MessageDeleteBulkEvent) -> None:\n raw = RawBulkMessageDeleteEvent(data)\n if self._messages:\n found_messages = [message for message in self._messages if message.id in raw.message_ids]\n else:\n found_messages = []\n raw.cached_messages = found_messages\n self.dispatch('raw_bulk_message_delete', raw)\n if found_messages:\n self.dispatch('bulk_message_delete', found_messages)\n for msg in found_messages:\n # self._messages won't be None here\n self._messages.remove(msg) # type: ignore\n\n def parse_message_update(self, data: gw.MessageUpdateEvent) -> None:\n raw = RawMessageUpdateEvent(data)\n message = self._get_message(raw.message_id)\n if message is not None:\n older_message = copy.copy(message)\n raw.cached_message = older_message\n self.dispatch('raw_message_edit', raw)\n message._update(data)\n # Coerce the `after` parameter to take the new updated Member\n # ref: #5999\n older_message.author = message.author\n self.dispatch('message_edit', older_message, message)\n else:\n self.dispatch('raw_message_edit', raw)\n\n if 'components' in data:\n try:\n entity_id = int(data['interaction']['id'])\n except (KeyError, ValueError):\n entity_id = raw.message_id\n\n if self._view_store.is_message_tracked(entity_id):\n self._view_store.update_from_message(entity_id, data['components'])\n\n def parse_message_reaction_add(self, data: gw.MessageReactionAddEvent) -> None:\n emoji = PartialEmoji.from_dict(data['emoji'])\n emoji._state = self\n raw = RawReactionActionEvent(data, emoji, 'REACTION_ADD')\n\n member_data = data.get('member')\n if member_data:\n guild = self._get_guild(raw.guild_id)\n if guild is not None:\n raw.member = Member(data=member_data, guild=guild, state=self)\n else:\n raw.member = None\n else:\n raw.member = None\n self.dispatch('raw_reaction_add', raw)\n\n # rich interface here\n message = self._get_message(raw.message_id)\n if message is not None:\n emoji = self._upgrade_partial_emoji(emoji)\n reaction = message._add_reaction(data, emoji, raw.user_id)\n user = raw.member or self._get_reaction_user(message.channel, raw.user_id)\n\n if user:\n self.dispatch('reaction_add', reaction, user)\n\n def parse_message_reaction_remove_all(self, data: gw.MessageReactionRemoveAllEvent) -> None:\n raw = RawReactionClearEvent(data)\n self.dispatch('raw_reaction_clear', raw)\n\n message = self._get_message(raw.message_id)\n if message is not None:\n old_reactions = message.reactions.copy()\n message.reactions.clear()\n self.dispatch('reaction_clear', message, old_reactions)\n\n def parse_message_reaction_remove(self, data: gw.MessageReactionRemoveEvent) -> None:\n emoji = PartialEmoji.from_dict(data['emoji'])\n emoji._state = self\n raw = RawReactionActionEvent(data, emoji, 'REACTION_REMOVE')\n self.dispatch('raw_reaction_remove', raw)\n\n message = self._get_message(raw.message_id)\n if message is not None:\n emoji = self._upgrade_partial_emoji(emoji)\n try:\n reaction = message._remove_reaction(data, emoji, raw.user_id)\n except (AttributeError, ValueError): # eventual consistency lol\n pass\n else:\n user = self._get_reaction_user(message.channel, raw.user_id)\n if user:\n self.dispatch('reaction_remove', reaction, user)\n\n def parse_message_reaction_remove_emoji(self, data: gw.MessageReactionRemoveEmojiEvent) -> None:\n emoji = PartialEmoji.from_dict(data['emoji'])\n emoji._state = self\n raw = RawReactionClearEmojiEvent(data, emoji)\n self.dispatch('raw_reaction_clear_emoji', raw)\n\n message = self._get_message(raw.message_id)\n if message is not None:\n try:\n reaction = message._clear_emoji(emoji)\n except (AttributeError, ValueError): # eventual consistency lol\n pass\n else:\n if reaction:\n self.dispatch('reaction_clear_emoji', reaction)\n\n def parse_interaction_create(self, data: gw.InteractionCreateEvent) -> None:\n interaction = Interaction(data=data, state=self)\n if data['type'] in (2, 4) and self._command_tree: # application command and auto complete\n self._command_tree._from_interaction(interaction)\n elif data['type'] == 3: # interaction component\n # These keys are always there for this interaction type\n inner_data = data['data']\n custom_id = inner_data['custom_id']\n component_type = inner_data['component_type']\n self._view_store.dispatch_view(component_type, custom_id, interaction)\n elif data['type'] == 5: # modal submit\n # These keys are always there for this interaction type\n inner_data = data['data']\n custom_id = inner_data['custom_id']\n components = inner_data['components']\n self._view_store.dispatch_modal(custom_id, interaction, components)\n self.dispatch('interaction', interaction)\n\n def parse_presence_update(self, data: gw.PresenceUpdateEvent) -> None:\n guild_id = utils._get_as_snowflake(data, 'guild_id')\n # guild_id won't be None here\n guild = self._get_guild(guild_id)\n if guild is None:\n _log.debug('PRESENCE_UPDATE referencing an unknown guild ID: %s. Discarding.', guild_id)\n return\n\n user = data['user']\n member_id = int(user['id'])\n member = guild.get_member(member_id)\n if member is None:\n _log.debug('PRESENCE_UPDATE referencing an unknown member ID: %s. Discarding', member_id)\n return\n\n old_member = Member._copy(member)\n user_update = member._presence_update(data=data, user=user)\n if user_update:\n self.dispatch('user_update', user_update[0], user_update[1])\n\n self.dispatch('presence_update', old_member, member)\n\n def parse_user_update(self, data: gw.UserUpdateEvent) -> None:\n if self.user:\n self.user._update(data)\n\n def parse_invite_create(self, data: gw.InviteCreateEvent) -> None:\n invite = Invite.from_gateway(state=self, data=data)\n self.dispatch('invite_create', invite)\n\n def parse_invite_delete(self, data: gw.InviteDeleteEvent) -> None:\n invite = Invite.from_gateway(state=self, data=data)\n self.dispatch('invite_delete', invite)\n\n def parse_channel_delete(self, data: gw.ChannelDeleteEvent) -> None:\n guild = self._get_guild(utils._get_as_snowflake(data, 'guild_id'))\n channel_id = int(data['id'])\n if guild is not None:\n channel = guild.get_channel(channel_id)\n if channel is not None:\n guild._remove_channel(channel)\n self.dispatch('guild_channel_delete', channel)\n\n if channel.type in (ChannelType.voice, ChannelType.stage_voice):\n for s in guild.scheduled_events:\n if s.channel_id == channel.id:\n guild._scheduled_events.pop(s.id)\n self.dispatch('scheduled_event_delete', s)\n\n def parse_channel_update(self, data: gw.ChannelUpdateEvent) -> None:\n channel_type = try_enum(ChannelType, data.get('type'))\n channel_id = int(data['id'])\n if channel_type is ChannelType.group:\n channel = self._get_private_channel(channel_id)\n if channel is not None:\n old_channel = copy.copy(channel)\n # the channel is a GroupChannel rather than PrivateChannel\n channel._update_group(data) # type: ignore\n self.dispatch('private_channel_update', old_channel, channel)\n return\n else:\n _log.debug('CHANNEL_UPDATE referencing an unknown channel ID: %s. Discarding.', channel_id)\n\n guild_id = utils._get_as_snowflake(data, 'guild_id')\n guild = self._get_guild(guild_id)\n if guild is not None:\n channel = guild.get_channel(channel_id)\n if channel is not None:\n old_channel = copy.copy(channel)\n channel._update(guild, data) # type: ignore # the data payload varies based on the channel type.\n self.dispatch('guild_channel_update', old_channel, channel)\n else:\n _log.debug('CHANNEL_UPDATE referencing an unknown channel ID: %s. Discarding.', channel_id)\n else:\n _log.debug('CHANNEL_UPDATE referencing an unknown guild ID: %s. Discarding.', guild_id)\n\n def parse_channel_create(self, data: gw.ChannelCreateEvent) -> None:\n factory, ch_type = _channel_factory(data['type'])\n if factory is None:\n _log.debug('CHANNEL_CREATE referencing an unknown channel type %s. Discarding.', data['type'])\n return\n\n guild_id = utils._get_as_snowflake(data, 'guild_id')\n guild = self._get_guild(guild_id)\n if guild is not None:\n # the factory can't be a DMChannel or GroupChannel here\n channel = factory(guild=guild, state=self, data=data) # type: ignore\n guild._add_channel(channel) # type: ignore\n self.dispatch('guild_channel_create', channel)\n else:\n _log.debug('CHANNEL_CREATE referencing an unknown guild ID: %s. Discarding.', guild_id)\n return\n\n def parse_channel_pins_update(self, data: gw.ChannelPinsUpdateEvent) -> None:\n channel_id = int(data['channel_id'])\n try:\n guild = self._get_guild(int(data['guild_id']))\n except KeyError:\n guild = None\n channel = self._get_private_channel(channel_id)\n else:\n channel = guild and guild._resolve_channel(channel_id)\n\n if channel is None:\n _log.debug('CHANNEL_PINS_UPDATE referencing an unknown channel ID: %s. Discarding.', channel_id)\n return\n\n last_pin = utils.parse_time(data.get('last_pin_timestamp'))\n\n if guild is None:\n self.dispatch('private_channel_pins_update', channel, last_pin)\n else:\n self.dispatch('guild_channel_pins_update', channel, last_pin)\n\n def parse_thread_create(self, data: gw.ThreadCreateEvent) -> None:\n guild_id = int(data['guild_id'])\n guild: Optional[Guild] = self._get_guild(guild_id)\n if guild is None:\n _log.debug('THREAD_CREATE referencing an unknown guild ID: %s. Discarding', guild_id)\n return\n\n thread = Thread(guild=guild, state=guild._state, data=data)\n has_thread = guild.get_thread(thread.id)\n guild._add_thread(thread)\n if not has_thread:\n if data.get('newly_created'):\n if thread.parent.__class__ is ForumChannel:\n thread.parent.last_message_id = thread.id # type: ignore\n\n self.dispatch('thread_create', thread)\n else:\n self.dispatch('thread_join', thread)\n\n def parse_thread_update(self, data: gw.ThreadUpdateEvent) -> None:\n guild_id = int(data['guild_id'])\n guild = self._get_guild(guild_id)\n if guild is None:\n _log.debug('THREAD_UPDATE referencing an unknown guild ID: %s. Discarding', guild_id)\n return\n\n raw = RawThreadUpdateEvent(data)\n raw.thread = thread = guild.get_thread(raw.thread_id)\n self.dispatch('raw_thread_update', raw)\n if thread is not None:\n old = copy.copy(thread)\n thread._update(data)\n if thread.archived:\n guild._remove_thread(thread)\n self.dispatch('thread_update', old, thread)\n else:\n thread = Thread(guild=guild, state=guild._state, data=data)\n if not thread.archived:\n guild._add_thread(thread)\n self.dispatch('thread_join', thread)\n\n def parse_thread_delete(self, data: gw.ThreadDeleteEvent) -> None:\n guild_id = int(data['guild_id'])\n guild = self._get_guild(guild_id)\n if guild is None:\n _log.debug('THREAD_DELETE referencing an unknown guild ID: %s. Discarding', guild_id)\n return\n\n raw = RawThreadDeleteEvent(data)\n raw.thread = thread = guild.get_thread(raw.thread_id)\n self.dispatch('raw_thread_delete', raw)\n\n if thread is not None:\n guild._remove_thread(thread)\n self.dispatch('thread_delete', thread)\n\n def parse_thread_list_sync(self, data: gw.ThreadListSyncEvent) -> None:\n guild_id = int(data['guild_id'])\n guild: Optional[Guild] = self._get_guild(guild_id)\n if guild is None:\n _log.debug('THREAD_LIST_SYNC referencing an unknown guild ID: %s. Discarding', guild_id)\n return\n\n try:\n channel_ids = {int(i) for i in data['channel_ids']}\n except KeyError:\n # If not provided, then the entire guild is being synced\n # So all previous thread data should be overwritten\n previous_threads = guild._threads.copy()\n guild._clear_threads()\n else:\n previous_threads = guild._filter_threads(channel_ids)\n\n threads = {d['id']: guild._store_thread(d) for d in data.get('threads', [])}\n\n for member in data.get('members', []):\n try:\n # note: member['id'] is the thread_id\n thread = threads[member['id']]\n except KeyError:\n continue\n else:\n thread._add_member(ThreadMember(thread, member))\n\n for thread in threads.values():\n old = previous_threads.pop(thread.id, None)\n if old is None:\n self.dispatch('thread_join', thread)\n\n for thread in previous_threads.values():\n self.dispatch('thread_remove', thread)\n\n def parse_thread_member_update(self, data: gw.ThreadMemberUpdate) -> None:\n guild_id = int(data['guild_id'])\n guild: Optional[Guild] = self._get_guild(guild_id)\n if guild is None:\n _log.debug('THREAD_MEMBER_UPDATE referencing an unknown guild ID: %s. Discarding', guild_id)\n return\n\n thread_id = int(data['id'])\n thread: Optional[Thread] = guild.get_thread(thread_id)\n if thread is None:\n _log.debug('THREAD_MEMBER_UPDATE referencing an unknown thread ID: %s. Discarding', thread_id)\n return\n\n member = ThreadMember(thread, data)\n thread.me = member\n\n def parse_thread_members_update(self, data: gw.ThreadMembersUpdate) -> None:\n guild_id = int(data['guild_id'])\n guild: Optional[Guild] = self._get_guild(guild_id)\n if guild is None:\n _log.debug('THREAD_MEMBERS_UPDATE referencing an unknown guild ID: %s. Discarding', guild_id)\n return\n\n thread_id = int(data['id'])\n thread: Optional[Thread] = guild.get_thread(thread_id)\n raw = RawThreadMembersUpdate(data)\n if thread is None:\n _log.debug('THREAD_MEMBERS_UPDATE referencing an unknown thread ID: %s. Discarding', thread_id)\n return\n\n added_members = [ThreadMember(thread, d) for d in data.get('added_members', [])]\n removed_member_ids = [int(x) for x in data.get('removed_member_ids', [])]\n self_id = self.self_id\n for member in added_members:\n if member.id != self_id:\n thread._add_member(member)\n self.dispatch('thread_member_join', member)\n else:\n thread.me = member\n self.dispatch('thread_join', thread)\n\n for member_id in removed_member_ids:\n if member_id != self_id:\n member = thread._pop_member(member_id)\n self.dispatch('raw_thread_member_remove', raw)\n if member is not None:\n self.dispatch('thread_member_remove', member)\n else:\n self.dispatch('thread_remove', thread)\n\n def parse_guild_member_add(self, data: gw.GuildMemberAddEvent) -> None:\n guild = self._get_guild(int(data['guild_id']))\n if guild is None:\n _log.debug('GUILD_MEMBER_ADD referencing an unknown guild ID: %s. Discarding.', data['guild_id'])\n return\n\n member = Member(guild=guild, data=data, state=self)\n if self.member_cache_flags.joined:\n guild._add_member(member)\n\n if guild._member_count is not None:\n guild._member_count += 1\n\n self.dispatch('member_join', member)\n\n def parse_guild_member_remove(self, data: gw.GuildMemberRemoveEvent) -> None:\n user = self.store_user(data['user'])\n raw = RawMemberRemoveEvent(data, user)\n\n guild = self._get_guild(raw.guild_id)\n if guild is not None:\n if guild._member_count is not None:\n guild._member_count -= 1\n\n member = guild.get_member(user.id)\n if member is not None:\n raw.user = member\n guild._remove_member(member)\n self.dispatch('member_remove', member)\n else:\n _log.debug('GUILD_MEMBER_REMOVE referencing an unknown guild ID: %s. Discarding.', data['guild_id'])\n\n self.dispatch('raw_member_remove', raw)\n\n def parse_guild_member_update(self, data: gw.GuildMemberUpdateEvent) -> None:\n guild = self._get_guild(int(data['guild_id']))\n user = data['user']\n user_id = int(user['id'])\n if guild is None:\n _log.debug('GUILD_MEMBER_UPDATE referencing an unknown guild ID: %s. Discarding.', data['guild_id'])\n return\n\n member = guild.get_member(user_id)\n if member is not None:\n old_member = Member._copy(member)\n member._update(data)\n user_update = member._update_inner_user(user)\n if user_update:\n self.dispatch('user_update', user_update[0], user_update[1])\n\n self.dispatch('member_update', old_member, member)\n else:\n if self.member_cache_flags.joined:\n member = Member(data=data, guild=guild, state=self) # type: ignore # the data is not complete, contains a delta of values\n\n # Force an update on the inner user if necessary\n user_update = member._update_inner_user(user)\n if user_update:\n self.dispatch('user_update', user_update[0], user_update[1])\n\n guild._add_member(member)\n _log.debug('GUILD_MEMBER_UPDATE referencing an unknown member ID: %s. Discarding.', user_id)\n\n def parse_guild_emojis_update(self, data: gw.GuildEmojisUpdateEvent) -> None:\n guild = self._get_guild(int(data['guild_id']))\n if guild is None:\n _log.debug('GUILD_EMOJIS_UPDATE referencing an unknown guild ID: %s. Discarding.', data['guild_id'])\n return\n\n before_emojis = guild.emojis\n for emoji in before_emojis:\n self._emojis.pop(emoji.id, None)\n # guild won't be None here\n guild.emojis = tuple(map(lambda d: self.store_emoji(guild, d), data['emojis']))\n self.dispatch('guild_emojis_update', guild, before_emojis, guild.emojis)\n\n def parse_guild_stickers_update(self, data: gw.GuildStickersUpdateEvent) -> None:\n guild = self._get_guild(int(data['guild_id']))\n if guild is None:\n _log.debug('GUILD_STICKERS_UPDATE referencing an unknown guild ID: %s. Discarding.', data['guild_id'])\n return\n\n before_stickers = guild.stickers\n for emoji in before_stickers:\n self._stickers.pop(emoji.id, None)\n\n guild.stickers = tuple(map(lambda d: self.store_sticker(guild, d), data['stickers']))\n self.dispatch('guild_stickers_update', guild, before_stickers, guild.stickers)\n\n def parse_guild_audit_log_entry_create(self, data: gw.GuildAuditLogEntryCreate) -> None:\n guild = self._get_guild(int(data['guild_id']))\n if guild is None:\n _log.debug('GUILD_AUDIT_LOG_ENTRY_CREATE referencing an unknown guild ID: %s. Discarding.', data['guild_id'])\n return\n\n entry = AuditLogEntry(\n users=self._users,\n integrations={},\n app_commands={},\n automod_rules={},\n webhooks={},\n data=data,\n guild=guild,\n )\n\n self.dispatch('audit_log_entry_create', entry)\n\n def parse_auto_moderation_rule_create(self, data: AutoModerationRule) -> None:\n guild = self._get_guild(int(data['guild_id']))\n if guild is None:\n _log.debug('AUTO_MODERATION_RULE_CREATE referencing an unknown guild ID: %s. Discarding.', data['guild_id'])\n return\n\n rule = AutoModRule(data=data, guild=guild, state=self)\n\n self.dispatch('automod_rule_create', rule)\n\n def parse_auto_moderation_rule_update(self, data: AutoModerationRule) -> None:\n guild = self._get_guild(int(data['guild_id']))\n if guild is None:\n _log.debug('AUTO_MODERATION_RULE_UPDATE referencing an unknown guild ID: %s. Discarding.', data['guild_id'])\n return\n\n rule = AutoModRule(data=data, guild=guild, state=self)\n\n self.dispatch('automod_rule_update', rule)\n\n def parse_auto_moderation_rule_delete(self, data: AutoModerationRule) -> None:\n guild = self._get_guild(int(data['guild_id']))\n if guild is None:\n _log.debug('AUTO_MODERATION_RULE_DELETE referencing an unknown guild ID: %s. Discarding.', data['guild_id'])\n return\n\n rule = AutoModRule(data=data, guild=guild, state=self)\n\n self.dispatch('automod_rule_delete', rule)\n\n def parse_auto_moderation_action_execution(self, data: AutoModerationActionExecution) -> None:\n guild = self._get_guild(int(data['guild_id']))\n if guild is None:\n _log.debug('AUTO_MODERATION_ACTION_EXECUTION referencing an unknown guild ID: %s. Discarding.', data['guild_id'])\n return\n\n execution = AutoModAction(data=data, state=self)\n\n self.dispatch('automod_action', execution)\n\n def _get_create_guild(self, data: gw.GuildCreateEvent) -> Guild:\n if data.get('unavailable') is False:\n # GUILD_CREATE with unavailable in the response\n # usually means that the guild has become available\n # and is therefore in the cache\n guild = self._get_guild(int(data['id']))\n if guild is not None:\n guild.unavailable = False\n guild._from_data(data)\n return guild\n\n return self._add_guild_from_data(data)\n\n def is_guild_evicted(self, guild: Guild) -> bool:\n return guild.id not in self._guilds\n\n @overload\n async def chunk_guild(self, guild: Guild, *, wait: Literal[True] = ..., cache: Optional[bool] = ...) -> List[Member]:\n ...\n\n @overload\n async def chunk_guild(\n self, guild: Guild, *, wait: Literal[False] = ..., cache: Optional[bool] = ...\n ) -> asyncio.Future[List[Member]]:\n ...\n\n async def chunk_guild(\n self, guild: Guild, *, wait: bool = True, cache: Optional[bool] = None\n ) -> Union[List[Member], asyncio.Future[List[Member]]]:\n cache = cache or self.member_cache_flags.joined\n request = self._chunk_requests.get(guild.id)\n if request is None:\n self._chunk_requests[guild.id] = request = ChunkRequest(guild.id, self.loop, self._get_guild, cache=cache)\n await self.chunker(guild.id, nonce=request.nonce)\n\n if wait:\n return await request.wait()\n return request.get_future()\n\n def _chunk_timeout(self, guild: Guild) -> float:\n return max(5.0, (guild.member_count or 0) / 10000)\n\n async def _chunk_and_dispatch(self, guild, unavailable):\n timeout = self._chunk_timeout(guild)\n\n try:\n await asyncio.wait_for(self.chunk_guild(guild), timeout=timeout)\n except asyncio.TimeoutError:\n _log.warning('Somehow timed out waiting for chunks for guild ID %s.', guild.id)\n\n if unavailable is False:\n self.dispatch('guild_available', guild)\n else:\n self.dispatch('guild_join', guild)\n\n def _add_ready_state(self, guild: Guild) -> bool:\n try:\n # Notify the on_ready state, if any, that this guild is complete.\n self._ready_state.put_nowait(guild)\n except AttributeError:\n return False\n else:\n return True\n\n def parse_guild_create(self, data: gw.GuildCreateEvent) -> None:\n unavailable = data.get('unavailable')\n if unavailable is True:\n # joined a guild with unavailable == True so..\n return\n\n guild = self._get_create_guild(data)\n\n if self._add_ready_state(guild):\n return # We're waiting for the ready event, put the rest on hold\n\n # check if it requires chunking\n if self._guild_needs_chunking(guild):\n asyncio.create_task(self._chunk_and_dispatch(guild, unavailable))\n return\n\n # Dispatch available if newly available\n if unavailable is False:\n self.dispatch('guild_available', guild)\n else:\n self.dispatch('guild_join', guild)\n\n def parse_guild_update(self, data: gw.GuildUpdateEvent) -> None:\n guild = self._get_guild(int(data['id']))\n if guild is not None:\n old_guild = copy.copy(guild)\n guild._from_data(data)\n self.dispatch('guild_update', old_guild, guild)\n else:\n _log.debug('GUILD_UPDATE referencing an unknown guild ID: %s. Discarding.', data['id'])\n\n def parse_guild_delete(self, data: gw.GuildDeleteEvent) -> None:\n guild = self._get_guild(int(data['id']))\n if guild is None:\n _log.debug('GUILD_DELETE referencing an unknown guild ID: %s. Discarding.', data['id'])\n return\n\n if data.get('unavailable', False):\n # GUILD_DELETE with unavailable being True means that the\n # guild that was available is now currently unavailable\n guild.unavailable = True\n self.dispatch('guild_unavailable', guild)\n return\n\n # do a cleanup of the messages cache\n if self._messages is not None:\n self._messages: Optional[Deque[Message]] = deque(\n (msg for msg in self._messages if msg.guild != guild), maxlen=self.max_messages\n )\n\n self._remove_guild(guild)\n self.dispatch('guild_remove', guild)\n\n def parse_guild_ban_add(self, data: gw.GuildBanAddEvent) -> None:\n # we make the assumption that GUILD_BAN_ADD is done\n # before GUILD_MEMBER_REMOVE is called\n # hence we don't remove it from cache or do anything\n # strange with it, the main purpose of this event\n # is mainly to dispatch to another event worth listening to for logging\n guild = self._get_guild(int(data['guild_id']))\n if guild is not None:\n try:\n user = User(data=data['user'], state=self)\n except KeyError:\n pass\n else:\n member = guild.get_member(user.id) or user\n self.dispatch('member_ban', guild, member)\n\n def parse_guild_ban_remove(self, data: gw.GuildBanRemoveEvent) -> None:\n guild = self._get_guild(int(data['guild_id']))\n if guild is not None and 'user' in data:\n user = self.store_user(data['user'])\n self.dispatch('member_unban', guild, user)\n\n def parse_guild_role_create(self, data: gw.GuildRoleCreateEvent) -> None:\n guild = self._get_guild(int(data['guild_id']))\n if guild is None:\n _log.debug('GUILD_ROLE_CREATE referencing an unknown guild ID: %s. Discarding.', data['guild_id'])\n return\n\n role_data = data['role']\n role = Role(guild=guild, data=role_data, state=self)\n guild._add_role(role)\n self.dispatch('guild_role_create', role)\n\n def parse_guild_role_delete(self, data: gw.GuildRoleDeleteEvent) -> None:\n guild = self._get_guild(int(data['guild_id']))\n if guild is not None:\n role_id = int(data['role_id'])\n try:\n role = guild._remove_role(role_id)\n except KeyError:\n return\n else:\n self.dispatch('guild_role_delete', role)\n else:\n _log.debug('GUILD_ROLE_DELETE referencing an unknown guild ID: %s. Discarding.', data['guild_id'])\n\n def parse_guild_role_update(self, data: gw.GuildRoleUpdateEvent) -> None:\n guild = self._get_guild(int(data['guild_id']))\n if guild is not None:\n role_data = data['role']\n role_id = int(role_data['id'])\n role = guild.get_role(role_id)\n if role is not None:\n old_role = copy.copy(role)\n role._update(role_data)\n self.dispatch('guild_role_update', old_role, role)\n else:\n _log.debug('GUILD_ROLE_UPDATE referencing an unknown guild ID: %s. Discarding.', data['guild_id'])\n\n def parse_guild_members_chunk(self, data: gw.GuildMembersChunkEvent) -> None:\n guild_id = int(data['guild_id'])\n guild = self._get_guild(guild_id)\n presences = data.get('presences', [])\n\n if guild is None:\n return\n\n members = [Member(guild=guild, data=member, state=self) for member in data.get('members', [])]\n _log.debug('Processed a chunk for %s members in guild ID %s.', len(members), guild_id)\n\n if presences:\n member_dict: Dict[Snowflake, Member] = {str(member.id): member for member in members}\n for presence in presences:\n user = presence['user']\n member_id = user['id']\n member = member_dict.get(member_id)\n if member is not None:\n member._presence_update(presence, user)\n\n complete = data.get('chunk_index', 0) + 1 == data.get('chunk_count')\n self.process_chunk_requests(guild_id, data.get('nonce'), members, complete)\n\n def parse_guild_integrations_update(self, data: gw.GuildIntegrationsUpdateEvent) -> None:\n guild = self._get_guild(int(data['guild_id']))\n if guild is not None:\n self.dispatch('guild_integrations_update', guild)\n else:\n _log.debug('GUILD_INTEGRATIONS_UPDATE referencing an unknown guild ID: %s. Discarding.', data['guild_id'])\n\n def parse_integration_create(self, data: gw.IntegrationCreateEvent) -> None:\n guild_id = int(data['guild_id'])\n guild = self._get_guild(guild_id)\n if guild is not None:\n cls, _ = _integration_factory(data['type'])\n integration = cls(data=data, guild=guild)\n self.dispatch('integration_create', integration)\n else:\n _log.debug('INTEGRATION_CREATE referencing an unknown guild ID: %s. Discarding.', guild_id)\n\n def parse_integration_update(self, data: gw.IntegrationUpdateEvent) -> None:\n guild_id = int(data['guild_id'])\n guild = self._get_guild(guild_id)\n if guild is not None:\n cls, _ = _integration_factory(data['type'])\n integration = cls(data=data, guild=guild)\n self.dispatch('integration_update', integration)\n else:\n _log.debug('INTEGRATION_UPDATE referencing an unknown guild ID: %s. Discarding.', guild_id)\n\n def parse_integration_delete(self, data: gw.IntegrationDeleteEvent) -> None:\n guild_id = int(data['guild_id'])\n guild = self._get_guild(guild_id)\n if guild is not None:\n raw = RawIntegrationDeleteEvent(data)\n self.dispatch('raw_integration_delete', raw)\n else:\n _log.debug('INTEGRATION_DELETE referencing an unknown guild ID: %s. Discarding.', guild_id)\n\n def parse_webhooks_update(self, data: gw.WebhooksUpdateEvent) -> None:\n guild = self._get_guild(int(data['guild_id']))\n if guild is None:\n _log.debug('WEBHOOKS_UPDATE referencing an unknown guild ID: %s. Discarding', data['guild_id'])\n return\n\n channel_id = utils._get_as_snowflake(data, 'channel_id')\n channel = guild.get_channel(channel_id) # type: ignore # None is okay here\n if channel is not None:\n self.dispatch('webhooks_update', channel)\n else:\n _log.debug('WEBHOOKS_UPDATE referencing an unknown channel ID: %s. Discarding.', data['channel_id'])\n\n def parse_stage_instance_create(self, data: gw.StageInstanceCreateEvent) -> None:\n guild = self._get_guild(int(data['guild_id']))\n if guild is not None:\n stage_instance = StageInstance(guild=guild, state=self, data=data)\n guild._stage_instances[stage_instance.id] = stage_instance\n self.dispatch('stage_instance_create', stage_instance)\n else:\n _log.debug('STAGE_INSTANCE_CREATE referencing unknown guild ID: %s. Discarding.', data['guild_id'])\n\n def parse_stage_instance_update(self, data: gw.StageInstanceUpdateEvent) -> None:\n guild = self._get_guild(int(data['guild_id']))\n if guild is not None:\n stage_instance = guild._stage_instances.get(int(data['id']))\n if stage_instance is not None:\n old_stage_instance = copy.copy(stage_instance)\n stage_instance._update(data)\n self.dispatch('stage_instance_update', old_stage_instance, stage_instance)\n else:\n _log.debug('STAGE_INSTANCE_UPDATE referencing unknown stage instance ID: %s. Discarding.', data['id'])\n else:\n _log.debug('STAGE_INSTANCE_UPDATE referencing unknown guild ID: %s. Discarding.', data['guild_id'])\n\n def parse_stage_instance_delete(self, data: gw.StageInstanceDeleteEvent) -> None:\n guild = self._get_guild(int(data['guild_id']))\n if guild is not None:\n try:\n stage_instance = guild._stage_instances.pop(int(data['id']))\n except KeyError:\n pass\n else:\n self.dispatch('stage_instance_delete', stage_instance)\n else:\n _log.debug('STAGE_INSTANCE_DELETE referencing unknown guild ID: %s. Discarding.', data['guild_id'])\n\n def parse_guild_scheduled_event_create(self, data: gw.GuildScheduledEventCreateEvent) -> None:\n guild = self._get_guild(int(data['guild_id']))\n if guild is not None:\n scheduled_event = ScheduledEvent(state=self, data=data)\n guild._scheduled_events[scheduled_event.id] = scheduled_event\n self.dispatch('scheduled_event_create', scheduled_event)\n else:\n _log.debug('SCHEDULED_EVENT_CREATE referencing unknown guild ID: %s. Discarding.', data['guild_id'])\n\n def parse_guild_scheduled_event_update(self, data: gw.GuildScheduledEventUpdateEvent) -> None:\n guild = self._get_guild(int(data['guild_id']))\n if guild is not None:\n scheduled_event = guild._scheduled_events.get(int(data['id']))\n if scheduled_event is not None:\n old_scheduled_event = copy.copy(scheduled_event)\n scheduled_event._update(data)\n self.dispatch('scheduled_event_update', old_scheduled_event, scheduled_event)\n else:\n _log.debug('SCHEDULED_EVENT_UPDATE referencing unknown scheduled event ID: %s. Discarding.', data['id'])\n else:\n _log.debug('SCHEDULED_EVENT_UPDATE referencing unknown guild ID: %s. Discarding.', data['guild_id'])\n\n def parse_guild_scheduled_event_delete(self, data: gw.GuildScheduledEventDeleteEvent) -> None:\n guild = self._get_guild(int(data['guild_id']))\n if guild is not None:\n try:\n scheduled_event = guild._scheduled_events.pop(int(data['id']))\n except KeyError:\n pass\n else:\n self.dispatch('scheduled_event_delete', scheduled_event)\n else:\n _log.debug('SCHEDULED_EVENT_DELETE referencing unknown guild ID: %s. Discarding.', data['guild_id'])\n\n def parse_guild_scheduled_event_user_add(self, data: gw.GuildScheduledEventUserAdd) -> None:\n guild = self._get_guild(int(data['guild_id']))\n if guild is not None:\n scheduled_event = guild._scheduled_events.get(int(data['guild_scheduled_event_id']))\n if scheduled_event is not None:\n user = self.get_user(int(data['user_id']))\n if user is not None:\n scheduled_event._add_user(user)\n self.dispatch('scheduled_event_user_add', scheduled_event, user)\n else:\n _log.debug('SCHEDULED_EVENT_USER_ADD referencing unknown user ID: %s. Discarding.', data['user_id'])\n else:\n _log.debug(\n 'SCHEDULED_EVENT_USER_ADD referencing unknown scheduled event ID: %s. Discarding.',\n data['guild_scheduled_event_id'],\n )\n else:\n _log.debug('SCHEDULED_EVENT_USER_ADD referencing unknown guild ID: %s. Discarding.', data['guild_id'])\n\n def parse_guild_scheduled_event_user_remove(self, data: gw.GuildScheduledEventUserRemove) -> None:\n guild = self._get_guild(int(data['guild_id']))\n if guild is not None:\n scheduled_event = guild._scheduled_events.get(int(data['guild_scheduled_event_id']))\n if scheduled_event is not None:\n user = self.get_user(int(data['user_id']))\n if user is not None:\n scheduled_event._pop_user(user.id)\n self.dispatch('scheduled_event_user_remove', scheduled_event, user)\n else:\n _log.debug('SCHEDULED_EVENT_USER_REMOVE referencing unknown user ID: %s. Discarding.', data['user_id'])\n else:\n _log.debug(\n 'SCHEDULED_EVENT_USER_REMOVE referencing unknown scheduled event ID: %s. Discarding.',\n data['guild_scheduled_event_id'],\n )\n else:\n _log.debug('SCHEDULED_EVENT_USER_REMOVE referencing unknown guild ID: %s. Discarding.', data['guild_id'])\n\n def parse_application_command_permissions_update(self, data: GuildApplicationCommandPermissionsPayload):\n raw = RawAppCommandPermissionsUpdateEvent(data=data, state=self)\n self.dispatch('raw_app_command_permissions_update', raw)\n\n def parse_voice_state_update(self, data: gw.VoiceStateUpdateEvent) -> None:\n guild = self._get_guild(utils._get_as_snowflake(data, 'guild_id'))\n channel_id = utils._get_as_snowflake(data, 'channel_id')\n flags = self.member_cache_flags\n # self.user is *always* cached when this is called\n self_id = self.user.id # type: ignore\n if guild is not None:\n if int(data['user_id']) == self_id:\n voice = self._get_voice_client(guild.id)\n if voice is not None:\n coro = voice.on_voice_state_update(data)\n asyncio.create_task(logging_coroutine(coro, info='Voice Protocol voice state update handler'))\n\n member, before, after = guild._update_voice_state(data, channel_id) # type: ignore\n if member is not None:\n if flags.voice:\n if channel_id is None and flags._voice_only and member.id != self_id:\n # Only remove from cache if we only have the voice flag enabled\n guild._remove_member(member)\n elif channel_id is not None:\n guild._add_member(member)\n\n self.dispatch('voice_state_update', member, before, after)\n else:\n _log.debug('VOICE_STATE_UPDATE referencing an unknown member ID: %s. Discarding.', data['user_id'])\n\n def parse_voice_server_update(self, data: gw.VoiceServerUpdateEvent) -> None:\n key_id = int(data['guild_id'])\n\n vc = self._get_voice_client(key_id)\n if vc is not None:\n coro = vc.on_voice_server_update(data)\n asyncio.create_task(logging_coroutine(coro, info='Voice Protocol voice server update handler'))\n\n def parse_typing_start(self, data: gw.TypingStartEvent) -> None:\n raw = RawTypingEvent(data)\n raw.user = self.get_user(raw.user_id)\n channel, guild = self._get_guild_channel(data)\n\n if channel is not None:\n if isinstance(channel, DMChannel):\n channel.recipient = raw.user\n elif guild is not None:\n raw.user = guild.get_member(raw.user_id)\n\n if raw.user is None:\n member_data = data.get('member')\n if member_data:\n raw.user = Member(data=member_data, state=self, guild=guild)\n\n if raw.user is not None:\n self.dispatch('typing', channel, raw.user, raw.timestamp)\n\n self.dispatch('raw_typing', raw)\n\n def _get_reaction_user(self, channel: MessageableChannel, user_id: int) -> Optional[Union[User, Member]]:\n if isinstance(channel, (TextChannel, Thread, VoiceChannel)):\n return channel.guild.get_member(user_id)\n return self.get_user(user_id)\n\n def get_reaction_emoji(self, data: PartialEmojiPayload) -> Union[Emoji, PartialEmoji, str]:\n emoji_id = utils._get_as_snowflake(data, 'id')\n\n if not emoji_id:\n # the name key will be a str\n return data['name'] # type: ignore\n\n try:\n return self._emojis[emoji_id]\n except KeyError:\n return PartialEmoji.with_state(\n self, animated=data.get('animated', False), id=emoji_id, name=data['name'] # type: ignore\n )\n\n def _upgrade_partial_emoji(self, emoji: PartialEmoji) -> Union[Emoji, PartialEmoji, str]:\n emoji_id = emoji.id\n if not emoji_id:\n return emoji.name\n try:\n return self._emojis[emoji_id]\n except KeyError:\n return emoji\n\n def get_channel(self, id: Optional[int]) -> Optional[Union[Channel, Thread]]:\n if id is None:\n return None\n\n pm = self._get_private_channel(id)\n if pm is not None:\n return pm\n\n for guild in self.guilds:\n channel = guild._resolve_channel(id)\n if channel is not None:\n return channel\n\n def create_message(self, *, channel: MessageableChannel, data: MessagePayload) -> Message:\n return Message(state=self, channel=channel, data=data)\n\n\nclass AutoShardedConnectionState(ConnectionState[ClientT]):\n def __init__(self, *args: Any, **kwargs: Any) -> None:\n super().__init__(*args, **kwargs)\n\n self.shard_ids: Union[List[int], range] = []\n\n self._ready_tasks: Dict[int, asyncio.Task[None]] = {}\n self._ready_states: Dict[int, asyncio.Queue[Guild]] = {}\n\n def _update_message_references(self) -> None:\n # self._messages won't be None when this is called\n for msg in self._messages: # type: ignore\n if not msg.guild:\n continue\n\n new_guild = self._get_guild(msg.guild.id)\n if new_guild is not None and new_guild is not msg.guild:\n channel_id = msg.channel.id\n channel = new_guild._resolve_channel(channel_id) or PartialMessageable(\n state=self, id=channel_id, guild_id=new_guild.id\n )\n msg._rebind_cached_references(new_guild, channel)\n\n async def chunker(\n self,\n guild_id: int,\n query: str = '',\n limit: int = 0,\n presences: bool = False,\n *,\n shard_id: Optional[int] = None,\n nonce: Optional[str] = None,\n ) -> None:\n ws = self._get_websocket(guild_id, shard_id=shard_id)\n await ws.request_chunks(guild_id, query=query, limit=limit, presences=presences, nonce=nonce)\n\n def _add_ready_state(self, guild: Guild) -> bool:\n try:\n # Notify the on_ready state, if any, that this guild is complete.\n self._ready_states[guild.shard_id].put_nowait(guild)\n except KeyError:\n return False\n else:\n return True\n\n async def _delay_ready(self) -> None:\n await asyncio.gather(*self._ready_tasks.values())\n\n # clear the current tasks\n self._ready_task = None\n self._ready_tasks = {}\n\n # dispatch the event\n self.call_handlers('ready')\n self.dispatch('ready')\n\n async def _delay_shard_ready(self, shard_id: int) -> None:\n try:\n states = []\n while True:\n # this snippet of code is basically waiting N seconds\n # until the last GUILD_CREATE was sent\n try:\n guild = await asyncio.wait_for(self._ready_states[shard_id].get(), timeout=self.guild_ready_timeout)\n except asyncio.TimeoutError:\n break\n else:\n if self._guild_needs_chunking(guild):\n future = await self.chunk_guild(guild, wait=False)\n states.append((guild, future))\n else:\n if guild.unavailable is False:\n self.dispatch('guild_available', guild)\n else:\n self.dispatch('guild_join', guild)\n\n for guild, future in states:\n timeout = self._chunk_timeout(guild)\n\n try:\n await asyncio.wait_for(future, timeout=timeout)\n except asyncio.TimeoutError:\n _log.warning('Shard ID %s timed out waiting for chunks for guild_id %s.', guild.shard_id, guild.id)\n\n if guild.unavailable is False:\n self.dispatch('guild_available', guild)\n else:\n self.dispatch('guild_join', guild)\n\n # remove the state\n try:\n del self._ready_states[shard_id]\n except KeyError:\n pass # already been deleted somehow\n\n except asyncio.CancelledError:\n pass\n else:\n # dispatch the event\n self.dispatch('shard_ready', shard_id)\n\n def parse_ready(self, data: gw.ReadyEvent) -> None:\n if self._ready_task is not None:\n self._ready_task.cancel()\n\n shard_id = data['shard'][0] # shard_id, num_shards\n\n if shard_id in self._ready_tasks:\n self._ready_tasks[shard_id].cancel()\n\n if shard_id not in self._ready_states:\n self._ready_states[shard_id] = asyncio.Queue()\n\n self.user: Optional[ClientUser]\n self.user = user = ClientUser(state=self, data=data['user'])\n # self._users is a list of Users, we're setting a ClientUser\n self._users[user.id] = user # type: ignore\n\n if self.application_id is None:\n try:\n application = data['application']\n except KeyError:\n pass\n else:\n self.application_id: Optional[int] = utils._get_as_snowflake(application, 'id')\n self.application_flags: ApplicationFlags = ApplicationFlags._from_value(application['flags'])\n\n for guild_data in data['guilds']:\n self._add_guild_from_data(guild_data) # type: ignore # _add_guild_from_data requires a complete Guild payload\n\n if self._messages:\n self._update_message_references()\n\n self.dispatch('connect')\n self.dispatch('shard_connect', shard_id)\n\n self._ready_tasks[shard_id] = asyncio.create_task(self._delay_shard_ready(shard_id))\n\n # The delay task for every shard has been started\n if len(self._ready_tasks) == len(self.shard_ids):\n self._ready_task = asyncio.create_task(self._delay_ready())\n\n def parse_resumed(self, data: gw.ResumedEvent) -> None:\n self.dispatch('resumed')\n self.dispatch('shard_resumed', data['__shard_id__']) # type: ignore # This is an internal discord.py key\n",
"path": "discord/state.py"
},
{
"content": "\"\"\"\nThe MIT License (MIT)\n\nCopyright (c) 2015-present Rapptz\n\nPermission is hereby granted, free of charge, to any person obtaining a\ncopy of this software and associated documentation files (the \"Software\"),\nto deal in the Software without restriction, including without limitation\nthe rights to use, copy, modify, merge, publish, distribute, sublicense,\nand/or sell copies of the Software, and to permit persons to whom the\nSoftware is furnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in\nall copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\nOR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\nFROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER\nDEALINGS IN THE SOFTWARE.\n\"\"\"\n\nfrom __future__ import annotations\nfrom typing import Any, Callable, ClassVar, Coroutine, Dict, Iterator, List, Optional, Sequence, TYPE_CHECKING, Tuple, Type\nfrom functools import partial\nfrom itertools import groupby\n\nimport asyncio\nimport logging\nimport sys\nimport time\nimport os\nfrom .item import Item, ItemCallbackType\nfrom .dynamic import DynamicItem\nfrom ..components import (\n Component,\n ActionRow as ActionRowComponent,\n _component_factory,\n Button as ButtonComponent,\n SelectMenu as SelectComponent,\n)\n\n# fmt: off\n__all__ = (\n 'View',\n)\n# fmt: on\n\n\nif TYPE_CHECKING:\n from typing_extensions import Self\n import re\n\n from ..interactions import Interaction\n from ..message import Message\n from ..types.components import Component as ComponentPayload\n from ..types.interactions import ModalSubmitComponentInteractionData as ModalSubmitComponentInteractionDataPayload\n from ..state import ConnectionState\n from .modal import Modal\n\n\n_log = logging.getLogger(__name__)\n\n\ndef _walk_all_components(components: List[Component]) -> Iterator[Component]:\n for item in components:\n if isinstance(item, ActionRowComponent):\n yield from item.children\n else:\n yield item\n\n\ndef _component_to_item(component: Component) -> Item:\n if isinstance(component, ButtonComponent):\n from .button import Button\n\n return Button.from_component(component)\n if isinstance(component, SelectComponent):\n from .select import Select\n\n return Select.from_component(component)\n return Item.from_component(component)\n\n\nclass _ViewWeights:\n # fmt: off\n __slots__ = (\n 'weights',\n )\n # fmt: on\n\n def __init__(self, children: List[Item]):\n self.weights: List[int] = [0, 0, 0, 0, 0]\n\n key = lambda i: sys.maxsize if i.row is None else i.row\n children = sorted(children, key=key)\n for row, group in groupby(children, key=key):\n for item in group:\n self.add_item(item)\n\n def find_open_space(self, item: Item) -> int:\n for index, weight in enumerate(self.weights):\n if weight + item.width <= 5:\n return index\n\n raise ValueError('could not find open space for item')\n\n def add_item(self, item: Item) -> None:\n if item.row is not None:\n total = self.weights[item.row] + item.width\n if total > 5:\n raise ValueError(f'item would not fit at row {item.row} ({total} > 5 width)')\n self.weights[item.row] = total\n item._rendered_row = item.row\n else:\n index = self.find_open_space(item)\n self.weights[index] += item.width\n item._rendered_row = index\n\n def remove_item(self, item: Item) -> None:\n if item._rendered_row is not None:\n self.weights[item._rendered_row] -= item.width\n item._rendered_row = None\n\n def clear(self) -> None:\n self.weights = [0, 0, 0, 0, 0]\n\n\nclass _ViewCallback:\n __slots__ = ('view', 'callback', 'item')\n\n def __init__(self, callback: ItemCallbackType[Any, Any], view: View, item: Item[View]) -> None:\n self.callback: ItemCallbackType[Any, Any] = callback\n self.view: View = view\n self.item: Item[View] = item\n\n def __call__(self, interaction: Interaction) -> Coroutine[Any, Any, Any]:\n return self.callback(self.view, interaction, self.item)\n\n\nclass View:\n \"\"\"Represents a UI view.\n\n This object must be inherited to create a UI within Discord.\n\n .. versionadded:: 2.0\n\n Parameters\n -----------\n timeout: Optional[:class:`float`]\n Timeout in seconds from last interaction with the UI before no longer accepting input.\n If ``None`` then there is no timeout.\n \"\"\"\n\n __discord_ui_view__: ClassVar[bool] = True\n __discord_ui_modal__: ClassVar[bool] = False\n __view_children_items__: ClassVar[List[ItemCallbackType[Any, Any]]] = []\n\n def __init_subclass__(cls) -> None:\n super().__init_subclass__()\n\n children: Dict[str, ItemCallbackType[Any, Any]] = {}\n for base in reversed(cls.__mro__):\n for name, member in base.__dict__.items():\n if hasattr(member, '__discord_ui_model_type__'):\n children[name] = member\n\n if len(children) > 25:\n raise TypeError('View cannot have more than 25 children')\n\n cls.__view_children_items__ = list(children.values())\n\n def _init_children(self) -> List[Item[Self]]:\n children = []\n for func in self.__view_children_items__:\n item: Item = func.__discord_ui_model_type__(**func.__discord_ui_model_kwargs__)\n item.callback = _ViewCallback(func, self, item)\n item._view = self\n setattr(self, func.__name__, item)\n children.append(item)\n return children\n\n def __init__(self, *, timeout: Optional[float] = 180.0):\n self.__timeout = timeout\n self._children: List[Item[Self]] = self._init_children()\n self.__weights = _ViewWeights(self._children)\n self.id: str = os.urandom(16).hex()\n self._cache_key: Optional[int] = None\n self.__cancel_callback: Optional[Callable[[View], None]] = None\n self.__timeout_expiry: Optional[float] = None\n self.__timeout_task: Optional[asyncio.Task[None]] = None\n self.__stopped: asyncio.Future[bool] = asyncio.get_running_loop().create_future()\n\n def __repr__(self) -> str:\n return f'<{self.__class__.__name__} timeout={self.timeout} children={len(self._children)}>'\n\n async def __timeout_task_impl(self) -> None:\n while True:\n # Guard just in case someone changes the value of the timeout at runtime\n if self.timeout is None:\n return\n\n if self.__timeout_expiry is None:\n return self._dispatch_timeout()\n\n # Check if we've elapsed our currently set timeout\n now = time.monotonic()\n if now >= self.__timeout_expiry:\n return self._dispatch_timeout()\n\n # Wait N seconds to see if timeout data has been refreshed\n await asyncio.sleep(self.__timeout_expiry - now)\n\n def to_components(self) -> List[Dict[str, Any]]:\n def key(item: Item) -> int:\n return item._rendered_row or 0\n\n children = sorted(self._children, key=key)\n components: List[Dict[str, Any]] = []\n for _, group in groupby(children, key=key):\n children = [item.to_component_dict() for item in group]\n if not children:\n continue\n\n components.append(\n {\n 'type': 1,\n 'components': children,\n }\n )\n\n return components\n\n def _refresh_timeout(self) -> None:\n if self.__timeout:\n self.__timeout_expiry = time.monotonic() + self.__timeout\n\n @property\n def timeout(self) -> Optional[float]:\n \"\"\"Optional[:class:`float`]: The timeout in seconds from last interaction with the UI before no longer accepting input.\n If ``None`` then there is no timeout.\n \"\"\"\n return self.__timeout\n\n @timeout.setter\n def timeout(self, value: Optional[float]) -> None:\n # If the timeout task is already running this allows it to update\n # the expiry while it's running\n if self.__timeout_task is not None:\n if value is not None:\n self.__timeout_expiry = time.monotonic() + value\n else:\n self.__timeout_expiry = None\n\n self.__timeout = value\n\n @property\n def children(self) -> List[Item[Self]]:\n \"\"\"List[:class:`Item`]: The list of children attached to this view.\"\"\"\n return self._children.copy()\n\n @classmethod\n def from_message(cls, message: Message, /, *, timeout: Optional[float] = 180.0) -> View:\n \"\"\"Converts a message's components into a :class:`View`.\n\n The :attr:`.Message.components` of a message are read-only\n and separate types from those in the ``discord.ui`` namespace.\n In order to modify and edit message components they must be\n converted into a :class:`View` first.\n\n Parameters\n -----------\n message: :class:`discord.Message`\n The message with components to convert into a view.\n timeout: Optional[:class:`float`]\n The timeout of the converted view.\n\n Returns\n --------\n :class:`View`\n The converted view. This always returns a :class:`View` and not\n one of its subclasses.\n \"\"\"\n view = View(timeout=timeout)\n row = 0\n for component in message.components:\n if isinstance(component, ActionRowComponent):\n for child in component.children:\n item = _component_to_item(child)\n item.row = row\n view.add_item(item)\n row += 1\n else:\n item = _component_to_item(component)\n item.row = row\n view.add_item(item)\n\n return view\n\n def add_item(self, item: Item[Any]) -> Self:\n \"\"\"Adds an item to the view.\n\n This function returns the class instance to allow for fluent-style\n chaining.\n\n Parameters\n -----------\n item: :class:`Item`\n The item to add to the view.\n\n Raises\n --------\n TypeError\n An :class:`Item` was not passed.\n ValueError\n Maximum number of children has been exceeded (25)\n or the row the item is trying to be added to is full.\n \"\"\"\n\n if len(self._children) > 25:\n raise ValueError('maximum number of children exceeded')\n\n if not isinstance(item, Item):\n raise TypeError(f'expected Item not {item.__class__.__name__}')\n\n self.__weights.add_item(item)\n\n item._view = self\n self._children.append(item)\n return self\n\n def remove_item(self, item: Item[Any]) -> Self:\n \"\"\"Removes an item from the view.\n\n This function returns the class instance to allow for fluent-style\n chaining.\n\n Parameters\n -----------\n item: :class:`Item`\n The item to remove from the view.\n \"\"\"\n\n try:\n self._children.remove(item)\n except ValueError:\n pass\n else:\n self.__weights.remove_item(item)\n return self\n\n def clear_items(self) -> Self:\n \"\"\"Removes all items from the view.\n\n This function returns the class instance to allow for fluent-style\n chaining.\n \"\"\"\n self._children.clear()\n self.__weights.clear()\n return self\n\n async def interaction_check(self, interaction: Interaction, /) -> bool:\n \"\"\"|coro|\n\n A callback that is called when an interaction happens within the view\n that checks whether the view should process item callbacks for the interaction.\n\n This is useful to override if, for example, you want to ensure that the\n interaction author is a given user.\n\n The default implementation of this returns ``True``.\n\n .. note::\n\n If an exception occurs within the body then the check\n is considered a failure and :meth:`on_error` is called.\n\n Parameters\n -----------\n interaction: :class:`~discord.Interaction`\n The interaction that occurred.\n\n Returns\n ---------\n :class:`bool`\n Whether the view children's callbacks should be called.\n \"\"\"\n return True\n\n async def on_timeout(self) -> None:\n \"\"\"|coro|\n\n A callback that is called when a view's timeout elapses without being explicitly stopped.\n \"\"\"\n pass\n\n async def on_error(self, interaction: Interaction, error: Exception, item: Item[Any], /) -> None:\n \"\"\"|coro|\n\n A callback that is called when an item's callback or :meth:`interaction_check`\n fails with an error.\n\n The default implementation logs to the library logger.\n\n Parameters\n -----------\n interaction: :class:`~discord.Interaction`\n The interaction that led to the failure.\n error: :class:`Exception`\n The exception that was raised.\n item: :class:`Item`\n The item that failed the dispatch.\n \"\"\"\n _log.error('Ignoring exception in view %r for item %r', self, item, exc_info=error)\n\n async def _scheduled_task(self, item: Item, interaction: Interaction):\n try:\n item._refresh_state(interaction, interaction.data) # type: ignore\n\n allow = await item.interaction_check(interaction) and await self.interaction_check(interaction)\n if not allow:\n return\n\n if self.timeout:\n self.__timeout_expiry = time.monotonic() + self.timeout\n\n await item.callback(interaction)\n except Exception as e:\n return await self.on_error(interaction, e, item)\n\n def _start_listening_from_store(self, store: ViewStore) -> None:\n self.__cancel_callback = partial(store.remove_view)\n if self.timeout:\n if self.__timeout_task is not None:\n self.__timeout_task.cancel()\n\n self.__timeout_expiry = time.monotonic() + self.timeout\n self.__timeout_task = asyncio.create_task(self.__timeout_task_impl())\n\n def _dispatch_timeout(self):\n if self.__stopped.done():\n return\n\n if self.__cancel_callback:\n self.__cancel_callback(self)\n self.__cancel_callback = None\n\n self.__stopped.set_result(True)\n asyncio.create_task(self.on_timeout(), name=f'discord-ui-view-timeout-{self.id}')\n\n def _dispatch_item(self, item: Item, interaction: Interaction):\n if self.__stopped.done():\n return\n\n asyncio.create_task(self._scheduled_task(item, interaction), name=f'discord-ui-view-dispatch-{self.id}')\n\n def _refresh(self, components: List[Component]) -> None:\n # fmt: off\n old_state: Dict[str, Item[Any]] = {\n item.custom_id: item # type: ignore\n for item in self._children\n if item.is_dispatchable()\n }\n # fmt: on\n\n for component in _walk_all_components(components):\n custom_id = getattr(component, 'custom_id', None)\n if custom_id is None:\n continue\n\n try:\n older = old_state[custom_id]\n except KeyError:\n _log.debug('View interaction referenced an unknown item custom_id %s. Discarding', custom_id)\n continue\n else:\n older._refresh_component(component)\n\n def stop(self) -> None:\n \"\"\"Stops listening to interaction events from this view.\n\n This operation cannot be undone.\n \"\"\"\n if not self.__stopped.done():\n self.__stopped.set_result(False)\n\n self.__timeout_expiry = None\n if self.__timeout_task is not None:\n self.__timeout_task.cancel()\n self.__timeout_task = None\n\n if self.__cancel_callback:\n self.__cancel_callback(self)\n self.__cancel_callback = None\n\n def is_finished(self) -> bool:\n \"\"\":class:`bool`: Whether the view has finished interacting.\"\"\"\n return self.__stopped.done()\n\n def is_dispatching(self) -> bool:\n \"\"\":class:`bool`: Whether the view has been added for dispatching purposes.\"\"\"\n return self.__cancel_callback is not None\n\n def is_persistent(self) -> bool:\n \"\"\":class:`bool`: Whether the view is set up as persistent.\n\n A persistent view has all their components with a set ``custom_id`` and\n a :attr:`timeout` set to ``None``.\n \"\"\"\n return self.timeout is None and all(item.is_persistent() for item in self._children)\n\n async def wait(self) -> bool:\n \"\"\"|coro|\n\n Waits until the view has finished interacting.\n\n A view is considered finished when :meth:`stop` is called\n or it times out.\n\n Returns\n --------\n :class:`bool`\n If ``True``, then the view timed out. If ``False`` then\n the view finished normally.\n \"\"\"\n return await self.__stopped\n\n\nclass ViewStore:\n def __init__(self, state: ConnectionState):\n # entity_id: {(component_type, custom_id): Item}\n self._views: Dict[Optional[int], Dict[Tuple[int, str], Item[View]]] = {}\n # message_id: View\n self._synced_message_views: Dict[int, View] = {}\n # custom_id: Modal\n self._modals: Dict[str, Modal] = {}\n # component_type is the key\n self._dynamic_items: Dict[re.Pattern[str], Type[DynamicItem[Item[Any]]]] = {}\n self._state: ConnectionState = state\n\n @property\n def persistent_views(self) -> Sequence[View]:\n # fmt: off\n views = {\n item.view.id: item.view\n for items in self._views.values()\n for item in items.values()\n if item.view and item.view.is_persistent()\n }\n # fmt: on\n return list(views.values())\n\n def add_dynamic_items(self, *items: Type[DynamicItem[Item[Any]]]) -> None:\n for item in items:\n pattern = item.__discord_ui_compiled_template__\n self._dynamic_items[pattern] = item\n\n def add_view(self, view: View, message_id: Optional[int] = None) -> None:\n view._start_listening_from_store(self)\n if view.__discord_ui_modal__:\n self._modals[view.custom_id] = view # type: ignore\n return\n\n dispatch_info = self._views.setdefault(message_id, {})\n for item in view._children:\n if isinstance(item, DynamicItem):\n pattern = item.__discord_ui_compiled_template__\n self._dynamic_items[pattern] = item.__class__\n elif item.is_dispatchable():\n dispatch_info[(item.type.value, item.custom_id)] = item # type: ignore\n\n view._cache_key = message_id\n if message_id is not None:\n self._synced_message_views[message_id] = view\n\n def remove_view(self, view: View) -> None:\n if view.__discord_ui_modal__:\n self._modals.pop(view.custom_id, None) # type: ignore\n return\n\n dispatch_info = self._views.get(view._cache_key)\n if dispatch_info:\n for item in view._children:\n if isinstance(item, DynamicItem):\n pattern = item.__discord_ui_compiled_template__\n self._dynamic_items.pop(pattern, None)\n elif item.is_dispatchable():\n dispatch_info.pop((item.type.value, item.custom_id), None) # type: ignore\n\n if len(dispatch_info) == 0:\n self._views.pop(view._cache_key, None)\n\n self._synced_message_views.pop(view._cache_key, None) # type: ignore\n\n async def schedule_dynamic_item_call(\n self,\n component_type: int,\n factory: Type[DynamicItem[Item[Any]]],\n interaction: Interaction,\n match: re.Match[str],\n ) -> None:\n try:\n item = await factory.from_custom_id(interaction, match)\n except Exception:\n _log.exception('Ignoring exception in dynamic item creation for %r', factory)\n return\n\n # Unfortunately cannot set Item.view here...\n item._refresh_state(interaction, interaction.data) # type: ignore\n\n try:\n allow = await item.interaction_check(interaction)\n except Exception:\n allow = False\n\n if not allow:\n return\n\n if interaction.message is None:\n item._view = None\n else:\n item._view = view = View.from_message(interaction.message)\n\n # Find the original item and replace it with the dynamic item\n for index, child in enumerate(view._children):\n if child.type.value == component_type and getattr(child, 'custom_id', None) == item.custom_id:\n view._children[index] = item\n break\n\n try:\n await item.callback(interaction)\n except Exception:\n _log.exception('Ignoring exception in dynamic item callback for %r', item)\n\n def dispatch_dynamic_items(self, component_type: int, custom_id: str, interaction: Interaction) -> None:\n for pattern, item in self._dynamic_items.items():\n match = pattern.fullmatch(custom_id)\n if match is not None:\n asyncio.create_task(\n self.schedule_dynamic_item_call(component_type, item, interaction, match),\n name=f'discord-ui-dynamic-item-{item.__name__}-{custom_id}',\n )\n\n def dispatch_view(self, component_type: int, custom_id: str, interaction: Interaction) -> None:\n self.dispatch_dynamic_items(component_type, custom_id, interaction)\n interaction_id: Optional[int] = None\n message_id: Optional[int] = None\n # Realistically, in a component based interaction the Interaction.message will never be None\n # However, this guard is just in case Discord screws up somehow\n msg = interaction.message\n if msg is not None:\n message_id = msg.id\n if msg.interaction:\n interaction_id = msg.interaction.id\n\n key = (component_type, custom_id)\n\n # The entity_id can either be message_id, interaction_id, or None in that priority order.\n item: Optional[Item[View]] = None\n if message_id is not None:\n item = self._views.get(message_id, {}).get(key)\n\n if item is None and interaction_id is not None:\n try:\n items = self._views.pop(interaction_id)\n except KeyError:\n item = None\n else:\n item = items.get(key)\n # If we actually got the items, then these keys should probably be moved\n # to the proper message_id instead of the interaction_id as they are now.\n # An interaction_id is only used as a temporary stop gap for\n # InteractionResponse.send_message so multiple view instances do not\n # override each other.\n # NOTE: Fix this mess if /callback endpoint ever gets proper return types\n self._views.setdefault(message_id, {}).update(items)\n\n if item is None:\n # Fallback to None message_id searches in case a persistent view\n # was added without an associated message_id\n item = self._views.get(None, {}).get(key)\n\n # If 3 lookups failed at this point then just discard it\n if item is None:\n return\n\n # Note, at this point the View is *not* None\n item.view._dispatch_item(item, interaction) # type: ignore\n\n def dispatch_modal(\n self,\n custom_id: str,\n interaction: Interaction,\n components: List[ModalSubmitComponentInteractionDataPayload],\n ) -> None:\n modal = self._modals.get(custom_id)\n if modal is None:\n _log.debug(\"Modal interaction referencing unknown custom_id %s. Discarding\", custom_id)\n return\n\n modal._dispatch_submit(interaction, components)\n\n def remove_interaction_mapping(self, interaction_id: int) -> None:\n # This is called before re-adding the view\n self._views.pop(interaction_id, None)\n self._synced_message_views.pop(interaction_id, None)\n\n def is_message_tracked(self, message_id: int) -> bool:\n return message_id in self._synced_message_views\n\n def remove_message_tracking(self, message_id: int) -> Optional[View]:\n return self._synced_message_views.pop(message_id, None)\n\n def update_from_message(self, message_id: int, data: List[ComponentPayload]) -> None:\n components: List[Component] = []\n\n for component_data in data:\n component = _component_factory(component_data)\n\n if component is not None:\n components.append(component)\n\n # pre-req: is_message_tracked == true\n view = self._synced_message_views[message_id]\n view._refresh(components)\n",
"path": "discord/ui/view.py"
}
] | 10_6 | python | import unittest
import sys
from discord.ui import DynamicItem, Item
class MockDynamicItem(DynamicItem, Item, template='mock_template'):
pass
class TestClientDynamicItems(unittest.TestCase):
def setUp(self):
from discord import Client, Intents
from unittest.mock import MagicMock
intents = Intents.default() # Use default intents for the test
self.client = Client(intents=intents)
self.client._connection = MagicMock()
def test_remove_dynamic_items_method_exists(self):
self.assertTrue(hasattr(self.client, 'remove_dynamic_items'))
def test_remove_dynamic_items_functionality(self):
self.client.add_dynamic_items(MockDynamicItem)
self.client.remove_dynamic_items(MockDynamicItem)
# Check if remove_dynamic_items was called on the _connection object
self.client._connection.remove_dynamic_items.assert_called_with(MockDynamicItem)
def test_remove_dynamic_items_with_invalid_type(self):
with self.assertRaises(TypeError):
self.client.remove_dynamic_items(str)
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestClientDynamicItems))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
main()
|
https://github.com/teamqurrent/discord.py | In the discord.py repository, refactor the `Template` class in `template.py` to streamline the initialization of the source_guild attribute. Remove the conditional logic that checks for an existing guild and instead always initialize source_guild from the serialized guild data. Additionally, add a cache_guild_expressions property to the `_PartialTemplateState` class that returns False. | 8b8ce55 | discord | python3.9 | 16f6466d | diff --git a/discord/template.py b/discord/template.py
--- a/discord/template.py
+++ b/discord/template.py
@@ -69,6 +69,10 @@ class _PartialTemplateState:
def member_cache_flags(self):
return self.__state.member_cache_flags
+ @property
+ def cache_guild_expressions(self):
+ return False
+
def store_emoji(self, guild, packet) -> None:
return None
@@ -146,18 +150,11 @@ class Template:
self.created_at: Optional[datetime.datetime] = parse_time(data.get('created_at'))
self.updated_at: Optional[datetime.datetime] = parse_time(data.get('updated_at'))
- guild_id = int(data['source_guild_id'])
- guild: Optional[Guild] = self._state._get_guild(guild_id)
-
- self.source_guild: Guild
- if guild is None:
- source_serialised = data['serialized_source_guild']
- source_serialised['id'] = guild_id
- state = _PartialTemplateState(state=self._state)
- # Guild expects a ConnectionState, we're passing a _PartialTemplateState
- self.source_guild = Guild(data=source_serialised, state=state) # type: ignore
- else:
- self.source_guild = guild
+ source_serialised = data['serialized_source_guild']
+ source_serialised['id'] = int(data['source_guild_id'])
+ state = _PartialTemplateState(state=self._state)
+ # Guild expects a ConnectionState, we're passing a _PartialTemplateState
+ self.source_guild = Guild(data=source_serialised, state=state) # type: ignore
self.is_dirty: Optional[bool] = data.get('is_dirty', None)
| [
{
"content": "\"\"\"\nThe MIT License (MIT)\n\nCopyright (c) 2015-present Rapptz\n\nPermission is hereby granted, free of charge, to any person obtaining a\ncopy of this software and associated documentation files (the \"Software\"),\nto deal in the Software without restriction, including without limitation\nthe rights to use, copy, modify, merge, publish, distribute, sublicense,\nand/or sell copies of the Software, and to permit persons to whom the\nSoftware is furnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in\nall copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\nOR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\nFROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER\nDEALINGS IN THE SOFTWARE.\n\"\"\"\n\nfrom __future__ import annotations\n\nfrom typing import Any, Optional, TYPE_CHECKING, List\nfrom .utils import parse_time, _bytes_to_base64_data, MISSING\nfrom .guild import Guild\n\n# fmt: off\n__all__ = (\n 'Template',\n)\n# fmt: on\n\nif TYPE_CHECKING:\n import datetime\n from .types.template import Template as TemplatePayload\n from .state import ConnectionState\n from .user import User\n\n\nclass _FriendlyHttpAttributeErrorHelper:\n __slots__ = ()\n\n def __getattr__(self, attr):\n raise AttributeError('PartialTemplateState does not support http methods.')\n\n\nclass _PartialTemplateState:\n def __init__(self, *, state) -> None:\n self.__state = state\n self.http = _FriendlyHttpAttributeErrorHelper()\n\n @property\n def shard_count(self):\n return self.__state.shard_count\n\n @property\n def user(self):\n return self.__state.user\n\n @property\n def self_id(self):\n return self.__state.user.id\n\n @property\n def member_cache_flags(self):\n return self.__state.member_cache_flags\n\n def store_emoji(self, guild, packet) -> None:\n return None\n\n def _get_voice_client(self, id) -> None:\n return None\n\n def _get_message(self, id) -> None:\n return None\n\n def _get_guild(self, id):\n return self.__state._get_guild(id)\n\n async def query_members(self, **kwargs: Any) -> List[Any]:\n return []\n\n def __getattr__(self, attr):\n raise AttributeError(f'PartialTemplateState does not support {attr!r}.')\n\n\nclass Template:\n \"\"\"Represents a Discord template.\n\n .. versionadded:: 1.4\n\n Attributes\n -----------\n code: :class:`str`\n The template code.\n uses: :class:`int`\n How many times the template has been used.\n name: :class:`str`\n The name of the template.\n description: :class:`str`\n The description of the template.\n creator: :class:`User`\n The creator of the template.\n created_at: :class:`datetime.datetime`\n An aware datetime in UTC representing when the template was created.\n updated_at: :class:`datetime.datetime`\n An aware datetime in UTC representing when the template was last updated.\n This is referred to as \"last synced\" in the official Discord client.\n source_guild: :class:`Guild`\n The guild snapshot that represents the data that this template currently holds.\n is_dirty: Optional[:class:`bool`]\n Whether the template has unsynced changes.\n\n .. versionadded:: 2.0\n \"\"\"\n\n __slots__ = (\n 'code',\n 'uses',\n 'name',\n 'description',\n 'creator',\n 'created_at',\n 'updated_at',\n 'source_guild',\n 'is_dirty',\n '_state',\n )\n\n def __init__(self, *, state: ConnectionState, data: TemplatePayload) -> None:\n self._state = state\n self._store(data)\n\n def _store(self, data: TemplatePayload) -> None:\n self.code: str = data['code']\n self.uses: int = data['usage_count']\n self.name: str = data['name']\n self.description: Optional[str] = data['description']\n creator_data = data.get('creator')\n self.creator: Optional[User] = None if creator_data is None else self._state.create_user(creator_data)\n\n self.created_at: Optional[datetime.datetime] = parse_time(data.get('created_at'))\n self.updated_at: Optional[datetime.datetime] = parse_time(data.get('updated_at'))\n\n guild_id = int(data['source_guild_id'])\n guild: Optional[Guild] = self._state._get_guild(guild_id)\n\n self.source_guild: Guild\n if guild is None:\n source_serialised = data['serialized_source_guild']\n source_serialised['id'] = guild_id\n state = _PartialTemplateState(state=self._state)\n # Guild expects a ConnectionState, we're passing a _PartialTemplateState\n self.source_guild = Guild(data=source_serialised, state=state) # type: ignore\n else:\n self.source_guild = guild\n\n self.is_dirty: Optional[bool] = data.get('is_dirty', None)\n\n def __repr__(self) -> str:\n return (\n f'<Template code={self.code!r} uses={self.uses} name={self.name!r}'\n f' creator={self.creator!r} source_guild={self.source_guild!r} is_dirty={self.is_dirty}>'\n )\n\n async def create_guild(self, name: str, icon: bytes = MISSING) -> Guild:\n \"\"\"|coro|\n\n Creates a :class:`.Guild` using the template.\n\n Bot accounts in more than 10 guilds are not allowed to create guilds.\n\n .. versionchanged:: 2.0\n The ``region`` parameter has been removed.\n\n .. versionchanged:: 2.0\n This function will now raise :exc:`ValueError` instead of\n ``InvalidArgument``.\n\n Parameters\n ----------\n name: :class:`str`\n The name of the guild.\n icon: :class:`bytes`\n The :term:`py:bytes-like object` representing the icon. See :meth:`.ClientUser.edit`\n for more details on what is expected.\n\n Raises\n ------\n HTTPException\n Guild creation failed.\n ValueError\n Invalid icon image format given. Must be PNG or JPG.\n\n Returns\n -------\n :class:`.Guild`\n The guild created. This is not the same guild that is\n added to cache.\n \"\"\"\n base64_icon = None\n if icon is not MISSING:\n base64_icon = _bytes_to_base64_data(icon)\n\n data = await self._state.http.create_from_template(self.code, name, base64_icon)\n return Guild(data=data, state=self._state)\n\n async def sync(self) -> Template:\n \"\"\"|coro|\n\n Sync the template to the guild's current state.\n\n You must have :attr:`~Permissions.manage_guild` in the source guild to do this.\n\n .. versionadded:: 1.7\n\n .. versionchanged:: 2.0\n The template is no longer edited in-place, instead it is returned.\n\n Raises\n -------\n HTTPException\n Editing the template failed.\n Forbidden\n You don't have permissions to edit the template.\n NotFound\n This template does not exist.\n\n Returns\n --------\n :class:`Template`\n The newly edited template.\n \"\"\"\n\n data = await self._state.http.sync_template(self.source_guild.id, self.code)\n return Template(state=self._state, data=data)\n\n async def edit(\n self,\n *,\n name: str = MISSING,\n description: Optional[str] = MISSING,\n ) -> Template:\n \"\"\"|coro|\n\n Edit the template metadata.\n\n You must have :attr:`~Permissions.manage_guild` in the source guild to do this.\n\n .. versionadded:: 1.7\n\n .. versionchanged:: 2.0\n The template is no longer edited in-place, instead it is returned.\n\n Parameters\n ------------\n name: :class:`str`\n The template's new name.\n description: Optional[:class:`str`]\n The template's new description.\n\n Raises\n -------\n HTTPException\n Editing the template failed.\n Forbidden\n You don't have permissions to edit the template.\n NotFound\n This template does not exist.\n\n Returns\n --------\n :class:`Template`\n The newly edited template.\n \"\"\"\n payload = {}\n\n if name is not MISSING:\n payload['name'] = name\n if description is not MISSING:\n payload['description'] = description\n\n data = await self._state.http.edit_template(self.source_guild.id, self.code, payload)\n return Template(state=self._state, data=data)\n\n async def delete(self) -> None:\n \"\"\"|coro|\n\n Delete the template.\n\n You must have :attr:`~Permissions.manage_guild` in the source guild to do this.\n\n .. versionadded:: 1.7\n\n Raises\n -------\n HTTPException\n Editing the template failed.\n Forbidden\n You don't have permissions to edit the template.\n NotFound\n This template does not exist.\n \"\"\"\n await self._state.http.delete_template(self.source_guild.id, self.code)\n\n @property\n def url(self) -> str:\n \"\"\":class:`str`: The template url.\n\n .. versionadded:: 2.0\n \"\"\"\n return f'https://discord.new/{self.code}'\n",
"path": "discord/template.py"
}
] | 10_7 | python | import unittest
import sys
import os
# Mocking _PartialTemplateState as it is not importable easily
class MockPartialTemplateState:
def __init__(self, state):
from unittest.mock import MagicMock
self.__state = state
self.user = state.user
self.member_cache_flags = MagicMock()
def create_user(self, data):
from unittest.mock import MagicMock
return MagicMock()
def _get_guild(self, guild_id):
from unittest.mock import MagicMock
return MagicMock()
class TestTemplateSourceGuild(unittest.TestCase):
def setUp(self):
from unittest.mock import MagicMock
self.mock_state = MagicMock()
self.mock_state.user.id = 123456
self.partial_state = MockPartialTemplateState(state=self.mock_state)
def test_source_guild_initialization(self):
from discord.template import Template
template_data = {
'code': 'template_code',
'usage_count': 1,
'name': 'Template Name',
'description': 'Template Description',
'creator': {'id': '123', 'username': 'creator', 'discriminator': '0001'},
'created_at': '2020-01-01T00:00:00.000000+00:00',
'updated_at': '2020-01-01T00:00:00.000000+00:00',
'source_guild_id': '1234567890',
'serialized_source_guild': {'id': '1234567890', 'name': 'Test Guild'}
}
template = Template(state=self.partial_state, data=template_data)
self.assertEqual(template.source_guild.id, 1234567890)
self.assertEqual(template.source_guild.name, 'Test Guild')
def test_cache_guild_expressions_in_partial_template_state(self):
discord_py_directory = ""
template_file_path = os.path.join(discord_py_directory, "discord", "template.py")
with open(template_file_path, 'r') as file:
template_content = file.read()
# Check if the specific line is in the file content... Such a small addition means a source code check is viable
self.assertIn("def cache_guild_expressions(self):", template_content)
self.assertIn("return False", template_content)
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestTemplateSourceGuild))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
main()
|
https://github.com/teamqurrent/discord.py | To align with new guild event permissions in Discord, modify the `Permissions` class in `discord/permissions.py`. Add a events class method to initialize a `Permissions` object with event-related permissions set (using the specific bit pattern of 0b100000000001000000000000000000000000000000000). Also, implement a create_events property method that checks if the permission to create guild events (identified by the bit 1 << 44) is active. These updates will enable the `Permissions` class to handle new event-related permissions effectively. | 135e57c | discord | python3.9 | c69ce78a | diff --git a/discord/permissions.py b/discord/permissions.py
--- a/discord/permissions.py
+++ b/discord/permissions.py
@@ -329,6 +329,15 @@ class Permissions(BaseFlags):
"""
return cls(0b10000010001110000000000000010000000111110)
+ @classmethod
+ def events(cls) -> Self:
+ """A factory method that creates a :class:`Permissions` with all
+ "Events" permissions from the official Discord UI set to ``True``.
+
+ .. versionadded:: 2.4
+ """
+ return cls(0b100000000001000000000000000000000000000000000)
+
@classmethod
def advanced(cls) -> Self:
"""A factory method that creates a :class:`Permissions` with all
@@ -684,6 +693,14 @@ class Permissions(BaseFlags):
"""
return 1 << 43
+ @flag_value
+ def create_events(self) -> int:
+ """:class:`bool`: Returns ``True`` if a user can create guild events.
+
+ .. versionadded:: 2.4
+ """
+ return 1 << 44
+
@flag_value
def use_external_sounds(self) -> int:
""":class:`bool`: Returns ``True`` if a user can use sounds from other guilds.
@@ -819,6 +836,7 @@ class PermissionOverwrite:
use_external_sounds: Optional[bool]
send_voice_messages: Optional[bool]
create_expressions: Optional[bool]
+ create_events: Optional[bool]
def __init__(self, **kwargs: Optional[bool]):
self._values: Dict[str, Optional[bool]] = {}
| [
{
"content": "\"\"\"\nThe MIT License (MIT)\n\nCopyright (c) 2015-present Rapptz\n\nPermission is hereby granted, free of charge, to any person obtaining a\ncopy of this software and associated documentation files (the \"Software\"),\nto deal in the Software without restriction, including without limitation\nthe rights to use, copy, modify, merge, publish, distribute, sublicense,\nand/or sell copies of the Software, and to permit persons to whom the\nSoftware is furnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in\nall copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\nOR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\nFROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER\nDEALINGS IN THE SOFTWARE.\n\"\"\"\n\nfrom __future__ import annotations\n\nfrom typing import Callable, Any, ClassVar, Dict, Iterator, Set, TYPE_CHECKING, Tuple, Optional\nfrom .flags import BaseFlags, flag_value, fill_with_flags, alias_flag_value\n\n__all__ = (\n 'Permissions',\n 'PermissionOverwrite',\n)\n\nif TYPE_CHECKING:\n from typing_extensions import Self\n\n# A permission alias works like a regular flag but is marked\n# So the PermissionOverwrite knows to work with it\nclass permission_alias(alias_flag_value):\n alias: str\n\n\ndef make_permission_alias(alias: str) -> Callable[[Callable[[Any], int]], permission_alias]:\n def decorator(func: Callable[[Any], int]) -> permission_alias:\n ret = permission_alias(func)\n ret.alias = alias\n return ret\n\n return decorator\n\n\n@fill_with_flags()\nclass Permissions(BaseFlags):\n \"\"\"Wraps up the Discord permission value.\n\n The properties provided are two way. You can set and retrieve individual\n bits using the properties as if they were regular bools. This allows\n you to edit permissions.\n\n .. versionchanged:: 1.3\n You can now use keyword arguments to initialize :class:`Permissions`\n similar to :meth:`update`.\n\n .. container:: operations\n\n .. describe:: x == y\n\n Checks if two permissions are equal.\n .. describe:: x != y\n\n Checks if two permissions are not equal.\n .. describe:: x <= y\n\n Checks if a permission is a subset of another permission.\n .. describe:: x >= y\n\n Checks if a permission is a superset of another permission.\n .. describe:: x < y\n\n Checks if a permission is a strict subset of another permission.\n .. describe:: x > y\n\n Checks if a permission is a strict superset of another permission.\n\n .. describe:: x | y, x |= y\n\n Returns a Permissions instance with all enabled flags from\n both x and y.\n\n .. versionadded:: 2.0\n\n .. describe:: x & y, x &= y\n\n Returns a Permissions instance with only flags enabled on\n both x and y.\n\n .. versionadded:: 2.0\n\n .. describe:: x ^ y, x ^= y\n\n Returns a Permissions instance with only flags enabled on\n only one of x or y, not on both.\n\n .. versionadded:: 2.0\n\n .. describe:: ~x\n\n Returns a Permissions instance with all flags inverted from x.\n\n .. versionadded:: 2.0\n\n .. describe:: hash(x)\n\n Return the permission's hash.\n .. describe:: iter(x)\n\n Returns an iterator of ``(perm, value)`` pairs. This allows it\n to be, for example, constructed as a dict or a list of pairs.\n Note that aliases are not shown.\n\n .. describe:: bool(b)\n\n Returns whether the permissions object has any permissions set to ``True``.\n\n .. versionadded:: 2.0\n\n Attributes\n -----------\n value: :class:`int`\n The raw value. This value is a bit array field of a 53-bit integer\n representing the currently available permissions. You should query\n permissions via the properties rather than using this raw value.\n \"\"\"\n\n __slots__ = ()\n\n def __init__(self, permissions: int = 0, **kwargs: bool):\n if not isinstance(permissions, int):\n raise TypeError(f'Expected int parameter, received {permissions.__class__.__name__} instead.')\n\n self.value = permissions\n for key, value in kwargs.items():\n if key not in self.VALID_FLAGS:\n raise TypeError(f'{key!r} is not a valid permission name.')\n setattr(self, key, value)\n\n def is_subset(self, other: Permissions) -> bool:\n \"\"\"Returns ``True`` if self has the same or fewer permissions as other.\"\"\"\n if isinstance(other, Permissions):\n return (self.value & other.value) == self.value\n else:\n raise TypeError(f\"cannot compare {self.__class__.__name__} with {other.__class__.__name__}\")\n\n def is_superset(self, other: Permissions) -> bool:\n \"\"\"Returns ``True`` if self has the same or more permissions as other.\"\"\"\n if isinstance(other, Permissions):\n return (self.value | other.value) == self.value\n else:\n raise TypeError(f\"cannot compare {self.__class__.__name__} with {other.__class__.__name__}\")\n\n def is_strict_subset(self, other: Permissions) -> bool:\n \"\"\"Returns ``True`` if the permissions on other are a strict subset of those on self.\"\"\"\n return self.is_subset(other) and self != other\n\n def is_strict_superset(self, other: Permissions) -> bool:\n \"\"\"Returns ``True`` if the permissions on other are a strict superset of those on self.\"\"\"\n return self.is_superset(other) and self != other\n\n __le__ = is_subset\n __ge__ = is_superset\n __lt__ = is_strict_subset\n __gt__ = is_strict_superset\n\n @classmethod\n def none(cls) -> Self:\n \"\"\"A factory method that creates a :class:`Permissions` with all\n permissions set to ``False``.\"\"\"\n return cls(0)\n\n @classmethod\n def all(cls) -> Self:\n \"\"\"A factory method that creates a :class:`Permissions` with all\n permissions set to ``True``.\n \"\"\"\n return cls(0b11111111111111111111111111111111111111111111111)\n\n @classmethod\n def _timeout_mask(cls) -> int:\n p = cls.all()\n p.view_channel = False\n p.read_message_history = False\n return ~p.value\n\n @classmethod\n def _dm_permissions(cls) -> Self:\n base = cls.text()\n base.read_messages = True\n base.send_tts_messages = False\n base.manage_messages = False\n base.create_private_threads = False\n base.create_public_threads = False\n base.manage_threads = False\n base.send_messages_in_threads = False\n return base\n\n @classmethod\n def all_channel(cls) -> Self:\n \"\"\"A :class:`Permissions` with all channel-specific permissions set to\n ``True`` and the guild-specific ones set to ``False``. The guild-specific\n permissions are currently:\n\n - :attr:`manage_expressions`\n - :attr:`view_audit_log`\n - :attr:`view_guild_insights`\n - :attr:`manage_guild`\n - :attr:`change_nickname`\n - :attr:`manage_nicknames`\n - :attr:`kick_members`\n - :attr:`ban_members`\n - :attr:`administrator`\n - :attr:`create_expressions`\n\n .. versionchanged:: 1.7\n Added :attr:`stream`, :attr:`priority_speaker` and :attr:`use_application_commands` permissions.\n\n .. versionchanged:: 2.0\n Added :attr:`create_public_threads`, :attr:`create_private_threads`, :attr:`manage_threads`,\n :attr:`use_external_stickers`, :attr:`send_messages_in_threads` and\n :attr:`request_to_speak` permissions.\n\n .. versionchanged:: 2.3\n Added :attr:`use_soundboard`, :attr:`create_expressions` permissions.\n \"\"\"\n return cls(0b01000111110110110011111101111111111101010001)\n\n @classmethod\n def general(cls) -> Self:\n \"\"\"A factory method that creates a :class:`Permissions` with all\n \"General\" permissions from the official Discord UI set to ``True``.\n\n .. versionchanged:: 1.7\n Permission :attr:`read_messages` is now included in the general permissions, but\n permissions :attr:`administrator`, :attr:`create_instant_invite`, :attr:`kick_members`,\n :attr:`ban_members`, :attr:`change_nickname` and :attr:`manage_nicknames` are\n no longer part of the general permissions.\n\n .. versionchanged:: 2.3\n Added :attr:`create_expressions` permission.\n \"\"\"\n return cls(0b10000000000001110000000010000000010010110000)\n\n @classmethod\n def membership(cls) -> Self:\n \"\"\"A factory method that creates a :class:`Permissions` with all\n \"Membership\" permissions from the official Discord UI set to ``True``.\n\n .. versionadded:: 1.7\n \"\"\"\n return cls(0b10000000000001100000000000000000000000111)\n\n @classmethod\n def text(cls) -> Self:\n \"\"\"A factory method that creates a :class:`Permissions` with all\n \"Text\" permissions from the official Discord UI set to ``True``.\n\n .. versionchanged:: 1.7\n Permission :attr:`read_messages` is no longer part of the text permissions.\n Added :attr:`use_application_commands` permission.\n\n .. versionchanged:: 2.0\n Added :attr:`create_public_threads`, :attr:`create_private_threads`, :attr:`manage_threads`,\n :attr:`send_messages_in_threads` and :attr:`use_external_stickers` permissions.\n\n .. versionchanged:: 2.3\n Added :attr:`send_voice_messages` permission.\n \"\"\"\n return cls(0b10000000111110010000000000001111111100001000000)\n\n @classmethod\n def voice(cls) -> Self:\n \"\"\"A factory method that creates a :class:`Permissions` with all\n \"Voice\" permissions from the official Discord UI set to ``True``.\"\"\"\n return cls(0b1001001000000000000011111100000000001100000000)\n\n @classmethod\n def stage(cls) -> Self:\n \"\"\"A factory method that creates a :class:`Permissions` with all\n \"Stage Channel\" permissions from the official Discord UI set to ``True``.\n\n .. versionadded:: 1.7\n \"\"\"\n return cls(1 << 32)\n\n @classmethod\n def stage_moderator(cls) -> Self:\n \"\"\"A factory method that creates a :class:`Permissions` with all permissions\n for stage moderators set to ``True``. These permissions are currently:\n\n - :attr:`manage_channels`\n - :attr:`mute_members`\n - :attr:`move_members`\n\n .. versionadded:: 1.7\n\n .. versionchanged:: 2.0\n Added :attr:`manage_channels` permission and removed :attr:`request_to_speak` permission.\n \"\"\"\n return cls(0b1010000000000000000010000)\n\n @classmethod\n def elevated(cls) -> Self:\n \"\"\"A factory method that creates a :class:`Permissions` with all permissions\n that require 2FA set to ``True``. These permissions are currently:\n\n - :attr:`kick_members`\n - :attr:`ban_members`\n - :attr:`administrator`\n - :attr:`manage_channels`\n - :attr:`manage_guild`\n - :attr:`manage_messages`\n - :attr:`manage_roles`\n - :attr:`manage_webhooks`\n - :attr:`manage_expressions`\n - :attr:`manage_threads`\n - :attr:`moderate_members`\n\n .. versionadded:: 2.0\n \"\"\"\n return cls(0b10000010001110000000000000010000000111110)\n\n @classmethod\n def advanced(cls) -> Self:\n \"\"\"A factory method that creates a :class:`Permissions` with all\n \"Advanced\" permissions from the official Discord UI set to ``True``.\n\n .. versionadded:: 1.7\n \"\"\"\n return cls(1 << 3)\n\n def update(self, **kwargs: bool) -> None:\n r\"\"\"Bulk updates this permission object.\n\n Allows you to set multiple attributes by using keyword\n arguments. The names must be equivalent to the properties\n listed. Extraneous key/value pairs will be silently ignored.\n\n Parameters\n ------------\n \\*\\*kwargs\n A list of key/value pairs to bulk update permissions with.\n \"\"\"\n for key, value in kwargs.items():\n if key in self.VALID_FLAGS:\n setattr(self, key, value)\n\n def handle_overwrite(self, allow: int, deny: int) -> None:\n # Basically this is what's happening here.\n # We have an original bit array, e.g. 1010\n # Then we have another bit array that is 'denied', e.g. 1111\n # And then we have the last one which is 'allowed', e.g. 0101\n # We want original OP denied to end up resulting in\n # whatever is in denied to be set to 0.\n # So 1010 OP 1111 -> 0000\n # Then we take this value and look at the allowed values.\n # And whatever is allowed is set to 1.\n # So 0000 OP2 0101 -> 0101\n # The OP is base & ~denied.\n # The OP2 is base | allowed.\n self.value: int = (self.value & ~deny) | allow\n\n @flag_value\n def create_instant_invite(self) -> int:\n \"\"\":class:`bool`: Returns ``True`` if the user can create instant invites.\"\"\"\n return 1 << 0\n\n @flag_value\n def kick_members(self) -> int:\n \"\"\":class:`bool`: Returns ``True`` if the user can kick users from the guild.\"\"\"\n return 1 << 1\n\n @flag_value\n def ban_members(self) -> int:\n \"\"\":class:`bool`: Returns ``True`` if a user can ban users from the guild.\"\"\"\n return 1 << 2\n\n @flag_value\n def administrator(self) -> int:\n \"\"\":class:`bool`: Returns ``True`` if a user is an administrator. This role overrides all other permissions.\n\n This also bypasses all channel-specific overrides.\n \"\"\"\n return 1 << 3\n\n @flag_value\n def manage_channels(self) -> int:\n \"\"\":class:`bool`: Returns ``True`` if a user can edit, delete, or create channels in the guild.\n\n This also corresponds to the \"Manage Channel\" channel-specific override.\"\"\"\n return 1 << 4\n\n @flag_value\n def manage_guild(self) -> int:\n \"\"\":class:`bool`: Returns ``True`` if a user can edit guild properties.\"\"\"\n return 1 << 5\n\n @flag_value\n def add_reactions(self) -> int:\n \"\"\":class:`bool`: Returns ``True`` if a user can add reactions to messages.\"\"\"\n return 1 << 6\n\n @flag_value\n def view_audit_log(self) -> int:\n \"\"\":class:`bool`: Returns ``True`` if a user can view the guild's audit log.\"\"\"\n return 1 << 7\n\n @flag_value\n def priority_speaker(self) -> int:\n \"\"\":class:`bool`: Returns ``True`` if a user can be more easily heard while talking.\"\"\"\n return 1 << 8\n\n @flag_value\n def stream(self) -> int:\n \"\"\":class:`bool`: Returns ``True`` if a user can stream in a voice channel.\"\"\"\n return 1 << 9\n\n @flag_value\n def read_messages(self) -> int:\n \"\"\":class:`bool`: Returns ``True`` if a user can read messages from all or specific text channels.\"\"\"\n return 1 << 10\n\n @make_permission_alias('read_messages')\n def view_channel(self) -> int:\n \"\"\":class:`bool`: An alias for :attr:`read_messages`.\n\n .. versionadded:: 1.3\n \"\"\"\n return 1 << 10\n\n @flag_value\n def send_messages(self) -> int:\n \"\"\":class:`bool`: Returns ``True`` if a user can send messages from all or specific text channels.\"\"\"\n return 1 << 11\n\n @flag_value\n def send_tts_messages(self) -> int:\n \"\"\":class:`bool`: Returns ``True`` if a user can send TTS messages from all or specific text channels.\"\"\"\n return 1 << 12\n\n @flag_value\n def manage_messages(self) -> int:\n \"\"\":class:`bool`: Returns ``True`` if a user can delete or pin messages in a text channel.\n\n .. note::\n\n Note that there are currently no ways to edit other people's messages.\n \"\"\"\n return 1 << 13\n\n @flag_value\n def embed_links(self) -> int:\n \"\"\":class:`bool`: Returns ``True`` if a user's messages will automatically be embedded by Discord.\"\"\"\n return 1 << 14\n\n @flag_value\n def attach_files(self) -> int:\n \"\"\":class:`bool`: Returns ``True`` if a user can send files in their messages.\"\"\"\n return 1 << 15\n\n @flag_value\n def read_message_history(self) -> int:\n \"\"\":class:`bool`: Returns ``True`` if a user can read a text channel's previous messages.\"\"\"\n return 1 << 16\n\n @flag_value\n def mention_everyone(self) -> int:\n \"\"\":class:`bool`: Returns ``True`` if a user's @everyone or @here will mention everyone in the text channel.\"\"\"\n return 1 << 17\n\n @flag_value\n def external_emojis(self) -> int:\n \"\"\":class:`bool`: Returns ``True`` if a user can use emojis from other guilds.\"\"\"\n return 1 << 18\n\n @make_permission_alias('external_emojis')\n def use_external_emojis(self) -> int:\n \"\"\":class:`bool`: An alias for :attr:`external_emojis`.\n\n .. versionadded:: 1.3\n \"\"\"\n return 1 << 18\n\n @flag_value\n def view_guild_insights(self) -> int:\n \"\"\":class:`bool`: Returns ``True`` if a user can view the guild's insights.\n\n .. versionadded:: 1.3\n \"\"\"\n return 1 << 19\n\n @flag_value\n def connect(self) -> int:\n \"\"\":class:`bool`: Returns ``True`` if a user can connect to a voice channel.\"\"\"\n return 1 << 20\n\n @flag_value\n def speak(self) -> int:\n \"\"\":class:`bool`: Returns ``True`` if a user can speak in a voice channel.\"\"\"\n return 1 << 21\n\n @flag_value\n def mute_members(self) -> int:\n \"\"\":class:`bool`: Returns ``True`` if a user can mute other users.\"\"\"\n return 1 << 22\n\n @flag_value\n def deafen_members(self) -> int:\n \"\"\":class:`bool`: Returns ``True`` if a user can deafen other users.\"\"\"\n return 1 << 23\n\n @flag_value\n def move_members(self) -> int:\n \"\"\":class:`bool`: Returns ``True`` if a user can move users between other voice channels.\"\"\"\n return 1 << 24\n\n @flag_value\n def use_voice_activation(self) -> int:\n \"\"\":class:`bool`: Returns ``True`` if a user can use voice activation in voice channels.\"\"\"\n return 1 << 25\n\n @flag_value\n def change_nickname(self) -> int:\n \"\"\":class:`bool`: Returns ``True`` if a user can change their nickname in the guild.\"\"\"\n return 1 << 26\n\n @flag_value\n def manage_nicknames(self) -> int:\n \"\"\":class:`bool`: Returns ``True`` if a user can change other user's nickname in the guild.\"\"\"\n return 1 << 27\n\n @flag_value\n def manage_roles(self) -> int:\n \"\"\":class:`bool`: Returns ``True`` if a user can create or edit roles less than their role's position.\n\n This also corresponds to the \"Manage Permissions\" channel-specific override.\n \"\"\"\n return 1 << 28\n\n @make_permission_alias('manage_roles')\n def manage_permissions(self) -> int:\n \"\"\":class:`bool`: An alias for :attr:`manage_roles`.\n\n .. versionadded:: 1.3\n \"\"\"\n return 1 << 28\n\n @flag_value\n def manage_webhooks(self) -> int:\n \"\"\":class:`bool`: Returns ``True`` if a user can create, edit, or delete webhooks.\"\"\"\n return 1 << 29\n\n @flag_value\n def manage_expressions(self) -> int:\n \"\"\":class:`bool`: Returns ``True`` if a user can edit or delete emojis, stickers, and soundboard sounds.\n\n .. versionadded:: 2.3\n \"\"\"\n return 1 << 30\n\n @make_permission_alias('manage_expressions')\n def manage_emojis(self) -> int:\n \"\"\":class:`bool`: An alias for :attr:`manage_expressions`.\"\"\"\n return 1 << 30\n\n @make_permission_alias('manage_expressions')\n def manage_emojis_and_stickers(self) -> int:\n \"\"\":class:`bool`: An alias for :attr:`manage_expressions`.\n\n .. versionadded:: 2.0\n \"\"\"\n return 1 << 30\n\n @flag_value\n def use_application_commands(self) -> int:\n \"\"\":class:`bool`: Returns ``True`` if a user can use slash commands.\n\n .. versionadded:: 1.7\n \"\"\"\n return 1 << 31\n\n @flag_value\n def request_to_speak(self) -> int:\n \"\"\":class:`bool`: Returns ``True`` if a user can request to speak in a stage channel.\n\n .. versionadded:: 1.7\n \"\"\"\n return 1 << 32\n\n @flag_value\n def manage_events(self) -> int:\n \"\"\":class:`bool`: Returns ``True`` if a user can manage guild events.\n\n .. versionadded:: 2.0\n \"\"\"\n return 1 << 33\n\n @flag_value\n def manage_threads(self) -> int:\n \"\"\":class:`bool`: Returns ``True`` if a user can manage threads.\n\n .. versionadded:: 2.0\n \"\"\"\n return 1 << 34\n\n @flag_value\n def create_public_threads(self) -> int:\n \"\"\":class:`bool`: Returns ``True`` if a user can create public threads.\n\n .. versionadded:: 2.0\n \"\"\"\n return 1 << 35\n\n @flag_value\n def create_private_threads(self) -> int:\n \"\"\":class:`bool`: Returns ``True`` if a user can create private threads.\n\n .. versionadded:: 2.0\n \"\"\"\n return 1 << 36\n\n @flag_value\n def external_stickers(self) -> int:\n \"\"\":class:`bool`: Returns ``True`` if a user can use stickers from other guilds.\n\n .. versionadded:: 2.0\n \"\"\"\n return 1 << 37\n\n @make_permission_alias('external_stickers')\n def use_external_stickers(self) -> int:\n \"\"\":class:`bool`: An alias for :attr:`external_stickers`.\n\n .. versionadded:: 2.0\n \"\"\"\n return 1 << 37\n\n @flag_value\n def send_messages_in_threads(self) -> int:\n \"\"\":class:`bool`: Returns ``True`` if a user can send messages in threads.\n\n .. versionadded:: 2.0\n \"\"\"\n return 1 << 38\n\n @flag_value\n def use_embedded_activities(self) -> int:\n \"\"\":class:`bool`: Returns ``True`` if a user can launch an embedded application in a Voice channel.\n\n .. versionadded:: 2.0\n \"\"\"\n return 1 << 39\n\n @flag_value\n def moderate_members(self) -> int:\n \"\"\":class:`bool`: Returns ``True`` if a user can time out other members.\n\n .. versionadded:: 2.0\n \"\"\"\n return 1 << 40\n\n @flag_value\n def use_soundboard(self) -> int:\n \"\"\":class:`bool`: Returns ``True`` if a user can use the soundboard.\n\n .. versionadded:: 2.3\n \"\"\"\n return 1 << 42\n\n @flag_value\n def create_expressions(self) -> int:\n \"\"\":class:`bool`: Returns ``True`` if a user can create emojis, stickers, and soundboard sounds.\n\n .. versionadded:: 2.3\n \"\"\"\n return 1 << 43\n\n @flag_value\n def use_external_sounds(self) -> int:\n \"\"\":class:`bool`: Returns ``True`` if a user can use sounds from other guilds.\n\n .. versionadded:: 2.3\n \"\"\"\n return 1 << 45\n\n @flag_value\n def send_voice_messages(self) -> int:\n \"\"\":class:`bool`: Returns ``True`` if a user can send voice messages.\n\n .. versionadded:: 2.3\n \"\"\"\n return 1 << 46\n\n\ndef _augment_from_permissions(cls):\n cls.VALID_NAMES = set(Permissions.VALID_FLAGS)\n aliases = set()\n\n # make descriptors for all the valid names and aliases\n for name, value in Permissions.__dict__.items():\n if isinstance(value, permission_alias):\n key = value.alias\n aliases.add(name)\n elif isinstance(value, flag_value):\n key = name\n else:\n continue\n\n # god bless Python\n def getter(self, x=key):\n return self._values.get(x)\n\n def setter(self, value, x=key):\n self._set(x, value)\n\n prop = property(getter, setter)\n setattr(cls, name, prop)\n\n cls.PURE_FLAGS = cls.VALID_NAMES - aliases\n return cls\n\n\n@_augment_from_permissions\nclass PermissionOverwrite:\n r\"\"\"A type that is used to represent a channel specific permission.\n\n Unlike a regular :class:`Permissions`\\, the default value of a\n permission is equivalent to ``None`` and not ``False``. Setting\n a value to ``False`` is **explicitly** denying that permission,\n while setting a value to ``True`` is **explicitly** allowing\n that permission.\n\n The values supported by this are the same as :class:`Permissions`\n with the added possibility of it being set to ``None``.\n\n .. container:: operations\n\n .. describe:: x == y\n\n Checks if two overwrites are equal.\n .. describe:: x != y\n\n Checks if two overwrites are not equal.\n .. describe:: iter(x)\n\n Returns an iterator of ``(perm, value)`` pairs. This allows it\n to be, for example, constructed as a dict or a list of pairs.\n Note that aliases are not shown.\n\n Parameters\n -----------\n \\*\\*kwargs\n Set the value of permissions by their name.\n \"\"\"\n\n __slots__ = ('_values',)\n\n if TYPE_CHECKING:\n VALID_NAMES: ClassVar[Set[str]]\n PURE_FLAGS: ClassVar[Set[str]]\n # I wish I didn't have to do this\n create_instant_invite: Optional[bool]\n kick_members: Optional[bool]\n ban_members: Optional[bool]\n administrator: Optional[bool]\n manage_channels: Optional[bool]\n manage_guild: Optional[bool]\n add_reactions: Optional[bool]\n view_audit_log: Optional[bool]\n priority_speaker: Optional[bool]\n stream: Optional[bool]\n read_messages: Optional[bool]\n view_channel: Optional[bool]\n send_messages: Optional[bool]\n send_tts_messages: Optional[bool]\n manage_messages: Optional[bool]\n embed_links: Optional[bool]\n attach_files: Optional[bool]\n read_message_history: Optional[bool]\n mention_everyone: Optional[bool]\n external_emojis: Optional[bool]\n use_external_emojis: Optional[bool]\n view_guild_insights: Optional[bool]\n connect: Optional[bool]\n speak: Optional[bool]\n mute_members: Optional[bool]\n deafen_members: Optional[bool]\n move_members: Optional[bool]\n use_voice_activation: Optional[bool]\n change_nickname: Optional[bool]\n manage_nicknames: Optional[bool]\n manage_roles: Optional[bool]\n manage_permissions: Optional[bool]\n manage_webhooks: Optional[bool]\n manage_expressions: Optional[bool]\n manage_emojis: Optional[bool]\n manage_emojis_and_stickers: Optional[bool]\n use_application_commands: Optional[bool]\n request_to_speak: Optional[bool]\n manage_events: Optional[bool]\n manage_threads: Optional[bool]\n create_public_threads: Optional[bool]\n create_private_threads: Optional[bool]\n send_messages_in_threads: Optional[bool]\n external_stickers: Optional[bool]\n use_external_stickers: Optional[bool]\n use_embedded_activities: Optional[bool]\n moderate_members: Optional[bool]\n use_soundboard: Optional[bool]\n use_external_sounds: Optional[bool]\n send_voice_messages: Optional[bool]\n create_expressions: Optional[bool]\n\n def __init__(self, **kwargs: Optional[bool]):\n self._values: Dict[str, Optional[bool]] = {}\n\n for key, value in kwargs.items():\n if key not in self.VALID_NAMES:\n raise ValueError(f'no permission called {key}.')\n\n setattr(self, key, value)\n\n def __eq__(self, other: object) -> bool:\n return isinstance(other, PermissionOverwrite) and self._values == other._values\n\n def _set(self, key: str, value: Optional[bool]) -> None:\n if value not in (True, None, False):\n raise TypeError(f'Expected bool or NoneType, received {value.__class__.__name__}')\n\n if value is None:\n self._values.pop(key, None)\n else:\n self._values[key] = value\n\n def pair(self) -> Tuple[Permissions, Permissions]:\n \"\"\"Tuple[:class:`Permissions`, :class:`Permissions`]: Returns the (allow, deny) pair from this overwrite.\"\"\"\n\n allow = Permissions.none()\n deny = Permissions.none()\n\n for key, value in self._values.items():\n if value is True:\n setattr(allow, key, True)\n elif value is False:\n setattr(deny, key, True)\n\n return allow, deny\n\n @classmethod\n def from_pair(cls, allow: Permissions, deny: Permissions) -> Self:\n \"\"\"Creates an overwrite from an allow/deny pair of :class:`Permissions`.\"\"\"\n ret = cls()\n for key, value in allow:\n if value is True:\n setattr(ret, key, True)\n\n for key, value in deny:\n if value is True:\n setattr(ret, key, False)\n\n return ret\n\n def is_empty(self) -> bool:\n \"\"\"Checks if the permission overwrite is currently empty.\n\n An empty permission overwrite is one that has no overwrites set\n to ``True`` or ``False``.\n\n Returns\n -------\n :class:`bool`\n Indicates if the overwrite is empty.\n \"\"\"\n return len(self._values) == 0\n\n def update(self, **kwargs: Optional[bool]) -> None:\n r\"\"\"Bulk updates this permission overwrite object.\n\n Allows you to set multiple attributes by using keyword\n arguments. The names must be equivalent to the properties\n listed. Extraneous key/value pairs will be silently ignored.\n\n Parameters\n ------------\n \\*\\*kwargs\n A list of key/value pairs to bulk update with.\n \"\"\"\n for key, value in kwargs.items():\n if key not in self.VALID_NAMES:\n continue\n\n setattr(self, key, value)\n\n def __iter__(self) -> Iterator[Tuple[str, Optional[bool]]]:\n for key in self.PURE_FLAGS:\n yield key, self._values.get(key)\n",
"path": "discord/permissions.py"
}
] | 10_8 | python | import unittest
import sys
class TestPermissionsEvents(unittest.TestCase):
def test_events_class_method(self):
from discord.permissions import Permissions
perms = Permissions.events()
self.assertTrue(perms.create_events)
def test_create_events_property(self):
from discord.permissions import Permissions
perms = Permissions(1 << 44) # Bit for create_events permission
self.assertTrue(perms.create_events)
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestPermissionsEvents))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
main()
|
https://github.com/teamqurrent/discord.py | In the `AuditLogEntry` class within `discord/audit_logs.py`, modify the `_convert_target_user` method to handle scenarios where target_id is None. Ensure that in such cases, the method initializes and returns a default user representation or a suitable placeholder object, instead of returning None. This change should ensure that AuditLogEntry.target is appropriately set for user-related audit log entries, even when target_id is not provided. | 0adef0e | discord | python3.9 | cb6170a7 | diff --git a/discord/audit_logs.py b/discord/audit_logs.py
--- a/discord/audit_logs.py
+++ b/discord/audit_logs.py
@@ -733,12 +733,11 @@ class AuditLogEntry(Hashable):
if self.action.target_type is None:
return None
- if self._target_id is None:
- return None
-
try:
converter = getattr(self, '_convert_target_' + self.action.target_type)
except AttributeError:
+ if self._target_id is None:
+ return None
return Object(id=self._target_id)
else:
return converter(self._target_id)
@@ -771,7 +770,12 @@ class AuditLogEntry(Hashable):
def _convert_target_channel(self, target_id: int) -> Union[abc.GuildChannel, Object]:
return self.guild.get_channel(target_id) or Object(id=target_id)
- def _convert_target_user(self, target_id: int) -> Union[Member, User, Object]:
+ def _convert_target_user(self, target_id: Optional[int]) -> Optional[Union[Member, User, Object]]:
+ # For some reason the member_disconnect and member_move action types
+ # do not have a non-null target_id so safeguard against that
+ if target_id is None:
+ return None
+
return self._get_member(target_id) or Object(id=target_id, type=Member)
def _convert_target_role(self, target_id: int) -> Union[Role, Object]:
| [
{
"content": "\"\"\"\nThe MIT License (MIT)\n\nCopyright (c) 2015-present Rapptz\n\nPermission is hereby granted, free of charge, to any person obtaining a\ncopy of this software and associated documentation files (the \"Software\"),\nto deal in the Software without restriction, including without limitation\nthe rights to use, copy, modify, merge, publish, distribute, sublicense,\nand/or sell copies of the Software, and to permit persons to whom the\nSoftware is furnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in\nall copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\nOR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\nFROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER\nDEALINGS IN THE SOFTWARE.\n\"\"\"\n\nfrom __future__ import annotations\n\nfrom typing import TYPE_CHECKING, Any, Callable, ClassVar, Mapping, Generator, List, Optional, Tuple, Type, TypeVar, Union\n\nfrom . import enums, flags, utils\nfrom .asset import Asset\nfrom .colour import Colour\nfrom .invite import Invite\nfrom .mixins import Hashable\nfrom .object import Object\nfrom .permissions import PermissionOverwrite, Permissions\nfrom .automod import AutoModTrigger, AutoModRuleAction, AutoModPresets, AutoModRule\nfrom .role import Role\nfrom .emoji import Emoji\nfrom .partial_emoji import PartialEmoji\nfrom .member import Member\nfrom .scheduled_event import ScheduledEvent\nfrom .stage_instance import StageInstance\nfrom .sticker import GuildSticker\nfrom .threads import Thread\nfrom .integrations import PartialIntegration\nfrom .channel import ForumChannel, StageChannel, ForumTag\n\n__all__ = (\n 'AuditLogDiff',\n 'AuditLogChanges',\n 'AuditLogEntry',\n)\n\n\nif TYPE_CHECKING:\n import datetime\n\n from . import abc\n from .guild import Guild\n from .state import ConnectionState\n from .types.audit_log import (\n AuditLogChange as AuditLogChangePayload,\n AuditLogEntry as AuditLogEntryPayload,\n )\n from .types.channel import (\n PermissionOverwrite as PermissionOverwritePayload,\n ForumTag as ForumTagPayload,\n DefaultReaction as DefaultReactionPayload,\n )\n from .types.invite import Invite as InvitePayload\n from .types.role import Role as RolePayload\n from .types.snowflake import Snowflake\n from .types.command import ApplicationCommandPermissions\n from .types.automod import AutoModerationTriggerMetadata, AutoModerationAction\n from .user import User\n from .app_commands import AppCommand\n from .webhook import Webhook\n\n TargetType = Union[\n Guild,\n abc.GuildChannel,\n Member,\n User,\n Role,\n Invite,\n Emoji,\n StageInstance,\n GuildSticker,\n Thread,\n Object,\n PartialIntegration,\n AutoModRule,\n ScheduledEvent,\n Webhook,\n AppCommand,\n None,\n ]\n\n\ndef _transform_timestamp(entry: AuditLogEntry, data: Optional[str]) -> Optional[datetime.datetime]:\n return utils.parse_time(data)\n\n\ndef _transform_color(entry: AuditLogEntry, data: int) -> Colour:\n return Colour(data)\n\n\ndef _transform_snowflake(entry: AuditLogEntry, data: Snowflake) -> int:\n return int(data)\n\n\ndef _transform_channel(entry: AuditLogEntry, data: Optional[Snowflake]) -> Optional[Union[abc.GuildChannel, Object]]:\n if data is None:\n return None\n return entry.guild.get_channel(int(data)) or Object(id=data)\n\n\ndef _transform_channels_or_threads(\n entry: AuditLogEntry, data: List[Snowflake]\n) -> List[Union[abc.GuildChannel, Thread, Object]]:\n return [entry.guild.get_channel_or_thread(int(data)) or Object(id=data) for data in data]\n\n\ndef _transform_member_id(entry: AuditLogEntry, data: Optional[Snowflake]) -> Union[Member, User, None]:\n if data is None:\n return None\n return entry._get_member(int(data))\n\n\ndef _transform_guild_id(entry: AuditLogEntry, data: Optional[Snowflake]) -> Optional[Guild]:\n if data is None:\n return None\n return entry._state._get_guild(int(data))\n\n\ndef _transform_roles(entry: AuditLogEntry, data: List[Snowflake]) -> List[Union[Role, Object]]:\n return [entry.guild.get_role(int(role_id)) or Object(role_id, type=Role) for role_id in data]\n\n\ndef _transform_applied_forum_tags(entry: AuditLogEntry, data: List[Snowflake]) -> List[Union[ForumTag, Object]]:\n thread = entry.target\n if isinstance(thread, Thread) and isinstance(thread.parent, ForumChannel):\n return [thread.parent.get_tag(tag_id) or Object(id=tag_id, type=ForumTag) for tag_id in map(int, data)]\n return [Object(id=tag_id, type=ForumTag) for tag_id in data]\n\n\ndef _transform_overloaded_flags(entry: AuditLogEntry, data: int) -> Union[int, flags.ChannelFlags]:\n # The `flags` key is definitely overloaded. Right now it's for channels and threads but\n # I am aware of `member.flags` and `user.flags` existing. However, this does not impact audit logs\n # at the moment but better safe than sorry.\n channel_audit_log_types = (\n enums.AuditLogAction.channel_create,\n enums.AuditLogAction.channel_update,\n enums.AuditLogAction.channel_delete,\n enums.AuditLogAction.thread_create,\n enums.AuditLogAction.thread_update,\n enums.AuditLogAction.thread_delete,\n )\n\n if entry.action in channel_audit_log_types:\n return flags.ChannelFlags._from_value(data)\n return data\n\n\ndef _transform_forum_tags(entry: AuditLogEntry, data: List[ForumTagPayload]) -> List[ForumTag]:\n return [ForumTag.from_data(state=entry._state, data=d) for d in data]\n\n\ndef _transform_default_reaction(entry: AuditLogEntry, data: DefaultReactionPayload) -> Optional[PartialEmoji]:\n if data is None:\n return None\n\n emoji_name = data.get('emoji_name') or ''\n emoji_id = utils._get_as_snowflake(data, 'emoji_id') or None # Coerce 0 -> None\n return PartialEmoji.with_state(state=entry._state, name=emoji_name, id=emoji_id)\n\n\ndef _transform_overwrites(\n entry: AuditLogEntry, data: List[PermissionOverwritePayload]\n) -> List[Tuple[Object, PermissionOverwrite]]:\n overwrites = []\n for elem in data:\n allow = Permissions(int(elem['allow']))\n deny = Permissions(int(elem['deny']))\n ow = PermissionOverwrite.from_pair(allow, deny)\n\n ow_type = elem['type']\n ow_id = int(elem['id'])\n target = None\n if ow_type == '0':\n target = entry.guild.get_role(ow_id)\n elif ow_type == '1':\n target = entry._get_member(ow_id)\n\n if target is None:\n target = Object(id=ow_id, type=Role if ow_type == '0' else Member)\n\n overwrites.append((target, ow))\n\n return overwrites\n\n\ndef _transform_icon(entry: AuditLogEntry, data: Optional[str]) -> Optional[Asset]:\n if data is None:\n return None\n if entry.action is enums.AuditLogAction.guild_update:\n return Asset._from_guild_icon(entry._state, entry.guild.id, data)\n else:\n return Asset._from_icon(entry._state, entry._target_id, data, path='role') # type: ignore # target_id won't be None in this case\n\n\ndef _transform_avatar(entry: AuditLogEntry, data: Optional[str]) -> Optional[Asset]:\n if data is None:\n return None\n return Asset._from_avatar(entry._state, entry._target_id, data) # type: ignore # target_id won't be None in this case\n\n\ndef _transform_cover_image(entry: AuditLogEntry, data: Optional[str]) -> Optional[Asset]:\n if data is None:\n return None\n return Asset._from_scheduled_event_cover_image(entry._state, entry._target_id, data) # type: ignore # target_id won't be None in this case\n\n\ndef _guild_hash_transformer(path: str) -> Callable[[AuditLogEntry, Optional[str]], Optional[Asset]]:\n def _transform(entry: AuditLogEntry, data: Optional[str]) -> Optional[Asset]:\n if data is None:\n return None\n return Asset._from_guild_image(entry._state, entry.guild.id, data, path=path)\n\n return _transform\n\n\ndef _transform_automod_trigger_metadata(\n entry: AuditLogEntry, data: AutoModerationTriggerMetadata\n) -> Optional[AutoModTrigger]:\n\n if isinstance(entry.target, AutoModRule):\n # Trigger type cannot be changed, so type should be the same before and after updates.\n # Avoids checking which keys are in data to guess trigger type\n # or returning None if data is empty.\n try:\n return AutoModTrigger.from_data(type=entry.target.trigger.type.value, data=data)\n except Exception:\n pass\n\n # Try to infer trigger type from available keys in data\n if 'presets' in data:\n return AutoModTrigger(\n type=enums.AutoModRuleTriggerType.keyword_preset,\n presets=AutoModPresets._from_value(data['presets']), # type: ignore\n allow_list=data.get('allow_list'),\n )\n elif 'keyword_filter' in data:\n return AutoModTrigger(\n type=enums.AutoModRuleTriggerType.keyword,\n keyword_filter=data['keyword_filter'], # type: ignore\n allow_list=data.get('allow_list'),\n regex_patterns=data.get('regex_patterns'),\n )\n elif 'mention_total_limit' in data:\n return AutoModTrigger(type=enums.AutoModRuleTriggerType.mention_spam, mention_limit=data['mention_total_limit']) # type: ignore\n else:\n return AutoModTrigger(type=enums.AutoModRuleTriggerType.spam)\n\n\ndef _transform_automod_actions(entry: AuditLogEntry, data: List[AutoModerationAction]) -> List[AutoModRuleAction]:\n return [AutoModRuleAction.from_data(action) for action in data]\n\n\nE = TypeVar('E', bound=enums.Enum)\n\n\ndef _enum_transformer(enum: Type[E]) -> Callable[[AuditLogEntry, int], E]:\n def _transform(entry: AuditLogEntry, data: int) -> E:\n return enums.try_enum(enum, data)\n\n return _transform\n\n\nF = TypeVar('F', bound=flags.BaseFlags)\n\n\ndef _flag_transformer(cls: Type[F]) -> Callable[[AuditLogEntry, Union[int, str]], F]:\n def _transform(entry: AuditLogEntry, data: Union[int, str]) -> F:\n return cls._from_value(int(data))\n\n return _transform\n\n\ndef _transform_type(\n entry: AuditLogEntry, data: Union[int, str]\n) -> Union[enums.ChannelType, enums.StickerType, enums.WebhookType, str]:\n if entry.action.name.startswith('sticker_'):\n return enums.try_enum(enums.StickerType, data)\n elif entry.action.name.startswith('integration_'):\n return data # type: ignore # integration type is str\n elif entry.action.name.startswith('webhook_'):\n return enums.try_enum(enums.WebhookType, data)\n else:\n return enums.try_enum(enums.ChannelType, data)\n\n\nclass AuditLogDiff:\n def __len__(self) -> int:\n return len(self.__dict__)\n\n def __iter__(self) -> Generator[Tuple[str, Any], None, None]:\n yield from self.__dict__.items()\n\n def __repr__(self) -> str:\n values = ' '.join('%s=%r' % item for item in self.__dict__.items())\n return f'<AuditLogDiff {values}>'\n\n if TYPE_CHECKING:\n\n def __getattr__(self, item: str) -> Any:\n ...\n\n def __setattr__(self, key: str, value: Any) -> Any:\n ...\n\n\nTransformer = Callable[[\"AuditLogEntry\", Any], Any]\n\n\nclass AuditLogChanges:\n # fmt: off\n TRANSFORMERS: ClassVar[Mapping[str, Tuple[Optional[str], Optional[Transformer]]]] = {\n 'verification_level': (None, _enum_transformer(enums.VerificationLevel)),\n 'explicit_content_filter': (None, _enum_transformer(enums.ContentFilter)),\n 'allow': (None, _flag_transformer(Permissions)),\n 'deny': (None, _flag_transformer(Permissions)),\n 'permissions': (None, _flag_transformer(Permissions)),\n 'id': (None, _transform_snowflake),\n 'color': ('colour', _transform_color),\n 'owner_id': ('owner', _transform_member_id),\n 'inviter_id': ('inviter', _transform_member_id),\n 'channel_id': ('channel', _transform_channel),\n 'afk_channel_id': ('afk_channel', _transform_channel),\n 'system_channel_id': ('system_channel', _transform_channel),\n 'system_channel_flags': (None, _flag_transformer(flags.SystemChannelFlags)),\n 'widget_channel_id': ('widget_channel', _transform_channel),\n 'rules_channel_id': ('rules_channel', _transform_channel),\n 'public_updates_channel_id': ('public_updates_channel', _transform_channel),\n 'permission_overwrites': ('overwrites', _transform_overwrites),\n 'splash_hash': ('splash', _guild_hash_transformer('splashes')),\n 'banner_hash': ('banner', _guild_hash_transformer('banners')),\n 'discovery_splash_hash': ('discovery_splash', _guild_hash_transformer('discovery-splashes')),\n 'icon_hash': ('icon', _transform_icon),\n 'avatar_hash': ('avatar', _transform_avatar),\n 'rate_limit_per_user': ('slowmode_delay', None),\n 'default_thread_rate_limit_per_user': ('default_thread_slowmode_delay', None),\n 'guild_id': ('guild', _transform_guild_id),\n 'tags': ('emoji', None),\n 'default_message_notifications': ('default_notifications', _enum_transformer(enums.NotificationLevel)),\n 'video_quality_mode': (None, _enum_transformer(enums.VideoQualityMode)),\n 'privacy_level': (None, _enum_transformer(enums.PrivacyLevel)),\n 'format_type': (None, _enum_transformer(enums.StickerFormatType)),\n 'type': (None, _transform_type),\n 'communication_disabled_until': ('timed_out_until', _transform_timestamp),\n 'expire_behavior': (None, _enum_transformer(enums.ExpireBehaviour)),\n 'mfa_level': (None, _enum_transformer(enums.MFALevel)),\n 'status': (None, _enum_transformer(enums.EventStatus)),\n 'entity_type': (None, _enum_transformer(enums.EntityType)),\n 'preferred_locale': (None, _enum_transformer(enums.Locale)),\n 'image_hash': ('cover_image', _transform_cover_image),\n 'trigger_type': (None, _enum_transformer(enums.AutoModRuleTriggerType)),\n 'event_type': (None, _enum_transformer(enums.AutoModRuleEventType)),\n 'trigger_metadata': ('trigger', _transform_automod_trigger_metadata),\n 'actions': (None, _transform_automod_actions),\n 'exempt_channels': (None, _transform_channels_or_threads),\n 'exempt_roles': (None, _transform_roles),\n 'applied_tags': (None, _transform_applied_forum_tags),\n 'available_tags': (None, _transform_forum_tags),\n 'flags': (None, _transform_overloaded_flags),\n 'default_reaction_emoji': (None, _transform_default_reaction),\n }\n # fmt: on\n\n def __init__(self, entry: AuditLogEntry, data: List[AuditLogChangePayload]):\n self.before: AuditLogDiff = AuditLogDiff()\n self.after: AuditLogDiff = AuditLogDiff()\n # special case entire process since each\n # element in data is a different target\n # key is the target id\n if entry.action is enums.AuditLogAction.app_command_permission_update:\n self.before.app_command_permissions = []\n self.after.app_command_permissions = []\n\n for elem in data:\n self._handle_app_command_permissions(\n self.before,\n entry,\n elem.get('old_value'), # type: ignore # value will be an ApplicationCommandPermissions if present\n )\n\n self._handle_app_command_permissions(\n self.after,\n entry,\n elem.get('new_value'), # type: ignore # value will be an ApplicationCommandPermissions if present\n )\n return\n\n for elem in data:\n attr = elem['key']\n\n # special cases for role add/remove\n if attr == '$add':\n self._handle_role(self.before, self.after, entry, elem['new_value']) # type: ignore # new_value is a list of roles in this case\n continue\n elif attr == '$remove':\n self._handle_role(self.after, self.before, entry, elem['new_value']) # type: ignore # new_value is a list of roles in this case\n continue\n\n try:\n key, transformer = self.TRANSFORMERS[attr]\n except (ValueError, KeyError):\n transformer = None\n else:\n if key:\n attr = key\n\n transformer: Optional[Transformer]\n\n try:\n before = elem['old_value']\n except KeyError:\n before = None\n else:\n if transformer:\n before = transformer(entry, before)\n\n setattr(self.before, attr, before)\n\n try:\n after = elem['new_value']\n except KeyError:\n after = None\n else:\n if transformer:\n after = transformer(entry, after)\n\n setattr(self.after, attr, after)\n\n # add an alias\n if hasattr(self.after, 'colour'):\n self.after.color = self.after.colour\n self.before.color = self.before.colour\n if hasattr(self.after, 'expire_behavior'):\n self.after.expire_behaviour = self.after.expire_behavior\n self.before.expire_behaviour = self.before.expire_behavior\n\n def __repr__(self) -> str:\n return f'<AuditLogChanges before={self.before!r} after={self.after!r}>'\n\n def _handle_role(self, first: AuditLogDiff, second: AuditLogDiff, entry: AuditLogEntry, elem: List[RolePayload]) -> None:\n if not hasattr(first, 'roles'):\n setattr(first, 'roles', [])\n\n data = []\n g: Guild = entry.guild\n\n for e in elem:\n role_id = int(e['id'])\n role = g.get_role(role_id)\n\n if role is None:\n role = Object(id=role_id, type=Role)\n role.name = e['name'] # type: ignore # Object doesn't usually have name\n\n data.append(role)\n\n setattr(second, 'roles', data)\n\n def _handle_app_command_permissions(\n self,\n diff: AuditLogDiff,\n entry: AuditLogEntry,\n data: Optional[ApplicationCommandPermissions],\n ):\n if data is None:\n return\n\n # avoid circular import\n from discord.app_commands import AppCommandPermissions\n\n state = entry._state\n guild = entry.guild\n diff.app_command_permissions.append(AppCommandPermissions(data=data, guild=guild, state=state))\n\n\nclass _AuditLogProxy:\n def __init__(self, **kwargs: Any) -> None:\n for k, v in kwargs.items():\n setattr(self, k, v)\n\n\nclass _AuditLogProxyMemberPrune(_AuditLogProxy):\n delete_member_days: int\n members_removed: int\n\n\nclass _AuditLogProxyMemberMoveOrMessageDelete(_AuditLogProxy):\n channel: Union[abc.GuildChannel, Thread]\n count: int\n\n\nclass _AuditLogProxyMemberDisconnect(_AuditLogProxy):\n count: int\n\n\nclass _AuditLogProxyPinAction(_AuditLogProxy):\n channel: Union[abc.GuildChannel, Thread]\n message_id: int\n\n\nclass _AuditLogProxyStageInstanceAction(_AuditLogProxy):\n channel: abc.GuildChannel\n\n\nclass _AuditLogProxyMessageBulkDelete(_AuditLogProxy):\n count: int\n\n\nclass _AuditLogProxyAutoModAction(_AuditLogProxy):\n automod_rule_name: str\n automod_rule_trigger_type: str\n channel: Optional[Union[abc.GuildChannel, Thread]]\n\n\nclass AuditLogEntry(Hashable):\n r\"\"\"Represents an Audit Log entry.\n\n You retrieve these via :meth:`Guild.audit_logs`.\n\n .. container:: operations\n\n .. describe:: x == y\n\n Checks if two entries are equal.\n\n .. describe:: x != y\n\n Checks if two entries are not equal.\n\n .. describe:: hash(x)\n\n Returns the entry's hash.\n\n .. versionchanged:: 1.7\n Audit log entries are now comparable and hashable.\n\n Attributes\n -----------\n action: :class:`AuditLogAction`\n The action that was done.\n user: Optional[:class:`abc.User`]\n The user who initiated this action. Usually a :class:`Member`\\, unless gone\n then it's a :class:`User`.\n user_id: Optional[:class:`int`]\n The user ID who initiated this action.\n\n .. versionadded:: 2.2\n id: :class:`int`\n The entry ID.\n guild: :class:`Guild`\n The guild that this entry belongs to.\n target: Any\n The target that got changed. The exact type of this depends on\n the action being done.\n reason: Optional[:class:`str`]\n The reason this action was done.\n extra: Any\n Extra information that this entry has that might be useful.\n For most actions, this is ``None``. However in some cases it\n contains extra information. See :class:`AuditLogAction` for\n which actions have this field filled out.\n \"\"\"\n\n def __init__(\n self,\n *,\n users: Mapping[int, User],\n integrations: Mapping[int, PartialIntegration],\n app_commands: Mapping[int, AppCommand],\n automod_rules: Mapping[int, AutoModRule],\n webhooks: Mapping[int, Webhook],\n data: AuditLogEntryPayload,\n guild: Guild,\n ):\n self._state: ConnectionState = guild._state\n self.guild: Guild = guild\n self._users: Mapping[int, User] = users\n self._integrations: Mapping[int, PartialIntegration] = integrations\n self._app_commands: Mapping[int, AppCommand] = app_commands\n self._automod_rules: Mapping[int, AutoModRule] = automod_rules\n self._webhooks: Mapping[int, Webhook] = webhooks\n self._from_data(data)\n\n def _from_data(self, data: AuditLogEntryPayload) -> None:\n self.action: enums.AuditLogAction = enums.try_enum(enums.AuditLogAction, data['action_type'])\n self.id: int = int(data['id'])\n\n # this key is technically not usually present\n self.reason: Optional[str] = data.get('reason')\n extra = data.get('options')\n\n # fmt: off\n self.extra: Union[\n _AuditLogProxyMemberPrune,\n _AuditLogProxyMemberMoveOrMessageDelete,\n _AuditLogProxyMemberDisconnect,\n _AuditLogProxyPinAction,\n _AuditLogProxyStageInstanceAction,\n _AuditLogProxyMessageBulkDelete,\n _AuditLogProxyAutoModAction,\n Member, User, None, PartialIntegration,\n Role, Object\n ] = None\n # fmt: on\n\n if isinstance(self.action, enums.AuditLogAction) and extra:\n if self.action is enums.AuditLogAction.member_prune:\n # member prune has two keys with useful information\n self.extra = _AuditLogProxyMemberPrune(\n delete_member_days=int(extra['delete_member_days']),\n members_removed=int(extra['members_removed']),\n )\n elif self.action is enums.AuditLogAction.member_move or self.action is enums.AuditLogAction.message_delete:\n channel_id = int(extra['channel_id'])\n self.extra = _AuditLogProxyMemberMoveOrMessageDelete(\n count=int(extra['count']),\n channel=self.guild.get_channel_or_thread(channel_id) or Object(id=channel_id),\n )\n elif self.action is enums.AuditLogAction.member_disconnect:\n # The member disconnect action has a dict with some information\n self.extra = _AuditLogProxyMemberDisconnect(count=int(extra['count']))\n elif self.action is enums.AuditLogAction.message_bulk_delete:\n # The bulk message delete action has the number of messages deleted\n self.extra = _AuditLogProxyMessageBulkDelete(count=int(extra['count']))\n elif self.action.name.endswith('pin'):\n # the pin actions have a dict with some information\n channel_id = int(extra['channel_id'])\n self.extra = _AuditLogProxyPinAction(\n channel=self.guild.get_channel_or_thread(channel_id) or Object(id=channel_id),\n message_id=int(extra['message_id']),\n )\n elif (\n self.action is enums.AuditLogAction.automod_block_message\n or self.action is enums.AuditLogAction.automod_flag_message\n or self.action is enums.AuditLogAction.automod_timeout_member\n ):\n channel_id = utils._get_as_snowflake(extra, 'channel_id')\n channel = None\n if channel_id is not None:\n channel = self.guild.get_channel_or_thread(channel_id) or Object(id=channel_id)\n\n self.extra = _AuditLogProxyAutoModAction(\n automod_rule_name=extra['auto_moderation_rule_name'],\n automod_rule_trigger_type=enums.try_enum(\n enums.AutoModRuleTriggerType, extra['auto_moderation_rule_trigger_type']\n ),\n channel=channel,\n )\n\n elif self.action.name.startswith('overwrite_'):\n # the overwrite_ actions have a dict with some information\n instance_id = int(extra['id'])\n the_type = extra.get('type')\n if the_type == '1':\n self.extra = self._get_member(instance_id)\n elif the_type == '0':\n role = self.guild.get_role(instance_id)\n if role is None:\n role = Object(id=instance_id, type=Role)\n role.name = extra.get('role_name') # type: ignore # Object doesn't usually have name\n self.extra = role\n elif self.action.name.startswith('stage_instance'):\n channel_id = int(extra['channel_id'])\n self.extra = _AuditLogProxyStageInstanceAction(\n channel=self.guild.get_channel(channel_id) or Object(id=channel_id, type=StageChannel)\n )\n elif self.action.name.startswith('app_command'):\n app_id = int(extra['application_id'])\n self.extra = self._get_integration_by_app_id(app_id) or Object(app_id, type=PartialIntegration)\n\n # this key is not present when the above is present, typically.\n # It's a list of { new_value: a, old_value: b, key: c }\n # where new_value and old_value are not guaranteed to be there depending\n # on the action type, so let's just fetch it for now and only turn it\n # into meaningful data when requested\n self._changes = data.get('changes', [])\n\n self.user_id: Optional[int] = utils._get_as_snowflake(data, 'user_id')\n self.user: Optional[Union[User, Member]] = self._get_member(self.user_id)\n self._target_id = utils._get_as_snowflake(data, 'target_id')\n\n def _get_member(self, user_id: Optional[int]) -> Union[Member, User, None]:\n if user_id is None:\n return None\n\n return self.guild.get_member(user_id) or self._users.get(user_id)\n\n def _get_integration(self, integration_id: Optional[int]) -> Optional[PartialIntegration]:\n if integration_id is None:\n return None\n\n return self._integrations.get(integration_id)\n\n def _get_integration_by_app_id(self, application_id: Optional[int]) -> Optional[PartialIntegration]:\n if application_id is None:\n return None\n\n # get PartialIntegration by application id\n return utils.get(self._integrations.values(), application_id=application_id)\n\n def _get_app_command(self, app_command_id: Optional[int]) -> Optional[AppCommand]:\n if app_command_id is None:\n return None\n\n return self._app_commands.get(app_command_id)\n\n def __repr__(self) -> str:\n return f'<AuditLogEntry id={self.id} action={self.action} user={self.user!r}>'\n\n @utils.cached_property\n def created_at(self) -> datetime.datetime:\n \"\"\":class:`datetime.datetime`: Returns the entry's creation time in UTC.\"\"\"\n return utils.snowflake_time(self.id)\n\n @utils.cached_property\n def target(self) -> TargetType:\n if self.action.target_type is None:\n return None\n\n if self._target_id is None:\n return None\n\n try:\n converter = getattr(self, '_convert_target_' + self.action.target_type)\n except AttributeError:\n return Object(id=self._target_id)\n else:\n return converter(self._target_id)\n\n @utils.cached_property\n def category(self) -> Optional[enums.AuditLogActionCategory]:\n \"\"\"Optional[:class:`AuditLogActionCategory`]: The category of the action, if applicable.\"\"\"\n return self.action.category\n\n @utils.cached_property\n def changes(self) -> AuditLogChanges:\n \"\"\":class:`AuditLogChanges`: The list of changes this entry has.\"\"\"\n obj = AuditLogChanges(self, self._changes)\n del self._changes\n return obj\n\n @utils.cached_property\n def before(self) -> AuditLogDiff:\n \"\"\":class:`AuditLogDiff`: The target's prior state.\"\"\"\n return self.changes.before\n\n @utils.cached_property\n def after(self) -> AuditLogDiff:\n \"\"\":class:`AuditLogDiff`: The target's subsequent state.\"\"\"\n return self.changes.after\n\n def _convert_target_guild(self, target_id: int) -> Guild:\n return self.guild\n\n def _convert_target_channel(self, target_id: int) -> Union[abc.GuildChannel, Object]:\n return self.guild.get_channel(target_id) or Object(id=target_id)\n\n def _convert_target_user(self, target_id: int) -> Union[Member, User, Object]:\n return self._get_member(target_id) or Object(id=target_id, type=Member)\n\n def _convert_target_role(self, target_id: int) -> Union[Role, Object]:\n return self.guild.get_role(target_id) or Object(id=target_id, type=Role)\n\n def _convert_target_invite(self, target_id: None) -> Invite:\n # invites have target_id set to null\n # so figure out which change has the full invite data\n changeset = self.before if self.action is enums.AuditLogAction.invite_delete else self.after\n\n fake_payload: InvitePayload = {\n 'max_age': changeset.max_age,\n 'max_uses': changeset.max_uses,\n 'code': changeset.code,\n 'temporary': changeset.temporary,\n 'uses': changeset.uses,\n 'channel': None, # type: ignore # the channel is passed to the Invite constructor directly\n }\n\n obj = Invite(state=self._state, data=fake_payload, guild=self.guild, channel=changeset.channel)\n try:\n obj.inviter = changeset.inviter\n except AttributeError:\n pass\n return obj\n\n def _convert_target_emoji(self, target_id: int) -> Union[Emoji, Object]:\n return self._state.get_emoji(target_id) or Object(id=target_id, type=Emoji)\n\n def _convert_target_message(self, target_id: int) -> Union[Member, User, Object]:\n return self._get_member(target_id) or Object(id=target_id, type=Member)\n\n def _convert_target_stage_instance(self, target_id: int) -> Union[StageInstance, Object]:\n return self.guild.get_stage_instance(target_id) or Object(id=target_id, type=StageInstance)\n\n def _convert_target_sticker(self, target_id: int) -> Union[GuildSticker, Object]:\n return self._state.get_sticker(target_id) or Object(id=target_id, type=GuildSticker)\n\n def _convert_target_thread(self, target_id: int) -> Union[Thread, Object]:\n return self.guild.get_thread(target_id) or Object(id=target_id, type=Thread)\n\n def _convert_target_guild_scheduled_event(self, target_id: int) -> Union[ScheduledEvent, Object]:\n return self.guild.get_scheduled_event(target_id) or Object(id=target_id, type=ScheduledEvent)\n\n def _convert_target_integration(self, target_id: int) -> Union[PartialIntegration, Object]:\n return self._get_integration(target_id) or Object(target_id, type=PartialIntegration)\n\n def _convert_target_app_command(self, target_id: int) -> Union[AppCommand, Object]:\n target = self._get_app_command(target_id)\n if not target:\n # circular import\n from .app_commands import AppCommand\n\n target = Object(target_id, type=AppCommand)\n\n return target\n\n def _convert_target_integration_or_app_command(self, target_id: int) -> Union[PartialIntegration, AppCommand, Object]:\n target = self._get_integration_by_app_id(target_id) or self._get_app_command(target_id)\n if not target:\n try:\n # circular import\n from .app_commands import AppCommand\n\n # get application id from extras\n # if it matches target id, type should be integration\n target_app = self.extra\n # extra should be an Object or PartialIntegration\n app_id = target_app.application_id if isinstance(target_app, PartialIntegration) else target_app.id # type: ignore\n type = PartialIntegration if target_id == app_id else AppCommand\n except AttributeError:\n return Object(target_id)\n else:\n return Object(target_id, type=type)\n\n return target\n\n def _convert_target_auto_moderation(self, target_id: int) -> Union[AutoModRule, Object]:\n return self._automod_rules.get(target_id) or Object(target_id, type=AutoModRule)\n\n def _convert_target_webhook(self, target_id: int) -> Union[Webhook, Object]:\n # circular import\n from .webhook import Webhook\n\n return self._webhooks.get(target_id) or Object(target_id, type=Webhook)\n",
"path": "discord/audit_logs.py"
}
] | 10_9 | python | import unittest
import sys
class TestAuditLogEntryTarget(unittest.TestCase):
def setUp(self):
from discord.audit_logs import AuditLogEntry
from discord import enums
from unittest.mock import MagicMock
# Mocking the required arguments for AuditLogEntry
mock_guild = MagicMock()
mock_users = MagicMock()
mock_integrations = MagicMock()
mock_app_commands = MagicMock()
mock_automod_rules = MagicMock()
mock_webhooks = MagicMock()
# Providing mock data for AuditLogEntry, including action_type
mock_data = {
'action_type': enums.AuditLogAction.guild_update, # Example action type
'target_id': None,
'changes': [],
'user_id': '123',
'id': 1,
'reason': None
}
self.audit_log_entry = AuditLogEntry(
data=mock_data,
guild=mock_guild,
users=mock_users,
integrations=mock_integrations,
app_commands=mock_app_commands,
automod_rules=mock_automod_rules,
webhooks=mock_webhooks
)
def test_target_with_none_target_id(self):
from unittest.mock import MagicMock
self.audit_log_entry.action = MagicMock()
self.audit_log_entry.action.target_type = 'user'
# Mock the _convert_target_user method to return a specific value
self.audit_log_entry._convert_target_user = MagicMock(return_value='MockUser')
target = self.audit_log_entry.target
self.assertEqual(target, 'MockUser')
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestAuditLogEntryTarget))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
main()
|
https://github.com/teamqurrent/prowler | The goal is to correct a reference issue in the `__init__` method of a specific class within the `service.py` file, located in the `prowler/providers/gcp/lib/service` directory. The task involves ensuring that the `__generate_client__` method is called with the class's own service attribute, rather than an external service variable. This change is crucial for proper encapsulation and to avoid potential bugs related to variable scope. | cd03fa6 | about-time==4.2.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:6a538862d33ce67d997429d14998310e1dbfda6cb7d9bbfbf799c4709847fece \
--hash=sha256:8bbf4c75fe13cbd3d72f49a03b02c5c7dca32169b6d49117c257e7eb3eaee341
adal==1.2.7 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:2a7451ed7441ddbc57703042204a3e30ef747478eea022c70f789fc7f084bc3d \
--hash=sha256:d74f45b81317454d96e982fd1c50e6fb5c99ac2223728aea8764433a39f566f1
alive-progress==3.1.5 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:347220c1858e3abe137fa0746895668c04df09c5261a13dc03f05795e8a29be5 \
--hash=sha256:42e399a66c8150dc507602dff7b7953f105ef11faf97ddaa6d27b1cbf45c4c98
attrs==23.1.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04 \
--hash=sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015
awsipranges==0.3.3 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4f0b3f22a9dc1163c85b513bed812b6c92bdacd674e6a7b68252a3c25b99e2c0 \
--hash=sha256:f3d7a54aeaf7fe310beb5d377a4034a63a51b72677ae6af3e0967bc4de7eedaf
azure-common==1.1.28 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4ac0cd3214e36b6a1b6a442686722a5d8cc449603aa833f3f0f40bda836704a3 \
--hash=sha256:5c12d3dcf4ec20599ca6b0d3e09e86e146353d443e7fcc050c9a19c1f9df20ad
azure-core==1.28.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:dec36dfc8eb0b052a853f30c07437effec2f9e3e1fc8f703d9bdaa5cfc0043d9 \
--hash=sha256:e9eefc66fc1fde56dab6f04d4e5d12c60754d5a9fa49bdcfd8534fc96ed936bd
azure-identity==1.15.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4c28fc246b7f9265610eb5261d65931183d019a23d4b0e99357facb2e6c227c8 \
--hash=sha256:a14b1f01c7036f11f148f22cd8c16e05035293d714458d6b44ddf534d93eb912
azure-mgmt-authorization==4.0.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:69b85abc09ae64fc72975bd43431170d8c7eb5d166754b98aac5f3845de57dc4 \
--hash=sha256:d8feeb3842e6ddf1a370963ca4f61fb6edc124e8997b807dd025bc9b2379cd1a
azure-mgmt-core==1.4.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:81071675f186a585555ef01816f2774d49c1c9024cb76e5720c3c0f6b337bb7d \
--hash=sha256:d195208340094f98e5a6661b781cde6f6a051e79ce317caabd8ff97030a9b3ae
azure-mgmt-security==5.0.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:38b03efe82c2344cea203fda95e6d00b7ac22782fa1c0b585cd0ea2c8ff3e702 \
--hash=sha256:73a74ce8f6ffb1b345ce101c8abdd42238f161f0988d168d23918feda0089654
azure-mgmt-sql==3.0.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:129042cc011225e27aee6ef2697d585fa5722e5d1aeb0038af6ad2451a285457 \
--hash=sha256:1d1dd940d4d41be4ee319aad626341251572a5bf4a2addec71779432d9a1381f
azure-mgmt-storage==21.1.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:593f2544fc4f05750c4fe7ca4d83c32ea1e9d266e57899bbf79ce5940124e8cc \
--hash=sha256:d6d3c0e917c988bc9ed0472477d3ef3f90886009eb1d97a711944f8375630162
azure-mgmt-subscription==3.1.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:38d4574a8d47fa17e3587d756e296cb63b82ad8fb21cd8543bcee443a502bf48 \
--hash=sha256:4e255b4ce9b924357bb8c5009b3c88a2014d3203b2495e2256fa027bf84e800e
azure-storage-blob==12.19.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:26c0a4320a34a3c2a1b74528ba6812ebcb632a04cd67b1c7377232c4b01a5897 \
--hash=sha256:7bbc2c9c16678f7a420367fef6b172ba8730a7e66df7f4d7a55d5b3c8216615b
boto3==1.26.165 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:9e7242b9059d937f34264125fecd844cb5e01acce6be093f6c44869fdf7c6e30 \
--hash=sha256:fa85b67147c8dc99b6e7c699fc086103f958f9677db934f70659e6e6a72a818c
botocore==1.29.165 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:6f35d59e230095aed7cd747604fe248fa384bebb7d09549077892f936a8ca3df \
--hash=sha256:988b948be685006b43c4bbd8f5c0cb93e77c66deb70561994e0c5b31b5a67210
cachetools==5.3.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:13dfddc7b8df938c21a940dfa6557ce6e94a2f1cdfa58eb90c805721d58f2c14 \
--hash=sha256:429e1a1e845c008ea6c85aa35d4b98b65d6a9763eeef3e37e92728a12d1de9d4
certifi==2023.7.22 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \
--hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9
cffi==1.15.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \
--hash=sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef \
--hash=sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104 \
--hash=sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426 \
--hash=sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405 \
--hash=sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375 \
--hash=sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a \
--hash=sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e \
--hash=sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc \
--hash=sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf \
--hash=sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185 \
--hash=sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497 \
--hash=sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3 \
--hash=sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35 \
--hash=sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c \
--hash=sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83 \
--hash=sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21 \
--hash=sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca \
--hash=sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984 \
--hash=sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac \
--hash=sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd \
--hash=sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee \
--hash=sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a \
--hash=sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2 \
--hash=sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192 \
--hash=sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7 \
--hash=sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585 \
--hash=sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f \
--hash=sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e \
--hash=sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27 \
--hash=sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b \
--hash=sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e \
--hash=sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e \
--hash=sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d \
--hash=sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c \
--hash=sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415 \
--hash=sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82 \
--hash=sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02 \
--hash=sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314 \
--hash=sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325 \
--hash=sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c \
--hash=sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3 \
--hash=sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914 \
--hash=sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045 \
--hash=sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d \
--hash=sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9 \
--hash=sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5 \
--hash=sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2 \
--hash=sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c \
--hash=sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3 \
--hash=sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2 \
--hash=sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8 \
--hash=sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d \
--hash=sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d \
--hash=sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9 \
--hash=sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162 \
--hash=sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76 \
--hash=sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4 \
--hash=sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e \
--hash=sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9 \
--hash=sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6 \
--hash=sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b \
--hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \
--hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0
charset-normalizer==3.1.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6 \
--hash=sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1 \
--hash=sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e \
--hash=sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373 \
--hash=sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62 \
--hash=sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230 \
--hash=sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be \
--hash=sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c \
--hash=sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0 \
--hash=sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448 \
--hash=sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f \
--hash=sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649 \
--hash=sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d \
--hash=sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0 \
--hash=sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706 \
--hash=sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a \
--hash=sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59 \
--hash=sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23 \
--hash=sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5 \
--hash=sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb \
--hash=sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e \
--hash=sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e \
--hash=sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c \
--hash=sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28 \
--hash=sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d \
--hash=sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41 \
--hash=sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974 \
--hash=sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce \
--hash=sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f \
--hash=sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1 \
--hash=sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d \
--hash=sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8 \
--hash=sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017 \
--hash=sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31 \
--hash=sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7 \
--hash=sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8 \
--hash=sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e \
--hash=sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14 \
--hash=sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd \
--hash=sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d \
--hash=sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795 \
--hash=sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b \
--hash=sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b \
--hash=sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b \
--hash=sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203 \
--hash=sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f \
--hash=sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19 \
--hash=sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1 \
--hash=sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a \
--hash=sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac \
--hash=sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9 \
--hash=sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0 \
--hash=sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137 \
--hash=sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f \
--hash=sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6 \
--hash=sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5 \
--hash=sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909 \
--hash=sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f \
--hash=sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0 \
--hash=sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324 \
--hash=sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755 \
--hash=sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb \
--hash=sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854 \
--hash=sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c \
--hash=sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60 \
--hash=sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84 \
--hash=sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0 \
--hash=sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b \
--hash=sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1 \
--hash=sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531 \
--hash=sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1 \
--hash=sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11 \
--hash=sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326 \
--hash=sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df \
--hash=sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab
click-plugins==1.1.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b \
--hash=sha256:5d262006d3222f5057fd81e1623d4443e41dcda5dc815c06b442aa3c02889fc8
click==8.1.3 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e \
--hash=sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48
colorama==0.4.6 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \
--hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6
contextlib2==21.6.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:3fbdb64466afd23abaf6c977627b75b6139a5a3e8ce38405c5b413aed7a0471f \
--hash=sha256:ab1e2bfe1d01d968e1b7e8d9023bc51ef3509bba217bb730cee3827e1ee82869
cryptography==41.0.6 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:068bc551698c234742c40049e46840843f3d98ad7ce265fd2bd4ec0d11306596 \
--hash=sha256:0f27acb55a4e77b9be8d550d762b0513ef3fc658cd3eb15110ebbcbd626db12c \
--hash=sha256:2132d5865eea673fe6712c2ed5fb4fa49dba10768bb4cc798345748380ee3660 \
--hash=sha256:3288acccef021e3c3c10d58933f44e8602cf04dba96d9796d70d537bb2f4bbc4 \
--hash=sha256:35f3f288e83c3f6f10752467c48919a7a94b7d88cc00b0668372a0d2ad4f8ead \
--hash=sha256:398ae1fc711b5eb78e977daa3cbf47cec20f2c08c5da129b7a296055fbb22aed \
--hash=sha256:422e3e31d63743855e43e5a6fcc8b4acab860f560f9321b0ee6269cc7ed70cc3 \
--hash=sha256:48783b7e2bef51224020efb61b42704207dde583d7e371ef8fc2a5fb6c0aabc7 \
--hash=sha256:4d03186af98b1c01a4eda396b137f29e4e3fb0173e30f885e27acec8823c1b09 \
--hash=sha256:5daeb18e7886a358064a68dbcaf441c036cbdb7da52ae744e7b9207b04d3908c \
--hash=sha256:60e746b11b937911dc70d164060d28d273e31853bb359e2b2033c9e93e6f3c43 \
--hash=sha256:742ae5e9a2310e9dade7932f9576606836ed174da3c7d26bc3d3ab4bd49b9f65 \
--hash=sha256:7e00fb556bda398b99b0da289ce7053639d33b572847181d6483ad89835115f6 \
--hash=sha256:85abd057699b98fce40b41737afb234fef05c67e116f6f3650782c10862c43da \
--hash=sha256:8efb2af8d4ba9dbc9c9dd8f04d19a7abb5b49eab1f3694e7b5a16a5fc2856f5c \
--hash=sha256:ae236bb8760c1e55b7a39b6d4d32d2279bc6c7c8500b7d5a13b6fb9fc97be35b \
--hash=sha256:afda76d84b053923c27ede5edc1ed7d53e3c9f475ebaf63c68e69f1403c405a8 \
--hash=sha256:b27a7fd4229abef715e064269d98a7e2909ebf92eb6912a9603c7e14c181928c \
--hash=sha256:b648fe2a45e426aaee684ddca2632f62ec4613ef362f4d681a9a6283d10e079d \
--hash=sha256:c5a550dc7a3b50b116323e3d376241829fd326ac47bc195e04eb33a8170902a9 \
--hash=sha256:da46e2b5df770070412c46f87bac0849b8d685c5f2679771de277a422c7d0b86 \
--hash=sha256:f39812f70fc5c71a15aa3c97b2bbe213c3f2a460b79bd21c40d033bb34a9bf36 \
--hash=sha256:ff369dd19e8fe0528b02e8df9f2aeb2479f89b1270d90f96a63500afe9af5cae
detect-secrets==1.4.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:d08ecabeee8b68c0acb0e8a354fb98d822a653f6ed05e520cead4c6fc1fc02cd \
--hash=sha256:d56787e339758cef48c9ccd6692f7a094b9963c979c9813580b0169e41132833
filelock==3.12.2 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:002740518d8aa59a26b0c76e10fb8c6e15eae825d34b6fdf670333fd7b938d81 \
--hash=sha256:cbb791cdea2a72f23da6ac5b5269ab0a0d161e9ef0100e653b69049a7706d1ec
google-api-core==2.11.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4b9bb5d5a380a0befa0573b302651b8a9a89262c1730e37bf423cec511804c22 \
--hash=sha256:ce222e27b0de0d7bc63eb043b956996d6dccab14cc3b690aaea91c9cc99dc16e
google-api-python-client==2.111.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:3a45a53c031478d1c82c7162dd25c9a965247bca6bd438af0838a9d9b8219405 \
--hash=sha256:b605adee2d09a843b97a59925757802904679e44e5599708cedb8939900dfbc7
google-auth-httplib2==0.2.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:38aa7badf48f974f1eb9861794e9c0cb2a0511a4ec0679b1f886d108f5640e05 \
--hash=sha256:b65a0a2123300dd71281a7bf6e64d65a0759287df52729bdd1ae2e47dc311a3d
google-auth==2.17.3 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:ce311e2bc58b130fddf316df57c9b3943c2a7b4f6ec31de9663a9333e4064efc \
--hash=sha256:f586b274d3eb7bd932ea424b1c702a30e0393a2e2bc4ca3eae8263ffd8be229f
googleapis-common-protos==1.59.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4168fcb568a826a52f23510412da405abd93f4d23ba544bb68d943b14ba3cb44 \
--hash=sha256:b287dc48449d1d41af0c69f4ea26242b5ae4c3d7249a38b0984c86a4caffff1f
grapheme==0.6.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:44c2b9f21bbe77cfb05835fec230bd435954275267fea1858013b102f8603cca
httplib2==0.22.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:14ae0a53c1ba8f3d37e9e27cf37eabb0fb9980f435ba405d546948b009dd64dc \
--hash=sha256:d7a10bc5ef5ab08322488bde8c726eeee5c8618723fdb399597ec58f3d82df81
idna==3.4 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \
--hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2
isodate==0.6.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96 \
--hash=sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9
jmespath==1.0.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980 \
--hash=sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe
jsonschema-specifications==2023.6.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:3d2b82663aff01815f744bb5c7887e2121a63399b49b104a3c96145474d091d7 \
--hash=sha256:ca1c4dd059a9e7b34101cf5b3ab7ff1d18b139f35950d598d629837ef66e8f28
jsonschema==4.20.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4f614fd46d8d61258610998997743ec5492a648b33cf478c1ddc23ed4598a5fa \
--hash=sha256:ed6231f0429ecf966f5bc8dfef245998220549cbbcf140f913b7464c52c3b6b3
msal-extensions==1.0.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:91e3db9620b822d0ed2b4d1850056a0f133cba04455e62f11612e40f5502f2ee \
--hash=sha256:c676aba56b0cce3783de1b5c5ecfe828db998167875126ca4b47dc6436451354
msal==1.24.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:aa0972884b3c6fdec53d9a0bd15c12e5bd7b71ac1b66d746f54d128709f3f8f8 \
--hash=sha256:ce4320688f95c301ee74a4d0e9dbcfe029a63663a8cc61756f40d0d0d36574ad
msgraph-core==0.2.2 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:147324246788abe8ed7e05534cd9e4e0ec98b33b30e011693b8d014cebf97f63 \
--hash=sha256:e297564b9a0ca228493d8851f95cb2de9522143d82efa40ce3a6ad286e21392e
msrest==0.7.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:21120a810e1233e5e6cc7fe40b474eeb4ec6f757a15d7cf86702c369f9567c32 \
--hash=sha256:6e7661f46f3afd88b75667b7187a92829924446c7ea1d169be8c4bb7eeb788b9
msrestazure==0.6.4 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:3de50f56147ef529b31e099a982496690468ecef33f0544cb0fa0cfe1e1de5b9 \
--hash=sha256:a06f0dabc9a6f5efe3b6add4bd8fb623aeadacf816b7a35b0f89107e0544d189
oauthlib==3.2.2 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca \
--hash=sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918
portalocker==2.7.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:032e81d534a88ec1736d03f780ba073f047a06c478b06e2937486f334e955c51 \
--hash=sha256:a07c5b4f3985c3cf4798369631fb7011adb498e2a46d8440efc75a8f29a0f983
protobuf==4.23.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:03eee35b60317112a72d19c54d0bff7bc58ff12fea4cd7b018232bd99758ffdf \
--hash=sha256:2b94bd6df92d71bd1234a2ffe7ce96ddf6d10cf637a18d6b55ad0a89fbb7fc21 \
--hash=sha256:36f5370a930cb77c8ad2f4135590c672d0d2c72d4a707c7d0058dce4b4b4a598 \
--hash=sha256:5f1eba1da2a2f3f7df469fccddef3cc060b8a16cfe3cc65961ad36b4dbcf59c5 \
--hash=sha256:6c16657d6717a0c62d5d740cb354fbad1b0d8cb811669e06fc1caa0ff4799ddd \
--hash=sha256:6fe180b56e1169d72ecc4acbd39186339aed20af5384531b8e8979b02bbee159 \
--hash=sha256:7cb5b9a05ce52c6a782bb97de52679bd3438ff2b7460eff5da348db65650f227 \
--hash=sha256:9744e934ea5855d12191040ea198eaf704ac78665d365a89d9572e3b627c2688 \
--hash=sha256:9f5a0fbfcdcc364f3986f9ed9f8bb1328fb84114fd790423ff3d7fdb0f85c2d1 \
--hash=sha256:baca40d067dddd62141a129f244703160d278648b569e90bb0e3753067644711 \
--hash=sha256:d5a35ff54e3f62e8fc7be02bb0d2fbc212bba1a5a9cc2748090690093996f07b \
--hash=sha256:e62fb869762b4ba18666370e2f8a18f17f8ab92dd4467295c6d38be6f8fef60b \
--hash=sha256:ebde3a023b8e11bfa6c890ef34cd6a8b47d586f26135e86c21344fe433daf2e2
pyasn1-modules==0.3.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c \
--hash=sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d
pyasn1==0.5.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57 \
--hash=sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde
pycparser==2.21 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \
--hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206
pydantic==1.10.13 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:1740068fd8e2ef6eb27a20e5651df000978edce6da6803c2bef0bc74540f9548 \
--hash=sha256:210ce042e8f6f7c01168b2d84d4c9eb2b009fe7bf572c2266e235edf14bacd80 \
--hash=sha256:32c8b48dcd3b2ac4e78b0ba4af3a2c2eb6048cb75202f0ea7b34feb740efc340 \
--hash=sha256:3ecea2b9d80e5333303eeb77e180b90e95eea8f765d08c3d278cd56b00345d01 \
--hash=sha256:4b03e42ec20286f052490423682016fd80fda830d8e4119f8ab13ec7464c0132 \
--hash=sha256:4c5370a7edaac06daee3af1c8b1192e305bc102abcbf2a92374b5bc793818599 \
--hash=sha256:56e3ff861c3b9c6857579de282ce8baabf443f42ffba355bf070770ed63e11e1 \
--hash=sha256:5a1f9f747851338933942db7af7b6ee8268568ef2ed86c4185c6ef4402e80ba8 \
--hash=sha256:5e08865bc6464df8c7d61439ef4439829e3ab62ab1669cddea8dd00cd74b9ffe \
--hash=sha256:61d9dce220447fb74f45e73d7ff3b530e25db30192ad8d425166d43c5deb6df0 \
--hash=sha256:654db58ae399fe6434e55325a2c3e959836bd17a6f6a0b6ca8107ea0571d2e17 \
--hash=sha256:678bcf5591b63cc917100dc50ab6caebe597ac67e8c9ccb75e698f66038ea953 \
--hash=sha256:6cf25c1a65c27923a17b3da28a0bdb99f62ee04230c931d83e888012851f4e7f \
--hash=sha256:75ac15385a3534d887a99c713aa3da88a30fbd6204a5cd0dc4dab3d770b9bd2f \
--hash=sha256:75b297827b59bc229cac1a23a2f7a4ac0031068e5be0ce385be1462e7e17a35d \
--hash=sha256:7d6f6e7305244bddb4414ba7094ce910560c907bdfa3501e9db1a7fd7eaea127 \
--hash=sha256:84bafe2e60b5e78bc64a2941b4c071a4b7404c5c907f5f5a99b0139781e69ed8 \
--hash=sha256:854223752ba81e3abf663d685f105c64150873cc6f5d0c01d3e3220bcff7d36f \
--hash=sha256:8ae5dd6b721459bfa30805f4c25880e0dd78fc5b5879f9f7a692196ddcb5a580 \
--hash=sha256:8ef467901d7a41fa0ca6db9ae3ec0021e3f657ce2c208e98cd511f3161c762c6 \
--hash=sha256:968ac42970f57b8344ee08837b62f6ee6f53c33f603547a55571c954a4225691 \
--hash=sha256:97cce3ae7341f7620a0ba5ef6cf043975cd9d2b81f3aa5f4ea37928269bc1b87 \
--hash=sha256:9849f031cf8a2f0a928fe885e5a04b08006d6d41876b8bbd2fc68a18f9f2e3fd \
--hash=sha256:9f00790179497767aae6bcdc36355792c79e7bbb20b145ff449700eb076c5f96 \
--hash=sha256:b87326822e71bd5f313e7d3bfdc77ac3247035ac10b0c0618bd99dcf95b1e687 \
--hash=sha256:b97c1fac8c49be29486df85968682b0afa77e1b809aff74b83081cc115e52f33 \
--hash=sha256:bc0898c12f8e9c97f6cd44c0ed70d55749eaf783716896960b4ecce2edfd2d69 \
--hash=sha256:c553f6a156deb868ba38a23cf0df886c63492e9257f60a79c0fd8e7173537653 \
--hash=sha256:c636925f38b8db208e09d344c7aa4f29a86bb9947495dd6b6d376ad10334fb78 \
--hash=sha256:c958d053453a1c4b1c2062b05cd42d9d5c8eb67537b8d5a7e3c3032943ecd261 \
--hash=sha256:d3a3c792a58e1622667a2837512099eac62490cdfd63bd407993aaf200a4cf1f \
--hash=sha256:e31647d85a2013d926ce60b84f9dd5300d44535a9941fe825dc349ae1f760df9 \
--hash=sha256:e70ca129d2053fb8b728ee7d1af8e553a928d7e301a311094b8a0501adc8763d \
--hash=sha256:efff03cc7a4f29d9009d1c96ceb1e7a70a65cfe86e89d34e4a5f2ab1e5693737 \
--hash=sha256:f59ef915cac80275245824e9d771ee939133be38215555e9dc90c6cb148aaeb5 \
--hash=sha256:f8e81fc5fb17dae698f52bdd1c4f18b6ca674d7068242b2aff075f588301bbb0
pyjwt==2.6.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd \
--hash=sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14
pyjwt[crypto]==2.6.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd \
--hash=sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14
pyparsing==3.0.9 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb \
--hash=sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc
python-dateutil==2.8.2 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \
--hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9
pywin32==306 ; python_version >= "3.9" and platform_system == "Windows" and python_version < "3.12" \
--hash=sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d \
--hash=sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65 \
--hash=sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e \
--hash=sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b \
--hash=sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4 \
--hash=sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040 \
--hash=sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a \
--hash=sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36 \
--hash=sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8 \
--hash=sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e \
--hash=sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802 \
--hash=sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a \
--hash=sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407 \
--hash=sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0
pyyaml==6.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf \
--hash=sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293 \
--hash=sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b \
--hash=sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57 \
--hash=sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b \
--hash=sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4 \
--hash=sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07 \
--hash=sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba \
--hash=sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9 \
--hash=sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287 \
--hash=sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513 \
--hash=sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0 \
--hash=sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782 \
--hash=sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0 \
--hash=sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92 \
--hash=sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f \
--hash=sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2 \
--hash=sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc \
--hash=sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1 \
--hash=sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c \
--hash=sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86 \
--hash=sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4 \
--hash=sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c \
--hash=sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34 \
--hash=sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b \
--hash=sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d \
--hash=sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c \
--hash=sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb \
--hash=sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7 \
--hash=sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737 \
--hash=sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3 \
--hash=sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d \
--hash=sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358 \
--hash=sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53 \
--hash=sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78 \
--hash=sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803 \
--hash=sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a \
--hash=sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f \
--hash=sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174 \
--hash=sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5
referencing==0.29.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:90cb53782d550ba28d2166ef3f55731f38397def8832baac5d45235f1995e35e \
--hash=sha256:d3c8f323ee1480095da44d55917cfb8278d73d6b4d5f677e3e40eb21314ac67f
requests-file==1.5.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:07d74208d3389d01c38ab89ef403af0cfec63957d53a0081d8eca738d0247d8e \
--hash=sha256:dfe5dae75c12481f68ba353183c53a65e6044c923e64c24b2209f6c7570ca953
requests-oauthlib==1.3.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5 \
--hash=sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a
requests==2.31.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \
--hash=sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1
rpds-py==0.8.10 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:08166467258fd0240a1256fce272f689f2360227ee41c72aeea103e9e4f63d2b \
--hash=sha256:083df0fafe199371206111583c686c985dddaf95ab3ee8e7b24f1fda54515d09 \
--hash=sha256:0da53292edafecba5e1d8c1218f99babf2ed0bf1c791d83c0ab5c29b57223068 \
--hash=sha256:0eeb2731708207d0fe2619afe6c4dc8cb9798f7de052da891de5f19c0006c315 \
--hash=sha256:134ec8f14ca7dbc6d9ae34dac632cdd60939fe3734b5d287a69683c037c51acb \
--hash=sha256:13e643ce8ad502a0263397362fb887594b49cf84bf518d6038c16f235f2bcea4 \
--hash=sha256:148b0b38d719c0760e31ce9285a9872972bdd7774969a4154f40c980e5beaca7 \
--hash=sha256:14f1c356712f66653b777ecd8819804781b23dbbac4eade4366b94944c9e78ad \
--hash=sha256:15a90d0ac11b4499171067ae40a220d1ca3cb685ec0acc356d8f3800e07e4cb8 \
--hash=sha256:1a2edf8173ac0c7a19da21bc68818be1321998528b5e3f748d6ee90c0ba2a1fd \
--hash=sha256:1b21575031478609db6dbd1f0465e739fe0e7f424a8e7e87610a6c7f68b4eb16 \
--hash=sha256:1ee45cd1d84beed6cbebc839fd85c2e70a3a1325c8cfd16b62c96e2ffb565eca \
--hash=sha256:220bdcad2d2936f674650d304e20ac480a3ce88a40fe56cd084b5780f1d104d9 \
--hash=sha256:2418cf17d653d24ffb8b75e81f9f60b7ba1b009a23298a433a4720b2a0a17017 \
--hash=sha256:2614c2732bf45de5c7f9e9e54e18bc78693fa2f635ae58d2895b7965e470378c \
--hash=sha256:2cd3045e7f6375dda64ed7db1c5136826facb0159ea982f77d9cf6125025bd34 \
--hash=sha256:2eb4b08c45f8f8d8254cdbfacd3fc5d6b415d64487fb30d7380b0d0569837bf1 \
--hash=sha256:300eb606e6b94a7a26f11c8cc8ee59e295c6649bd927f91e1dbd37a4c89430b6 \
--hash=sha256:376b8de737401050bd12810003d207e824380be58810c031f10ec563ff6aef3d \
--hash=sha256:3793c21494bad1373da517001d0849eea322e9a049a0e4789e50d8d1329df8e7 \
--hash=sha256:37f7ee4dc86db7af3bac6d2a2cedbecb8e57ce4ed081f6464510e537589f8b1e \
--hash=sha256:3816a890a6a9e9f1de250afa12ca71c9a7a62f2b715a29af6aaee3aea112c181 \
--hash=sha256:3c490204e16bca4f835dba8467869fe7295cdeaa096e4c5a7af97f3454a97991 \
--hash=sha256:3cc5e5b5514796f45f03a568981971b12a3570f3de2e76114f7dc18d4b60a3c4 \
--hash=sha256:41c89a366eae49ad9e65ed443a8f94aee762931a1e3723749d72aeac80f5ef2f \
--hash=sha256:4a8ca409f1252e1220bf09c57290b76cae2f14723746215a1e0506472ebd7bdf \
--hash=sha256:4b519bac7c09444dd85280fd60f28c6dde4389c88dddf4279ba9b630aca3bbbe \
--hash=sha256:521fc8861a86ae54359edf53a15a05fabc10593cea7b3357574132f8427a5e5a \
--hash=sha256:574868858a7ff6011192c023a5289158ed20e3f3b94b54f97210a773f2f22921 \
--hash=sha256:5a665f6f1a87614d1c3039baf44109094926dedf785e346d8b0a728e9cabd27a \
--hash=sha256:5d1c2bc319428d50b3e0fa6b673ab8cc7fa2755a92898db3a594cbc4eeb6d1f7 \
--hash=sha256:60e0e86e870350e03b3e25f9b1dd2c6cc72d2b5f24e070249418320a6f9097b7 \
--hash=sha256:695f642a3a5dbd4ad2ffbbacf784716ecd87f1b7a460843b9ddf965ccaeafff4 \
--hash=sha256:69d089c026f6a8b9d64a06ff67dc3be196707b699d7f6ca930c25f00cf5e30d8 \
--hash=sha256:6c6a0225b8501d881b32ebf3f5807a08ad3685b5eb5f0a6bfffd3a6e039b2055 \
--hash=sha256:70bb9c8004b97b4ef7ae56a2aa56dfaa74734a0987c78e7e85f00004ab9bf2d0 \
--hash=sha256:73a1e48430f418f0ac3dfd87860e4cc0d33ad6c0f589099a298cb53724db1169 \
--hash=sha256:7495010b658ec5b52835f21d8c8b1a7e52e194c50f095d4223c0b96c3da704b1 \
--hash=sha256:7947e6e2c2ad68b1c12ee797d15e5f8d0db36331200b0346871492784083b0c6 \
--hash=sha256:7b38a9ac96eeb6613e7f312cd0014de64c3f07000e8bf0004ad6ec153bac46f8 \
--hash=sha256:7d20a8ed227683401cc508e7be58cba90cc97f784ea8b039c8cd01111e6043e0 \
--hash=sha256:7f29b8c55fd3a2bc48e485e37c4e2df3317f43b5cc6c4b6631c33726f52ffbb3 \
--hash=sha256:802f42200d8caf7f25bbb2a6464cbd83e69d600151b7e3b49f49a47fa56b0a38 \
--hash=sha256:805a5f3f05d186c5d50de2e26f765ba7896d0cc1ac5b14ffc36fae36df5d2f10 \
--hash=sha256:82bb361cae4d0a627006dadd69dc2f36b7ad5dc1367af9d02e296ec565248b5b \
--hash=sha256:84eb541a44f7a18f07a6bfc48b95240739e93defe1fdfb4f2a295f37837945d7 \
--hash=sha256:89c92b74e8bf6f53a6f4995fd52f4bd510c12f103ee62c99e22bc9e05d45583c \
--hash=sha256:8c398fda6df361a30935ab4c4bccb7f7a3daef2964ca237f607c90e9f3fdf66f \
--hash=sha256:915031002c86a5add7c6fd4beb601b2415e8a1c956590a5f91d825858e92fe6e \
--hash=sha256:927d784648211447201d4c6f1babddb7971abad922b32257ab74de2f2750fad0 \
--hash=sha256:92cf5b3ee60eef41f41e1a2cabca466846fb22f37fc580ffbcb934d1bcab225a \
--hash=sha256:93d06cccae15b3836247319eee7b6f1fdcd6c10dabb4e6d350d27bd0bdca2711 \
--hash=sha256:93d99f957a300d7a4ced41615c45aeb0343bb8f067c42b770b505de67a132346 \
--hash=sha256:96b293c0498c70162effb13100624c5863797d99df75f2f647438bd10cbf73e4 \
--hash=sha256:97cab733d303252f7c2f7052bf021a3469d764fc2b65e6dbef5af3cbf89d4892 \
--hash=sha256:996cc95830de9bc22b183661d95559ec6b3cd900ad7bc9154c4cbf5be0c9b734 \
--hash=sha256:9a7d20c1cf8d7b3960c5072c265ec47b3f72a0c608a9a6ee0103189b4f28d531 \
--hash=sha256:9cd57981d9fab04fc74438d82460f057a2419974d69a96b06a440822d693b3c0 \
--hash=sha256:a11ab0d97be374efd04f640c04fe5c2d3dabc6dfb998954ea946ee3aec97056d \
--hash=sha256:a13c8e56c46474cd5958d525ce6a9996727a83d9335684e41f5192c83deb6c58 \
--hash=sha256:a38b9f526d0d6cbdaa37808c400e3d9f9473ac4ff64d33d9163fd05d243dbd9b \
--hash=sha256:a7c6304b894546b5a6bdc0fe15761fa53fe87d28527a7142dae8de3c663853e1 \
--hash=sha256:ad3bfb44c8840fb4be719dc58e229f435e227fbfbe133dc33f34981ff622a8f8 \
--hash=sha256:ae40f4a70a1f40939d66ecbaf8e7edc144fded190c4a45898a8cfe19d8fc85ea \
--hash=sha256:b01b39ad5411563031ea3977bbbc7324d82b088e802339e6296f082f78f6115c \
--hash=sha256:b2e3c4f2a8e3da47f850d7ea0d7d56720f0f091d66add889056098c4b2fd576c \
--hash=sha256:b41941583adce4242af003d2a8337b066ba6148ca435f295f31ac6d9e4ea2722 \
--hash=sha256:b4627520a02fccbd324b33c7a83e5d7906ec746e1083a9ac93c41ac7d15548c7 \
--hash=sha256:ba9f1d1ebe4b63801977cec7401f2d41e888128ae40b5441270d43140efcad52 \
--hash=sha256:c03a435d26c3999c2a8642cecad5d1c4d10c961817536af52035f6f4ee2f5dd0 \
--hash=sha256:c200b30dd573afa83847bed7e3041aa36a8145221bf0cfdfaa62d974d720805c \
--hash=sha256:c493365d3fad241d52f096e4995475a60a80f4eba4d3ff89b713bc65c2ca9615 \
--hash=sha256:c4d42e83ddbf3445e6514f0aff96dca511421ed0392d9977d3990d9f1ba6753c \
--hash=sha256:c60528671d9d467009a6ec284582179f6b88651e83367d0ab54cb739021cd7de \
--hash=sha256:c72ebc22e70e04126158c46ba56b85372bc4d54d00d296be060b0db1671638a4 \
--hash=sha256:ccbbd276642788c4376fbe8d4e6c50f0fb4972ce09ecb051509062915891cbf0 \
--hash=sha256:ceaac0c603bf5ac2f505a78b2dcab78d3e6b706be6596c8364b64cc613d208d2 \
--hash=sha256:d19db6ba816e7f59fc806c690918da80a7d186f00247048cd833acdab9b4847b \
--hash=sha256:d5c191713e98e7c28800233f039a32a42c1a4f9a001a8a0f2448b07391881036 \
--hash=sha256:d64f9f88d5203274a002b54442cafc9c7a1abff2a238f3e767b70aadf919b451 \
--hash=sha256:d77dff3a5aa5eedcc3da0ebd10ff8e4969bc9541aa3333a8d41715b429e99f47 \
--hash=sha256:dd4f16e57c12c0ae17606c53d1b57d8d1c8792efe3f065a37cb3341340599d49 \
--hash=sha256:e39d7ab0c18ac99955b36cd19f43926450baba21e3250f053e0704d6ffd76873 \
--hash=sha256:e3d0cd3dff0e7638a7b5390f3a53057c4e347f4ef122ee84ed93fc2fb7ea4aa2 \
--hash=sha256:e7dfb1cbb895810fa2b892b68153c17716c6abaa22c7dc2b2f6dcf3364932a1c \
--hash=sha256:e8e24b210a4deb5a7744971f8f77393005bae7f873568e37dfd9effe808be7f7 \
--hash=sha256:e9c0683cb35a9b5881b41bc01d5568ffc667910d9dbc632a1fba4e7d59e98773 \
--hash=sha256:ed41f3f49507936a6fe7003985ea2574daccfef999775525d79eb67344e23767 \
--hash=sha256:ee744fca8d1ea822480a2a4e7c5f2e1950745477143668f0b523769426060f29 \
--hash=sha256:f3f1e860be21f3e83011116a65e7310486300e08d9a3028e73e8d13bb6c77292 \
--hash=sha256:f43ab4cb04bde6109eb2555528a64dfd8a265cc6a9920a67dcbde13ef53a46c8 \
--hash=sha256:f53f55a8852f0e49b0fc76f2412045d6ad9d5772251dea8f55ea45021616e7d5 \
--hash=sha256:f59996d0550894affaad8743e97b9b9c98f638b221fac12909210ec3d9294786 \
--hash=sha256:f96f3f98fbff7af29e9edf9a6584f3c1382e7788783d07ba3721790625caa43e \
--hash=sha256:f9adb5664b78fcfcd830000416c8cc69853ef43cb084d645b3f1f0296edd9bae \
--hash=sha256:fa326b3505d5784436d9433b7980171ab2375535d93dd63fbcd20af2b5ca1bb6 \
--hash=sha256:fafc0049add8043ad07ab5382ee80d80ed7e3699847f26c9a5cf4d3714d96a84
rsa==4.9 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \
--hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21
s3transfer==0.6.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:3c0da2d074bf35d6870ef157158641178a4204a6e689e82546083e31e0311346 \
--hash=sha256:640bb492711f4c0c0905e1f62b6aaeb771881935ad27884852411f8e9cacbca9
schema==0.7.5 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:f06717112c61895cabc4707752b88716e8420a8819d71404501e114f91043197 \
--hash=sha256:f3ffdeeada09ec34bf40d7d79996d9f7175db93b7a5065de0faa7f41083c1e6c
shodan==1.31.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:c73275386ea02390e196c35c660706a28dd4d537c5a21eb387ab6236fac251f6
six==1.16.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \
--hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254
slack-sdk==3.26.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:d1600211eaa37c71a5f92daf4404074c3e6b3f5359a37c93c818b39d88ab4ca0 \
--hash=sha256:f80f0d15f0fce539b470447d2a07b03ecdad6b24f69c1edd05d464cf21253a06
tabulate==0.9.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c \
--hash=sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f
tldextract==3.4.4 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:581e7dbefc90e7bb857bb6f768d25c811a3c5f0892ed56a9a2999ddb7b1b70c2 \
--hash=sha256:5fe3210c577463545191d45ad522d3d5e78d55218ce97215e82004dcae1e1234
typing-extensions==4.5.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb \
--hash=sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4
uritemplate==4.1.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4346edfc5c3b79f694bccd6d6099a322bbeb628dbf2cd86eea55a456ce5124f0 \
--hash=sha256:830c08b8d99bdd312ea4ead05994a38e8936266f84b9a7878232db50b044e02e
urllib3==1.26.18 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07 \
--hash=sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0
xlsxwriter==3.1.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:02913b50b74c00f165933d5da3e3a02cab4204cb4932722a1b342c5c71034122 \
--hash=sha256:b70a147d36235d1ee835cfd037396f789db1f76740a0e5c917d54137169341de | python3.9 | 1737d7cf | diff --git a/prowler/providers/gcp/lib/service/service.py b/prowler/providers/gcp/lib/service/service.py
--- a/prowler/providers/gcp/lib/service/service.py
+++ b/prowler/providers/gcp/lib/service/service.py
@@ -27,7 +27,7 @@ class GCPService:
self.default_project_id = audit_info.default_project_id
self.region = region
self.client = self.__generate_client__(
- service, api_version, audit_info.credentials
+ self.service, api_version, audit_info.credentials
)
# Only project ids that have their API enabled will be scanned
self.project_ids = self.__is_api_active__(audit_info.project_ids)
| [
{
"content": "import threading\n\nimport google_auth_httplib2\nimport httplib2\nfrom colorama import Fore, Style\nfrom google.oauth2.credentials import Credentials\nfrom googleapiclient import discovery\nfrom googleapiclient.discovery import Resource\n\nfrom prowler.lib.logger import logger\nfrom prowler.providers.gcp.lib.audit_info.models import GCP_Audit_Info\n\n\nclass GCPService:\n def __init__(\n self,\n service: str,\n audit_info: GCP_Audit_Info,\n region=\"global\",\n api_version=\"v1\",\n ):\n # We receive the service using __class__.__name__ or the service name in lowercase\n # e.g.: APIKeys --> we need a lowercase string, so service.lower()\n self.service = service.lower() if not service.islower() else service\n self.credentials = audit_info.credentials\n self.api_version = api_version\n self.default_project_id = audit_info.default_project_id\n self.region = region\n self.client = self.__generate_client__(\n service, api_version, audit_info.credentials\n )\n # Only project ids that have their API enabled will be scanned\n self.project_ids = self.__is_api_active__(audit_info.project_ids)\n\n def __get_client__(self):\n return self.client\n\n def __threading_call__(self, call, iterator):\n threads = []\n for value in iterator:\n threads.append(threading.Thread(target=call, args=(value,)))\n for t in threads:\n t.start()\n for t in threads:\n t.join()\n\n def __get_AuthorizedHttp_client__(self):\n return google_auth_httplib2.AuthorizedHttp(\n self.credentials, http=httplib2.Http()\n )\n\n def __is_api_active__(self, audited_project_ids):\n project_ids = []\n for project_id in audited_project_ids:\n try:\n client = discovery.build(\"serviceusage\", \"v1\")\n request = client.services().get(\n name=f\"projects/{project_id}/services/{self.service}.googleapis.com\"\n )\n response = request.execute()\n if response.get(\"state\") != \"DISABLED\":\n project_ids.append(project_id)\n else:\n print(\n f\"\\n{Fore.YELLOW}{self.service} API {Style.RESET_ALL}has not been used in project {project_id} before or it is disabled.\\nEnable it by visiting https://console.developers.google.com/apis/api/{self.service}.googleapis.com/overview?project={project_id} then retry.\"\n )\n except Exception as error:\n logger.error(\n f\"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}\"\n )\n return project_ids\n\n def __generate_client__(\n self,\n service: str,\n api_version: str,\n credentials: Credentials,\n ) -> Resource:\n try:\n return discovery.build(service, api_version, credentials=credentials)\n except Exception as error:\n logger.error(\n f\"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}\"\n )\n",
"path": "prowler/providers/gcp/lib/service/service.py"
}
] | 11_0 | python | import unittest
import os
import sys
import ast
class ArgExtractor(ast.NodeVisitor):
def __init__(self):
self.arguments = []
self.in_gcp_service_class = False
def visit_ClassDef(self, node):
if node.name == 'GCPService':
self.in_gcp_service_class = True
self.generic_visit(node) # Visit all the nodes inside the GCPService class
self.in_gcp_service_class = False # Reset after leaving the class
def visit_FunctionDef(self, node):
if self.in_gcp_service_class and node.name == '__init__':
self.generic_visit(node) # Visit all the nodes inside the __init__ method
def visit_Call(self, node):
if (self.in_gcp_service_class and
isinstance(node.func, ast.Attribute) and
isinstance(node.func.value, ast.Name) and
node.func.value.id == 'self' and
node.func.attr == '__generate_client__'):
# Extract the arguments
for arg in node.args:
self.arguments.append(ast.unparse(arg))
for kw in node.keywords:
self.arguments.append(f"{ast.unparse(kw.value)}")
self.generic_visit(node) # Continue traversing
def extract_generate_client_args(source_code):
tree = ast.parse(source_code)
extractor = ArgExtractor()
extractor.visit(tree)
return extractor.arguments
class TestServiceSourceCode(unittest.TestCase):
def test_self_service_in_init(self):
file_path = os.path.join(os.path.dirname(__file__), 'prowler/providers/gcp/lib/service/service.py')
with open(file_path, 'r') as file:
source_code = file.read()
args = extract_generate_client_args(source_code)
print(args)
self.assertTrue(["self.service", "api_version", "audit_info.credentials"] == args)
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestServiceSourceCode))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
main()
|
https://github.com/teamqurrent/prowler | The goal is to to enhance the `__get_insight_selectors__` method in the `cloudtrail_service.py` file of the `Cloudtrail` class. Specifically, add conditional checks within the exception handling block to identify InsightNotEnabledException and UnsupportedOperationException exceptions. For these exceptions, log a detailed warning message using logger.warning, including the region, error class name, traceback line number, and error message. Ensure that other exceptions continue to be handled as they currently are. This enhancement will improve the clarity and usefulness of the logs when these specific exceptions occur. | 4785056 | about-time==4.2.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:6a538862d33ce67d997429d14998310e1dbfda6cb7d9bbfbf799c4709847fece \
--hash=sha256:8bbf4c75fe13cbd3d72f49a03b02c5c7dca32169b6d49117c257e7eb3eaee341
adal==1.2.7 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:2a7451ed7441ddbc57703042204a3e30ef747478eea022c70f789fc7f084bc3d \
--hash=sha256:d74f45b81317454d96e982fd1c50e6fb5c99ac2223728aea8764433a39f566f1
alive-progress==3.1.5 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:347220c1858e3abe137fa0746895668c04df09c5261a13dc03f05795e8a29be5 \
--hash=sha256:42e399a66c8150dc507602dff7b7953f105ef11faf97ddaa6d27b1cbf45c4c98
attrs==23.1.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04 \
--hash=sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015
awsipranges==0.3.3 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4f0b3f22a9dc1163c85b513bed812b6c92bdacd674e6a7b68252a3c25b99e2c0 \
--hash=sha256:f3d7a54aeaf7fe310beb5d377a4034a63a51b72677ae6af3e0967bc4de7eedaf
azure-common==1.1.28 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4ac0cd3214e36b6a1b6a442686722a5d8cc449603aa833f3f0f40bda836704a3 \
--hash=sha256:5c12d3dcf4ec20599ca6b0d3e09e86e146353d443e7fcc050c9a19c1f9df20ad
azure-core==1.28.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:dec36dfc8eb0b052a853f30c07437effec2f9e3e1fc8f703d9bdaa5cfc0043d9 \
--hash=sha256:e9eefc66fc1fde56dab6f04d4e5d12c60754d5a9fa49bdcfd8534fc96ed936bd
azure-identity==1.15.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4c28fc246b7f9265610eb5261d65931183d019a23d4b0e99357facb2e6c227c8 \
--hash=sha256:a14b1f01c7036f11f148f22cd8c16e05035293d714458d6b44ddf534d93eb912
azure-mgmt-authorization==4.0.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:69b85abc09ae64fc72975bd43431170d8c7eb5d166754b98aac5f3845de57dc4 \
--hash=sha256:d8feeb3842e6ddf1a370963ca4f61fb6edc124e8997b807dd025bc9b2379cd1a
azure-mgmt-core==1.4.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:81071675f186a585555ef01816f2774d49c1c9024cb76e5720c3c0f6b337bb7d \
--hash=sha256:d195208340094f98e5a6661b781cde6f6a051e79ce317caabd8ff97030a9b3ae
azure-mgmt-security==5.0.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:38b03efe82c2344cea203fda95e6d00b7ac22782fa1c0b585cd0ea2c8ff3e702 \
--hash=sha256:73a74ce8f6ffb1b345ce101c8abdd42238f161f0988d168d23918feda0089654
azure-mgmt-sql==3.0.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:129042cc011225e27aee6ef2697d585fa5722e5d1aeb0038af6ad2451a285457 \
--hash=sha256:1d1dd940d4d41be4ee319aad626341251572a5bf4a2addec71779432d9a1381f
azure-mgmt-storage==21.1.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:593f2544fc4f05750c4fe7ca4d83c32ea1e9d266e57899bbf79ce5940124e8cc \
--hash=sha256:d6d3c0e917c988bc9ed0472477d3ef3f90886009eb1d97a711944f8375630162
azure-mgmt-subscription==3.1.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:38d4574a8d47fa17e3587d756e296cb63b82ad8fb21cd8543bcee443a502bf48 \
--hash=sha256:4e255b4ce9b924357bb8c5009b3c88a2014d3203b2495e2256fa027bf84e800e
azure-storage-blob==12.19.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:26c0a4320a34a3c2a1b74528ba6812ebcb632a04cd67b1c7377232c4b01a5897 \
--hash=sha256:7bbc2c9c16678f7a420367fef6b172ba8730a7e66df7f4d7a55d5b3c8216615b
boto3==1.26.165 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:9e7242b9059d937f34264125fecd844cb5e01acce6be093f6c44869fdf7c6e30 \
--hash=sha256:fa85b67147c8dc99b6e7c699fc086103f958f9677db934f70659e6e6a72a818c
botocore==1.29.165 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:6f35d59e230095aed7cd747604fe248fa384bebb7d09549077892f936a8ca3df \
--hash=sha256:988b948be685006b43c4bbd8f5c0cb93e77c66deb70561994e0c5b31b5a67210
cachetools==5.3.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:13dfddc7b8df938c21a940dfa6557ce6e94a2f1cdfa58eb90c805721d58f2c14 \
--hash=sha256:429e1a1e845c008ea6c85aa35d4b98b65d6a9763eeef3e37e92728a12d1de9d4
certifi==2023.7.22 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \
--hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9
cffi==1.15.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \
--hash=sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef \
--hash=sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104 \
--hash=sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426 \
--hash=sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405 \
--hash=sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375 \
--hash=sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a \
--hash=sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e \
--hash=sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc \
--hash=sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf \
--hash=sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185 \
--hash=sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497 \
--hash=sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3 \
--hash=sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35 \
--hash=sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c \
--hash=sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83 \
--hash=sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21 \
--hash=sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca \
--hash=sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984 \
--hash=sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac \
--hash=sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd \
--hash=sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee \
--hash=sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a \
--hash=sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2 \
--hash=sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192 \
--hash=sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7 \
--hash=sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585 \
--hash=sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f \
--hash=sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e \
--hash=sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27 \
--hash=sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b \
--hash=sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e \
--hash=sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e \
--hash=sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d \
--hash=sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c \
--hash=sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415 \
--hash=sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82 \
--hash=sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02 \
--hash=sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314 \
--hash=sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325 \
--hash=sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c \
--hash=sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3 \
--hash=sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914 \
--hash=sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045 \
--hash=sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d \
--hash=sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9 \
--hash=sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5 \
--hash=sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2 \
--hash=sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c \
--hash=sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3 \
--hash=sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2 \
--hash=sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8 \
--hash=sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d \
--hash=sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d \
--hash=sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9 \
--hash=sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162 \
--hash=sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76 \
--hash=sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4 \
--hash=sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e \
--hash=sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9 \
--hash=sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6 \
--hash=sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b \
--hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \
--hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0
charset-normalizer==3.1.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6 \
--hash=sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1 \
--hash=sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e \
--hash=sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373 \
--hash=sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62 \
--hash=sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230 \
--hash=sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be \
--hash=sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c \
--hash=sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0 \
--hash=sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448 \
--hash=sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f \
--hash=sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649 \
--hash=sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d \
--hash=sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0 \
--hash=sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706 \
--hash=sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a \
--hash=sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59 \
--hash=sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23 \
--hash=sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5 \
--hash=sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb \
--hash=sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e \
--hash=sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e \
--hash=sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c \
--hash=sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28 \
--hash=sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d \
--hash=sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41 \
--hash=sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974 \
--hash=sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce \
--hash=sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f \
--hash=sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1 \
--hash=sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d \
--hash=sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8 \
--hash=sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017 \
--hash=sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31 \
--hash=sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7 \
--hash=sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8 \
--hash=sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e \
--hash=sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14 \
--hash=sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd \
--hash=sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d \
--hash=sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795 \
--hash=sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b \
--hash=sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b \
--hash=sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b \
--hash=sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203 \
--hash=sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f \
--hash=sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19 \
--hash=sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1 \
--hash=sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a \
--hash=sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac \
--hash=sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9 \
--hash=sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0 \
--hash=sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137 \
--hash=sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f \
--hash=sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6 \
--hash=sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5 \
--hash=sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909 \
--hash=sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f \
--hash=sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0 \
--hash=sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324 \
--hash=sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755 \
--hash=sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb \
--hash=sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854 \
--hash=sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c \
--hash=sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60 \
--hash=sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84 \
--hash=sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0 \
--hash=sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b \
--hash=sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1 \
--hash=sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531 \
--hash=sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1 \
--hash=sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11 \
--hash=sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326 \
--hash=sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df \
--hash=sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab
click-plugins==1.1.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b \
--hash=sha256:5d262006d3222f5057fd81e1623d4443e41dcda5dc815c06b442aa3c02889fc8
click==8.1.3 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e \
--hash=sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48
colorama==0.4.6 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \
--hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6
contextlib2==21.6.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:3fbdb64466afd23abaf6c977627b75b6139a5a3e8ce38405c5b413aed7a0471f \
--hash=sha256:ab1e2bfe1d01d968e1b7e8d9023bc51ef3509bba217bb730cee3827e1ee82869
cryptography==41.0.6 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:068bc551698c234742c40049e46840843f3d98ad7ce265fd2bd4ec0d11306596 \
--hash=sha256:0f27acb55a4e77b9be8d550d762b0513ef3fc658cd3eb15110ebbcbd626db12c \
--hash=sha256:2132d5865eea673fe6712c2ed5fb4fa49dba10768bb4cc798345748380ee3660 \
--hash=sha256:3288acccef021e3c3c10d58933f44e8602cf04dba96d9796d70d537bb2f4bbc4 \
--hash=sha256:35f3f288e83c3f6f10752467c48919a7a94b7d88cc00b0668372a0d2ad4f8ead \
--hash=sha256:398ae1fc711b5eb78e977daa3cbf47cec20f2c08c5da129b7a296055fbb22aed \
--hash=sha256:422e3e31d63743855e43e5a6fcc8b4acab860f560f9321b0ee6269cc7ed70cc3 \
--hash=sha256:48783b7e2bef51224020efb61b42704207dde583d7e371ef8fc2a5fb6c0aabc7 \
--hash=sha256:4d03186af98b1c01a4eda396b137f29e4e3fb0173e30f885e27acec8823c1b09 \
--hash=sha256:5daeb18e7886a358064a68dbcaf441c036cbdb7da52ae744e7b9207b04d3908c \
--hash=sha256:60e746b11b937911dc70d164060d28d273e31853bb359e2b2033c9e93e6f3c43 \
--hash=sha256:742ae5e9a2310e9dade7932f9576606836ed174da3c7d26bc3d3ab4bd49b9f65 \
--hash=sha256:7e00fb556bda398b99b0da289ce7053639d33b572847181d6483ad89835115f6 \
--hash=sha256:85abd057699b98fce40b41737afb234fef05c67e116f6f3650782c10862c43da \
--hash=sha256:8efb2af8d4ba9dbc9c9dd8f04d19a7abb5b49eab1f3694e7b5a16a5fc2856f5c \
--hash=sha256:ae236bb8760c1e55b7a39b6d4d32d2279bc6c7c8500b7d5a13b6fb9fc97be35b \
--hash=sha256:afda76d84b053923c27ede5edc1ed7d53e3c9f475ebaf63c68e69f1403c405a8 \
--hash=sha256:b27a7fd4229abef715e064269d98a7e2909ebf92eb6912a9603c7e14c181928c \
--hash=sha256:b648fe2a45e426aaee684ddca2632f62ec4613ef362f4d681a9a6283d10e079d \
--hash=sha256:c5a550dc7a3b50b116323e3d376241829fd326ac47bc195e04eb33a8170902a9 \
--hash=sha256:da46e2b5df770070412c46f87bac0849b8d685c5f2679771de277a422c7d0b86 \
--hash=sha256:f39812f70fc5c71a15aa3c97b2bbe213c3f2a460b79bd21c40d033bb34a9bf36 \
--hash=sha256:ff369dd19e8fe0528b02e8df9f2aeb2479f89b1270d90f96a63500afe9af5cae
detect-secrets==1.4.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:d08ecabeee8b68c0acb0e8a354fb98d822a653f6ed05e520cead4c6fc1fc02cd \
--hash=sha256:d56787e339758cef48c9ccd6692f7a094b9963c979c9813580b0169e41132833
filelock==3.12.2 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:002740518d8aa59a26b0c76e10fb8c6e15eae825d34b6fdf670333fd7b938d81 \
--hash=sha256:cbb791cdea2a72f23da6ac5b5269ab0a0d161e9ef0100e653b69049a7706d1ec
google-api-core==2.11.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4b9bb5d5a380a0befa0573b302651b8a9a89262c1730e37bf423cec511804c22 \
--hash=sha256:ce222e27b0de0d7bc63eb043b956996d6dccab14cc3b690aaea91c9cc99dc16e
google-api-python-client==2.111.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:3a45a53c031478d1c82c7162dd25c9a965247bca6bd438af0838a9d9b8219405 \
--hash=sha256:b605adee2d09a843b97a59925757802904679e44e5599708cedb8939900dfbc7
google-auth-httplib2==0.2.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:38aa7badf48f974f1eb9861794e9c0cb2a0511a4ec0679b1f886d108f5640e05 \
--hash=sha256:b65a0a2123300dd71281a7bf6e64d65a0759287df52729bdd1ae2e47dc311a3d
google-auth==2.17.3 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:ce311e2bc58b130fddf316df57c9b3943c2a7b4f6ec31de9663a9333e4064efc \
--hash=sha256:f586b274d3eb7bd932ea424b1c702a30e0393a2e2bc4ca3eae8263ffd8be229f
googleapis-common-protos==1.59.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4168fcb568a826a52f23510412da405abd93f4d23ba544bb68d943b14ba3cb44 \
--hash=sha256:b287dc48449d1d41af0c69f4ea26242b5ae4c3d7249a38b0984c86a4caffff1f
grapheme==0.6.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:44c2b9f21bbe77cfb05835fec230bd435954275267fea1858013b102f8603cca
httplib2==0.22.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:14ae0a53c1ba8f3d37e9e27cf37eabb0fb9980f435ba405d546948b009dd64dc \
--hash=sha256:d7a10bc5ef5ab08322488bde8c726eeee5c8618723fdb399597ec58f3d82df81
idna==3.4 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \
--hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2
isodate==0.6.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96 \
--hash=sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9
jmespath==1.0.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980 \
--hash=sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe
jsonschema-specifications==2023.6.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:3d2b82663aff01815f744bb5c7887e2121a63399b49b104a3c96145474d091d7 \
--hash=sha256:ca1c4dd059a9e7b34101cf5b3ab7ff1d18b139f35950d598d629837ef66e8f28
jsonschema==4.20.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4f614fd46d8d61258610998997743ec5492a648b33cf478c1ddc23ed4598a5fa \
--hash=sha256:ed6231f0429ecf966f5bc8dfef245998220549cbbcf140f913b7464c52c3b6b3
msal-extensions==1.0.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:91e3db9620b822d0ed2b4d1850056a0f133cba04455e62f11612e40f5502f2ee \
--hash=sha256:c676aba56b0cce3783de1b5c5ecfe828db998167875126ca4b47dc6436451354
msal==1.24.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:aa0972884b3c6fdec53d9a0bd15c12e5bd7b71ac1b66d746f54d128709f3f8f8 \
--hash=sha256:ce4320688f95c301ee74a4d0e9dbcfe029a63663a8cc61756f40d0d0d36574ad
msgraph-core==0.2.2 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:147324246788abe8ed7e05534cd9e4e0ec98b33b30e011693b8d014cebf97f63 \
--hash=sha256:e297564b9a0ca228493d8851f95cb2de9522143d82efa40ce3a6ad286e21392e
msrest==0.7.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:21120a810e1233e5e6cc7fe40b474eeb4ec6f757a15d7cf86702c369f9567c32 \
--hash=sha256:6e7661f46f3afd88b75667b7187a92829924446c7ea1d169be8c4bb7eeb788b9
msrestazure==0.6.4 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:3de50f56147ef529b31e099a982496690468ecef33f0544cb0fa0cfe1e1de5b9 \
--hash=sha256:a06f0dabc9a6f5efe3b6add4bd8fb623aeadacf816b7a35b0f89107e0544d189
oauthlib==3.2.2 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca \
--hash=sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918
portalocker==2.7.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:032e81d534a88ec1736d03f780ba073f047a06c478b06e2937486f334e955c51 \
--hash=sha256:a07c5b4f3985c3cf4798369631fb7011adb498e2a46d8440efc75a8f29a0f983
protobuf==4.23.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:03eee35b60317112a72d19c54d0bff7bc58ff12fea4cd7b018232bd99758ffdf \
--hash=sha256:2b94bd6df92d71bd1234a2ffe7ce96ddf6d10cf637a18d6b55ad0a89fbb7fc21 \
--hash=sha256:36f5370a930cb77c8ad2f4135590c672d0d2c72d4a707c7d0058dce4b4b4a598 \
--hash=sha256:5f1eba1da2a2f3f7df469fccddef3cc060b8a16cfe3cc65961ad36b4dbcf59c5 \
--hash=sha256:6c16657d6717a0c62d5d740cb354fbad1b0d8cb811669e06fc1caa0ff4799ddd \
--hash=sha256:6fe180b56e1169d72ecc4acbd39186339aed20af5384531b8e8979b02bbee159 \
--hash=sha256:7cb5b9a05ce52c6a782bb97de52679bd3438ff2b7460eff5da348db65650f227 \
--hash=sha256:9744e934ea5855d12191040ea198eaf704ac78665d365a89d9572e3b627c2688 \
--hash=sha256:9f5a0fbfcdcc364f3986f9ed9f8bb1328fb84114fd790423ff3d7fdb0f85c2d1 \
--hash=sha256:baca40d067dddd62141a129f244703160d278648b569e90bb0e3753067644711 \
--hash=sha256:d5a35ff54e3f62e8fc7be02bb0d2fbc212bba1a5a9cc2748090690093996f07b \
--hash=sha256:e62fb869762b4ba18666370e2f8a18f17f8ab92dd4467295c6d38be6f8fef60b \
--hash=sha256:ebde3a023b8e11bfa6c890ef34cd6a8b47d586f26135e86c21344fe433daf2e2
pyasn1-modules==0.3.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c \
--hash=sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d
pyasn1==0.5.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57 \
--hash=sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde
pycparser==2.21 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \
--hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206
pydantic==1.10.13 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:1740068fd8e2ef6eb27a20e5651df000978edce6da6803c2bef0bc74540f9548 \
--hash=sha256:210ce042e8f6f7c01168b2d84d4c9eb2b009fe7bf572c2266e235edf14bacd80 \
--hash=sha256:32c8b48dcd3b2ac4e78b0ba4af3a2c2eb6048cb75202f0ea7b34feb740efc340 \
--hash=sha256:3ecea2b9d80e5333303eeb77e180b90e95eea8f765d08c3d278cd56b00345d01 \
--hash=sha256:4b03e42ec20286f052490423682016fd80fda830d8e4119f8ab13ec7464c0132 \
--hash=sha256:4c5370a7edaac06daee3af1c8b1192e305bc102abcbf2a92374b5bc793818599 \
--hash=sha256:56e3ff861c3b9c6857579de282ce8baabf443f42ffba355bf070770ed63e11e1 \
--hash=sha256:5a1f9f747851338933942db7af7b6ee8268568ef2ed86c4185c6ef4402e80ba8 \
--hash=sha256:5e08865bc6464df8c7d61439ef4439829e3ab62ab1669cddea8dd00cd74b9ffe \
--hash=sha256:61d9dce220447fb74f45e73d7ff3b530e25db30192ad8d425166d43c5deb6df0 \
--hash=sha256:654db58ae399fe6434e55325a2c3e959836bd17a6f6a0b6ca8107ea0571d2e17 \
--hash=sha256:678bcf5591b63cc917100dc50ab6caebe597ac67e8c9ccb75e698f66038ea953 \
--hash=sha256:6cf25c1a65c27923a17b3da28a0bdb99f62ee04230c931d83e888012851f4e7f \
--hash=sha256:75ac15385a3534d887a99c713aa3da88a30fbd6204a5cd0dc4dab3d770b9bd2f \
--hash=sha256:75b297827b59bc229cac1a23a2f7a4ac0031068e5be0ce385be1462e7e17a35d \
--hash=sha256:7d6f6e7305244bddb4414ba7094ce910560c907bdfa3501e9db1a7fd7eaea127 \
--hash=sha256:84bafe2e60b5e78bc64a2941b4c071a4b7404c5c907f5f5a99b0139781e69ed8 \
--hash=sha256:854223752ba81e3abf663d685f105c64150873cc6f5d0c01d3e3220bcff7d36f \
--hash=sha256:8ae5dd6b721459bfa30805f4c25880e0dd78fc5b5879f9f7a692196ddcb5a580 \
--hash=sha256:8ef467901d7a41fa0ca6db9ae3ec0021e3f657ce2c208e98cd511f3161c762c6 \
--hash=sha256:968ac42970f57b8344ee08837b62f6ee6f53c33f603547a55571c954a4225691 \
--hash=sha256:97cce3ae7341f7620a0ba5ef6cf043975cd9d2b81f3aa5f4ea37928269bc1b87 \
--hash=sha256:9849f031cf8a2f0a928fe885e5a04b08006d6d41876b8bbd2fc68a18f9f2e3fd \
--hash=sha256:9f00790179497767aae6bcdc36355792c79e7bbb20b145ff449700eb076c5f96 \
--hash=sha256:b87326822e71bd5f313e7d3bfdc77ac3247035ac10b0c0618bd99dcf95b1e687 \
--hash=sha256:b97c1fac8c49be29486df85968682b0afa77e1b809aff74b83081cc115e52f33 \
--hash=sha256:bc0898c12f8e9c97f6cd44c0ed70d55749eaf783716896960b4ecce2edfd2d69 \
--hash=sha256:c553f6a156deb868ba38a23cf0df886c63492e9257f60a79c0fd8e7173537653 \
--hash=sha256:c636925f38b8db208e09d344c7aa4f29a86bb9947495dd6b6d376ad10334fb78 \
--hash=sha256:c958d053453a1c4b1c2062b05cd42d9d5c8eb67537b8d5a7e3c3032943ecd261 \
--hash=sha256:d3a3c792a58e1622667a2837512099eac62490cdfd63bd407993aaf200a4cf1f \
--hash=sha256:e31647d85a2013d926ce60b84f9dd5300d44535a9941fe825dc349ae1f760df9 \
--hash=sha256:e70ca129d2053fb8b728ee7d1af8e553a928d7e301a311094b8a0501adc8763d \
--hash=sha256:efff03cc7a4f29d9009d1c96ceb1e7a70a65cfe86e89d34e4a5f2ab1e5693737 \
--hash=sha256:f59ef915cac80275245824e9d771ee939133be38215555e9dc90c6cb148aaeb5 \
--hash=sha256:f8e81fc5fb17dae698f52bdd1c4f18b6ca674d7068242b2aff075f588301bbb0
pyjwt==2.6.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd \
--hash=sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14
pyjwt[crypto]==2.6.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd \
--hash=sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14
pyparsing==3.0.9 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb \
--hash=sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc
python-dateutil==2.8.2 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \
--hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9
pywin32==306 ; python_version >= "3.9" and platform_system == "Windows" and python_version < "3.12" \
--hash=sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d \
--hash=sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65 \
--hash=sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e \
--hash=sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b \
--hash=sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4 \
--hash=sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040 \
--hash=sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a \
--hash=sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36 \
--hash=sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8 \
--hash=sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e \
--hash=sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802 \
--hash=sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a \
--hash=sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407 \
--hash=sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0
pyyaml==6.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf \
--hash=sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293 \
--hash=sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b \
--hash=sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57 \
--hash=sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b \
--hash=sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4 \
--hash=sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07 \
--hash=sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba \
--hash=sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9 \
--hash=sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287 \
--hash=sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513 \
--hash=sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0 \
--hash=sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782 \
--hash=sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0 \
--hash=sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92 \
--hash=sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f \
--hash=sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2 \
--hash=sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc \
--hash=sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1 \
--hash=sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c \
--hash=sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86 \
--hash=sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4 \
--hash=sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c \
--hash=sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34 \
--hash=sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b \
--hash=sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d \
--hash=sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c \
--hash=sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb \
--hash=sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7 \
--hash=sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737 \
--hash=sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3 \
--hash=sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d \
--hash=sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358 \
--hash=sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53 \
--hash=sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78 \
--hash=sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803 \
--hash=sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a \
--hash=sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f \
--hash=sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174 \
--hash=sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5
referencing==0.29.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:90cb53782d550ba28d2166ef3f55731f38397def8832baac5d45235f1995e35e \
--hash=sha256:d3c8f323ee1480095da44d55917cfb8278d73d6b4d5f677e3e40eb21314ac67f
requests-file==1.5.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:07d74208d3389d01c38ab89ef403af0cfec63957d53a0081d8eca738d0247d8e \
--hash=sha256:dfe5dae75c12481f68ba353183c53a65e6044c923e64c24b2209f6c7570ca953
requests-oauthlib==1.3.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5 \
--hash=sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a
requests==2.31.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \
--hash=sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1
rpds-py==0.8.10 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:08166467258fd0240a1256fce272f689f2360227ee41c72aeea103e9e4f63d2b \
--hash=sha256:083df0fafe199371206111583c686c985dddaf95ab3ee8e7b24f1fda54515d09 \
--hash=sha256:0da53292edafecba5e1d8c1218f99babf2ed0bf1c791d83c0ab5c29b57223068 \
--hash=sha256:0eeb2731708207d0fe2619afe6c4dc8cb9798f7de052da891de5f19c0006c315 \
--hash=sha256:134ec8f14ca7dbc6d9ae34dac632cdd60939fe3734b5d287a69683c037c51acb \
--hash=sha256:13e643ce8ad502a0263397362fb887594b49cf84bf518d6038c16f235f2bcea4 \
--hash=sha256:148b0b38d719c0760e31ce9285a9872972bdd7774969a4154f40c980e5beaca7 \
--hash=sha256:14f1c356712f66653b777ecd8819804781b23dbbac4eade4366b94944c9e78ad \
--hash=sha256:15a90d0ac11b4499171067ae40a220d1ca3cb685ec0acc356d8f3800e07e4cb8 \
--hash=sha256:1a2edf8173ac0c7a19da21bc68818be1321998528b5e3f748d6ee90c0ba2a1fd \
--hash=sha256:1b21575031478609db6dbd1f0465e739fe0e7f424a8e7e87610a6c7f68b4eb16 \
--hash=sha256:1ee45cd1d84beed6cbebc839fd85c2e70a3a1325c8cfd16b62c96e2ffb565eca \
--hash=sha256:220bdcad2d2936f674650d304e20ac480a3ce88a40fe56cd084b5780f1d104d9 \
--hash=sha256:2418cf17d653d24ffb8b75e81f9f60b7ba1b009a23298a433a4720b2a0a17017 \
--hash=sha256:2614c2732bf45de5c7f9e9e54e18bc78693fa2f635ae58d2895b7965e470378c \
--hash=sha256:2cd3045e7f6375dda64ed7db1c5136826facb0159ea982f77d9cf6125025bd34 \
--hash=sha256:2eb4b08c45f8f8d8254cdbfacd3fc5d6b415d64487fb30d7380b0d0569837bf1 \
--hash=sha256:300eb606e6b94a7a26f11c8cc8ee59e295c6649bd927f91e1dbd37a4c89430b6 \
--hash=sha256:376b8de737401050bd12810003d207e824380be58810c031f10ec563ff6aef3d \
--hash=sha256:3793c21494bad1373da517001d0849eea322e9a049a0e4789e50d8d1329df8e7 \
--hash=sha256:37f7ee4dc86db7af3bac6d2a2cedbecb8e57ce4ed081f6464510e537589f8b1e \
--hash=sha256:3816a890a6a9e9f1de250afa12ca71c9a7a62f2b715a29af6aaee3aea112c181 \
--hash=sha256:3c490204e16bca4f835dba8467869fe7295cdeaa096e4c5a7af97f3454a97991 \
--hash=sha256:3cc5e5b5514796f45f03a568981971b12a3570f3de2e76114f7dc18d4b60a3c4 \
--hash=sha256:41c89a366eae49ad9e65ed443a8f94aee762931a1e3723749d72aeac80f5ef2f \
--hash=sha256:4a8ca409f1252e1220bf09c57290b76cae2f14723746215a1e0506472ebd7bdf \
--hash=sha256:4b519bac7c09444dd85280fd60f28c6dde4389c88dddf4279ba9b630aca3bbbe \
--hash=sha256:521fc8861a86ae54359edf53a15a05fabc10593cea7b3357574132f8427a5e5a \
--hash=sha256:574868858a7ff6011192c023a5289158ed20e3f3b94b54f97210a773f2f22921 \
--hash=sha256:5a665f6f1a87614d1c3039baf44109094926dedf785e346d8b0a728e9cabd27a \
--hash=sha256:5d1c2bc319428d50b3e0fa6b673ab8cc7fa2755a92898db3a594cbc4eeb6d1f7 \
--hash=sha256:60e0e86e870350e03b3e25f9b1dd2c6cc72d2b5f24e070249418320a6f9097b7 \
--hash=sha256:695f642a3a5dbd4ad2ffbbacf784716ecd87f1b7a460843b9ddf965ccaeafff4 \
--hash=sha256:69d089c026f6a8b9d64a06ff67dc3be196707b699d7f6ca930c25f00cf5e30d8 \
--hash=sha256:6c6a0225b8501d881b32ebf3f5807a08ad3685b5eb5f0a6bfffd3a6e039b2055 \
--hash=sha256:70bb9c8004b97b4ef7ae56a2aa56dfaa74734a0987c78e7e85f00004ab9bf2d0 \
--hash=sha256:73a1e48430f418f0ac3dfd87860e4cc0d33ad6c0f589099a298cb53724db1169 \
--hash=sha256:7495010b658ec5b52835f21d8c8b1a7e52e194c50f095d4223c0b96c3da704b1 \
--hash=sha256:7947e6e2c2ad68b1c12ee797d15e5f8d0db36331200b0346871492784083b0c6 \
--hash=sha256:7b38a9ac96eeb6613e7f312cd0014de64c3f07000e8bf0004ad6ec153bac46f8 \
--hash=sha256:7d20a8ed227683401cc508e7be58cba90cc97f784ea8b039c8cd01111e6043e0 \
--hash=sha256:7f29b8c55fd3a2bc48e485e37c4e2df3317f43b5cc6c4b6631c33726f52ffbb3 \
--hash=sha256:802f42200d8caf7f25bbb2a6464cbd83e69d600151b7e3b49f49a47fa56b0a38 \
--hash=sha256:805a5f3f05d186c5d50de2e26f765ba7896d0cc1ac5b14ffc36fae36df5d2f10 \
--hash=sha256:82bb361cae4d0a627006dadd69dc2f36b7ad5dc1367af9d02e296ec565248b5b \
--hash=sha256:84eb541a44f7a18f07a6bfc48b95240739e93defe1fdfb4f2a295f37837945d7 \
--hash=sha256:89c92b74e8bf6f53a6f4995fd52f4bd510c12f103ee62c99e22bc9e05d45583c \
--hash=sha256:8c398fda6df361a30935ab4c4bccb7f7a3daef2964ca237f607c90e9f3fdf66f \
--hash=sha256:915031002c86a5add7c6fd4beb601b2415e8a1c956590a5f91d825858e92fe6e \
--hash=sha256:927d784648211447201d4c6f1babddb7971abad922b32257ab74de2f2750fad0 \
--hash=sha256:92cf5b3ee60eef41f41e1a2cabca466846fb22f37fc580ffbcb934d1bcab225a \
--hash=sha256:93d06cccae15b3836247319eee7b6f1fdcd6c10dabb4e6d350d27bd0bdca2711 \
--hash=sha256:93d99f957a300d7a4ced41615c45aeb0343bb8f067c42b770b505de67a132346 \
--hash=sha256:96b293c0498c70162effb13100624c5863797d99df75f2f647438bd10cbf73e4 \
--hash=sha256:97cab733d303252f7c2f7052bf021a3469d764fc2b65e6dbef5af3cbf89d4892 \
--hash=sha256:996cc95830de9bc22b183661d95559ec6b3cd900ad7bc9154c4cbf5be0c9b734 \
--hash=sha256:9a7d20c1cf8d7b3960c5072c265ec47b3f72a0c608a9a6ee0103189b4f28d531 \
--hash=sha256:9cd57981d9fab04fc74438d82460f057a2419974d69a96b06a440822d693b3c0 \
--hash=sha256:a11ab0d97be374efd04f640c04fe5c2d3dabc6dfb998954ea946ee3aec97056d \
--hash=sha256:a13c8e56c46474cd5958d525ce6a9996727a83d9335684e41f5192c83deb6c58 \
--hash=sha256:a38b9f526d0d6cbdaa37808c400e3d9f9473ac4ff64d33d9163fd05d243dbd9b \
--hash=sha256:a7c6304b894546b5a6bdc0fe15761fa53fe87d28527a7142dae8de3c663853e1 \
--hash=sha256:ad3bfb44c8840fb4be719dc58e229f435e227fbfbe133dc33f34981ff622a8f8 \
--hash=sha256:ae40f4a70a1f40939d66ecbaf8e7edc144fded190c4a45898a8cfe19d8fc85ea \
--hash=sha256:b01b39ad5411563031ea3977bbbc7324d82b088e802339e6296f082f78f6115c \
--hash=sha256:b2e3c4f2a8e3da47f850d7ea0d7d56720f0f091d66add889056098c4b2fd576c \
--hash=sha256:b41941583adce4242af003d2a8337b066ba6148ca435f295f31ac6d9e4ea2722 \
--hash=sha256:b4627520a02fccbd324b33c7a83e5d7906ec746e1083a9ac93c41ac7d15548c7 \
--hash=sha256:ba9f1d1ebe4b63801977cec7401f2d41e888128ae40b5441270d43140efcad52 \
--hash=sha256:c03a435d26c3999c2a8642cecad5d1c4d10c961817536af52035f6f4ee2f5dd0 \
--hash=sha256:c200b30dd573afa83847bed7e3041aa36a8145221bf0cfdfaa62d974d720805c \
--hash=sha256:c493365d3fad241d52f096e4995475a60a80f4eba4d3ff89b713bc65c2ca9615 \
--hash=sha256:c4d42e83ddbf3445e6514f0aff96dca511421ed0392d9977d3990d9f1ba6753c \
--hash=sha256:c60528671d9d467009a6ec284582179f6b88651e83367d0ab54cb739021cd7de \
--hash=sha256:c72ebc22e70e04126158c46ba56b85372bc4d54d00d296be060b0db1671638a4 \
--hash=sha256:ccbbd276642788c4376fbe8d4e6c50f0fb4972ce09ecb051509062915891cbf0 \
--hash=sha256:ceaac0c603bf5ac2f505a78b2dcab78d3e6b706be6596c8364b64cc613d208d2 \
--hash=sha256:d19db6ba816e7f59fc806c690918da80a7d186f00247048cd833acdab9b4847b \
--hash=sha256:d5c191713e98e7c28800233f039a32a42c1a4f9a001a8a0f2448b07391881036 \
--hash=sha256:d64f9f88d5203274a002b54442cafc9c7a1abff2a238f3e767b70aadf919b451 \
--hash=sha256:d77dff3a5aa5eedcc3da0ebd10ff8e4969bc9541aa3333a8d41715b429e99f47 \
--hash=sha256:dd4f16e57c12c0ae17606c53d1b57d8d1c8792efe3f065a37cb3341340599d49 \
--hash=sha256:e39d7ab0c18ac99955b36cd19f43926450baba21e3250f053e0704d6ffd76873 \
--hash=sha256:e3d0cd3dff0e7638a7b5390f3a53057c4e347f4ef122ee84ed93fc2fb7ea4aa2 \
--hash=sha256:e7dfb1cbb895810fa2b892b68153c17716c6abaa22c7dc2b2f6dcf3364932a1c \
--hash=sha256:e8e24b210a4deb5a7744971f8f77393005bae7f873568e37dfd9effe808be7f7 \
--hash=sha256:e9c0683cb35a9b5881b41bc01d5568ffc667910d9dbc632a1fba4e7d59e98773 \
--hash=sha256:ed41f3f49507936a6fe7003985ea2574daccfef999775525d79eb67344e23767 \
--hash=sha256:ee744fca8d1ea822480a2a4e7c5f2e1950745477143668f0b523769426060f29 \
--hash=sha256:f3f1e860be21f3e83011116a65e7310486300e08d9a3028e73e8d13bb6c77292 \
--hash=sha256:f43ab4cb04bde6109eb2555528a64dfd8a265cc6a9920a67dcbde13ef53a46c8 \
--hash=sha256:f53f55a8852f0e49b0fc76f2412045d6ad9d5772251dea8f55ea45021616e7d5 \
--hash=sha256:f59996d0550894affaad8743e97b9b9c98f638b221fac12909210ec3d9294786 \
--hash=sha256:f96f3f98fbff7af29e9edf9a6584f3c1382e7788783d07ba3721790625caa43e \
--hash=sha256:f9adb5664b78fcfcd830000416c8cc69853ef43cb084d645b3f1f0296edd9bae \
--hash=sha256:fa326b3505d5784436d9433b7980171ab2375535d93dd63fbcd20af2b5ca1bb6 \
--hash=sha256:fafc0049add8043ad07ab5382ee80d80ed7e3699847f26c9a5cf4d3714d96a84
rsa==4.9 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \
--hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21
s3transfer==0.6.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:3c0da2d074bf35d6870ef157158641178a4204a6e689e82546083e31e0311346 \
--hash=sha256:640bb492711f4c0c0905e1f62b6aaeb771881935ad27884852411f8e9cacbca9
schema==0.7.5 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:f06717112c61895cabc4707752b88716e8420a8819d71404501e114f91043197 \
--hash=sha256:f3ffdeeada09ec34bf40d7d79996d9f7175db93b7a5065de0faa7f41083c1e6c
shodan==1.31.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:c73275386ea02390e196c35c660706a28dd4d537c5a21eb387ab6236fac251f6
six==1.16.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \
--hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254
slack-sdk==3.26.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:d1600211eaa37c71a5f92daf4404074c3e6b3f5359a37c93c818b39d88ab4ca0 \
--hash=sha256:f80f0d15f0fce539b470447d2a07b03ecdad6b24f69c1edd05d464cf21253a06
tabulate==0.9.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c \
--hash=sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f
tldextract==3.4.4 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:581e7dbefc90e7bb857bb6f768d25c811a3c5f0892ed56a9a2999ddb7b1b70c2 \
--hash=sha256:5fe3210c577463545191d45ad522d3d5e78d55218ce97215e82004dcae1e1234
typing-extensions==4.5.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb \
--hash=sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4
uritemplate==4.1.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4346edfc5c3b79f694bccd6d6099a322bbeb628dbf2cd86eea55a456ce5124f0 \
--hash=sha256:830c08b8d99bdd312ea4ead05994a38e8936266f84b9a7878232db50b044e02e
urllib3==1.26.18 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07 \
--hash=sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0
xlsxwriter==3.1.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:02913b50b74c00f165933d5da3e3a02cab4204cb4932722a1b342c5c71034122 \
--hash=sha256:b70a147d36235d1ee835cfd037396f789db1f76740a0e5c917d54137169341de | python3.9 | 44a4c067 | diff --git a/prowler/providers/aws/services/cloudtrail/cloudtrail_service.py b/prowler/providers/aws/services/cloudtrail/cloudtrail_service.py
--- a/prowler/providers/aws/services/cloudtrail/cloudtrail_service.py
+++ b/prowler/providers/aws/services/cloudtrail/cloudtrail_service.py
@@ -140,7 +140,16 @@ class Cloudtrail(AWSService):
error.response["Error"]["Code"]
== "InsightNotEnabledException"
):
- continue
+ logger.warning(
+ f"{client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
+ )
+ elif (
+ error.response["Error"]["Code"]
+ == "UnsupportedOperationException"
+ ):
+ logger.warning(
+ f"{client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
+ )
else:
logger.error(
f"{client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
| [
{
"content": "from datetime import datetime\nfrom typing import Optional\n\nfrom botocore.client import ClientError\nfrom pydantic import BaseModel\n\nfrom prowler.lib.logger import logger\nfrom prowler.lib.scan_filters.scan_filters import is_resource_filtered\nfrom prowler.providers.aws.lib.service.service import AWSService\n\n\n################### CLOUDTRAIL\nclass Cloudtrail(AWSService):\n def __init__(self, audit_info):\n # Call AWSService's __init__\n super().__init__(__class__.__name__, audit_info)\n self.trails = []\n self.__threading_call__(self.__get_trails__)\n self.__get_trail_status__()\n self.__get_insight_selectors__()\n self.__get_event_selectors__()\n self.__list_tags_for_resource__()\n\n def __get_trails__(self, regional_client):\n logger.info(\"Cloudtrail - Getting trails...\")\n try:\n describe_trails = regional_client.describe_trails()[\"trailList\"]\n trails_count = 0\n for trail in describe_trails:\n if not self.audit_resources or (\n is_resource_filtered(trail[\"TrailARN\"], self.audit_resources)\n ):\n trails_count += 1\n kms_key_id = None\n log_group_arn = None\n if \"KmsKeyId\" in trail:\n kms_key_id = trail[\"KmsKeyId\"]\n if \"CloudWatchLogsLogGroupArn\" in trail:\n log_group_arn = trail[\"CloudWatchLogsLogGroupArn\"]\n self.trails.append(\n Trail(\n name=trail[\"Name\"],\n is_multiregion=trail[\"IsMultiRegionTrail\"],\n home_region=trail[\"HomeRegion\"],\n arn=trail[\"TrailARN\"],\n region=regional_client.region,\n is_logging=False,\n log_file_validation_enabled=trail[\n \"LogFileValidationEnabled\"\n ],\n latest_cloudwatch_delivery_time=None,\n s3_bucket=trail[\"S3BucketName\"],\n kms_key=kms_key_id,\n log_group_arn=log_group_arn,\n data_events=[],\n has_insight_selectors=trail.get(\"HasInsightSelectors\"),\n )\n )\n if trails_count == 0:\n self.trails.append(\n Trail(\n region=regional_client.region,\n )\n )\n\n except Exception as error:\n logger.error(\n f\"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}\"\n )\n\n def __get_trail_status__(self):\n logger.info(\"Cloudtrail - Getting trail status\")\n try:\n for trail in self.trails:\n for region, client in self.regional_clients.items():\n if trail.region == region and trail.name:\n status = client.get_trail_status(Name=trail.arn)\n trail.is_logging = status[\"IsLogging\"]\n if \"LatestCloudWatchLogsDeliveryTime\" in status:\n trail.latest_cloudwatch_delivery_time = status[\n \"LatestCloudWatchLogsDeliveryTime\"\n ]\n\n except Exception as error:\n logger.error(\n f\"{client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}\"\n )\n\n def __get_event_selectors__(self):\n logger.info(\"Cloudtrail - Getting event selector\")\n try:\n for trail in self.trails:\n for region, client in self.regional_clients.items():\n if trail.region == region and trail.name:\n data_events = client.get_event_selectors(TrailName=trail.arn)\n # EventSelectors\n if (\n \"EventSelectors\" in data_events\n and data_events[\"EventSelectors\"]\n ):\n for event in data_events[\"EventSelectors\"]:\n event_selector = Event_Selector(\n is_advanced=False, event_selector=event\n )\n trail.data_events.append(event_selector)\n # AdvancedEventSelectors\n elif (\n \"AdvancedEventSelectors\" in data_events\n and data_events[\"AdvancedEventSelectors\"]\n ):\n for event in data_events[\"AdvancedEventSelectors\"]:\n event_selector = Event_Selector(\n is_advanced=True, event_selector=event\n )\n trail.data_events.append(event_selector)\n\n except Exception as error:\n logger.error(\n f\"{client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}\"\n )\n\n def __get_insight_selectors__(self):\n logger.info(\"Cloudtrail - Getting trail insight selectors...\")\n\n try:\n for trail in self.trails:\n for region, client in self.regional_clients.items():\n if trail.region == region and trail.name:\n insight_selectors = None\n trail.has_insight_selectors = None\n try:\n client_insight_selectors = client.get_insight_selectors(\n TrailName=trail.arn\n )\n insight_selectors = client_insight_selectors.get(\n \"InsightSelectors\"\n )\n except ClientError as error:\n if (\n error.response[\"Error\"][\"Code\"]\n == \"InsightNotEnabledException\"\n ):\n continue\n else:\n logger.error(\n f\"{client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}\"\n )\n except Exception as error:\n logger.error(\n f\"{client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}\"\n )\n continue\n if insight_selectors:\n trail.has_insight_selectors = insight_selectors[0].get(\n \"InsightType\"\n )\n\n except Exception as error:\n logger.error(\n f\"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}\"\n )\n\n def __list_tags_for_resource__(self):\n logger.info(\"CloudTrail - List Tags...\")\n try:\n for trail in self.trails:\n # Check if trails are in this account and region\n if (\n trail.region == trail.home_region\n and self.audited_account in trail.arn\n ):\n regional_client = self.regional_clients[trail.region]\n response = regional_client.list_tags(ResourceIdList=[trail.arn])[\n \"ResourceTagList\"\n ][0]\n trail.tags = response.get(\"TagsList\")\n except Exception as error:\n logger.error(\n f\"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}\"\n )\n\n\nclass Event_Selector(BaseModel):\n is_advanced: bool\n event_selector: dict\n\n\nclass Trail(BaseModel):\n name: str = None\n is_multiregion: bool = None\n home_region: str = None\n arn: str = None\n region: str\n is_logging: bool = None\n log_file_validation_enabled: bool = None\n latest_cloudwatch_delivery_time: datetime = None\n s3_bucket: str = None\n kms_key: str = None\n log_group_arn: str = None\n data_events: list[Event_Selector] = []\n tags: Optional[list] = []\n has_insight_selectors: str = None\n",
"path": "prowler/providers/aws/services/cloudtrail/cloudtrail_service.py"
}
] | 11_1 | python | import unittest
import sys
from unittest.mock import patch, Mock, ANY
class TestCloudtrail(unittest.TestCase):
def setUp(self):
from prowler.providers.aws.services.cloudtrail.cloudtrail_service import Cloudtrail
self.audit_info_mock = Mock()
self.audit_info_mock.audited_partition = 'aws'
self.audit_info_mock.audited_regions = ['us-east-1']
self.audit_info_mock.audit_metadata = Mock()
self.audit_info_mock.audit_metadata.expected_checks = []
self.cloudtrail = Cloudtrail(self.audit_info_mock)
self.cloudtrail.trails = [Mock(arn='mock_arn', region='us-east-1', name='mock_trail')]
self.mock_client = Mock()
self.cloudtrail.regional_clients = {'us-east-1': self.mock_client}
@patch('prowler.providers.aws.services.cloudtrail.cloudtrail_service.logger')
def test_insight_selectors_logging(self, mock_logger):
from botocore.exceptions import ClientError
# Simulating ClientError with InsightNotEnabledException
error_response = {'Error': {'Code': 'InsightNotEnabledException', 'Message': 'Test Exception'}}
self.mock_client.get_insight_selectors.side_effect = ClientError(error_response, 'get_insight_selectors')
self.cloudtrail.__get_insight_selectors__()
mock_logger.warning.assert_called_with(
ANY
)
# Simulating ClientError with UnsupportedOperationException
error_response['Error']['Code'] = 'UnsupportedOperationException'
self.mock_client.get_insight_selectors.side_effect = ClientError(error_response, 'get_insight_selectors')
self.cloudtrail.__get_insight_selectors__()
mock_logger.warning.assert_called_with(
ANY
)
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestCloudtrail))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
main()
|
https://github.com/teamqurrent/prowler | you need to move the `generate_client` function from `gcp_provider.py` to the `GCPService` class in `service.py`, renaming it to `__generate_client__`. This involves adapting the function to work as a method within the `GCPService` class and updating all references in the two files to use this new method. Ensure that the `__generate_client__` method is properly integrated into the `GCPService` class and that it maintains its functionality of creating GCP service clients. This refactoring aims to centralize and streamline the client generation process within the `GCPService` class, enhancing the code's organization and maintainability. | bf0e62a | about-time==4.2.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:6a538862d33ce67d997429d14998310e1dbfda6cb7d9bbfbf799c4709847fece \
--hash=sha256:8bbf4c75fe13cbd3d72f49a03b02c5c7dca32169b6d49117c257e7eb3eaee341
adal==1.2.7 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:2a7451ed7441ddbc57703042204a3e30ef747478eea022c70f789fc7f084bc3d \
--hash=sha256:d74f45b81317454d96e982fd1c50e6fb5c99ac2223728aea8764433a39f566f1
alive-progress==3.1.5 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:347220c1858e3abe137fa0746895668c04df09c5261a13dc03f05795e8a29be5 \
--hash=sha256:42e399a66c8150dc507602dff7b7953f105ef11faf97ddaa6d27b1cbf45c4c98
attrs==23.1.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04 \
--hash=sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015
awsipranges==0.3.3 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4f0b3f22a9dc1163c85b513bed812b6c92bdacd674e6a7b68252a3c25b99e2c0 \
--hash=sha256:f3d7a54aeaf7fe310beb5d377a4034a63a51b72677ae6af3e0967bc4de7eedaf
azure-common==1.1.28 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4ac0cd3214e36b6a1b6a442686722a5d8cc449603aa833f3f0f40bda836704a3 \
--hash=sha256:5c12d3dcf4ec20599ca6b0d3e09e86e146353d443e7fcc050c9a19c1f9df20ad
azure-core==1.28.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:dec36dfc8eb0b052a853f30c07437effec2f9e3e1fc8f703d9bdaa5cfc0043d9 \
--hash=sha256:e9eefc66fc1fde56dab6f04d4e5d12c60754d5a9fa49bdcfd8534fc96ed936bd
azure-identity==1.15.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4c28fc246b7f9265610eb5261d65931183d019a23d4b0e99357facb2e6c227c8 \
--hash=sha256:a14b1f01c7036f11f148f22cd8c16e05035293d714458d6b44ddf534d93eb912
azure-mgmt-authorization==4.0.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:69b85abc09ae64fc72975bd43431170d8c7eb5d166754b98aac5f3845de57dc4 \
--hash=sha256:d8feeb3842e6ddf1a370963ca4f61fb6edc124e8997b807dd025bc9b2379cd1a
azure-mgmt-core==1.4.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:81071675f186a585555ef01816f2774d49c1c9024cb76e5720c3c0f6b337bb7d \
--hash=sha256:d195208340094f98e5a6661b781cde6f6a051e79ce317caabd8ff97030a9b3ae
azure-mgmt-security==5.0.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:38b03efe82c2344cea203fda95e6d00b7ac22782fa1c0b585cd0ea2c8ff3e702 \
--hash=sha256:73a74ce8f6ffb1b345ce101c8abdd42238f161f0988d168d23918feda0089654
azure-mgmt-sql==3.0.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:129042cc011225e27aee6ef2697d585fa5722e5d1aeb0038af6ad2451a285457 \
--hash=sha256:1d1dd940d4d41be4ee319aad626341251572a5bf4a2addec71779432d9a1381f
azure-mgmt-storage==21.1.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:593f2544fc4f05750c4fe7ca4d83c32ea1e9d266e57899bbf79ce5940124e8cc \
--hash=sha256:d6d3c0e917c988bc9ed0472477d3ef3f90886009eb1d97a711944f8375630162
azure-mgmt-subscription==3.1.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:38d4574a8d47fa17e3587d756e296cb63b82ad8fb21cd8543bcee443a502bf48 \
--hash=sha256:4e255b4ce9b924357bb8c5009b3c88a2014d3203b2495e2256fa027bf84e800e
azure-storage-blob==12.19.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:26c0a4320a34a3c2a1b74528ba6812ebcb632a04cd67b1c7377232c4b01a5897 \
--hash=sha256:7bbc2c9c16678f7a420367fef6b172ba8730a7e66df7f4d7a55d5b3c8216615b
boto3==1.26.165 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:9e7242b9059d937f34264125fecd844cb5e01acce6be093f6c44869fdf7c6e30 \
--hash=sha256:fa85b67147c8dc99b6e7c699fc086103f958f9677db934f70659e6e6a72a818c
botocore==1.29.165 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:6f35d59e230095aed7cd747604fe248fa384bebb7d09549077892f936a8ca3df \
--hash=sha256:988b948be685006b43c4bbd8f5c0cb93e77c66deb70561994e0c5b31b5a67210
cachetools==5.3.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:13dfddc7b8df938c21a940dfa6557ce6e94a2f1cdfa58eb90c805721d58f2c14 \
--hash=sha256:429e1a1e845c008ea6c85aa35d4b98b65d6a9763eeef3e37e92728a12d1de9d4
certifi==2023.7.22 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \
--hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9
cffi==1.15.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \
--hash=sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef \
--hash=sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104 \
--hash=sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426 \
--hash=sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405 \
--hash=sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375 \
--hash=sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a \
--hash=sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e \
--hash=sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc \
--hash=sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf \
--hash=sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185 \
--hash=sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497 \
--hash=sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3 \
--hash=sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35 \
--hash=sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c \
--hash=sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83 \
--hash=sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21 \
--hash=sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca \
--hash=sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984 \
--hash=sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac \
--hash=sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd \
--hash=sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee \
--hash=sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a \
--hash=sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2 \
--hash=sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192 \
--hash=sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7 \
--hash=sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585 \
--hash=sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f \
--hash=sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e \
--hash=sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27 \
--hash=sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b \
--hash=sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e \
--hash=sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e \
--hash=sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d \
--hash=sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c \
--hash=sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415 \
--hash=sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82 \
--hash=sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02 \
--hash=sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314 \
--hash=sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325 \
--hash=sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c \
--hash=sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3 \
--hash=sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914 \
--hash=sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045 \
--hash=sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d \
--hash=sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9 \
--hash=sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5 \
--hash=sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2 \
--hash=sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c \
--hash=sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3 \
--hash=sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2 \
--hash=sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8 \
--hash=sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d \
--hash=sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d \
--hash=sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9 \
--hash=sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162 \
--hash=sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76 \
--hash=sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4 \
--hash=sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e \
--hash=sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9 \
--hash=sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6 \
--hash=sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b \
--hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \
--hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0
charset-normalizer==3.1.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6 \
--hash=sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1 \
--hash=sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e \
--hash=sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373 \
--hash=sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62 \
--hash=sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230 \
--hash=sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be \
--hash=sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c \
--hash=sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0 \
--hash=sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448 \
--hash=sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f \
--hash=sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649 \
--hash=sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d \
--hash=sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0 \
--hash=sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706 \
--hash=sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a \
--hash=sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59 \
--hash=sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23 \
--hash=sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5 \
--hash=sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb \
--hash=sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e \
--hash=sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e \
--hash=sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c \
--hash=sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28 \
--hash=sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d \
--hash=sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41 \
--hash=sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974 \
--hash=sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce \
--hash=sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f \
--hash=sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1 \
--hash=sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d \
--hash=sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8 \
--hash=sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017 \
--hash=sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31 \
--hash=sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7 \
--hash=sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8 \
--hash=sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e \
--hash=sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14 \
--hash=sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd \
--hash=sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d \
--hash=sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795 \
--hash=sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b \
--hash=sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b \
--hash=sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b \
--hash=sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203 \
--hash=sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f \
--hash=sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19 \
--hash=sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1 \
--hash=sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a \
--hash=sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac \
--hash=sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9 \
--hash=sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0 \
--hash=sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137 \
--hash=sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f \
--hash=sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6 \
--hash=sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5 \
--hash=sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909 \
--hash=sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f \
--hash=sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0 \
--hash=sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324 \
--hash=sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755 \
--hash=sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb \
--hash=sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854 \
--hash=sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c \
--hash=sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60 \
--hash=sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84 \
--hash=sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0 \
--hash=sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b \
--hash=sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1 \
--hash=sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531 \
--hash=sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1 \
--hash=sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11 \
--hash=sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326 \
--hash=sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df \
--hash=sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab
click-plugins==1.1.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b \
--hash=sha256:5d262006d3222f5057fd81e1623d4443e41dcda5dc815c06b442aa3c02889fc8
click==8.1.3 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e \
--hash=sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48
colorama==0.4.6 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \
--hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6
contextlib2==21.6.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:3fbdb64466afd23abaf6c977627b75b6139a5a3e8ce38405c5b413aed7a0471f \
--hash=sha256:ab1e2bfe1d01d968e1b7e8d9023bc51ef3509bba217bb730cee3827e1ee82869
cryptography==41.0.6 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:068bc551698c234742c40049e46840843f3d98ad7ce265fd2bd4ec0d11306596 \
--hash=sha256:0f27acb55a4e77b9be8d550d762b0513ef3fc658cd3eb15110ebbcbd626db12c \
--hash=sha256:2132d5865eea673fe6712c2ed5fb4fa49dba10768bb4cc798345748380ee3660 \
--hash=sha256:3288acccef021e3c3c10d58933f44e8602cf04dba96d9796d70d537bb2f4bbc4 \
--hash=sha256:35f3f288e83c3f6f10752467c48919a7a94b7d88cc00b0668372a0d2ad4f8ead \
--hash=sha256:398ae1fc711b5eb78e977daa3cbf47cec20f2c08c5da129b7a296055fbb22aed \
--hash=sha256:422e3e31d63743855e43e5a6fcc8b4acab860f560f9321b0ee6269cc7ed70cc3 \
--hash=sha256:48783b7e2bef51224020efb61b42704207dde583d7e371ef8fc2a5fb6c0aabc7 \
--hash=sha256:4d03186af98b1c01a4eda396b137f29e4e3fb0173e30f885e27acec8823c1b09 \
--hash=sha256:5daeb18e7886a358064a68dbcaf441c036cbdb7da52ae744e7b9207b04d3908c \
--hash=sha256:60e746b11b937911dc70d164060d28d273e31853bb359e2b2033c9e93e6f3c43 \
--hash=sha256:742ae5e9a2310e9dade7932f9576606836ed174da3c7d26bc3d3ab4bd49b9f65 \
--hash=sha256:7e00fb556bda398b99b0da289ce7053639d33b572847181d6483ad89835115f6 \
--hash=sha256:85abd057699b98fce40b41737afb234fef05c67e116f6f3650782c10862c43da \
--hash=sha256:8efb2af8d4ba9dbc9c9dd8f04d19a7abb5b49eab1f3694e7b5a16a5fc2856f5c \
--hash=sha256:ae236bb8760c1e55b7a39b6d4d32d2279bc6c7c8500b7d5a13b6fb9fc97be35b \
--hash=sha256:afda76d84b053923c27ede5edc1ed7d53e3c9f475ebaf63c68e69f1403c405a8 \
--hash=sha256:b27a7fd4229abef715e064269d98a7e2909ebf92eb6912a9603c7e14c181928c \
--hash=sha256:b648fe2a45e426aaee684ddca2632f62ec4613ef362f4d681a9a6283d10e079d \
--hash=sha256:c5a550dc7a3b50b116323e3d376241829fd326ac47bc195e04eb33a8170902a9 \
--hash=sha256:da46e2b5df770070412c46f87bac0849b8d685c5f2679771de277a422c7d0b86 \
--hash=sha256:f39812f70fc5c71a15aa3c97b2bbe213c3f2a460b79bd21c40d033bb34a9bf36 \
--hash=sha256:ff369dd19e8fe0528b02e8df9f2aeb2479f89b1270d90f96a63500afe9af5cae
detect-secrets==1.4.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:d08ecabeee8b68c0acb0e8a354fb98d822a653f6ed05e520cead4c6fc1fc02cd \
--hash=sha256:d56787e339758cef48c9ccd6692f7a094b9963c979c9813580b0169e41132833
filelock==3.12.2 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:002740518d8aa59a26b0c76e10fb8c6e15eae825d34b6fdf670333fd7b938d81 \
--hash=sha256:cbb791cdea2a72f23da6ac5b5269ab0a0d161e9ef0100e653b69049a7706d1ec
google-api-core==2.11.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4b9bb5d5a380a0befa0573b302651b8a9a89262c1730e37bf423cec511804c22 \
--hash=sha256:ce222e27b0de0d7bc63eb043b956996d6dccab14cc3b690aaea91c9cc99dc16e
google-api-python-client==2.111.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:3a45a53c031478d1c82c7162dd25c9a965247bca6bd438af0838a9d9b8219405 \
--hash=sha256:b605adee2d09a843b97a59925757802904679e44e5599708cedb8939900dfbc7
google-auth-httplib2==0.2.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:38aa7badf48f974f1eb9861794e9c0cb2a0511a4ec0679b1f886d108f5640e05 \
--hash=sha256:b65a0a2123300dd71281a7bf6e64d65a0759287df52729bdd1ae2e47dc311a3d
google-auth==2.17.3 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:ce311e2bc58b130fddf316df57c9b3943c2a7b4f6ec31de9663a9333e4064efc \
--hash=sha256:f586b274d3eb7bd932ea424b1c702a30e0393a2e2bc4ca3eae8263ffd8be229f
googleapis-common-protos==1.59.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4168fcb568a826a52f23510412da405abd93f4d23ba544bb68d943b14ba3cb44 \
--hash=sha256:b287dc48449d1d41af0c69f4ea26242b5ae4c3d7249a38b0984c86a4caffff1f
grapheme==0.6.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:44c2b9f21bbe77cfb05835fec230bd435954275267fea1858013b102f8603cca
httplib2==0.22.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:14ae0a53c1ba8f3d37e9e27cf37eabb0fb9980f435ba405d546948b009dd64dc \
--hash=sha256:d7a10bc5ef5ab08322488bde8c726eeee5c8618723fdb399597ec58f3d82df81
idna==3.4 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \
--hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2
isodate==0.6.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96 \
--hash=sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9
jmespath==1.0.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980 \
--hash=sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe
jsonschema-specifications==2023.6.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:3d2b82663aff01815f744bb5c7887e2121a63399b49b104a3c96145474d091d7 \
--hash=sha256:ca1c4dd059a9e7b34101cf5b3ab7ff1d18b139f35950d598d629837ef66e8f28
jsonschema==4.20.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4f614fd46d8d61258610998997743ec5492a648b33cf478c1ddc23ed4598a5fa \
--hash=sha256:ed6231f0429ecf966f5bc8dfef245998220549cbbcf140f913b7464c52c3b6b3
msal-extensions==1.0.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:91e3db9620b822d0ed2b4d1850056a0f133cba04455e62f11612e40f5502f2ee \
--hash=sha256:c676aba56b0cce3783de1b5c5ecfe828db998167875126ca4b47dc6436451354
msal==1.24.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:aa0972884b3c6fdec53d9a0bd15c12e5bd7b71ac1b66d746f54d128709f3f8f8 \
--hash=sha256:ce4320688f95c301ee74a4d0e9dbcfe029a63663a8cc61756f40d0d0d36574ad
msgraph-core==0.2.2 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:147324246788abe8ed7e05534cd9e4e0ec98b33b30e011693b8d014cebf97f63 \
--hash=sha256:e297564b9a0ca228493d8851f95cb2de9522143d82efa40ce3a6ad286e21392e
msrest==0.7.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:21120a810e1233e5e6cc7fe40b474eeb4ec6f757a15d7cf86702c369f9567c32 \
--hash=sha256:6e7661f46f3afd88b75667b7187a92829924446c7ea1d169be8c4bb7eeb788b9
msrestazure==0.6.4 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:3de50f56147ef529b31e099a982496690468ecef33f0544cb0fa0cfe1e1de5b9 \
--hash=sha256:a06f0dabc9a6f5efe3b6add4bd8fb623aeadacf816b7a35b0f89107e0544d189
oauthlib==3.2.2 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca \
--hash=sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918
portalocker==2.7.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:032e81d534a88ec1736d03f780ba073f047a06c478b06e2937486f334e955c51 \
--hash=sha256:a07c5b4f3985c3cf4798369631fb7011adb498e2a46d8440efc75a8f29a0f983
protobuf==4.23.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:03eee35b60317112a72d19c54d0bff7bc58ff12fea4cd7b018232bd99758ffdf \
--hash=sha256:2b94bd6df92d71bd1234a2ffe7ce96ddf6d10cf637a18d6b55ad0a89fbb7fc21 \
--hash=sha256:36f5370a930cb77c8ad2f4135590c672d0d2c72d4a707c7d0058dce4b4b4a598 \
--hash=sha256:5f1eba1da2a2f3f7df469fccddef3cc060b8a16cfe3cc65961ad36b4dbcf59c5 \
--hash=sha256:6c16657d6717a0c62d5d740cb354fbad1b0d8cb811669e06fc1caa0ff4799ddd \
--hash=sha256:6fe180b56e1169d72ecc4acbd39186339aed20af5384531b8e8979b02bbee159 \
--hash=sha256:7cb5b9a05ce52c6a782bb97de52679bd3438ff2b7460eff5da348db65650f227 \
--hash=sha256:9744e934ea5855d12191040ea198eaf704ac78665d365a89d9572e3b627c2688 \
--hash=sha256:9f5a0fbfcdcc364f3986f9ed9f8bb1328fb84114fd790423ff3d7fdb0f85c2d1 \
--hash=sha256:baca40d067dddd62141a129f244703160d278648b569e90bb0e3753067644711 \
--hash=sha256:d5a35ff54e3f62e8fc7be02bb0d2fbc212bba1a5a9cc2748090690093996f07b \
--hash=sha256:e62fb869762b4ba18666370e2f8a18f17f8ab92dd4467295c6d38be6f8fef60b \
--hash=sha256:ebde3a023b8e11bfa6c890ef34cd6a8b47d586f26135e86c21344fe433daf2e2
pyasn1-modules==0.3.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c \
--hash=sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d
pyasn1==0.5.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57 \
--hash=sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde
pycparser==2.21 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \
--hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206
pydantic==1.10.13 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:1740068fd8e2ef6eb27a20e5651df000978edce6da6803c2bef0bc74540f9548 \
--hash=sha256:210ce042e8f6f7c01168b2d84d4c9eb2b009fe7bf572c2266e235edf14bacd80 \
--hash=sha256:32c8b48dcd3b2ac4e78b0ba4af3a2c2eb6048cb75202f0ea7b34feb740efc340 \
--hash=sha256:3ecea2b9d80e5333303eeb77e180b90e95eea8f765d08c3d278cd56b00345d01 \
--hash=sha256:4b03e42ec20286f052490423682016fd80fda830d8e4119f8ab13ec7464c0132 \
--hash=sha256:4c5370a7edaac06daee3af1c8b1192e305bc102abcbf2a92374b5bc793818599 \
--hash=sha256:56e3ff861c3b9c6857579de282ce8baabf443f42ffba355bf070770ed63e11e1 \
--hash=sha256:5a1f9f747851338933942db7af7b6ee8268568ef2ed86c4185c6ef4402e80ba8 \
--hash=sha256:5e08865bc6464df8c7d61439ef4439829e3ab62ab1669cddea8dd00cd74b9ffe \
--hash=sha256:61d9dce220447fb74f45e73d7ff3b530e25db30192ad8d425166d43c5deb6df0 \
--hash=sha256:654db58ae399fe6434e55325a2c3e959836bd17a6f6a0b6ca8107ea0571d2e17 \
--hash=sha256:678bcf5591b63cc917100dc50ab6caebe597ac67e8c9ccb75e698f66038ea953 \
--hash=sha256:6cf25c1a65c27923a17b3da28a0bdb99f62ee04230c931d83e888012851f4e7f \
--hash=sha256:75ac15385a3534d887a99c713aa3da88a30fbd6204a5cd0dc4dab3d770b9bd2f \
--hash=sha256:75b297827b59bc229cac1a23a2f7a4ac0031068e5be0ce385be1462e7e17a35d \
--hash=sha256:7d6f6e7305244bddb4414ba7094ce910560c907bdfa3501e9db1a7fd7eaea127 \
--hash=sha256:84bafe2e60b5e78bc64a2941b4c071a4b7404c5c907f5f5a99b0139781e69ed8 \
--hash=sha256:854223752ba81e3abf663d685f105c64150873cc6f5d0c01d3e3220bcff7d36f \
--hash=sha256:8ae5dd6b721459bfa30805f4c25880e0dd78fc5b5879f9f7a692196ddcb5a580 \
--hash=sha256:8ef467901d7a41fa0ca6db9ae3ec0021e3f657ce2c208e98cd511f3161c762c6 \
--hash=sha256:968ac42970f57b8344ee08837b62f6ee6f53c33f603547a55571c954a4225691 \
--hash=sha256:97cce3ae7341f7620a0ba5ef6cf043975cd9d2b81f3aa5f4ea37928269bc1b87 \
--hash=sha256:9849f031cf8a2f0a928fe885e5a04b08006d6d41876b8bbd2fc68a18f9f2e3fd \
--hash=sha256:9f00790179497767aae6bcdc36355792c79e7bbb20b145ff449700eb076c5f96 \
--hash=sha256:b87326822e71bd5f313e7d3bfdc77ac3247035ac10b0c0618bd99dcf95b1e687 \
--hash=sha256:b97c1fac8c49be29486df85968682b0afa77e1b809aff74b83081cc115e52f33 \
--hash=sha256:bc0898c12f8e9c97f6cd44c0ed70d55749eaf783716896960b4ecce2edfd2d69 \
--hash=sha256:c553f6a156deb868ba38a23cf0df886c63492e9257f60a79c0fd8e7173537653 \
--hash=sha256:c636925f38b8db208e09d344c7aa4f29a86bb9947495dd6b6d376ad10334fb78 \
--hash=sha256:c958d053453a1c4b1c2062b05cd42d9d5c8eb67537b8d5a7e3c3032943ecd261 \
--hash=sha256:d3a3c792a58e1622667a2837512099eac62490cdfd63bd407993aaf200a4cf1f \
--hash=sha256:e31647d85a2013d926ce60b84f9dd5300d44535a9941fe825dc349ae1f760df9 \
--hash=sha256:e70ca129d2053fb8b728ee7d1af8e553a928d7e301a311094b8a0501adc8763d \
--hash=sha256:efff03cc7a4f29d9009d1c96ceb1e7a70a65cfe86e89d34e4a5f2ab1e5693737 \
--hash=sha256:f59ef915cac80275245824e9d771ee939133be38215555e9dc90c6cb148aaeb5 \
--hash=sha256:f8e81fc5fb17dae698f52bdd1c4f18b6ca674d7068242b2aff075f588301bbb0
pyjwt==2.6.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd \
--hash=sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14
pyjwt[crypto]==2.6.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd \
--hash=sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14
pyparsing==3.0.9 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb \
--hash=sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc
python-dateutil==2.8.2 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \
--hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9
pywin32==306 ; python_version >= "3.9" and platform_system == "Windows" and python_version < "3.12" \
--hash=sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d \
--hash=sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65 \
--hash=sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e \
--hash=sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b \
--hash=sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4 \
--hash=sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040 \
--hash=sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a \
--hash=sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36 \
--hash=sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8 \
--hash=sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e \
--hash=sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802 \
--hash=sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a \
--hash=sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407 \
--hash=sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0
pyyaml==6.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf \
--hash=sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293 \
--hash=sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b \
--hash=sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57 \
--hash=sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b \
--hash=sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4 \
--hash=sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07 \
--hash=sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba \
--hash=sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9 \
--hash=sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287 \
--hash=sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513 \
--hash=sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0 \
--hash=sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782 \
--hash=sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0 \
--hash=sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92 \
--hash=sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f \
--hash=sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2 \
--hash=sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc \
--hash=sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1 \
--hash=sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c \
--hash=sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86 \
--hash=sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4 \
--hash=sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c \
--hash=sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34 \
--hash=sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b \
--hash=sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d \
--hash=sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c \
--hash=sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb \
--hash=sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7 \
--hash=sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737 \
--hash=sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3 \
--hash=sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d \
--hash=sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358 \
--hash=sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53 \
--hash=sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78 \
--hash=sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803 \
--hash=sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a \
--hash=sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f \
--hash=sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174 \
--hash=sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5
referencing==0.29.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:90cb53782d550ba28d2166ef3f55731f38397def8832baac5d45235f1995e35e \
--hash=sha256:d3c8f323ee1480095da44d55917cfb8278d73d6b4d5f677e3e40eb21314ac67f
requests-file==1.5.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:07d74208d3389d01c38ab89ef403af0cfec63957d53a0081d8eca738d0247d8e \
--hash=sha256:dfe5dae75c12481f68ba353183c53a65e6044c923e64c24b2209f6c7570ca953
requests-oauthlib==1.3.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5 \
--hash=sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a
requests==2.31.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \
--hash=sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1
rpds-py==0.8.10 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:08166467258fd0240a1256fce272f689f2360227ee41c72aeea103e9e4f63d2b \
--hash=sha256:083df0fafe199371206111583c686c985dddaf95ab3ee8e7b24f1fda54515d09 \
--hash=sha256:0da53292edafecba5e1d8c1218f99babf2ed0bf1c791d83c0ab5c29b57223068 \
--hash=sha256:0eeb2731708207d0fe2619afe6c4dc8cb9798f7de052da891de5f19c0006c315 \
--hash=sha256:134ec8f14ca7dbc6d9ae34dac632cdd60939fe3734b5d287a69683c037c51acb \
--hash=sha256:13e643ce8ad502a0263397362fb887594b49cf84bf518d6038c16f235f2bcea4 \
--hash=sha256:148b0b38d719c0760e31ce9285a9872972bdd7774969a4154f40c980e5beaca7 \
--hash=sha256:14f1c356712f66653b777ecd8819804781b23dbbac4eade4366b94944c9e78ad \
--hash=sha256:15a90d0ac11b4499171067ae40a220d1ca3cb685ec0acc356d8f3800e07e4cb8 \
--hash=sha256:1a2edf8173ac0c7a19da21bc68818be1321998528b5e3f748d6ee90c0ba2a1fd \
--hash=sha256:1b21575031478609db6dbd1f0465e739fe0e7f424a8e7e87610a6c7f68b4eb16 \
--hash=sha256:1ee45cd1d84beed6cbebc839fd85c2e70a3a1325c8cfd16b62c96e2ffb565eca \
--hash=sha256:220bdcad2d2936f674650d304e20ac480a3ce88a40fe56cd084b5780f1d104d9 \
--hash=sha256:2418cf17d653d24ffb8b75e81f9f60b7ba1b009a23298a433a4720b2a0a17017 \
--hash=sha256:2614c2732bf45de5c7f9e9e54e18bc78693fa2f635ae58d2895b7965e470378c \
--hash=sha256:2cd3045e7f6375dda64ed7db1c5136826facb0159ea982f77d9cf6125025bd34 \
--hash=sha256:2eb4b08c45f8f8d8254cdbfacd3fc5d6b415d64487fb30d7380b0d0569837bf1 \
--hash=sha256:300eb606e6b94a7a26f11c8cc8ee59e295c6649bd927f91e1dbd37a4c89430b6 \
--hash=sha256:376b8de737401050bd12810003d207e824380be58810c031f10ec563ff6aef3d \
--hash=sha256:3793c21494bad1373da517001d0849eea322e9a049a0e4789e50d8d1329df8e7 \
--hash=sha256:37f7ee4dc86db7af3bac6d2a2cedbecb8e57ce4ed081f6464510e537589f8b1e \
--hash=sha256:3816a890a6a9e9f1de250afa12ca71c9a7a62f2b715a29af6aaee3aea112c181 \
--hash=sha256:3c490204e16bca4f835dba8467869fe7295cdeaa096e4c5a7af97f3454a97991 \
--hash=sha256:3cc5e5b5514796f45f03a568981971b12a3570f3de2e76114f7dc18d4b60a3c4 \
--hash=sha256:41c89a366eae49ad9e65ed443a8f94aee762931a1e3723749d72aeac80f5ef2f \
--hash=sha256:4a8ca409f1252e1220bf09c57290b76cae2f14723746215a1e0506472ebd7bdf \
--hash=sha256:4b519bac7c09444dd85280fd60f28c6dde4389c88dddf4279ba9b630aca3bbbe \
--hash=sha256:521fc8861a86ae54359edf53a15a05fabc10593cea7b3357574132f8427a5e5a \
--hash=sha256:574868858a7ff6011192c023a5289158ed20e3f3b94b54f97210a773f2f22921 \
--hash=sha256:5a665f6f1a87614d1c3039baf44109094926dedf785e346d8b0a728e9cabd27a \
--hash=sha256:5d1c2bc319428d50b3e0fa6b673ab8cc7fa2755a92898db3a594cbc4eeb6d1f7 \
--hash=sha256:60e0e86e870350e03b3e25f9b1dd2c6cc72d2b5f24e070249418320a6f9097b7 \
--hash=sha256:695f642a3a5dbd4ad2ffbbacf784716ecd87f1b7a460843b9ddf965ccaeafff4 \
--hash=sha256:69d089c026f6a8b9d64a06ff67dc3be196707b699d7f6ca930c25f00cf5e30d8 \
--hash=sha256:6c6a0225b8501d881b32ebf3f5807a08ad3685b5eb5f0a6bfffd3a6e039b2055 \
--hash=sha256:70bb9c8004b97b4ef7ae56a2aa56dfaa74734a0987c78e7e85f00004ab9bf2d0 \
--hash=sha256:73a1e48430f418f0ac3dfd87860e4cc0d33ad6c0f589099a298cb53724db1169 \
--hash=sha256:7495010b658ec5b52835f21d8c8b1a7e52e194c50f095d4223c0b96c3da704b1 \
--hash=sha256:7947e6e2c2ad68b1c12ee797d15e5f8d0db36331200b0346871492784083b0c6 \
--hash=sha256:7b38a9ac96eeb6613e7f312cd0014de64c3f07000e8bf0004ad6ec153bac46f8 \
--hash=sha256:7d20a8ed227683401cc508e7be58cba90cc97f784ea8b039c8cd01111e6043e0 \
--hash=sha256:7f29b8c55fd3a2bc48e485e37c4e2df3317f43b5cc6c4b6631c33726f52ffbb3 \
--hash=sha256:802f42200d8caf7f25bbb2a6464cbd83e69d600151b7e3b49f49a47fa56b0a38 \
--hash=sha256:805a5f3f05d186c5d50de2e26f765ba7896d0cc1ac5b14ffc36fae36df5d2f10 \
--hash=sha256:82bb361cae4d0a627006dadd69dc2f36b7ad5dc1367af9d02e296ec565248b5b \
--hash=sha256:84eb541a44f7a18f07a6bfc48b95240739e93defe1fdfb4f2a295f37837945d7 \
--hash=sha256:89c92b74e8bf6f53a6f4995fd52f4bd510c12f103ee62c99e22bc9e05d45583c \
--hash=sha256:8c398fda6df361a30935ab4c4bccb7f7a3daef2964ca237f607c90e9f3fdf66f \
--hash=sha256:915031002c86a5add7c6fd4beb601b2415e8a1c956590a5f91d825858e92fe6e \
--hash=sha256:927d784648211447201d4c6f1babddb7971abad922b32257ab74de2f2750fad0 \
--hash=sha256:92cf5b3ee60eef41f41e1a2cabca466846fb22f37fc580ffbcb934d1bcab225a \
--hash=sha256:93d06cccae15b3836247319eee7b6f1fdcd6c10dabb4e6d350d27bd0bdca2711 \
--hash=sha256:93d99f957a300d7a4ced41615c45aeb0343bb8f067c42b770b505de67a132346 \
--hash=sha256:96b293c0498c70162effb13100624c5863797d99df75f2f647438bd10cbf73e4 \
--hash=sha256:97cab733d303252f7c2f7052bf021a3469d764fc2b65e6dbef5af3cbf89d4892 \
--hash=sha256:996cc95830de9bc22b183661d95559ec6b3cd900ad7bc9154c4cbf5be0c9b734 \
--hash=sha256:9a7d20c1cf8d7b3960c5072c265ec47b3f72a0c608a9a6ee0103189b4f28d531 \
--hash=sha256:9cd57981d9fab04fc74438d82460f057a2419974d69a96b06a440822d693b3c0 \
--hash=sha256:a11ab0d97be374efd04f640c04fe5c2d3dabc6dfb998954ea946ee3aec97056d \
--hash=sha256:a13c8e56c46474cd5958d525ce6a9996727a83d9335684e41f5192c83deb6c58 \
--hash=sha256:a38b9f526d0d6cbdaa37808c400e3d9f9473ac4ff64d33d9163fd05d243dbd9b \
--hash=sha256:a7c6304b894546b5a6bdc0fe15761fa53fe87d28527a7142dae8de3c663853e1 \
--hash=sha256:ad3bfb44c8840fb4be719dc58e229f435e227fbfbe133dc33f34981ff622a8f8 \
--hash=sha256:ae40f4a70a1f40939d66ecbaf8e7edc144fded190c4a45898a8cfe19d8fc85ea \
--hash=sha256:b01b39ad5411563031ea3977bbbc7324d82b088e802339e6296f082f78f6115c \
--hash=sha256:b2e3c4f2a8e3da47f850d7ea0d7d56720f0f091d66add889056098c4b2fd576c \
--hash=sha256:b41941583adce4242af003d2a8337b066ba6148ca435f295f31ac6d9e4ea2722 \
--hash=sha256:b4627520a02fccbd324b33c7a83e5d7906ec746e1083a9ac93c41ac7d15548c7 \
--hash=sha256:ba9f1d1ebe4b63801977cec7401f2d41e888128ae40b5441270d43140efcad52 \
--hash=sha256:c03a435d26c3999c2a8642cecad5d1c4d10c961817536af52035f6f4ee2f5dd0 \
--hash=sha256:c200b30dd573afa83847bed7e3041aa36a8145221bf0cfdfaa62d974d720805c \
--hash=sha256:c493365d3fad241d52f096e4995475a60a80f4eba4d3ff89b713bc65c2ca9615 \
--hash=sha256:c4d42e83ddbf3445e6514f0aff96dca511421ed0392d9977d3990d9f1ba6753c \
--hash=sha256:c60528671d9d467009a6ec284582179f6b88651e83367d0ab54cb739021cd7de \
--hash=sha256:c72ebc22e70e04126158c46ba56b85372bc4d54d00d296be060b0db1671638a4 \
--hash=sha256:ccbbd276642788c4376fbe8d4e6c50f0fb4972ce09ecb051509062915891cbf0 \
--hash=sha256:ceaac0c603bf5ac2f505a78b2dcab78d3e6b706be6596c8364b64cc613d208d2 \
--hash=sha256:d19db6ba816e7f59fc806c690918da80a7d186f00247048cd833acdab9b4847b \
--hash=sha256:d5c191713e98e7c28800233f039a32a42c1a4f9a001a8a0f2448b07391881036 \
--hash=sha256:d64f9f88d5203274a002b54442cafc9c7a1abff2a238f3e767b70aadf919b451 \
--hash=sha256:d77dff3a5aa5eedcc3da0ebd10ff8e4969bc9541aa3333a8d41715b429e99f47 \
--hash=sha256:dd4f16e57c12c0ae17606c53d1b57d8d1c8792efe3f065a37cb3341340599d49 \
--hash=sha256:e39d7ab0c18ac99955b36cd19f43926450baba21e3250f053e0704d6ffd76873 \
--hash=sha256:e3d0cd3dff0e7638a7b5390f3a53057c4e347f4ef122ee84ed93fc2fb7ea4aa2 \
--hash=sha256:e7dfb1cbb895810fa2b892b68153c17716c6abaa22c7dc2b2f6dcf3364932a1c \
--hash=sha256:e8e24b210a4deb5a7744971f8f77393005bae7f873568e37dfd9effe808be7f7 \
--hash=sha256:e9c0683cb35a9b5881b41bc01d5568ffc667910d9dbc632a1fba4e7d59e98773 \
--hash=sha256:ed41f3f49507936a6fe7003985ea2574daccfef999775525d79eb67344e23767 \
--hash=sha256:ee744fca8d1ea822480a2a4e7c5f2e1950745477143668f0b523769426060f29 \
--hash=sha256:f3f1e860be21f3e83011116a65e7310486300e08d9a3028e73e8d13bb6c77292 \
--hash=sha256:f43ab4cb04bde6109eb2555528a64dfd8a265cc6a9920a67dcbde13ef53a46c8 \
--hash=sha256:f53f55a8852f0e49b0fc76f2412045d6ad9d5772251dea8f55ea45021616e7d5 \
--hash=sha256:f59996d0550894affaad8743e97b9b9c98f638b221fac12909210ec3d9294786 \
--hash=sha256:f96f3f98fbff7af29e9edf9a6584f3c1382e7788783d07ba3721790625caa43e \
--hash=sha256:f9adb5664b78fcfcd830000416c8cc69853ef43cb084d645b3f1f0296edd9bae \
--hash=sha256:fa326b3505d5784436d9433b7980171ab2375535d93dd63fbcd20af2b5ca1bb6 \
--hash=sha256:fafc0049add8043ad07ab5382ee80d80ed7e3699847f26c9a5cf4d3714d96a84
rsa==4.9 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \
--hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21
s3transfer==0.6.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:3c0da2d074bf35d6870ef157158641178a4204a6e689e82546083e31e0311346 \
--hash=sha256:640bb492711f4c0c0905e1f62b6aaeb771881935ad27884852411f8e9cacbca9
schema==0.7.5 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:f06717112c61895cabc4707752b88716e8420a8819d71404501e114f91043197 \
--hash=sha256:f3ffdeeada09ec34bf40d7d79996d9f7175db93b7a5065de0faa7f41083c1e6c
shodan==1.31.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:c73275386ea02390e196c35c660706a28dd4d537c5a21eb387ab6236fac251f6
six==1.16.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \
--hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254
slack-sdk==3.26.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:d1600211eaa37c71a5f92daf4404074c3e6b3f5359a37c93c818b39d88ab4ca0 \
--hash=sha256:f80f0d15f0fce539b470447d2a07b03ecdad6b24f69c1edd05d464cf21253a06
tabulate==0.9.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c \
--hash=sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f
tldextract==3.4.4 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:581e7dbefc90e7bb857bb6f768d25c811a3c5f0892ed56a9a2999ddb7b1b70c2 \
--hash=sha256:5fe3210c577463545191d45ad522d3d5e78d55218ce97215e82004dcae1e1234
typing-extensions==4.5.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb \
--hash=sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4
uritemplate==4.1.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4346edfc5c3b79f694bccd6d6099a322bbeb628dbf2cd86eea55a456ce5124f0 \
--hash=sha256:830c08b8d99bdd312ea4ead05994a38e8936266f84b9a7878232db50b044e02e
urllib3==1.26.18 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07 \
--hash=sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0
xlsxwriter==3.1.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:02913b50b74c00f165933d5da3e3a02cab4204cb4932722a1b342c5c71034122 \
--hash=sha256:b70a147d36235d1ee835cfd037396f789db1f76740a0e5c917d54137169341de | python3.9 | ed33fac3 | diff --git a/prowler/providers/gcp/gcp_provider.py b/prowler/providers/gcp/gcp_provider.py
--- a/prowler/providers/gcp/gcp_provider.py
+++ b/prowler/providers/gcp/gcp_provider.py
@@ -3,10 +3,8 @@ import sys
from google import auth
from googleapiclient import discovery
-from googleapiclient.discovery import Resource
from prowler.lib.logger import logger
-from prowler.providers.gcp.lib.audit_info.models import GCP_Audit_Info
class GCP_Provider:
@@ -92,16 +90,3 @@ class GCP_Provider:
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
return []
-
-
-def generate_client(
- service: str,
- api_version: str,
- audit_info: GCP_Audit_Info,
-) -> Resource:
- try:
- return discovery.build(service, api_version, credentials=audit_info.credentials)
- except Exception as error:
- logger.error(
- f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
- )
diff --git a/prowler/providers/gcp/lib/service/service.py b/prowler/providers/gcp/lib/service/service.py
--- a/prowler/providers/gcp/lib/service/service.py
+++ b/prowler/providers/gcp/lib/service/service.py
@@ -3,10 +3,11 @@ import threading
import google_auth_httplib2
import httplib2
from colorama import Fore, Style
+from google.oauth2.credentials import Credentials
from googleapiclient import discovery
+from googleapiclient.discovery import Resource
from prowler.lib.logger import logger
-from prowler.providers.gcp.gcp_provider import generate_client
from prowler.providers.gcp.lib.audit_info.models import GCP_Audit_Info
@@ -25,7 +26,9 @@ class GCPService:
self.api_version = api_version
self.default_project_id = audit_info.default_project_id
self.region = region
- self.client = generate_client(service, api_version, audit_info)
+ self.client = self.__generate_client__(
+ service, api_version, audit_info.credentials
+ )
# Only project ids that have their API enabled will be scanned
self.project_ids = self.__is_api_active__(audit_info.project_ids)
@@ -66,3 +69,16 @@ class GCPService:
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
return project_ids
+
+ def __generate_client__(
+ self,
+ service: str,
+ api_version: str,
+ credentials: Credentials,
+ ) -> Resource:
+ try:
+ return discovery.build(service, api_version, credentials=credentials)
+ except Exception as error:
+ logger.error(
+ f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
+ )
| [
{
"content": "import os\nimport sys\n\nfrom google import auth\nfrom googleapiclient import discovery\nfrom googleapiclient.discovery import Resource\n\nfrom prowler.lib.logger import logger\nfrom prowler.providers.gcp.lib.audit_info.models import GCP_Audit_Info\n\n\nclass GCP_Provider:\n def __init__(\n self,\n credentials_file: str,\n input_project_ids: list,\n ):\n logger.info(\"Instantiating GCP Provider ...\")\n self.credentials, self.default_project_id = self.__set_credentials__(\n credentials_file\n )\n if not self.default_project_id:\n logger.critical(\"No Project ID associated to Google Credentials.\")\n sys.exit(1)\n\n self.project_ids = []\n accessible_projects = self.get_project_ids()\n if not accessible_projects:\n logger.critical(\"No Project IDs can be accessed via Google Credentials.\")\n sys.exit(1)\n\n if input_project_ids:\n for input_project in input_project_ids:\n if input_project in accessible_projects:\n self.project_ids.append(input_project)\n else:\n logger.critical(\n f\"Project {input_project} cannot be accessed via Google Credentials.\"\n )\n sys.exit(1)\n else:\n # If not projects were input, all accessible projects are scanned by default\n self.project_ids = accessible_projects\n\n def __set_credentials__(self, credentials_file):\n try:\n if credentials_file:\n self.__set_gcp_creds_env_var__(credentials_file)\n\n return auth.default(\n scopes=[\"https://www.googleapis.com/auth/cloud-platform\"]\n )\n except Exception as error:\n logger.critical(\n f\"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}\"\n )\n sys.exit(1)\n\n def __set_gcp_creds_env_var__(self, credentials_file):\n logger.info(\n \"GCP provider: Setting GOOGLE_APPLICATION_CREDENTIALS environment variable...\"\n )\n client_secrets_path = os.path.abspath(credentials_file)\n os.environ[\"GOOGLE_APPLICATION_CREDENTIALS\"] = client_secrets_path\n\n def get_credentials(self):\n return self.credentials, self.default_project_id, self.project_ids\n\n def get_project_ids(self):\n try:\n project_ids = []\n\n service = discovery.build(\n \"cloudresourcemanager\", \"v1\", credentials=self.credentials\n )\n\n request = service.projects().list()\n\n while request is not None:\n response = request.execute()\n\n for project in response.get(\"projects\", []):\n project_ids.append(project[\"projectId\"])\n\n request = service.projects().list_next(\n previous_request=request, previous_response=response\n )\n\n return project_ids\n except Exception as error:\n logger.error(\n f\"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}\"\n )\n return []\n\n\ndef generate_client(\n service: str,\n api_version: str,\n audit_info: GCP_Audit_Info,\n) -> Resource:\n try:\n return discovery.build(service, api_version, credentials=audit_info.credentials)\n except Exception as error:\n logger.error(\n f\"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}\"\n )\n",
"path": "prowler/providers/gcp/gcp_provider.py"
},
{
"content": "import threading\n\nimport google_auth_httplib2\nimport httplib2\nfrom colorama import Fore, Style\nfrom googleapiclient import discovery\n\nfrom prowler.lib.logger import logger\nfrom prowler.providers.gcp.gcp_provider import generate_client\nfrom prowler.providers.gcp.lib.audit_info.models import GCP_Audit_Info\n\n\nclass GCPService:\n def __init__(\n self,\n service: str,\n audit_info: GCP_Audit_Info,\n region=\"global\",\n api_version=\"v1\",\n ):\n # We receive the service using __class__.__name__ or the service name in lowercase\n # e.g.: APIKeys --> we need a lowercase string, so service.lower()\n self.service = service.lower() if not service.islower() else service\n self.credentials = audit_info.credentials\n self.api_version = api_version\n self.default_project_id = audit_info.default_project_id\n self.region = region\n self.client = generate_client(service, api_version, audit_info)\n # Only project ids that have their API enabled will be scanned\n self.project_ids = self.__is_api_active__(audit_info.project_ids)\n\n def __get_client__(self):\n return self.client\n\n def __threading_call__(self, call, iterator):\n threads = []\n for value in iterator:\n threads.append(threading.Thread(target=call, args=(value,)))\n for t in threads:\n t.start()\n for t in threads:\n t.join()\n\n def __get_AuthorizedHttp_client__(self):\n return google_auth_httplib2.AuthorizedHttp(\n self.credentials, http=httplib2.Http()\n )\n\n def __is_api_active__(self, audited_project_ids):\n project_ids = []\n for project_id in audited_project_ids:\n try:\n client = discovery.build(\"serviceusage\", \"v1\")\n request = client.services().get(\n name=f\"projects/{project_id}/services/{self.service}.googleapis.com\"\n )\n response = request.execute()\n if response.get(\"state\") != \"DISABLED\":\n project_ids.append(project_id)\n else:\n print(\n f\"\\n{Fore.YELLOW}{self.service} API {Style.RESET_ALL}has not been used in project {project_id} before or it is disabled.\\nEnable it by visiting https://console.developers.google.com/apis/api/dataproc.googleapis.com/overview?project={project_id} then retry.\"\n )\n except Exception as error:\n logger.error(\n f\"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}\"\n )\n return project_ids\n",
"path": "prowler/providers/gcp/lib/service/service.py"
}
] | 11_2 | python | import unittest
import sys
from unittest.mock import patch, Mock
class TestGCPServiceClientGeneration(unittest.TestCase):
def setUp(self):
from prowler.providers.gcp.lib.service.service import GCPService
from google.oauth2.credentials import Credentials
self.audit_info_mock = Mock()
self.audit_info_mock.credentials = Credentials(token='fake-token')
self.audit_info_mock.project_ids = []
self.gcp_service = GCPService('fake-service', self.audit_info_mock)
@patch('prowler.providers.gcp.lib.service.service.discovery.build')
def test_client_generation_method_exists(self, mock_build):
from prowler.providers.gcp.lib.service.service import GCPService
from googleapiclient.discovery import Resource
from google.oauth2.credentials import Credentials
# Check if __generate_client__ method exists
self.assertTrue(hasattr(self.gcp_service, '__generate_client__'),
"__generate_client__ method does not exist in GCPService class.")
@patch('prowler.providers.gcp.lib.service.service.discovery.build')
def test_generate_client_creates_client(self, mock_build):
from googleapiclient.discovery import Resource
# Setup mock for discovery.build
mock_build.return_value = Mock(spec=Resource)
# Test if __generate_client__ creates a client successfully
client = self.gcp_service.__generate_client__('fake-service', 'v1', self.gcp_service.credentials)
self.assertIsInstance(client, Resource, "The generated client is not an instance of Resource.")
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestGCPServiceClientGeneration))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
main()
|
https://github.com/teamqurrent/prowler | Your objective is refining the handling of FMS policies in `fms_policy_compliant.py` and `fms_service.py`. In `fms_policy_compliant.py`, enhance the execute method to correctly identify and report scenarios with no FMS policies, marking them as failures. In `fms_service.py`, update the `__list_compliance_status__` method to safely process the PolicyComplianceStatusList, using a fail-safe approach for accessing this list. These modifications aim to improve the robustness and accuracy of FMS policy processing in the Prowler application. | d1bd097 | about-time==4.2.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:6a538862d33ce67d997429d14998310e1dbfda6cb7d9bbfbf799c4709847fece \
--hash=sha256:8bbf4c75fe13cbd3d72f49a03b02c5c7dca32169b6d49117c257e7eb3eaee341
adal==1.2.7 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:2a7451ed7441ddbc57703042204a3e30ef747478eea022c70f789fc7f084bc3d \
--hash=sha256:d74f45b81317454d96e982fd1c50e6fb5c99ac2223728aea8764433a39f566f1
alive-progress==3.1.5 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:347220c1858e3abe137fa0746895668c04df09c5261a13dc03f05795e8a29be5 \
--hash=sha256:42e399a66c8150dc507602dff7b7953f105ef11faf97ddaa6d27b1cbf45c4c98
attrs==23.1.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04 \
--hash=sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015
awsipranges==0.3.3 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4f0b3f22a9dc1163c85b513bed812b6c92bdacd674e6a7b68252a3c25b99e2c0 \
--hash=sha256:f3d7a54aeaf7fe310beb5d377a4034a63a51b72677ae6af3e0967bc4de7eedaf
azure-common==1.1.28 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4ac0cd3214e36b6a1b6a442686722a5d8cc449603aa833f3f0f40bda836704a3 \
--hash=sha256:5c12d3dcf4ec20599ca6b0d3e09e86e146353d443e7fcc050c9a19c1f9df20ad
azure-core==1.28.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:dec36dfc8eb0b052a853f30c07437effec2f9e3e1fc8f703d9bdaa5cfc0043d9 \
--hash=sha256:e9eefc66fc1fde56dab6f04d4e5d12c60754d5a9fa49bdcfd8534fc96ed936bd
azure-identity==1.15.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4c28fc246b7f9265610eb5261d65931183d019a23d4b0e99357facb2e6c227c8 \
--hash=sha256:a14b1f01c7036f11f148f22cd8c16e05035293d714458d6b44ddf534d93eb912
azure-mgmt-authorization==4.0.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:69b85abc09ae64fc72975bd43431170d8c7eb5d166754b98aac5f3845de57dc4 \
--hash=sha256:d8feeb3842e6ddf1a370963ca4f61fb6edc124e8997b807dd025bc9b2379cd1a
azure-mgmt-core==1.4.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:81071675f186a585555ef01816f2774d49c1c9024cb76e5720c3c0f6b337bb7d \
--hash=sha256:d195208340094f98e5a6661b781cde6f6a051e79ce317caabd8ff97030a9b3ae
azure-mgmt-security==5.0.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:38b03efe82c2344cea203fda95e6d00b7ac22782fa1c0b585cd0ea2c8ff3e702 \
--hash=sha256:73a74ce8f6ffb1b345ce101c8abdd42238f161f0988d168d23918feda0089654
azure-mgmt-sql==3.0.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:129042cc011225e27aee6ef2697d585fa5722e5d1aeb0038af6ad2451a285457 \
--hash=sha256:1d1dd940d4d41be4ee319aad626341251572a5bf4a2addec71779432d9a1381f
azure-mgmt-storage==21.1.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:593f2544fc4f05750c4fe7ca4d83c32ea1e9d266e57899bbf79ce5940124e8cc \
--hash=sha256:d6d3c0e917c988bc9ed0472477d3ef3f90886009eb1d97a711944f8375630162
azure-mgmt-subscription==3.1.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:38d4574a8d47fa17e3587d756e296cb63b82ad8fb21cd8543bcee443a502bf48 \
--hash=sha256:4e255b4ce9b924357bb8c5009b3c88a2014d3203b2495e2256fa027bf84e800e
azure-storage-blob==12.19.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:26c0a4320a34a3c2a1b74528ba6812ebcb632a04cd67b1c7377232c4b01a5897 \
--hash=sha256:7bbc2c9c16678f7a420367fef6b172ba8730a7e66df7f4d7a55d5b3c8216615b
boto3==1.26.165 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:9e7242b9059d937f34264125fecd844cb5e01acce6be093f6c44869fdf7c6e30 \
--hash=sha256:fa85b67147c8dc99b6e7c699fc086103f958f9677db934f70659e6e6a72a818c
botocore==1.29.165 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:6f35d59e230095aed7cd747604fe248fa384bebb7d09549077892f936a8ca3df \
--hash=sha256:988b948be685006b43c4bbd8f5c0cb93e77c66deb70561994e0c5b31b5a67210
cachetools==5.3.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:13dfddc7b8df938c21a940dfa6557ce6e94a2f1cdfa58eb90c805721d58f2c14 \
--hash=sha256:429e1a1e845c008ea6c85aa35d4b98b65d6a9763eeef3e37e92728a12d1de9d4
certifi==2023.7.22 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \
--hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9
cffi==1.15.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \
--hash=sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef \
--hash=sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104 \
--hash=sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426 \
--hash=sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405 \
--hash=sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375 \
--hash=sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a \
--hash=sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e \
--hash=sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc \
--hash=sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf \
--hash=sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185 \
--hash=sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497 \
--hash=sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3 \
--hash=sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35 \
--hash=sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c \
--hash=sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83 \
--hash=sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21 \
--hash=sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca \
--hash=sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984 \
--hash=sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac \
--hash=sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd \
--hash=sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee \
--hash=sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a \
--hash=sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2 \
--hash=sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192 \
--hash=sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7 \
--hash=sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585 \
--hash=sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f \
--hash=sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e \
--hash=sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27 \
--hash=sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b \
--hash=sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e \
--hash=sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e \
--hash=sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d \
--hash=sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c \
--hash=sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415 \
--hash=sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82 \
--hash=sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02 \
--hash=sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314 \
--hash=sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325 \
--hash=sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c \
--hash=sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3 \
--hash=sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914 \
--hash=sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045 \
--hash=sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d \
--hash=sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9 \
--hash=sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5 \
--hash=sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2 \
--hash=sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c \
--hash=sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3 \
--hash=sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2 \
--hash=sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8 \
--hash=sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d \
--hash=sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d \
--hash=sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9 \
--hash=sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162 \
--hash=sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76 \
--hash=sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4 \
--hash=sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e \
--hash=sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9 \
--hash=sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6 \
--hash=sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b \
--hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \
--hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0
charset-normalizer==3.1.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6 \
--hash=sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1 \
--hash=sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e \
--hash=sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373 \
--hash=sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62 \
--hash=sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230 \
--hash=sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be \
--hash=sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c \
--hash=sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0 \
--hash=sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448 \
--hash=sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f \
--hash=sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649 \
--hash=sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d \
--hash=sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0 \
--hash=sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706 \
--hash=sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a \
--hash=sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59 \
--hash=sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23 \
--hash=sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5 \
--hash=sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb \
--hash=sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e \
--hash=sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e \
--hash=sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c \
--hash=sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28 \
--hash=sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d \
--hash=sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41 \
--hash=sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974 \
--hash=sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce \
--hash=sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f \
--hash=sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1 \
--hash=sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d \
--hash=sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8 \
--hash=sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017 \
--hash=sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31 \
--hash=sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7 \
--hash=sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8 \
--hash=sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e \
--hash=sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14 \
--hash=sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd \
--hash=sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d \
--hash=sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795 \
--hash=sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b \
--hash=sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b \
--hash=sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b \
--hash=sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203 \
--hash=sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f \
--hash=sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19 \
--hash=sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1 \
--hash=sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a \
--hash=sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac \
--hash=sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9 \
--hash=sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0 \
--hash=sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137 \
--hash=sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f \
--hash=sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6 \
--hash=sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5 \
--hash=sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909 \
--hash=sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f \
--hash=sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0 \
--hash=sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324 \
--hash=sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755 \
--hash=sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb \
--hash=sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854 \
--hash=sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c \
--hash=sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60 \
--hash=sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84 \
--hash=sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0 \
--hash=sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b \
--hash=sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1 \
--hash=sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531 \
--hash=sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1 \
--hash=sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11 \
--hash=sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326 \
--hash=sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df \
--hash=sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab
click-plugins==1.1.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b \
--hash=sha256:5d262006d3222f5057fd81e1623d4443e41dcda5dc815c06b442aa3c02889fc8
click==8.1.3 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e \
--hash=sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48
colorama==0.4.6 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \
--hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6
contextlib2==21.6.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:3fbdb64466afd23abaf6c977627b75b6139a5a3e8ce38405c5b413aed7a0471f \
--hash=sha256:ab1e2bfe1d01d968e1b7e8d9023bc51ef3509bba217bb730cee3827e1ee82869
cryptography==41.0.6 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:068bc551698c234742c40049e46840843f3d98ad7ce265fd2bd4ec0d11306596 \
--hash=sha256:0f27acb55a4e77b9be8d550d762b0513ef3fc658cd3eb15110ebbcbd626db12c \
--hash=sha256:2132d5865eea673fe6712c2ed5fb4fa49dba10768bb4cc798345748380ee3660 \
--hash=sha256:3288acccef021e3c3c10d58933f44e8602cf04dba96d9796d70d537bb2f4bbc4 \
--hash=sha256:35f3f288e83c3f6f10752467c48919a7a94b7d88cc00b0668372a0d2ad4f8ead \
--hash=sha256:398ae1fc711b5eb78e977daa3cbf47cec20f2c08c5da129b7a296055fbb22aed \
--hash=sha256:422e3e31d63743855e43e5a6fcc8b4acab860f560f9321b0ee6269cc7ed70cc3 \
--hash=sha256:48783b7e2bef51224020efb61b42704207dde583d7e371ef8fc2a5fb6c0aabc7 \
--hash=sha256:4d03186af98b1c01a4eda396b137f29e4e3fb0173e30f885e27acec8823c1b09 \
--hash=sha256:5daeb18e7886a358064a68dbcaf441c036cbdb7da52ae744e7b9207b04d3908c \
--hash=sha256:60e746b11b937911dc70d164060d28d273e31853bb359e2b2033c9e93e6f3c43 \
--hash=sha256:742ae5e9a2310e9dade7932f9576606836ed174da3c7d26bc3d3ab4bd49b9f65 \
--hash=sha256:7e00fb556bda398b99b0da289ce7053639d33b572847181d6483ad89835115f6 \
--hash=sha256:85abd057699b98fce40b41737afb234fef05c67e116f6f3650782c10862c43da \
--hash=sha256:8efb2af8d4ba9dbc9c9dd8f04d19a7abb5b49eab1f3694e7b5a16a5fc2856f5c \
--hash=sha256:ae236bb8760c1e55b7a39b6d4d32d2279bc6c7c8500b7d5a13b6fb9fc97be35b \
--hash=sha256:afda76d84b053923c27ede5edc1ed7d53e3c9f475ebaf63c68e69f1403c405a8 \
--hash=sha256:b27a7fd4229abef715e064269d98a7e2909ebf92eb6912a9603c7e14c181928c \
--hash=sha256:b648fe2a45e426aaee684ddca2632f62ec4613ef362f4d681a9a6283d10e079d \
--hash=sha256:c5a550dc7a3b50b116323e3d376241829fd326ac47bc195e04eb33a8170902a9 \
--hash=sha256:da46e2b5df770070412c46f87bac0849b8d685c5f2679771de277a422c7d0b86 \
--hash=sha256:f39812f70fc5c71a15aa3c97b2bbe213c3f2a460b79bd21c40d033bb34a9bf36 \
--hash=sha256:ff369dd19e8fe0528b02e8df9f2aeb2479f89b1270d90f96a63500afe9af5cae
detect-secrets==1.4.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:d08ecabeee8b68c0acb0e8a354fb98d822a653f6ed05e520cead4c6fc1fc02cd \
--hash=sha256:d56787e339758cef48c9ccd6692f7a094b9963c979c9813580b0169e41132833
filelock==3.12.2 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:002740518d8aa59a26b0c76e10fb8c6e15eae825d34b6fdf670333fd7b938d81 \
--hash=sha256:cbb791cdea2a72f23da6ac5b5269ab0a0d161e9ef0100e653b69049a7706d1ec
google-api-core==2.11.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4b9bb5d5a380a0befa0573b302651b8a9a89262c1730e37bf423cec511804c22 \
--hash=sha256:ce222e27b0de0d7bc63eb043b956996d6dccab14cc3b690aaea91c9cc99dc16e
google-api-python-client==2.111.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:3a45a53c031478d1c82c7162dd25c9a965247bca6bd438af0838a9d9b8219405 \
--hash=sha256:b605adee2d09a843b97a59925757802904679e44e5599708cedb8939900dfbc7
google-auth-httplib2==0.2.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:38aa7badf48f974f1eb9861794e9c0cb2a0511a4ec0679b1f886d108f5640e05 \
--hash=sha256:b65a0a2123300dd71281a7bf6e64d65a0759287df52729bdd1ae2e47dc311a3d
google-auth==2.17.3 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:ce311e2bc58b130fddf316df57c9b3943c2a7b4f6ec31de9663a9333e4064efc \
--hash=sha256:f586b274d3eb7bd932ea424b1c702a30e0393a2e2bc4ca3eae8263ffd8be229f
googleapis-common-protos==1.59.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4168fcb568a826a52f23510412da405abd93f4d23ba544bb68d943b14ba3cb44 \
--hash=sha256:b287dc48449d1d41af0c69f4ea26242b5ae4c3d7249a38b0984c86a4caffff1f
grapheme==0.6.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:44c2b9f21bbe77cfb05835fec230bd435954275267fea1858013b102f8603cca
httplib2==0.22.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:14ae0a53c1ba8f3d37e9e27cf37eabb0fb9980f435ba405d546948b009dd64dc \
--hash=sha256:d7a10bc5ef5ab08322488bde8c726eeee5c8618723fdb399597ec58f3d82df81
idna==3.4 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \
--hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2
isodate==0.6.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96 \
--hash=sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9
jmespath==1.0.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980 \
--hash=sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe
jsonschema-specifications==2023.6.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:3d2b82663aff01815f744bb5c7887e2121a63399b49b104a3c96145474d091d7 \
--hash=sha256:ca1c4dd059a9e7b34101cf5b3ab7ff1d18b139f35950d598d629837ef66e8f28
jsonschema==4.20.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4f614fd46d8d61258610998997743ec5492a648b33cf478c1ddc23ed4598a5fa \
--hash=sha256:ed6231f0429ecf966f5bc8dfef245998220549cbbcf140f913b7464c52c3b6b3
msal-extensions==1.0.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:91e3db9620b822d0ed2b4d1850056a0f133cba04455e62f11612e40f5502f2ee \
--hash=sha256:c676aba56b0cce3783de1b5c5ecfe828db998167875126ca4b47dc6436451354
msal==1.24.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:aa0972884b3c6fdec53d9a0bd15c12e5bd7b71ac1b66d746f54d128709f3f8f8 \
--hash=sha256:ce4320688f95c301ee74a4d0e9dbcfe029a63663a8cc61756f40d0d0d36574ad
msgraph-core==0.2.2 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:147324246788abe8ed7e05534cd9e4e0ec98b33b30e011693b8d014cebf97f63 \
--hash=sha256:e297564b9a0ca228493d8851f95cb2de9522143d82efa40ce3a6ad286e21392e
msrest==0.7.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:21120a810e1233e5e6cc7fe40b474eeb4ec6f757a15d7cf86702c369f9567c32 \
--hash=sha256:6e7661f46f3afd88b75667b7187a92829924446c7ea1d169be8c4bb7eeb788b9
msrestazure==0.6.4 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:3de50f56147ef529b31e099a982496690468ecef33f0544cb0fa0cfe1e1de5b9 \
--hash=sha256:a06f0dabc9a6f5efe3b6add4bd8fb623aeadacf816b7a35b0f89107e0544d189
oauthlib==3.2.2 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca \
--hash=sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918
portalocker==2.7.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:032e81d534a88ec1736d03f780ba073f047a06c478b06e2937486f334e955c51 \
--hash=sha256:a07c5b4f3985c3cf4798369631fb7011adb498e2a46d8440efc75a8f29a0f983
protobuf==4.23.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:03eee35b60317112a72d19c54d0bff7bc58ff12fea4cd7b018232bd99758ffdf \
--hash=sha256:2b94bd6df92d71bd1234a2ffe7ce96ddf6d10cf637a18d6b55ad0a89fbb7fc21 \
--hash=sha256:36f5370a930cb77c8ad2f4135590c672d0d2c72d4a707c7d0058dce4b4b4a598 \
--hash=sha256:5f1eba1da2a2f3f7df469fccddef3cc060b8a16cfe3cc65961ad36b4dbcf59c5 \
--hash=sha256:6c16657d6717a0c62d5d740cb354fbad1b0d8cb811669e06fc1caa0ff4799ddd \
--hash=sha256:6fe180b56e1169d72ecc4acbd39186339aed20af5384531b8e8979b02bbee159 \
--hash=sha256:7cb5b9a05ce52c6a782bb97de52679bd3438ff2b7460eff5da348db65650f227 \
--hash=sha256:9744e934ea5855d12191040ea198eaf704ac78665d365a89d9572e3b627c2688 \
--hash=sha256:9f5a0fbfcdcc364f3986f9ed9f8bb1328fb84114fd790423ff3d7fdb0f85c2d1 \
--hash=sha256:baca40d067dddd62141a129f244703160d278648b569e90bb0e3753067644711 \
--hash=sha256:d5a35ff54e3f62e8fc7be02bb0d2fbc212bba1a5a9cc2748090690093996f07b \
--hash=sha256:e62fb869762b4ba18666370e2f8a18f17f8ab92dd4467295c6d38be6f8fef60b \
--hash=sha256:ebde3a023b8e11bfa6c890ef34cd6a8b47d586f26135e86c21344fe433daf2e2
pyasn1-modules==0.3.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c \
--hash=sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d
pyasn1==0.5.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57 \
--hash=sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde
pycparser==2.21 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \
--hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206
pydantic==1.10.13 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:1740068fd8e2ef6eb27a20e5651df000978edce6da6803c2bef0bc74540f9548 \
--hash=sha256:210ce042e8f6f7c01168b2d84d4c9eb2b009fe7bf572c2266e235edf14bacd80 \
--hash=sha256:32c8b48dcd3b2ac4e78b0ba4af3a2c2eb6048cb75202f0ea7b34feb740efc340 \
--hash=sha256:3ecea2b9d80e5333303eeb77e180b90e95eea8f765d08c3d278cd56b00345d01 \
--hash=sha256:4b03e42ec20286f052490423682016fd80fda830d8e4119f8ab13ec7464c0132 \
--hash=sha256:4c5370a7edaac06daee3af1c8b1192e305bc102abcbf2a92374b5bc793818599 \
--hash=sha256:56e3ff861c3b9c6857579de282ce8baabf443f42ffba355bf070770ed63e11e1 \
--hash=sha256:5a1f9f747851338933942db7af7b6ee8268568ef2ed86c4185c6ef4402e80ba8 \
--hash=sha256:5e08865bc6464df8c7d61439ef4439829e3ab62ab1669cddea8dd00cd74b9ffe \
--hash=sha256:61d9dce220447fb74f45e73d7ff3b530e25db30192ad8d425166d43c5deb6df0 \
--hash=sha256:654db58ae399fe6434e55325a2c3e959836bd17a6f6a0b6ca8107ea0571d2e17 \
--hash=sha256:678bcf5591b63cc917100dc50ab6caebe597ac67e8c9ccb75e698f66038ea953 \
--hash=sha256:6cf25c1a65c27923a17b3da28a0bdb99f62ee04230c931d83e888012851f4e7f \
--hash=sha256:75ac15385a3534d887a99c713aa3da88a30fbd6204a5cd0dc4dab3d770b9bd2f \
--hash=sha256:75b297827b59bc229cac1a23a2f7a4ac0031068e5be0ce385be1462e7e17a35d \
--hash=sha256:7d6f6e7305244bddb4414ba7094ce910560c907bdfa3501e9db1a7fd7eaea127 \
--hash=sha256:84bafe2e60b5e78bc64a2941b4c071a4b7404c5c907f5f5a99b0139781e69ed8 \
--hash=sha256:854223752ba81e3abf663d685f105c64150873cc6f5d0c01d3e3220bcff7d36f \
--hash=sha256:8ae5dd6b721459bfa30805f4c25880e0dd78fc5b5879f9f7a692196ddcb5a580 \
--hash=sha256:8ef467901d7a41fa0ca6db9ae3ec0021e3f657ce2c208e98cd511f3161c762c6 \
--hash=sha256:968ac42970f57b8344ee08837b62f6ee6f53c33f603547a55571c954a4225691 \
--hash=sha256:97cce3ae7341f7620a0ba5ef6cf043975cd9d2b81f3aa5f4ea37928269bc1b87 \
--hash=sha256:9849f031cf8a2f0a928fe885e5a04b08006d6d41876b8bbd2fc68a18f9f2e3fd \
--hash=sha256:9f00790179497767aae6bcdc36355792c79e7bbb20b145ff449700eb076c5f96 \
--hash=sha256:b87326822e71bd5f313e7d3bfdc77ac3247035ac10b0c0618bd99dcf95b1e687 \
--hash=sha256:b97c1fac8c49be29486df85968682b0afa77e1b809aff74b83081cc115e52f33 \
--hash=sha256:bc0898c12f8e9c97f6cd44c0ed70d55749eaf783716896960b4ecce2edfd2d69 \
--hash=sha256:c553f6a156deb868ba38a23cf0df886c63492e9257f60a79c0fd8e7173537653 \
--hash=sha256:c636925f38b8db208e09d344c7aa4f29a86bb9947495dd6b6d376ad10334fb78 \
--hash=sha256:c958d053453a1c4b1c2062b05cd42d9d5c8eb67537b8d5a7e3c3032943ecd261 \
--hash=sha256:d3a3c792a58e1622667a2837512099eac62490cdfd63bd407993aaf200a4cf1f \
--hash=sha256:e31647d85a2013d926ce60b84f9dd5300d44535a9941fe825dc349ae1f760df9 \
--hash=sha256:e70ca129d2053fb8b728ee7d1af8e553a928d7e301a311094b8a0501adc8763d \
--hash=sha256:efff03cc7a4f29d9009d1c96ceb1e7a70a65cfe86e89d34e4a5f2ab1e5693737 \
--hash=sha256:f59ef915cac80275245824e9d771ee939133be38215555e9dc90c6cb148aaeb5 \
--hash=sha256:f8e81fc5fb17dae698f52bdd1c4f18b6ca674d7068242b2aff075f588301bbb0
pyjwt==2.6.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd \
--hash=sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14
pyjwt[crypto]==2.6.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd \
--hash=sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14
pyparsing==3.0.9 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb \
--hash=sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc
python-dateutil==2.8.2 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \
--hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9
pywin32==306 ; python_version >= "3.9" and platform_system == "Windows" and python_version < "3.12" \
--hash=sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d \
--hash=sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65 \
--hash=sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e \
--hash=sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b \
--hash=sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4 \
--hash=sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040 \
--hash=sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a \
--hash=sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36 \
--hash=sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8 \
--hash=sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e \
--hash=sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802 \
--hash=sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a \
--hash=sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407 \
--hash=sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0
pyyaml==6.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf \
--hash=sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293 \
--hash=sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b \
--hash=sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57 \
--hash=sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b \
--hash=sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4 \
--hash=sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07 \
--hash=sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba \
--hash=sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9 \
--hash=sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287 \
--hash=sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513 \
--hash=sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0 \
--hash=sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782 \
--hash=sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0 \
--hash=sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92 \
--hash=sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f \
--hash=sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2 \
--hash=sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc \
--hash=sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1 \
--hash=sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c \
--hash=sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86 \
--hash=sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4 \
--hash=sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c \
--hash=sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34 \
--hash=sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b \
--hash=sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d \
--hash=sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c \
--hash=sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb \
--hash=sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7 \
--hash=sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737 \
--hash=sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3 \
--hash=sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d \
--hash=sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358 \
--hash=sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53 \
--hash=sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78 \
--hash=sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803 \
--hash=sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a \
--hash=sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f \
--hash=sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174 \
--hash=sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5
referencing==0.29.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:90cb53782d550ba28d2166ef3f55731f38397def8832baac5d45235f1995e35e \
--hash=sha256:d3c8f323ee1480095da44d55917cfb8278d73d6b4d5f677e3e40eb21314ac67f
requests-file==1.5.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:07d74208d3389d01c38ab89ef403af0cfec63957d53a0081d8eca738d0247d8e \
--hash=sha256:dfe5dae75c12481f68ba353183c53a65e6044c923e64c24b2209f6c7570ca953
requests-oauthlib==1.3.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5 \
--hash=sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a
requests==2.31.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \
--hash=sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1
rpds-py==0.8.10 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:08166467258fd0240a1256fce272f689f2360227ee41c72aeea103e9e4f63d2b \
--hash=sha256:083df0fafe199371206111583c686c985dddaf95ab3ee8e7b24f1fda54515d09 \
--hash=sha256:0da53292edafecba5e1d8c1218f99babf2ed0bf1c791d83c0ab5c29b57223068 \
--hash=sha256:0eeb2731708207d0fe2619afe6c4dc8cb9798f7de052da891de5f19c0006c315 \
--hash=sha256:134ec8f14ca7dbc6d9ae34dac632cdd60939fe3734b5d287a69683c037c51acb \
--hash=sha256:13e643ce8ad502a0263397362fb887594b49cf84bf518d6038c16f235f2bcea4 \
--hash=sha256:148b0b38d719c0760e31ce9285a9872972bdd7774969a4154f40c980e5beaca7 \
--hash=sha256:14f1c356712f66653b777ecd8819804781b23dbbac4eade4366b94944c9e78ad \
--hash=sha256:15a90d0ac11b4499171067ae40a220d1ca3cb685ec0acc356d8f3800e07e4cb8 \
--hash=sha256:1a2edf8173ac0c7a19da21bc68818be1321998528b5e3f748d6ee90c0ba2a1fd \
--hash=sha256:1b21575031478609db6dbd1f0465e739fe0e7f424a8e7e87610a6c7f68b4eb16 \
--hash=sha256:1ee45cd1d84beed6cbebc839fd85c2e70a3a1325c8cfd16b62c96e2ffb565eca \
--hash=sha256:220bdcad2d2936f674650d304e20ac480a3ce88a40fe56cd084b5780f1d104d9 \
--hash=sha256:2418cf17d653d24ffb8b75e81f9f60b7ba1b009a23298a433a4720b2a0a17017 \
--hash=sha256:2614c2732bf45de5c7f9e9e54e18bc78693fa2f635ae58d2895b7965e470378c \
--hash=sha256:2cd3045e7f6375dda64ed7db1c5136826facb0159ea982f77d9cf6125025bd34 \
--hash=sha256:2eb4b08c45f8f8d8254cdbfacd3fc5d6b415d64487fb30d7380b0d0569837bf1 \
--hash=sha256:300eb606e6b94a7a26f11c8cc8ee59e295c6649bd927f91e1dbd37a4c89430b6 \
--hash=sha256:376b8de737401050bd12810003d207e824380be58810c031f10ec563ff6aef3d \
--hash=sha256:3793c21494bad1373da517001d0849eea322e9a049a0e4789e50d8d1329df8e7 \
--hash=sha256:37f7ee4dc86db7af3bac6d2a2cedbecb8e57ce4ed081f6464510e537589f8b1e \
--hash=sha256:3816a890a6a9e9f1de250afa12ca71c9a7a62f2b715a29af6aaee3aea112c181 \
--hash=sha256:3c490204e16bca4f835dba8467869fe7295cdeaa096e4c5a7af97f3454a97991 \
--hash=sha256:3cc5e5b5514796f45f03a568981971b12a3570f3de2e76114f7dc18d4b60a3c4 \
--hash=sha256:41c89a366eae49ad9e65ed443a8f94aee762931a1e3723749d72aeac80f5ef2f \
--hash=sha256:4a8ca409f1252e1220bf09c57290b76cae2f14723746215a1e0506472ebd7bdf \
--hash=sha256:4b519bac7c09444dd85280fd60f28c6dde4389c88dddf4279ba9b630aca3bbbe \
--hash=sha256:521fc8861a86ae54359edf53a15a05fabc10593cea7b3357574132f8427a5e5a \
--hash=sha256:574868858a7ff6011192c023a5289158ed20e3f3b94b54f97210a773f2f22921 \
--hash=sha256:5a665f6f1a87614d1c3039baf44109094926dedf785e346d8b0a728e9cabd27a \
--hash=sha256:5d1c2bc319428d50b3e0fa6b673ab8cc7fa2755a92898db3a594cbc4eeb6d1f7 \
--hash=sha256:60e0e86e870350e03b3e25f9b1dd2c6cc72d2b5f24e070249418320a6f9097b7 \
--hash=sha256:695f642a3a5dbd4ad2ffbbacf784716ecd87f1b7a460843b9ddf965ccaeafff4 \
--hash=sha256:69d089c026f6a8b9d64a06ff67dc3be196707b699d7f6ca930c25f00cf5e30d8 \
--hash=sha256:6c6a0225b8501d881b32ebf3f5807a08ad3685b5eb5f0a6bfffd3a6e039b2055 \
--hash=sha256:70bb9c8004b97b4ef7ae56a2aa56dfaa74734a0987c78e7e85f00004ab9bf2d0 \
--hash=sha256:73a1e48430f418f0ac3dfd87860e4cc0d33ad6c0f589099a298cb53724db1169 \
--hash=sha256:7495010b658ec5b52835f21d8c8b1a7e52e194c50f095d4223c0b96c3da704b1 \
--hash=sha256:7947e6e2c2ad68b1c12ee797d15e5f8d0db36331200b0346871492784083b0c6 \
--hash=sha256:7b38a9ac96eeb6613e7f312cd0014de64c3f07000e8bf0004ad6ec153bac46f8 \
--hash=sha256:7d20a8ed227683401cc508e7be58cba90cc97f784ea8b039c8cd01111e6043e0 \
--hash=sha256:7f29b8c55fd3a2bc48e485e37c4e2df3317f43b5cc6c4b6631c33726f52ffbb3 \
--hash=sha256:802f42200d8caf7f25bbb2a6464cbd83e69d600151b7e3b49f49a47fa56b0a38 \
--hash=sha256:805a5f3f05d186c5d50de2e26f765ba7896d0cc1ac5b14ffc36fae36df5d2f10 \
--hash=sha256:82bb361cae4d0a627006dadd69dc2f36b7ad5dc1367af9d02e296ec565248b5b \
--hash=sha256:84eb541a44f7a18f07a6bfc48b95240739e93defe1fdfb4f2a295f37837945d7 \
--hash=sha256:89c92b74e8bf6f53a6f4995fd52f4bd510c12f103ee62c99e22bc9e05d45583c \
--hash=sha256:8c398fda6df361a30935ab4c4bccb7f7a3daef2964ca237f607c90e9f3fdf66f \
--hash=sha256:915031002c86a5add7c6fd4beb601b2415e8a1c956590a5f91d825858e92fe6e \
--hash=sha256:927d784648211447201d4c6f1babddb7971abad922b32257ab74de2f2750fad0 \
--hash=sha256:92cf5b3ee60eef41f41e1a2cabca466846fb22f37fc580ffbcb934d1bcab225a \
--hash=sha256:93d06cccae15b3836247319eee7b6f1fdcd6c10dabb4e6d350d27bd0bdca2711 \
--hash=sha256:93d99f957a300d7a4ced41615c45aeb0343bb8f067c42b770b505de67a132346 \
--hash=sha256:96b293c0498c70162effb13100624c5863797d99df75f2f647438bd10cbf73e4 \
--hash=sha256:97cab733d303252f7c2f7052bf021a3469d764fc2b65e6dbef5af3cbf89d4892 \
--hash=sha256:996cc95830de9bc22b183661d95559ec6b3cd900ad7bc9154c4cbf5be0c9b734 \
--hash=sha256:9a7d20c1cf8d7b3960c5072c265ec47b3f72a0c608a9a6ee0103189b4f28d531 \
--hash=sha256:9cd57981d9fab04fc74438d82460f057a2419974d69a96b06a440822d693b3c0 \
--hash=sha256:a11ab0d97be374efd04f640c04fe5c2d3dabc6dfb998954ea946ee3aec97056d \
--hash=sha256:a13c8e56c46474cd5958d525ce6a9996727a83d9335684e41f5192c83deb6c58 \
--hash=sha256:a38b9f526d0d6cbdaa37808c400e3d9f9473ac4ff64d33d9163fd05d243dbd9b \
--hash=sha256:a7c6304b894546b5a6bdc0fe15761fa53fe87d28527a7142dae8de3c663853e1 \
--hash=sha256:ad3bfb44c8840fb4be719dc58e229f435e227fbfbe133dc33f34981ff622a8f8 \
--hash=sha256:ae40f4a70a1f40939d66ecbaf8e7edc144fded190c4a45898a8cfe19d8fc85ea \
--hash=sha256:b01b39ad5411563031ea3977bbbc7324d82b088e802339e6296f082f78f6115c \
--hash=sha256:b2e3c4f2a8e3da47f850d7ea0d7d56720f0f091d66add889056098c4b2fd576c \
--hash=sha256:b41941583adce4242af003d2a8337b066ba6148ca435f295f31ac6d9e4ea2722 \
--hash=sha256:b4627520a02fccbd324b33c7a83e5d7906ec746e1083a9ac93c41ac7d15548c7 \
--hash=sha256:ba9f1d1ebe4b63801977cec7401f2d41e888128ae40b5441270d43140efcad52 \
--hash=sha256:c03a435d26c3999c2a8642cecad5d1c4d10c961817536af52035f6f4ee2f5dd0 \
--hash=sha256:c200b30dd573afa83847bed7e3041aa36a8145221bf0cfdfaa62d974d720805c \
--hash=sha256:c493365d3fad241d52f096e4995475a60a80f4eba4d3ff89b713bc65c2ca9615 \
--hash=sha256:c4d42e83ddbf3445e6514f0aff96dca511421ed0392d9977d3990d9f1ba6753c \
--hash=sha256:c60528671d9d467009a6ec284582179f6b88651e83367d0ab54cb739021cd7de \
--hash=sha256:c72ebc22e70e04126158c46ba56b85372bc4d54d00d296be060b0db1671638a4 \
--hash=sha256:ccbbd276642788c4376fbe8d4e6c50f0fb4972ce09ecb051509062915891cbf0 \
--hash=sha256:ceaac0c603bf5ac2f505a78b2dcab78d3e6b706be6596c8364b64cc613d208d2 \
--hash=sha256:d19db6ba816e7f59fc806c690918da80a7d186f00247048cd833acdab9b4847b \
--hash=sha256:d5c191713e98e7c28800233f039a32a42c1a4f9a001a8a0f2448b07391881036 \
--hash=sha256:d64f9f88d5203274a002b54442cafc9c7a1abff2a238f3e767b70aadf919b451 \
--hash=sha256:d77dff3a5aa5eedcc3da0ebd10ff8e4969bc9541aa3333a8d41715b429e99f47 \
--hash=sha256:dd4f16e57c12c0ae17606c53d1b57d8d1c8792efe3f065a37cb3341340599d49 \
--hash=sha256:e39d7ab0c18ac99955b36cd19f43926450baba21e3250f053e0704d6ffd76873 \
--hash=sha256:e3d0cd3dff0e7638a7b5390f3a53057c4e347f4ef122ee84ed93fc2fb7ea4aa2 \
--hash=sha256:e7dfb1cbb895810fa2b892b68153c17716c6abaa22c7dc2b2f6dcf3364932a1c \
--hash=sha256:e8e24b210a4deb5a7744971f8f77393005bae7f873568e37dfd9effe808be7f7 \
--hash=sha256:e9c0683cb35a9b5881b41bc01d5568ffc667910d9dbc632a1fba4e7d59e98773 \
--hash=sha256:ed41f3f49507936a6fe7003985ea2574daccfef999775525d79eb67344e23767 \
--hash=sha256:ee744fca8d1ea822480a2a4e7c5f2e1950745477143668f0b523769426060f29 \
--hash=sha256:f3f1e860be21f3e83011116a65e7310486300e08d9a3028e73e8d13bb6c77292 \
--hash=sha256:f43ab4cb04bde6109eb2555528a64dfd8a265cc6a9920a67dcbde13ef53a46c8 \
--hash=sha256:f53f55a8852f0e49b0fc76f2412045d6ad9d5772251dea8f55ea45021616e7d5 \
--hash=sha256:f59996d0550894affaad8743e97b9b9c98f638b221fac12909210ec3d9294786 \
--hash=sha256:f96f3f98fbff7af29e9edf9a6584f3c1382e7788783d07ba3721790625caa43e \
--hash=sha256:f9adb5664b78fcfcd830000416c8cc69853ef43cb084d645b3f1f0296edd9bae \
--hash=sha256:fa326b3505d5784436d9433b7980171ab2375535d93dd63fbcd20af2b5ca1bb6 \
--hash=sha256:fafc0049add8043ad07ab5382ee80d80ed7e3699847f26c9a5cf4d3714d96a84
rsa==4.9 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \
--hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21
s3transfer==0.6.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:3c0da2d074bf35d6870ef157158641178a4204a6e689e82546083e31e0311346 \
--hash=sha256:640bb492711f4c0c0905e1f62b6aaeb771881935ad27884852411f8e9cacbca9
schema==0.7.5 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:f06717112c61895cabc4707752b88716e8420a8819d71404501e114f91043197 \
--hash=sha256:f3ffdeeada09ec34bf40d7d79996d9f7175db93b7a5065de0faa7f41083c1e6c
shodan==1.31.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:c73275386ea02390e196c35c660706a28dd4d537c5a21eb387ab6236fac251f6
six==1.16.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \
--hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254
slack-sdk==3.26.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:d1600211eaa37c71a5f92daf4404074c3e6b3f5359a37c93c818b39d88ab4ca0 \
--hash=sha256:f80f0d15f0fce539b470447d2a07b03ecdad6b24f69c1edd05d464cf21253a06
tabulate==0.9.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c \
--hash=sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f
tldextract==3.4.4 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:581e7dbefc90e7bb857bb6f768d25c811a3c5f0892ed56a9a2999ddb7b1b70c2 \
--hash=sha256:5fe3210c577463545191d45ad522d3d5e78d55218ce97215e82004dcae1e1234
typing-extensions==4.5.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb \
--hash=sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4
uritemplate==4.1.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4346edfc5c3b79f694bccd6d6099a322bbeb628dbf2cd86eea55a456ce5124f0 \
--hash=sha256:830c08b8d99bdd312ea4ead05994a38e8936266f84b9a7878232db50b044e02e
urllib3==1.26.18 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07 \
--hash=sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0
xlsxwriter==3.1.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:02913b50b74c00f165933d5da3e3a02cab4204cb4932722a1b342c5c71034122 \
--hash=sha256:b70a147d36235d1ee835cfd037396f789db1f76740a0e5c917d54137169341de | python3.9 | 423f96b9 | diff --git a/prowler/providers/aws/services/fms/fms_policy_compliant/fms_policy_compliant.py b/prowler/providers/aws/services/fms/fms_policy_compliant/fms_policy_compliant.py
--- a/prowler/providers/aws/services/fms/fms_policy_compliant/fms_policy_compliant.py
+++ b/prowler/providers/aws/services/fms/fms_policy_compliant/fms_policy_compliant.py
@@ -13,17 +13,21 @@ class fms_policy_compliant(Check):
report.status = "PASS"
report.status_extended = "FMS enabled with all compliant accounts."
non_compliant_policy = False
- for policy in fms_client.fms_policies:
- for policy_to_account in policy.compliance_status:
- if policy_to_account.status == "NON_COMPLIANT":
- report.status = "FAIL"
- report.status_extended = f"FMS with non-compliant policy {policy.name} for account {policy_to_account.account_id}."
- report.resource_id = policy.id
- report.resource_arn = policy.arn
- non_compliant_policy = True
+ if fms_client.fms_policies:
+ for policy in fms_client.fms_policies:
+ for policy_to_account in policy.compliance_status:
+ if policy_to_account.status == "NON_COMPLIANT":
+ report.status = "FAIL"
+ report.status_extended = f"FMS with non-compliant policy {policy.name} for account {policy_to_account.account_id}."
+ report.resource_id = policy.id
+ report.resource_arn = policy.arn
+ non_compliant_policy = True
+ break
+ if non_compliant_policy:
break
- if non_compliant_policy:
- break
+ else:
+ report.status = "FAIL"
+ report.status_extended = f"FMS without any compliant policy for account {fms_client.audited_account}."
findings.append(report)
return findings
diff --git a/prowler/providers/aws/services/fms/fms_service.py b/prowler/providers/aws/services/fms/fms_service.py
--- a/prowler/providers/aws/services/fms/fms_service.py
+++ b/prowler/providers/aws/services/fms/fms_service.py
@@ -66,7 +66,9 @@ class FMS(AWSService):
for page in list_compliance_status_paginator.paginate(
PolicyId=fms_policy.id
):
- for fms_compliance_status in page["PolicyComplianceStatusList"]:
+ for fms_compliance_status in page.get(
+ "PolicyComplianceStatusList", []
+ ):
fms_policy.compliance_status.append(
PolicyAccountComplianceStatus(
account_id=fms_compliance_status.get("MemberAccount"),
diff --git a/tests/providers/aws/services/fms/fms_policy_compliant/fms_policy_compliant_test.py b/tests/providers/aws/services/fms/fms_policy_compliant/fms_policy_compliant_test.py
--- a/tests/providers/aws/services/fms/fms_policy_compliant/fms_policy_compliant_test.py
+++ b/tests/providers/aws/services/fms/fms_policy_compliant/fms_policy_compliant_test.py
@@ -170,3 +170,32 @@ class Test_fms_policy_compliant:
assert result[0].resource_id == "12345678901"
assert result[0].resource_arn == "arn:aws:fms:us-east-1:12345678901"
assert result[0].region == AWS_REGION_US_EAST_1
+
+ def test_fms_admin_without_policies(self):
+ fms_client = mock.MagicMock
+ fms_client.audited_account = AWS_ACCOUNT_NUMBER
+ fms_client.audited_account_arn = f"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:root"
+ fms_client.region = AWS_REGION_US_EAST_1
+ fms_client.fms_admin_account = True
+ fms_client.fms_policies = []
+ with mock.patch(
+ "prowler.providers.aws.services.fms.fms_service.FMS",
+ new=fms_client,
+ ):
+ # Test Check
+ from prowler.providers.aws.services.fms.fms_policy_compliant.fms_policy_compliant import (
+ fms_policy_compliant,
+ )
+
+ check = fms_policy_compliant()
+ result = check.execute()
+
+ assert len(result) == 1
+ assert result[0].status == "FAIL"
+ assert (
+ result[0].status_extended
+ == f"FMS without any compliant policy for account {AWS_ACCOUNT_NUMBER}."
+ )
+ assert result[0].resource_id == AWS_ACCOUNT_NUMBER
+ assert result[0].resource_arn == fms_client.audited_account_arn
+ assert result[0].region == AWS_REGION_US_EAST_1
| [
{
"content": "from prowler.lib.check.models import Check, Check_Report_AWS\nfrom prowler.providers.aws.services.fms.fms_client import fms_client\n\n\nclass fms_policy_compliant(Check):\n def execute(self):\n findings = []\n if fms_client.fms_admin_account:\n report = Check_Report_AWS(self.metadata())\n report.resource_arn = fms_client.audited_account_arn\n report.resource_id = fms_client.audited_account\n report.region = fms_client.region\n report.status = \"PASS\"\n report.status_extended = \"FMS enabled with all compliant accounts.\"\n non_compliant_policy = False\n for policy in fms_client.fms_policies:\n for policy_to_account in policy.compliance_status:\n if policy_to_account.status == \"NON_COMPLIANT\":\n report.status = \"FAIL\"\n report.status_extended = f\"FMS with non-compliant policy {policy.name} for account {policy_to_account.account_id}.\"\n report.resource_id = policy.id\n report.resource_arn = policy.arn\n non_compliant_policy = True\n break\n if non_compliant_policy:\n break\n\n findings.append(report)\n return findings\n",
"path": "prowler/providers/aws/services/fms/fms_policy_compliant/fms_policy_compliant.py"
},
{
"content": "from botocore.client import ClientError\nfrom pydantic import BaseModel\n\nfrom prowler.lib.logger import logger\nfrom prowler.lib.scan_filters.scan_filters import is_resource_filtered\nfrom prowler.providers.aws.lib.service.service import AWSService\n\n\n################## FMS\nclass FMS(AWSService):\n def __init__(self, audit_info):\n # # Call AWSService's __init__\n super().__init__(__class__.__name__, audit_info, global_service=True)\n self.fms_admin_account = True\n self.fms_policies = []\n self.__list_policies__()\n self.__list_compliance_status__()\n\n def __list_policies__(self):\n logger.info(\"FMS - Listing Policies...\")\n try:\n list_policies_paginator = self.client.get_paginator(\"list_policies\")\n try:\n for page in list_policies_paginator.paginate():\n for fms_policy in page[\"PolicyList\"]:\n if not self.audit_resources or (\n is_resource_filtered(\n fms_policy[\"PolicyArn\"], self.audit_resources\n )\n ):\n self.fms_policies.append(\n Policy(\n arn=fms_policy.get(\"PolicyArn\"),\n id=fms_policy.get(\"PolicyId\"),\n name=fms_policy.get(\"PolicyName\"),\n resource_type=fms_policy.get(\"ResourceType\"),\n service_type=fms_policy.get(\"SecurityServiceType\"),\n remediation_enabled=fms_policy.get(\n \"RemediationEnabled\"\n ),\n delete_unused_managed_resources=fms_policy.get(\n \"DeleteUnusedFMManagedResources\"\n ),\n )\n )\n except ClientError as error:\n if error.response[\"Error\"][\"Code\"] == \"AccessDeniedException\":\n if (\n \"No default admin could be found for account\"\n in error.response[\"Error\"][\"Message\"]\n ):\n # FMS is not enabled in this account\n self.fms_admin_account = False\n except Exception as error:\n logger.error(\n f\"{error.__class__.__name__}:{error.__traceback__.tb_lineno} -- {error}\"\n )\n\n def __list_compliance_status__(self):\n logger.info(\"FMS - Listing Policies...\")\n try:\n for fms_policy in self.fms_policies:\n list_compliance_status_paginator = self.client.get_paginator(\n \"list_compliance_status\"\n )\n for page in list_compliance_status_paginator.paginate(\n PolicyId=fms_policy.id\n ):\n for fms_compliance_status in page[\"PolicyComplianceStatusList\"]:\n fms_policy.compliance_status.append(\n PolicyAccountComplianceStatus(\n account_id=fms_compliance_status.get(\"MemberAccount\"),\n policy_id=fms_compliance_status.get(\"PolicyId\"),\n status=fms_compliance_status.get(\"EvaluationResults\")[\n 0\n ].get(\"ComplianceStatus\"),\n )\n )\n\n except Exception as error:\n logger.error(\n f\"{error.__class__.__name__}:{error.__traceback__.tb_lineno} -- {error}\"\n )\n\n\nclass PolicyAccountComplianceStatus(BaseModel):\n account_id: str\n policy_id: str\n status: str\n\n\nclass Policy(BaseModel):\n arn: str\n id: str\n name: str\n resource_type: str\n service_type: str\n remediation_enabled: bool\n delete_unused_managed_resources: bool\n compliance_status: list[PolicyAccountComplianceStatus] = []\n",
"path": "prowler/providers/aws/services/fms/fms_service.py"
},
{
"content": "from unittest import mock\n\nfrom prowler.providers.aws.services.fms.fms_service import (\n Policy,\n PolicyAccountComplianceStatus,\n)\nfrom tests.providers.aws.audit_info_utils import (\n AWS_ACCOUNT_NUMBER,\n AWS_REGION_US_EAST_1,\n)\n\n\nclass Test_fms_policy_compliant:\n def test_fms_not_admin(self):\n fms_client = mock.MagicMock\n fms_client.region = AWS_REGION_US_EAST_1\n fms_client.fms_admin_account = False\n with mock.patch(\n \"prowler.providers.aws.services.fms.fms_service.FMS\",\n new=fms_client,\n ):\n # Test Check\n from prowler.providers.aws.services.fms.fms_policy_compliant.fms_policy_compliant import (\n fms_policy_compliant,\n )\n\n check = fms_policy_compliant()\n result = check.execute()\n\n assert len(result) == 0\n\n def test_fms_admin_with_non_compliant_policies(self):\n fms_client = mock.MagicMock\n fms_client.audited_account = AWS_ACCOUNT_NUMBER\n fms_client.audited_account_arn = f\"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:root\"\n fms_client.region = AWS_REGION_US_EAST_1\n fms_client.fms_admin_account = True\n fms_client.fms_policies = [\n Policy(\n arn=\"arn:aws:fms:us-east-1:12345678901\",\n id=\"12345678901\",\n name=\"test\",\n resource_type=\"AWS::EC2::Instance\",\n service_type=\"WAF\",\n remediation_enabled=True,\n delete_unused_managed_resources=True,\n compliance_status=[\n PolicyAccountComplianceStatus(\n account_id=\"12345678901\",\n policy_id=\"12345678901\",\n status=\"NON_COMPLIANT\",\n )\n ],\n )\n ]\n with mock.patch(\n \"prowler.providers.aws.services.fms.fms_service.FMS\",\n new=fms_client,\n ):\n # Test Check\n from prowler.providers.aws.services.fms.fms_policy_compliant.fms_policy_compliant import (\n fms_policy_compliant,\n )\n\n check = fms_policy_compliant()\n result = check.execute()\n\n assert len(result) == 1\n assert result[0].status == \"FAIL\"\n assert (\n result[0].status_extended\n == f\"FMS with non-compliant policy {fms_client.fms_policies[0].name} for account {fms_client.fms_policies[0].compliance_status[0].account_id}.\"\n )\n assert result[0].resource_id == \"12345678901\"\n assert result[0].resource_arn == \"arn:aws:fms:us-east-1:12345678901\"\n assert result[0].region == AWS_REGION_US_EAST_1\n\n def test_fms_admin_with_compliant_policies(self):\n fms_client = mock.MagicMock\n fms_client.audited_account = AWS_ACCOUNT_NUMBER\n fms_client.audited_account_arn = f\"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:root\"\n fms_client.region = AWS_REGION_US_EAST_1\n fms_client.fms_admin_account = True\n fms_client.fms_policies = [\n Policy(\n arn=\"arn:aws:fms:us-east-1:12345678901\",\n id=\"12345678901\",\n name=\"test\",\n resource_type=\"AWS::EC2::Instance\",\n service_type=\"WAF\",\n remediation_enabled=True,\n delete_unused_managed_resources=True,\n compliance_status=[\n PolicyAccountComplianceStatus(\n account_id=\"12345678901\",\n policy_id=\"12345678901\",\n status=\"COMPLIANT\",\n )\n ],\n )\n ]\n with mock.patch(\n \"prowler.providers.aws.services.fms.fms_service.FMS\",\n new=fms_client,\n ):\n # Test Check\n from prowler.providers.aws.services.fms.fms_policy_compliant.fms_policy_compliant import (\n fms_policy_compliant,\n )\n\n check = fms_policy_compliant()\n result = check.execute()\n\n assert len(result) == 1\n assert result[0].status == \"PASS\"\n assert (\n result[0].status_extended == \"FMS enabled with all compliant accounts.\"\n )\n assert result[0].resource_id == AWS_ACCOUNT_NUMBER\n assert result[0].resource_arn == f\"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:root\"\n assert result[0].region == AWS_REGION_US_EAST_1\n\n def test_fms_admin_with_non_and_compliant_policies(self):\n fms_client = mock.MagicMock\n fms_client.audited_account = AWS_ACCOUNT_NUMBER\n fms_client.audited_account_arn = f\"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:root\"\n fms_client.region = AWS_REGION_US_EAST_1\n fms_client.fms_admin_account = True\n fms_client.fms_policies = [\n Policy(\n arn=\"arn:aws:fms:us-east-1:12345678901\",\n id=\"12345678901\",\n name=\"test\",\n resource_type=\"AWS::EC2::Instance\",\n service_type=\"WAF\",\n remediation_enabled=True,\n delete_unused_managed_resources=True,\n compliance_status=[\n PolicyAccountComplianceStatus(\n account_id=\"12345678901\",\n policy_id=\"12345678901\",\n status=\"COMPLIANT\",\n ),\n PolicyAccountComplianceStatus(\n account_id=\"12345678901\",\n policy_id=\"12345678901\",\n status=\"NON_COMPLIANT\",\n ),\n ],\n )\n ]\n with mock.patch(\n \"prowler.providers.aws.services.fms.fms_service.FMS\",\n new=fms_client,\n ):\n # Test Check\n from prowler.providers.aws.services.fms.fms_policy_compliant.fms_policy_compliant import (\n fms_policy_compliant,\n )\n\n check = fms_policy_compliant()\n result = check.execute()\n\n assert len(result) == 1\n assert result[0].status == \"FAIL\"\n assert (\n result[0].status_extended\n == f\"FMS with non-compliant policy {fms_client.fms_policies[0].name} for account {fms_client.fms_policies[0].compliance_status[0].account_id}.\"\n )\n assert result[0].resource_id == \"12345678901\"\n assert result[0].resource_arn == \"arn:aws:fms:us-east-1:12345678901\"\n assert result[0].region == AWS_REGION_US_EAST_1\n",
"path": "tests/providers/aws/services/fms/fms_policy_compliant/fms_policy_compliant_test.py"
}
] | 11_3 | python | from unittest import mock
import sys
import unittest
class Test_fms_policy_compliant(unittest.TestCase):
def test_fms_admin_without_policies(self):
from tests.providers.aws.audit_info_utils import (
AWS_ACCOUNT_NUMBER,
AWS_REGION_US_EAST_1,
)
fms_client = mock.MagicMock
fms_client.audited_account = AWS_ACCOUNT_NUMBER
fms_client.audited_account_arn = f"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:root"
fms_client.region = AWS_REGION_US_EAST_1
fms_client.fms_admin_account = True
fms_client.fms_policies = []
with mock.patch(
"prowler.providers.aws.services.fms.fms_service.FMS",
new=fms_client,
):
# Test Check
from prowler.providers.aws.services.fms.fms_policy_compliant.fms_policy_compliant import (
fms_policy_compliant,
)
check = fms_policy_compliant()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== f"FMS without any compliant policy for account {AWS_ACCOUNT_NUMBER}."
)
assert result[0].resource_id == AWS_ACCOUNT_NUMBER
assert result[0].resource_arn == fms_client.audited_account_arn
assert result[0].region == AWS_REGION_US_EAST_1
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(Test_fms_policy_compliant))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
main()
|
https://github.com/teamqurrent/prowler | To improve readability, focus on converting string concatenations to f-string format in the following specific functions and files: `get_azure_html_assessment_summary` in `html.py`, both `send_slack_message` and `create_message_identity` in `slack.py`, `execute` in `apigateway_restapi_authorizers_enabled.py`, `print_azure_credentials` in `audit_info.py`, and `test_azure_get_assessment_summary` in `common_outputs_test.py`. Begin by locating each instance of string concatenation in these functions, then replace them with f-strings. Verify your changes by running the unittest to ensure all instances are correctly converted. Pay close attention to maintain consistency in your updates across all the affected code. | 0fff056 | about-time==4.2.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:6a538862d33ce67d997429d14998310e1dbfda6cb7d9bbfbf799c4709847fece \
--hash=sha256:8bbf4c75fe13cbd3d72f49a03b02c5c7dca32169b6d49117c257e7eb3eaee341
adal==1.2.7 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:2a7451ed7441ddbc57703042204a3e30ef747478eea022c70f789fc7f084bc3d \
--hash=sha256:d74f45b81317454d96e982fd1c50e6fb5c99ac2223728aea8764433a39f566f1
alive-progress==3.1.5 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:347220c1858e3abe137fa0746895668c04df09c5261a13dc03f05795e8a29be5 \
--hash=sha256:42e399a66c8150dc507602dff7b7953f105ef11faf97ddaa6d27b1cbf45c4c98
attrs==23.1.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04 \
--hash=sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015
awsipranges==0.3.3 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4f0b3f22a9dc1163c85b513bed812b6c92bdacd674e6a7b68252a3c25b99e2c0 \
--hash=sha256:f3d7a54aeaf7fe310beb5d377a4034a63a51b72677ae6af3e0967bc4de7eedaf
azure-common==1.1.28 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4ac0cd3214e36b6a1b6a442686722a5d8cc449603aa833f3f0f40bda836704a3 \
--hash=sha256:5c12d3dcf4ec20599ca6b0d3e09e86e146353d443e7fcc050c9a19c1f9df20ad
azure-core==1.28.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:dec36dfc8eb0b052a853f30c07437effec2f9e3e1fc8f703d9bdaa5cfc0043d9 \
--hash=sha256:e9eefc66fc1fde56dab6f04d4e5d12c60754d5a9fa49bdcfd8534fc96ed936bd
azure-identity==1.15.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4c28fc246b7f9265610eb5261d65931183d019a23d4b0e99357facb2e6c227c8 \
--hash=sha256:a14b1f01c7036f11f148f22cd8c16e05035293d714458d6b44ddf534d93eb912
azure-mgmt-authorization==4.0.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:69b85abc09ae64fc72975bd43431170d8c7eb5d166754b98aac5f3845de57dc4 \
--hash=sha256:d8feeb3842e6ddf1a370963ca4f61fb6edc124e8997b807dd025bc9b2379cd1a
azure-mgmt-core==1.4.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:81071675f186a585555ef01816f2774d49c1c9024cb76e5720c3c0f6b337bb7d \
--hash=sha256:d195208340094f98e5a6661b781cde6f6a051e79ce317caabd8ff97030a9b3ae
azure-mgmt-security==5.0.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:38b03efe82c2344cea203fda95e6d00b7ac22782fa1c0b585cd0ea2c8ff3e702 \
--hash=sha256:73a74ce8f6ffb1b345ce101c8abdd42238f161f0988d168d23918feda0089654
azure-mgmt-sql==3.0.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:129042cc011225e27aee6ef2697d585fa5722e5d1aeb0038af6ad2451a285457 \
--hash=sha256:1d1dd940d4d41be4ee319aad626341251572a5bf4a2addec71779432d9a1381f
azure-mgmt-storage==21.1.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:593f2544fc4f05750c4fe7ca4d83c32ea1e9d266e57899bbf79ce5940124e8cc \
--hash=sha256:d6d3c0e917c988bc9ed0472477d3ef3f90886009eb1d97a711944f8375630162
azure-mgmt-subscription==3.1.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:38d4574a8d47fa17e3587d756e296cb63b82ad8fb21cd8543bcee443a502bf48 \
--hash=sha256:4e255b4ce9b924357bb8c5009b3c88a2014d3203b2495e2256fa027bf84e800e
azure-storage-blob==12.19.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:26c0a4320a34a3c2a1b74528ba6812ebcb632a04cd67b1c7377232c4b01a5897 \
--hash=sha256:7bbc2c9c16678f7a420367fef6b172ba8730a7e66df7f4d7a55d5b3c8216615b
boto3==1.26.165 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:9e7242b9059d937f34264125fecd844cb5e01acce6be093f6c44869fdf7c6e30 \
--hash=sha256:fa85b67147c8dc99b6e7c699fc086103f958f9677db934f70659e6e6a72a818c
botocore==1.29.165 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:6f35d59e230095aed7cd747604fe248fa384bebb7d09549077892f936a8ca3df \
--hash=sha256:988b948be685006b43c4bbd8f5c0cb93e77c66deb70561994e0c5b31b5a67210
cachetools==5.3.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:13dfddc7b8df938c21a940dfa6557ce6e94a2f1cdfa58eb90c805721d58f2c14 \
--hash=sha256:429e1a1e845c008ea6c85aa35d4b98b65d6a9763eeef3e37e92728a12d1de9d4
certifi==2023.7.22 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \
--hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9
cffi==1.15.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \
--hash=sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef \
--hash=sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104 \
--hash=sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426 \
--hash=sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405 \
--hash=sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375 \
--hash=sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a \
--hash=sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e \
--hash=sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc \
--hash=sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf \
--hash=sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185 \
--hash=sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497 \
--hash=sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3 \
--hash=sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35 \
--hash=sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c \
--hash=sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83 \
--hash=sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21 \
--hash=sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca \
--hash=sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984 \
--hash=sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac \
--hash=sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd \
--hash=sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee \
--hash=sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a \
--hash=sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2 \
--hash=sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192 \
--hash=sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7 \
--hash=sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585 \
--hash=sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f \
--hash=sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e \
--hash=sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27 \
--hash=sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b \
--hash=sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e \
--hash=sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e \
--hash=sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d \
--hash=sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c \
--hash=sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415 \
--hash=sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82 \
--hash=sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02 \
--hash=sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314 \
--hash=sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325 \
--hash=sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c \
--hash=sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3 \
--hash=sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914 \
--hash=sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045 \
--hash=sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d \
--hash=sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9 \
--hash=sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5 \
--hash=sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2 \
--hash=sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c \
--hash=sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3 \
--hash=sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2 \
--hash=sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8 \
--hash=sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d \
--hash=sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d \
--hash=sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9 \
--hash=sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162 \
--hash=sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76 \
--hash=sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4 \
--hash=sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e \
--hash=sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9 \
--hash=sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6 \
--hash=sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b \
--hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \
--hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0
charset-normalizer==3.1.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6 \
--hash=sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1 \
--hash=sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e \
--hash=sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373 \
--hash=sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62 \
--hash=sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230 \
--hash=sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be \
--hash=sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c \
--hash=sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0 \
--hash=sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448 \
--hash=sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f \
--hash=sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649 \
--hash=sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d \
--hash=sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0 \
--hash=sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706 \
--hash=sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a \
--hash=sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59 \
--hash=sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23 \
--hash=sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5 \
--hash=sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb \
--hash=sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e \
--hash=sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e \
--hash=sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c \
--hash=sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28 \
--hash=sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d \
--hash=sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41 \
--hash=sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974 \
--hash=sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce \
--hash=sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f \
--hash=sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1 \
--hash=sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d \
--hash=sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8 \
--hash=sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017 \
--hash=sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31 \
--hash=sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7 \
--hash=sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8 \
--hash=sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e \
--hash=sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14 \
--hash=sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd \
--hash=sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d \
--hash=sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795 \
--hash=sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b \
--hash=sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b \
--hash=sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b \
--hash=sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203 \
--hash=sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f \
--hash=sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19 \
--hash=sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1 \
--hash=sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a \
--hash=sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac \
--hash=sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9 \
--hash=sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0 \
--hash=sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137 \
--hash=sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f \
--hash=sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6 \
--hash=sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5 \
--hash=sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909 \
--hash=sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f \
--hash=sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0 \
--hash=sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324 \
--hash=sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755 \
--hash=sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb \
--hash=sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854 \
--hash=sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c \
--hash=sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60 \
--hash=sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84 \
--hash=sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0 \
--hash=sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b \
--hash=sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1 \
--hash=sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531 \
--hash=sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1 \
--hash=sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11 \
--hash=sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326 \
--hash=sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df \
--hash=sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab
click-plugins==1.1.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b \
--hash=sha256:5d262006d3222f5057fd81e1623d4443e41dcda5dc815c06b442aa3c02889fc8
click==8.1.3 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e \
--hash=sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48
colorama==0.4.6 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \
--hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6
contextlib2==21.6.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:3fbdb64466afd23abaf6c977627b75b6139a5a3e8ce38405c5b413aed7a0471f \
--hash=sha256:ab1e2bfe1d01d968e1b7e8d9023bc51ef3509bba217bb730cee3827e1ee82869
cryptography==41.0.4 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:004b6ccc95943f6a9ad3142cfabcc769d7ee38a3f60fb0dddbfb431f818c3a67 \
--hash=sha256:047c4603aeb4bbd8db2756e38f5b8bd7e94318c047cfe4efeb5d715e08b49311 \
--hash=sha256:0d9409894f495d465fe6fda92cb70e8323e9648af912d5b9141d616df40a87b8 \
--hash=sha256:23a25c09dfd0d9f28da2352503b23e086f8e78096b9fd585d1d14eca01613e13 \
--hash=sha256:2ed09183922d66c4ec5fdaa59b4d14e105c084dd0febd27452de8f6f74704143 \
--hash=sha256:35c00f637cd0b9d5b6c6bd11b6c3359194a8eba9c46d4e875a3660e3b400005f \
--hash=sha256:37480760ae08065437e6573d14be973112c9e6dcaf5f11d00147ee74f37a3829 \
--hash=sha256:3b224890962a2d7b57cf5eeb16ccaafba6083f7b811829f00476309bce2fe0fd \
--hash=sha256:5a0f09cefded00e648a127048119f77bc2b2ec61e736660b5789e638f43cc397 \
--hash=sha256:5b72205a360f3b6176485a333256b9bcd48700fc755fef51c8e7e67c4b63e3ac \
--hash=sha256:7e53db173370dea832190870e975a1e09c86a879b613948f09eb49324218c14d \
--hash=sha256:7febc3094125fc126a7f6fb1f420d0da639f3f32cb15c8ff0dc3997c4549f51a \
--hash=sha256:80907d3faa55dc5434a16579952ac6da800935cd98d14dbd62f6f042c7f5e839 \
--hash=sha256:86defa8d248c3fa029da68ce61fe735432b047e32179883bdb1e79ed9bb8195e \
--hash=sha256:8ac4f9ead4bbd0bc8ab2d318f97d85147167a488be0e08814a37eb2f439d5cf6 \
--hash=sha256:93530900d14c37a46ce3d6c9e6fd35dbe5f5601bf6b3a5c325c7bffc030344d9 \
--hash=sha256:9eeb77214afae972a00dee47382d2591abe77bdae166bda672fb1e24702a3860 \
--hash=sha256:b5f4dfe950ff0479f1f00eda09c18798d4f49b98f4e2006d644b3301682ebdca \
--hash=sha256:c3391bd8e6de35f6f1140e50aaeb3e2b3d6a9012536ca23ab0d9c35ec18c8a91 \
--hash=sha256:c880eba5175f4307129784eca96f4e70b88e57aa3f680aeba3bab0e980b0f37d \
--hash=sha256:cecfefa17042941f94ab54f769c8ce0fe14beff2694e9ac684176a2535bf9714 \
--hash=sha256:e40211b4923ba5a6dc9769eab704bdb3fbb58d56c5b336d30996c24fcf12aadb \
--hash=sha256:efc8ad4e6fc4f1752ebfb58aefece8b4e3c4cae940b0994d43649bdfce8d0d4f
detect-secrets==1.4.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:d08ecabeee8b68c0acb0e8a354fb98d822a653f6ed05e520cead4c6fc1fc02cd \
--hash=sha256:d56787e339758cef48c9ccd6692f7a094b9963c979c9813580b0169e41132833
filelock==3.12.2 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:002740518d8aa59a26b0c76e10fb8c6e15eae825d34b6fdf670333fd7b938d81 \
--hash=sha256:cbb791cdea2a72f23da6ac5b5269ab0a0d161e9ef0100e653b69049a7706d1ec
google-api-core==2.11.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4b9bb5d5a380a0befa0573b302651b8a9a89262c1730e37bf423cec511804c22 \
--hash=sha256:ce222e27b0de0d7bc63eb043b956996d6dccab14cc3b690aaea91c9cc99dc16e
google-api-python-client==2.108.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:6396efca83185fb205c0abdbc1c2ee57b40475578c6af37f6d0e30a639aade99 \
--hash=sha256:9d1327213e388943ebcd7db5ce6e7f47987a7e6874e3e1f6116010eea4a0e75d
google-auth-httplib2==0.1.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:42c50900b8e4dcdf8222364d1f0efe32b8421fb6ed72f2613f12f75cc933478c \
--hash=sha256:c64bc555fdc6dd788ea62ecf7bccffcf497bf77244887a3f3d7a5a02f8e3fc29
google-auth==2.17.3 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:ce311e2bc58b130fddf316df57c9b3943c2a7b4f6ec31de9663a9333e4064efc \
--hash=sha256:f586b274d3eb7bd932ea424b1c702a30e0393a2e2bc4ca3eae8263ffd8be229f
googleapis-common-protos==1.59.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4168fcb568a826a52f23510412da405abd93f4d23ba544bb68d943b14ba3cb44 \
--hash=sha256:b287dc48449d1d41af0c69f4ea26242b5ae4c3d7249a38b0984c86a4caffff1f
grapheme==0.6.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:44c2b9f21bbe77cfb05835fec230bd435954275267fea1858013b102f8603cca
httplib2==0.22.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:14ae0a53c1ba8f3d37e9e27cf37eabb0fb9980f435ba405d546948b009dd64dc \
--hash=sha256:d7a10bc5ef5ab08322488bde8c726eeee5c8618723fdb399597ec58f3d82df81
idna==3.4 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \
--hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2
isodate==0.6.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96 \
--hash=sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9
jmespath==1.0.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980 \
--hash=sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe
jsonschema-specifications==2023.6.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:3d2b82663aff01815f744bb5c7887e2121a63399b49b104a3c96145474d091d7 \
--hash=sha256:ca1c4dd059a9e7b34101cf5b3ab7ff1d18b139f35950d598d629837ef66e8f28
jsonschema==4.18.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:8caf5b57a990a98e9b39832ef3cb35c176fe331414252b6e1b26fd5866f891a4 \
--hash=sha256:b508dd6142bd03f4c3670534c80af68cd7bbff9ea830b9cf2625d4a3c49ddf60
msal-extensions==1.0.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:91e3db9620b822d0ed2b4d1850056a0f133cba04455e62f11612e40f5502f2ee \
--hash=sha256:c676aba56b0cce3783de1b5c5ecfe828db998167875126ca4b47dc6436451354
msal==1.24.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:aa0972884b3c6fdec53d9a0bd15c12e5bd7b71ac1b66d746f54d128709f3f8f8 \
--hash=sha256:ce4320688f95c301ee74a4d0e9dbcfe029a63663a8cc61756f40d0d0d36574ad
msgraph-core==0.2.2 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:147324246788abe8ed7e05534cd9e4e0ec98b33b30e011693b8d014cebf97f63 \
--hash=sha256:e297564b9a0ca228493d8851f95cb2de9522143d82efa40ce3a6ad286e21392e
msrest==0.7.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:21120a810e1233e5e6cc7fe40b474eeb4ec6f757a15d7cf86702c369f9567c32 \
--hash=sha256:6e7661f46f3afd88b75667b7187a92829924446c7ea1d169be8c4bb7eeb788b9
msrestazure==0.6.4 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:3de50f56147ef529b31e099a982496690468ecef33f0544cb0fa0cfe1e1de5b9 \
--hash=sha256:a06f0dabc9a6f5efe3b6add4bd8fb623aeadacf816b7a35b0f89107e0544d189
oauthlib==3.2.2 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca \
--hash=sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918
portalocker==2.7.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:032e81d534a88ec1736d03f780ba073f047a06c478b06e2937486f334e955c51 \
--hash=sha256:a07c5b4f3985c3cf4798369631fb7011adb498e2a46d8440efc75a8f29a0f983
protobuf==4.23.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:03eee35b60317112a72d19c54d0bff7bc58ff12fea4cd7b018232bd99758ffdf \
--hash=sha256:2b94bd6df92d71bd1234a2ffe7ce96ddf6d10cf637a18d6b55ad0a89fbb7fc21 \
--hash=sha256:36f5370a930cb77c8ad2f4135590c672d0d2c72d4a707c7d0058dce4b4b4a598 \
--hash=sha256:5f1eba1da2a2f3f7df469fccddef3cc060b8a16cfe3cc65961ad36b4dbcf59c5 \
--hash=sha256:6c16657d6717a0c62d5d740cb354fbad1b0d8cb811669e06fc1caa0ff4799ddd \
--hash=sha256:6fe180b56e1169d72ecc4acbd39186339aed20af5384531b8e8979b02bbee159 \
--hash=sha256:7cb5b9a05ce52c6a782bb97de52679bd3438ff2b7460eff5da348db65650f227 \
--hash=sha256:9744e934ea5855d12191040ea198eaf704ac78665d365a89d9572e3b627c2688 \
--hash=sha256:9f5a0fbfcdcc364f3986f9ed9f8bb1328fb84114fd790423ff3d7fdb0f85c2d1 \
--hash=sha256:baca40d067dddd62141a129f244703160d278648b569e90bb0e3753067644711 \
--hash=sha256:d5a35ff54e3f62e8fc7be02bb0d2fbc212bba1a5a9cc2748090690093996f07b \
--hash=sha256:e62fb869762b4ba18666370e2f8a18f17f8ab92dd4467295c6d38be6f8fef60b \
--hash=sha256:ebde3a023b8e11bfa6c890ef34cd6a8b47d586f26135e86c21344fe433daf2e2
pyasn1-modules==0.3.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c \
--hash=sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d
pyasn1==0.5.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57 \
--hash=sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde
pycparser==2.21 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \
--hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206
pydantic==1.10.13 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:1740068fd8e2ef6eb27a20e5651df000978edce6da6803c2bef0bc74540f9548 \
--hash=sha256:210ce042e8f6f7c01168b2d84d4c9eb2b009fe7bf572c2266e235edf14bacd80 \
--hash=sha256:32c8b48dcd3b2ac4e78b0ba4af3a2c2eb6048cb75202f0ea7b34feb740efc340 \
--hash=sha256:3ecea2b9d80e5333303eeb77e180b90e95eea8f765d08c3d278cd56b00345d01 \
--hash=sha256:4b03e42ec20286f052490423682016fd80fda830d8e4119f8ab13ec7464c0132 \
--hash=sha256:4c5370a7edaac06daee3af1c8b1192e305bc102abcbf2a92374b5bc793818599 \
--hash=sha256:56e3ff861c3b9c6857579de282ce8baabf443f42ffba355bf070770ed63e11e1 \
--hash=sha256:5a1f9f747851338933942db7af7b6ee8268568ef2ed86c4185c6ef4402e80ba8 \
--hash=sha256:5e08865bc6464df8c7d61439ef4439829e3ab62ab1669cddea8dd00cd74b9ffe \
--hash=sha256:61d9dce220447fb74f45e73d7ff3b530e25db30192ad8d425166d43c5deb6df0 \
--hash=sha256:654db58ae399fe6434e55325a2c3e959836bd17a6f6a0b6ca8107ea0571d2e17 \
--hash=sha256:678bcf5591b63cc917100dc50ab6caebe597ac67e8c9ccb75e698f66038ea953 \
--hash=sha256:6cf25c1a65c27923a17b3da28a0bdb99f62ee04230c931d83e888012851f4e7f \
--hash=sha256:75ac15385a3534d887a99c713aa3da88a30fbd6204a5cd0dc4dab3d770b9bd2f \
--hash=sha256:75b297827b59bc229cac1a23a2f7a4ac0031068e5be0ce385be1462e7e17a35d \
--hash=sha256:7d6f6e7305244bddb4414ba7094ce910560c907bdfa3501e9db1a7fd7eaea127 \
--hash=sha256:84bafe2e60b5e78bc64a2941b4c071a4b7404c5c907f5f5a99b0139781e69ed8 \
--hash=sha256:854223752ba81e3abf663d685f105c64150873cc6f5d0c01d3e3220bcff7d36f \
--hash=sha256:8ae5dd6b721459bfa30805f4c25880e0dd78fc5b5879f9f7a692196ddcb5a580 \
--hash=sha256:8ef467901d7a41fa0ca6db9ae3ec0021e3f657ce2c208e98cd511f3161c762c6 \
--hash=sha256:968ac42970f57b8344ee08837b62f6ee6f53c33f603547a55571c954a4225691 \
--hash=sha256:97cce3ae7341f7620a0ba5ef6cf043975cd9d2b81f3aa5f4ea37928269bc1b87 \
--hash=sha256:9849f031cf8a2f0a928fe885e5a04b08006d6d41876b8bbd2fc68a18f9f2e3fd \
--hash=sha256:9f00790179497767aae6bcdc36355792c79e7bbb20b145ff449700eb076c5f96 \
--hash=sha256:b87326822e71bd5f313e7d3bfdc77ac3247035ac10b0c0618bd99dcf95b1e687 \
--hash=sha256:b97c1fac8c49be29486df85968682b0afa77e1b809aff74b83081cc115e52f33 \
--hash=sha256:bc0898c12f8e9c97f6cd44c0ed70d55749eaf783716896960b4ecce2edfd2d69 \
--hash=sha256:c553f6a156deb868ba38a23cf0df886c63492e9257f60a79c0fd8e7173537653 \
--hash=sha256:c636925f38b8db208e09d344c7aa4f29a86bb9947495dd6b6d376ad10334fb78 \
--hash=sha256:c958d053453a1c4b1c2062b05cd42d9d5c8eb67537b8d5a7e3c3032943ecd261 \
--hash=sha256:d3a3c792a58e1622667a2837512099eac62490cdfd63bd407993aaf200a4cf1f \
--hash=sha256:e31647d85a2013d926ce60b84f9dd5300d44535a9941fe825dc349ae1f760df9 \
--hash=sha256:e70ca129d2053fb8b728ee7d1af8e553a928d7e301a311094b8a0501adc8763d \
--hash=sha256:efff03cc7a4f29d9009d1c96ceb1e7a70a65cfe86e89d34e4a5f2ab1e5693737 \
--hash=sha256:f59ef915cac80275245824e9d771ee939133be38215555e9dc90c6cb148aaeb5 \
--hash=sha256:f8e81fc5fb17dae698f52bdd1c4f18b6ca674d7068242b2aff075f588301bbb0
pyjwt==2.6.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd \
--hash=sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14
pyjwt[crypto]==2.6.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd \
--hash=sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14
pyparsing==3.0.9 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb \
--hash=sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc
python-dateutil==2.8.2 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \
--hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9
pywin32==306 ; python_version >= "3.9" and platform_system == "Windows" and python_version < "3.12" \
--hash=sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d \
--hash=sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65 \
--hash=sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e \
--hash=sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b \
--hash=sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4 \
--hash=sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040 \
--hash=sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a \
--hash=sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36 \
--hash=sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8 \
--hash=sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e \
--hash=sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802 \
--hash=sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a \
--hash=sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407 \
--hash=sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0
pyyaml==6.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf \
--hash=sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293 \
--hash=sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b \
--hash=sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57 \
--hash=sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b \
--hash=sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4 \
--hash=sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07 \
--hash=sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba \
--hash=sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9 \
--hash=sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287 \
--hash=sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513 \
--hash=sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0 \
--hash=sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782 \
--hash=sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0 \
--hash=sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92 \
--hash=sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f \
--hash=sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2 \
--hash=sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc \
--hash=sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1 \
--hash=sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c \
--hash=sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86 \
--hash=sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4 \
--hash=sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c \
--hash=sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34 \
--hash=sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b \
--hash=sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d \
--hash=sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c \
--hash=sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb \
--hash=sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7 \
--hash=sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737 \
--hash=sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3 \
--hash=sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d \
--hash=sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358 \
--hash=sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53 \
--hash=sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78 \
--hash=sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803 \
--hash=sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a \
--hash=sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f \
--hash=sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174 \
--hash=sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5
referencing==0.29.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:90cb53782d550ba28d2166ef3f55731f38397def8832baac5d45235f1995e35e \
--hash=sha256:d3c8f323ee1480095da44d55917cfb8278d73d6b4d5f677e3e40eb21314ac67f
requests-file==1.5.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:07d74208d3389d01c38ab89ef403af0cfec63957d53a0081d8eca738d0247d8e \
--hash=sha256:dfe5dae75c12481f68ba353183c53a65e6044c923e64c24b2209f6c7570ca953
requests-oauthlib==1.3.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5 \
--hash=sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a
requests==2.31.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \
--hash=sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1
rpds-py==0.8.10 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:08166467258fd0240a1256fce272f689f2360227ee41c72aeea103e9e4f63d2b \
--hash=sha256:083df0fafe199371206111583c686c985dddaf95ab3ee8e7b24f1fda54515d09 \
--hash=sha256:0da53292edafecba5e1d8c1218f99babf2ed0bf1c791d83c0ab5c29b57223068 \
--hash=sha256:0eeb2731708207d0fe2619afe6c4dc8cb9798f7de052da891de5f19c0006c315 \
--hash=sha256:134ec8f14ca7dbc6d9ae34dac632cdd60939fe3734b5d287a69683c037c51acb \
--hash=sha256:13e643ce8ad502a0263397362fb887594b49cf84bf518d6038c16f235f2bcea4 \
--hash=sha256:148b0b38d719c0760e31ce9285a9872972bdd7774969a4154f40c980e5beaca7 \
--hash=sha256:14f1c356712f66653b777ecd8819804781b23dbbac4eade4366b94944c9e78ad \
--hash=sha256:15a90d0ac11b4499171067ae40a220d1ca3cb685ec0acc356d8f3800e07e4cb8 \
--hash=sha256:1a2edf8173ac0c7a19da21bc68818be1321998528b5e3f748d6ee90c0ba2a1fd \
--hash=sha256:1b21575031478609db6dbd1f0465e739fe0e7f424a8e7e87610a6c7f68b4eb16 \
--hash=sha256:1ee45cd1d84beed6cbebc839fd85c2e70a3a1325c8cfd16b62c96e2ffb565eca \
--hash=sha256:220bdcad2d2936f674650d304e20ac480a3ce88a40fe56cd084b5780f1d104d9 \
--hash=sha256:2418cf17d653d24ffb8b75e81f9f60b7ba1b009a23298a433a4720b2a0a17017 \
--hash=sha256:2614c2732bf45de5c7f9e9e54e18bc78693fa2f635ae58d2895b7965e470378c \
--hash=sha256:2cd3045e7f6375dda64ed7db1c5136826facb0159ea982f77d9cf6125025bd34 \
--hash=sha256:2eb4b08c45f8f8d8254cdbfacd3fc5d6b415d64487fb30d7380b0d0569837bf1 \
--hash=sha256:300eb606e6b94a7a26f11c8cc8ee59e295c6649bd927f91e1dbd37a4c89430b6 \
--hash=sha256:376b8de737401050bd12810003d207e824380be58810c031f10ec563ff6aef3d \
--hash=sha256:3793c21494bad1373da517001d0849eea322e9a049a0e4789e50d8d1329df8e7 \
--hash=sha256:37f7ee4dc86db7af3bac6d2a2cedbecb8e57ce4ed081f6464510e537589f8b1e \
--hash=sha256:3816a890a6a9e9f1de250afa12ca71c9a7a62f2b715a29af6aaee3aea112c181 \
--hash=sha256:3c490204e16bca4f835dba8467869fe7295cdeaa096e4c5a7af97f3454a97991 \
--hash=sha256:3cc5e5b5514796f45f03a568981971b12a3570f3de2e76114f7dc18d4b60a3c4 \
--hash=sha256:41c89a366eae49ad9e65ed443a8f94aee762931a1e3723749d72aeac80f5ef2f \
--hash=sha256:4a8ca409f1252e1220bf09c57290b76cae2f14723746215a1e0506472ebd7bdf \
--hash=sha256:4b519bac7c09444dd85280fd60f28c6dde4389c88dddf4279ba9b630aca3bbbe \
--hash=sha256:521fc8861a86ae54359edf53a15a05fabc10593cea7b3357574132f8427a5e5a \
--hash=sha256:574868858a7ff6011192c023a5289158ed20e3f3b94b54f97210a773f2f22921 \
--hash=sha256:5a665f6f1a87614d1c3039baf44109094926dedf785e346d8b0a728e9cabd27a \
--hash=sha256:5d1c2bc319428d50b3e0fa6b673ab8cc7fa2755a92898db3a594cbc4eeb6d1f7 \
--hash=sha256:60e0e86e870350e03b3e25f9b1dd2c6cc72d2b5f24e070249418320a6f9097b7 \
--hash=sha256:695f642a3a5dbd4ad2ffbbacf784716ecd87f1b7a460843b9ddf965ccaeafff4 \
--hash=sha256:69d089c026f6a8b9d64a06ff67dc3be196707b699d7f6ca930c25f00cf5e30d8 \
--hash=sha256:6c6a0225b8501d881b32ebf3f5807a08ad3685b5eb5f0a6bfffd3a6e039b2055 \
--hash=sha256:70bb9c8004b97b4ef7ae56a2aa56dfaa74734a0987c78e7e85f00004ab9bf2d0 \
--hash=sha256:73a1e48430f418f0ac3dfd87860e4cc0d33ad6c0f589099a298cb53724db1169 \
--hash=sha256:7495010b658ec5b52835f21d8c8b1a7e52e194c50f095d4223c0b96c3da704b1 \
--hash=sha256:7947e6e2c2ad68b1c12ee797d15e5f8d0db36331200b0346871492784083b0c6 \
--hash=sha256:7b38a9ac96eeb6613e7f312cd0014de64c3f07000e8bf0004ad6ec153bac46f8 \
--hash=sha256:7d20a8ed227683401cc508e7be58cba90cc97f784ea8b039c8cd01111e6043e0 \
--hash=sha256:7f29b8c55fd3a2bc48e485e37c4e2df3317f43b5cc6c4b6631c33726f52ffbb3 \
--hash=sha256:802f42200d8caf7f25bbb2a6464cbd83e69d600151b7e3b49f49a47fa56b0a38 \
--hash=sha256:805a5f3f05d186c5d50de2e26f765ba7896d0cc1ac5b14ffc36fae36df5d2f10 \
--hash=sha256:82bb361cae4d0a627006dadd69dc2f36b7ad5dc1367af9d02e296ec565248b5b \
--hash=sha256:84eb541a44f7a18f07a6bfc48b95240739e93defe1fdfb4f2a295f37837945d7 \
--hash=sha256:89c92b74e8bf6f53a6f4995fd52f4bd510c12f103ee62c99e22bc9e05d45583c \
--hash=sha256:8c398fda6df361a30935ab4c4bccb7f7a3daef2964ca237f607c90e9f3fdf66f \
--hash=sha256:915031002c86a5add7c6fd4beb601b2415e8a1c956590a5f91d825858e92fe6e \
--hash=sha256:927d784648211447201d4c6f1babddb7971abad922b32257ab74de2f2750fad0 \
--hash=sha256:92cf5b3ee60eef41f41e1a2cabca466846fb22f37fc580ffbcb934d1bcab225a \
--hash=sha256:93d06cccae15b3836247319eee7b6f1fdcd6c10dabb4e6d350d27bd0bdca2711 \
--hash=sha256:93d99f957a300d7a4ced41615c45aeb0343bb8f067c42b770b505de67a132346 \
--hash=sha256:96b293c0498c70162effb13100624c5863797d99df75f2f647438bd10cbf73e4 \
--hash=sha256:97cab733d303252f7c2f7052bf021a3469d764fc2b65e6dbef5af3cbf89d4892 \
--hash=sha256:996cc95830de9bc22b183661d95559ec6b3cd900ad7bc9154c4cbf5be0c9b734 \
--hash=sha256:9a7d20c1cf8d7b3960c5072c265ec47b3f72a0c608a9a6ee0103189b4f28d531 \
--hash=sha256:9cd57981d9fab04fc74438d82460f057a2419974d69a96b06a440822d693b3c0 \
--hash=sha256:a11ab0d97be374efd04f640c04fe5c2d3dabc6dfb998954ea946ee3aec97056d \
--hash=sha256:a13c8e56c46474cd5958d525ce6a9996727a83d9335684e41f5192c83deb6c58 \
--hash=sha256:a38b9f526d0d6cbdaa37808c400e3d9f9473ac4ff64d33d9163fd05d243dbd9b \
--hash=sha256:a7c6304b894546b5a6bdc0fe15761fa53fe87d28527a7142dae8de3c663853e1 \
--hash=sha256:ad3bfb44c8840fb4be719dc58e229f435e227fbfbe133dc33f34981ff622a8f8 \
--hash=sha256:ae40f4a70a1f40939d66ecbaf8e7edc144fded190c4a45898a8cfe19d8fc85ea \
--hash=sha256:b01b39ad5411563031ea3977bbbc7324d82b088e802339e6296f082f78f6115c \
--hash=sha256:b2e3c4f2a8e3da47f850d7ea0d7d56720f0f091d66add889056098c4b2fd576c \
--hash=sha256:b41941583adce4242af003d2a8337b066ba6148ca435f295f31ac6d9e4ea2722 \
--hash=sha256:b4627520a02fccbd324b33c7a83e5d7906ec746e1083a9ac93c41ac7d15548c7 \
--hash=sha256:ba9f1d1ebe4b63801977cec7401f2d41e888128ae40b5441270d43140efcad52 \
--hash=sha256:c03a435d26c3999c2a8642cecad5d1c4d10c961817536af52035f6f4ee2f5dd0 \
--hash=sha256:c200b30dd573afa83847bed7e3041aa36a8145221bf0cfdfaa62d974d720805c \
--hash=sha256:c493365d3fad241d52f096e4995475a60a80f4eba4d3ff89b713bc65c2ca9615 \
--hash=sha256:c4d42e83ddbf3445e6514f0aff96dca511421ed0392d9977d3990d9f1ba6753c \
--hash=sha256:c60528671d9d467009a6ec284582179f6b88651e83367d0ab54cb739021cd7de \
--hash=sha256:c72ebc22e70e04126158c46ba56b85372bc4d54d00d296be060b0db1671638a4 \
--hash=sha256:ccbbd276642788c4376fbe8d4e6c50f0fb4972ce09ecb051509062915891cbf0 \
--hash=sha256:ceaac0c603bf5ac2f505a78b2dcab78d3e6b706be6596c8364b64cc613d208d2 \
--hash=sha256:d19db6ba816e7f59fc806c690918da80a7d186f00247048cd833acdab9b4847b \
--hash=sha256:d5c191713e98e7c28800233f039a32a42c1a4f9a001a8a0f2448b07391881036 \
--hash=sha256:d64f9f88d5203274a002b54442cafc9c7a1abff2a238f3e767b70aadf919b451 \
--hash=sha256:d77dff3a5aa5eedcc3da0ebd10ff8e4969bc9541aa3333a8d41715b429e99f47 \
--hash=sha256:dd4f16e57c12c0ae17606c53d1b57d8d1c8792efe3f065a37cb3341340599d49 \
--hash=sha256:e39d7ab0c18ac99955b36cd19f43926450baba21e3250f053e0704d6ffd76873 \
--hash=sha256:e3d0cd3dff0e7638a7b5390f3a53057c4e347f4ef122ee84ed93fc2fb7ea4aa2 \
--hash=sha256:e7dfb1cbb895810fa2b892b68153c17716c6abaa22c7dc2b2f6dcf3364932a1c \
--hash=sha256:e8e24b210a4deb5a7744971f8f77393005bae7f873568e37dfd9effe808be7f7 \
--hash=sha256:e9c0683cb35a9b5881b41bc01d5568ffc667910d9dbc632a1fba4e7d59e98773 \
--hash=sha256:ed41f3f49507936a6fe7003985ea2574daccfef999775525d79eb67344e23767 \
--hash=sha256:ee744fca8d1ea822480a2a4e7c5f2e1950745477143668f0b523769426060f29 \
--hash=sha256:f3f1e860be21f3e83011116a65e7310486300e08d9a3028e73e8d13bb6c77292 \
--hash=sha256:f43ab4cb04bde6109eb2555528a64dfd8a265cc6a9920a67dcbde13ef53a46c8 \
--hash=sha256:f53f55a8852f0e49b0fc76f2412045d6ad9d5772251dea8f55ea45021616e7d5 \
--hash=sha256:f59996d0550894affaad8743e97b9b9c98f638b221fac12909210ec3d9294786 \
--hash=sha256:f96f3f98fbff7af29e9edf9a6584f3c1382e7788783d07ba3721790625caa43e \
--hash=sha256:f9adb5664b78fcfcd830000416c8cc69853ef43cb084d645b3f1f0296edd9bae \
--hash=sha256:fa326b3505d5784436d9433b7980171ab2375535d93dd63fbcd20af2b5ca1bb6 \
--hash=sha256:fafc0049add8043ad07ab5382ee80d80ed7e3699847f26c9a5cf4d3714d96a84
rsa==4.9 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \
--hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21
s3transfer==0.6.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:3c0da2d074bf35d6870ef157158641178a4204a6e689e82546083e31e0311346 \
--hash=sha256:640bb492711f4c0c0905e1f62b6aaeb771881935ad27884852411f8e9cacbca9
schema==0.7.5 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:f06717112c61895cabc4707752b88716e8420a8819d71404501e114f91043197 \
--hash=sha256:f3ffdeeada09ec34bf40d7d79996d9f7175db93b7a5065de0faa7f41083c1e6c
shodan==1.30.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:bedb6e8c2b4459592c1bc17b4d4b57dab0cb58a455ad589ee26a6304242cd505
six==1.16.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \
--hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254
slack-sdk==3.24.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:741ea5381e65f4407d24ed81203912cbd6bfe807a6704b1d3c5ad346c86000b6 \
--hash=sha256:cae64f0177a53d34cca59cc691d4535edd18929843a936b97cea421db9e4fbfe
tabulate==0.9.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c \
--hash=sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f
tldextract==3.4.4 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:581e7dbefc90e7bb857bb6f768d25c811a3c5f0892ed56a9a2999ddb7b1b70c2 \
--hash=sha256:5fe3210c577463545191d45ad522d3d5e78d55218ce97215e82004dcae1e1234
typing-extensions==4.5.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb \
--hash=sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4
uritemplate==4.1.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4346edfc5c3b79f694bccd6d6099a322bbeb628dbf2cd86eea55a456ce5124f0 \
--hash=sha256:830c08b8d99bdd312ea4ead05994a38e8936266f84b9a7878232db50b044e02e
urllib3==1.26.18 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07 \
--hash=sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0
xlsxwriter==3.1.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:02913b50b74c00f165933d5da3e3a02cab4204cb4932722a1b342c5c71034122 \
--hash=sha256:b70a147d36235d1ee835cfd037396f789db1f76740a0e5c917d54137169341de | python3.9 | ceabe8ec | diff --git a/prowler/lib/outputs/html.py b/prowler/lib/outputs/html.py
--- a/prowler/lib/outputs/html.py
+++ b/prowler/lib/outputs/html.py
@@ -407,7 +407,7 @@ def get_azure_html_assessment_summary(audit_info):
if isinstance(audit_info, Azure_Audit_Info):
printed_subscriptions = []
for key, value in audit_info.identity.subscriptions.items():
- intermediate = key + " : " + value
+ intermediate = f"{key} : {value}"
printed_subscriptions.append(intermediate)
# check if identity is str(coming from SP) or dict(coming from browser or)
diff --git a/prowler/lib/outputs/slack.py b/prowler/lib/outputs/slack.py
--- a/prowler/lib/outputs/slack.py
+++ b/prowler/lib/outputs/slack.py
@@ -13,7 +13,7 @@ def send_slack_message(token, channel, stats, provider, audit_info):
response = client.chat_postMessage(
username="Prowler",
icon_url=square_logo_img,
- channel="#" + channel,
+ channel=f"#{channel}",
blocks=create_message_blocks(identity, logo, stats),
)
return response
@@ -35,7 +35,7 @@ def create_message_identity(provider, audit_info):
elif provider == "azure":
printed_subscriptions = []
for key, value in audit_info.identity.subscriptions.items():
- intermediate = "- *" + key + ": " + value + "*\n"
+ intermediate = f"- *{key}: {value}*\n"
printed_subscriptions.append(intermediate)
identity = f"Azure Subscriptions:\n{''.join(printed_subscriptions)}"
logo = azure_logo
diff --git a/prowler/providers/aws/services/apigateway/apigateway_restapi_authorizers_enabled/apigateway_restapi_authorizers_enabled.py b/prowler/providers/aws/services/apigateway/apigateway_restapi_authorizers_enabled/apigateway_restapi_authorizers_enabled.py
--- a/prowler/providers/aws/services/apigateway/apigateway_restapi_authorizers_enabled/apigateway_restapi_authorizers_enabled.py
+++ b/prowler/providers/aws/services/apigateway/apigateway_restapi_authorizers_enabled/apigateway_restapi_authorizers_enabled.py
@@ -35,7 +35,7 @@ class apigateway_restapi_authorizers_enabled(Check):
if authorization_method == "NONE":
all_methods_authorized = False
unauthorized_method = (
- resource.path + " -> " + http_method
+ f"{resource.path} -> {http_method}"
)
resource_paths_with_unathorized_methods.append(
unauthorized_method
diff --git a/prowler/providers/common/audit_info.py b/prowler/providers/common/audit_info.py
--- a/prowler/providers/common/audit_info.py
+++ b/prowler/providers/common/audit_info.py
@@ -63,7 +63,7 @@ GCP Account: {Fore.YELLOW}[{profile}]{Style.RESET_ALL} GCP Project IDs: {Fore.Y
def print_azure_credentials(self, audit_info: Azure_Audit_Info):
printed_subscriptions = []
for key, value in audit_info.identity.subscriptions.items():
- intermediate = key + " : " + value
+ intermediate = f"{key} : {value}"
printed_subscriptions.append(intermediate)
report = f"""
This report is being generated using the identity below:
diff --git a/tests/providers/common/common_outputs_test.py b/tests/providers/common/common_outputs_test.py
--- a/tests/providers/common/common_outputs_test.py
+++ b/tests/providers/common/common_outputs_test.py
@@ -281,7 +281,7 @@ class Test_Common_Output_Options:
}
printed_subscriptions = []
for key, value in audit_info.identity.subscriptions.items():
- intermediate = key + " : " + value
+ intermediate = f"{key} : {value}"
printed_subscriptions.append(intermediate)
assert (
get_assessment_summary(audit_info)
| [
{
"content": "import importlib\nimport sys\nfrom os import path\n\nfrom prowler.config.config import (\n html_file_suffix,\n html_logo_img,\n html_logo_url,\n prowler_version,\n timestamp,\n)\nfrom prowler.lib.check.models import Check_Report_AWS, Check_Report_GCP\nfrom prowler.lib.logger import logger\nfrom prowler.lib.outputs.models import (\n get_check_compliance,\n parse_html_string,\n unroll_dict,\n unroll_tags,\n)\nfrom prowler.lib.utils.utils import open_file\nfrom prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info\nfrom prowler.providers.azure.lib.audit_info.models import Azure_Audit_Info\nfrom prowler.providers.gcp.lib.audit_info.models import GCP_Audit_Info\n\n\ndef add_html_header(file_descriptor, audit_info):\n try:\n file_descriptor.write(\n \"\"\"\n <!DOCTYPE html>\n <html lang=\"en\">\n <head>\n <meta http-equiv=\"Content-Type\" content=\"text/html; charset=UTF-8\">\n <!-- Required meta tags -->\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1, shrink-to-fit=no\">\n <style>\n .read-more {\n color: #00f;\n }\n\n .bg-success-custom {\n background-color: #98dea7 !important;\n }\n\n .bg-danger {\n background-color: #f28484 !important;\n }\n </style>\n <!-- Bootstrap CSS -->\n <link rel=\"stylesheet\" href=\"https://stackpath.bootstrapcdn.com/bootstrap/4.5.0/css/bootstrap.min.css\"\n integrity=\"sha384-9aIt2nRpC12Uk9gS9baDl411NQApFmC26EwAOH8WgZl5MYYxFfc+NcPb1dKGj7Sk\" crossorigin=\"anonymous\">\n <!-- https://datatables.net/download/index with jQuery, DataTables, Buttons, SearchPanes, and Select //-->\n <link rel=\"stylesheet\" type=\"text/css\"\n href=\"https://cdn.datatables.net/v/dt/jqc-1.12.4/dt-1.10.25/b-1.7.1/sp-1.4.0/sl-1.3.3/datatables.min.css\" />\n <link rel=\"stylesheet\" href=\"https://pro.fontawesome.com/releases/v5.10.0/css/all.css\"\n integrity=\"sha384-AYmEC3Yw5cVb3ZcuHtOA93w35dYTsvhLPVnYs9eStHfGJvOvKxVfELGroGkvsg+p\" crossorigin=\"anonymous\" />\n <style>\n .show-read-more .more-text {\n display: none;\n }\n\n .dataTable {\n font-size: 14px;\n }\n\n .container-fluid {\n font-size: 14px;\n }\n\n .float-left {\n float: left !important;\n max-width: 100%;\n }\n </style>\n <title>Prowler - The Handy Cloud Security Tool</title>\n </head>\n <body>\n <div class=\"container-fluid\">\n <div class=\"row mt-3\">\n <div class=\"col-md-4\">\n <a href=\"\"\"\n + html_logo_url\n + \"\"\"><img class=\"float-left card-img-left mt-4 mr-4 ml-4\"\n src=\"\"\"\n + html_logo_img\n + \"\"\"\n alt=\"prowler-logo\"></a>\n <div class=\"card\">\n <div class=\"card-header\">\n Report Information\n </div>\n <ul class=\"list-group list-group-flush\">\n <li class=\"list-group-item\">\n <div class=\"row\">\n <div class=\"col-md-auto\">\n <b>Version:</b> \"\"\"\n + prowler_version\n + \"\"\"\n </div>\n </div>\n </li>\n <li class=\"list-group-item\">\n <b>Parameters used:</b> \"\"\"\n + \" \".join(sys.argv[1:])\n + \"\"\"\n </li>\n <li class=\"list-group-item\">\n <b>Date:</b> \"\"\"\n + timestamp.isoformat()\n + \"\"\"\n </li>\n </ul>\n </div>\n </div> \"\"\"\n + get_assessment_summary(audit_info)\n + \"\"\"\n <div class=\"col-md-2\">\n <div class=\"card\">\n <div class=\"card-header\">\n Assessment Overview\n </div>\n <ul class=\"list-group list-group-flush\">\n <li class=\"list-group-item\">\n <b>Total Findings:</b> TOTAL_FINDINGS\n </li>\n <li class=\"list-group-item\">\n <b>Passed:</b> TOTAL_PASS\n </li>\n <li class=\"list-group-item\">\n <b>Failed:</b> TOTAL_FAIL\n </li>\n <li class=\"list-group-item\">\n <b>Total Resources:</b> TOTAL_RESOURCES\n </li>\n </ul>\n </div>\n </div>\n </div>\n </div>\n <div class=\"row-mt-3\">\n <div class=\"col-md-12\">\n <table class=\"table compact stripe row-border ordering\" id=\"findingsTable\" data-order='[[ 5, \"asc\" ]]' data-page-length='100'>\n <thead class=\"thead-light\">\n <tr>\n <th scope=\"col\">Status</th>\n <th scope=\"col\">Severity</th>\n <th scope=\"col\">Service Name</th>\n <th scope=\"col\">Region</th>\n <th style=\"width:20%\" scope=\"col\">Check ID</th>\n <th style=\"width:20%\" scope=\"col\">Check Title</th>\n <th scope=\"col\">Resource ID</th>\n <th scope=\"col\">Resource Tags</th>\n <th scope=\"col\">Status Extended</th>\n <th scope=\"col\">Risk</th>\n <th scope=\"col\">Recomendation</th>\n <th scope=\"col\">Compliance</th>\n </tr>\n </thead>\n <tbody>\n \"\"\"\n )\n except Exception as error:\n logger.critical(\n f\"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}\"\n )\n sys.exit(1)\n\n\ndef fill_html(file_descriptor, finding, output_options):\n try:\n row_class = \"p-3 mb-2 bg-success-custom\"\n if finding.status == \"INFO\":\n row_class = \"table-info\"\n elif finding.status == \"FAIL\":\n row_class = \"table-danger\"\n elif finding.status == \"WARNING\":\n row_class = \"table-warning\"\n file_descriptor.write(\n f\"\"\"\n <tr class=\"{row_class}\">\n <td>{finding.status}</td>\n <td>{finding.check_metadata.Severity}</td>\n <td>{finding.check_metadata.ServiceName}</td>\n <td>{finding.location.lower() if isinstance(finding, Check_Report_GCP) else finding.region if isinstance(finding, Check_Report_AWS) else \"\"}</td>\n <td>{finding.check_metadata.CheckID.replace(\"_\", \"<wbr>_\")}</td>\n <td>{finding.check_metadata.CheckTitle}</td>\n <td>{finding.resource_id.replace(\"<\", \"<\").replace(\">\", \">\").replace(\"_\", \"<wbr>_\")}</td>\n <td>{parse_html_string(unroll_tags(finding.resource_tags))}</td>\n <td>{finding.status_extended.replace(\"<\", \"<\").replace(\">\", \">\").replace(\"_\", \"<wbr>_\")}</td>\n <td><p class=\"show-read-more\">{finding.check_metadata.Risk}</p></td>\n <td><p class=\"show-read-more\">{finding.check_metadata.Remediation.Recommendation.Text}</p> <a class=\"read-more\" href=\"{finding.check_metadata.Remediation.Recommendation.Url}\"><i class=\"fas fa-external-link-alt\"></i></a></td>\n <td><p class=\"show-read-more\">{parse_html_string(unroll_dict(get_check_compliance(finding, finding.check_metadata.Provider, output_options)))}</p></td>\n </tr>\n \"\"\"\n )\n except Exception as error:\n logger.critical(\n f\"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}\"\n )\n sys.exit(1)\n\n\ndef fill_html_overview_statistics(stats, output_filename, output_directory):\n try:\n filename = f\"{output_directory}/{output_filename}{html_file_suffix}\"\n # Read file\n if path.isfile(filename):\n with open(filename, \"r\") as file:\n filedata = file.read()\n\n # Replace statistics\n # TOTAL_FINDINGS\n filedata = filedata.replace(\n \"TOTAL_FINDINGS\", str(stats.get(\"findings_count\"))\n )\n # TOTAL_RESOURCES\n filedata = filedata.replace(\n \"TOTAL_RESOURCES\", str(stats.get(\"resources_count\"))\n )\n # TOTAL_PASS\n filedata = filedata.replace(\"TOTAL_PASS\", str(stats.get(\"total_pass\")))\n # TOTAL_FAIL\n filedata = filedata.replace(\"TOTAL_FAIL\", str(stats.get(\"total_fail\")))\n # Write file\n with open(filename, \"w\") as file:\n file.write(filedata)\n\n except Exception as error:\n logger.critical(\n f\"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}\"\n )\n sys.exit(1)\n\n\ndef add_html_footer(output_filename, output_directory):\n try:\n filename = f\"{output_directory}/{output_filename}{html_file_suffix}\"\n # Close HTML file if exists\n if path.isfile(filename):\n file_descriptor = open_file(\n filename,\n \"a\",\n )\n file_descriptor.write(\n \"\"\"\n </tbody>\n </table>\n </div>\n </div>\n </div>\n </div>\n <!-- Table search and paginator -->\n <!-- Optional JavaScript -->\n <!-- jQuery first, then Popper.js, then Bootstrap JS -->\n <script src=\"https://code.jquery.com/jquery-3.5.1.min.js\"\n integrity=\"sha256-9/aliU8dGd2tb6OSsuzixeV4y/faTqgFtohetphbbj0=\" crossorigin=\"anonymous\"></script>\n <script src=\"https://stackpath.bootstrapcdn.com/bootstrap/4.5.0/js/bootstrap.bundle.min.js\"\n integrity=\"sha384-1CmrxMRARb6aLqgBO7yyAxTOQE2AKb9GfXnEo760AUcUmFx3ibVJJAzGytlQcNXd\"\n crossorigin=\"anonymous\"></script>\n <!-- https://datatables.net/download/index with jQuery, DataTables, Buttons, SearchPanes, and Select //-->\n <script type=\"text/javascript\"\n src=\"https://cdn.datatables.net/v/dt/jqc-1.12.4/dt-1.10.25/b-1.7.1/sp-1.4.0/sl-1.3.3/datatables.min.js\"></script>\n <script>\n $(document).ready(function () {\n // Initialise the table with 50 rows, and some search/filtering panes\n $('#findingsTable').DataTable({\n responsive: true,\n // Show 25, 50, 100 and All records\n lengthChange: true,\n lengthMenu: [[25, 50, 100, -1], [25, 50, 100, \"All\"]],\n searchPanes: {\n cascadePanes: true,\n viewTotal: true,\n },\n dom: 'Blfrtip',\n language: {\n // To enable a filter button instead of the filter row\n searchPanes: {\n clearMessage: 'Clear Filters',\n collapse: { 0: 'Filters', _: 'Filters (%d)' },\n initCollapsed: true\n\n }\n },\n buttons: [\n {\n extend: 'searchPanes',\n config: {\n cascadePanes: true,\n viewTotal: true,\n orderable: false\n }\n }\n ],\n columnDefs: [\n {\n searchPanes: {\n show: true,\n pagingType: 'numbers',\n searching: true\n },\n // Show all filters\n targets: [0, 1, 2, 3, 5, 7]\n }\n ]\n });\n var maxLength = 30;\n // ReadMore ReadLess\n $(\".show-read-more\").each(function () {\n var myStr = $(this).text();\n if ($.trim(myStr).length > maxLength) {\n var newStr = myStr.substring(0, maxLength);\n var removedStr = myStr.substring(maxLength, $.trim(myStr).length);\n $(this).empty().html(newStr);\n $(this).append(' <a href=\"javascript:void(0);\" class=\"read-more\">read more...</a>');\n $(this).append('<span class=\"more-text\">' + removedStr + '</span>');\n }\n });\n $(\".read-more\").click(function () {\n $(this).siblings(\".more-text\").contents().unwrap();\n $(this).remove();\n });\n });\n </script>\n</body>\n\n</html>\n\"\"\"\n )\n file_descriptor.close()\n except Exception as error:\n logger.critical(\n f\"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}\"\n )\n sys.exit(1)\n\n\ndef get_aws_html_assessment_summary(audit_info):\n try:\n if isinstance(audit_info, AWS_Audit_Info):\n profile = (\n audit_info.profile if audit_info.profile is not None else \"default\"\n )\n if isinstance(audit_info.audited_regions, list):\n audited_regions = \" \".join(audit_info.audited_regions)\n elif not audit_info.audited_regions:\n audited_regions = \"All Regions\"\n else:\n audited_regions = \", \".join(audit_info.audited_regions)\n return (\n \"\"\"\n <div class=\"col-md-2\">\n <div class=\"card\">\n <div class=\"card-header\">\n AWS Assessment Summary\n </div>\n <ul class=\"list-group list-group-flush\">\n <li class=\"list-group-item\">\n <b>AWS Account:</b> \"\"\"\n + audit_info.audited_account\n + \"\"\"\n </li>\n <li class=\"list-group-item\">\n <b>AWS-CLI Profile:</b> \"\"\"\n + profile\n + \"\"\"\n </li>\n <li class=\"list-group-item\">\n <b>Audited Regions:</b> \"\"\"\n + audited_regions\n + \"\"\"\n </li>\n </ul>\n </div>\n </div>\n <div class=\"col-md-4\">\n <div class=\"card\">\n <div class=\"card-header\">\n AWS Credentials\n </div>\n <ul class=\"list-group list-group-flush\">\n <li class=\"list-group-item\">\n <b>User Id:</b> \"\"\"\n + audit_info.audited_user_id\n + \"\"\"\n </li>\n <li class=\"list-group-item\">\n <b>Caller Identity ARN:</b> \"\"\"\n + audit_info.audited_identity_arn\n + \"\"\"\n </li>\n </ul>\n </div>\n </div>\n \"\"\"\n )\n\n except Exception as error:\n logger.critical(\n f\"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}\"\n )\n sys.exit(1)\n\n\ndef get_azure_html_assessment_summary(audit_info):\n try:\n if isinstance(audit_info, Azure_Audit_Info):\n printed_subscriptions = []\n for key, value in audit_info.identity.subscriptions.items():\n intermediate = key + \" : \" + value\n printed_subscriptions.append(intermediate)\n\n # check if identity is str(coming from SP) or dict(coming from browser or)\n if isinstance(audit_info.identity.identity_id, dict):\n html_identity = audit_info.identity.identity_id.get(\n \"userPrincipalName\", \"Identity not found\"\n )\n else:\n html_identity = audit_info.identity.identity_id\n return (\n \"\"\"\n <div class=\"col-md-2\">\n <div class=\"card\">\n <div class=\"card-header\">\n Azure Assessment Summary\n </div>\n <ul class=\"list-group list-group-flush\">\n <li class=\"list-group-item\">\n <b>Azure Tenant IDs:</b> \"\"\"\n + \" \".join(audit_info.identity.tenant_ids)\n + \"\"\"\n </li>\n <li class=\"list-group-item\">\n <b>Azure Tenant Domain:</b> \"\"\"\n + audit_info.identity.domain\n + \"\"\"\n </li>\n <li class=\"list-group-item\">\n <b>Azure Subscriptions:</b> \"\"\"\n + \" \".join(printed_subscriptions)\n + \"\"\"\n </li>\n </ul>\n </div>\n </div>\n <div class=\"col-md-4\">\n <div class=\"card\">\n <div class=\"card-header\">\n Azure Credentials\n </div>\n <ul class=\"list-group list-group-flush\">\n <li class=\"list-group-item\">\n <b>Azure Identity Type:</b> \"\"\"\n + audit_info.identity.identity_type\n + \"\"\"\n </li>\n <li class=\"list-group-item\">\n <b>Azure Identity ID:</b> \"\"\"\n + html_identity\n + \"\"\"\n </li>\n </ul>\n </div>\n </div>\n \"\"\"\n )\n except Exception as error:\n logger.critical(\n f\"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}\"\n )\n sys.exit(1)\n\n\ndef get_gcp_html_assessment_summary(audit_info):\n try:\n if isinstance(audit_info, GCP_Audit_Info):\n try:\n getattr(audit_info.credentials, \"_service_account_email\")\n profile = (\n audit_info.credentials._service_account_email\n if audit_info.credentials._service_account_email is not None\n else \"default\"\n )\n except AttributeError:\n profile = \"default\"\n return (\n \"\"\"\n <div class=\"col-md-2\">\n <div class=\"card\">\n <div class=\"card-header\">\n GCP Assessment Summary\n </div>\n <ul class=\"list-group list-group-flush\">\n <li class=\"list-group-item\">\n <b>GCP Project IDs:</b> \"\"\"\n + \", \".join(audit_info.project_ids)\n + \"\"\"\n </li>\n </ul>\n </div>\n </div>\n <div class=\"col-md-4\">\n <div class=\"card\">\n <div class=\"card-header\">\n GCP Credentials\n </div>\n <ul class=\"list-group list-group-flush\">\n <li class=\"list-group-item\">\n <b>GCP Account:</b> \"\"\"\n + profile\n + \"\"\"\n </li>\n </ul>\n </div>\n </div>\n \"\"\"\n )\n except Exception as error:\n logger.critical(\n f\"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}\"\n )\n sys.exit(1)\n\n\ndef get_assessment_summary(audit_info):\n \"\"\"\n get_assessment_summary gets the HTML assessment summary for the provider\n \"\"\"\n try:\n # This is based in the Provider_Audit_Info class\n # It is not pretty but useful\n # AWS_Audit_Info --> aws\n # GCP_Audit_Info --> gcp\n # Azure_Audit_Info --> azure\n provider = audit_info.__class__.__name__.split(\"_\")[0].lower()\n\n # Dynamically get the Provider quick inventory handler\n provider_html_assessment_summary_function = (\n f\"get_{provider}_html_assessment_summary\"\n )\n return getattr(\n importlib.import_module(__name__), provider_html_assessment_summary_function\n )(audit_info)\n except Exception as error:\n logger.critical(\n f\"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}\"\n )\n sys.exit(1)\n",
"path": "prowler/lib/outputs/html.py"
},
{
"content": "import sys\n\nfrom slack_sdk import WebClient\n\nfrom prowler.config.config import aws_logo, azure_logo, gcp_logo, square_logo_img\nfrom prowler.lib.logger import logger\n\n\ndef send_slack_message(token, channel, stats, provider, audit_info):\n try:\n client = WebClient(token=token)\n identity, logo = create_message_identity(provider, audit_info)\n response = client.chat_postMessage(\n username=\"Prowler\",\n icon_url=square_logo_img,\n channel=\"#\" + channel,\n blocks=create_message_blocks(identity, logo, stats),\n )\n return response\n except Exception as error:\n logger.error(\n f\"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}\"\n )\n\n\ndef create_message_identity(provider, audit_info):\n try:\n identity = \"\"\n logo = aws_logo\n if provider == \"aws\":\n identity = f\"AWS Account *{audit_info.audited_account}*\"\n elif provider == \"gcp\":\n identity = f\"GCP Projects *{', '.join(audit_info.project_ids)}*\"\n logo = gcp_logo\n elif provider == \"azure\":\n printed_subscriptions = []\n for key, value in audit_info.identity.subscriptions.items():\n intermediate = \"- *\" + key + \": \" + value + \"*\\n\"\n printed_subscriptions.append(intermediate)\n identity = f\"Azure Subscriptions:\\n{''.join(printed_subscriptions)}\"\n logo = azure_logo\n return identity, logo\n except Exception as error:\n logger.error(\n f\"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}\"\n )\n\n\ndef create_message_blocks(identity, logo, stats):\n try:\n blocks = [\n {\n \"type\": \"section\",\n \"text\": {\n \"type\": \"mrkdwn\",\n \"text\": f\"Hey there 👋 \\n I'm *Prowler*, _the handy cloud security tool_ :cloud::key:\\n\\n I have just finished the security assessment on your {identity} with a total of *{stats['findings_count']}* findings.\",\n },\n \"accessory\": {\n \"type\": \"image\",\n \"image_url\": logo,\n \"alt_text\": \"Provider Logo\",\n },\n },\n {\"type\": \"divider\"},\n {\n \"type\": \"section\",\n \"text\": {\n \"type\": \"mrkdwn\",\n \"text\": f\"\\n:white_check_mark: *{stats['total_pass']} Passed findings* ({round(stats['total_pass']/stats['findings_count']*100,2)}%)\\n\",\n },\n },\n {\n \"type\": \"section\",\n \"text\": {\n \"type\": \"mrkdwn\",\n \"text\": f\"\\n:x: *{stats['total_fail']} Failed findings* ({round(stats['total_fail']/stats['findings_count']*100,2)}%)\\n \",\n },\n },\n {\n \"type\": \"section\",\n \"text\": {\n \"type\": \"mrkdwn\",\n \"text\": f\"\\n:bar_chart: *{stats['resources_count']} Scanned Resources*\\n\",\n },\n },\n {\"type\": \"divider\"},\n {\n \"type\": \"context\",\n \"elements\": [\n {\n \"type\": \"mrkdwn\",\n \"text\": f\"Used parameters: `prowler {' '.join(sys.argv[1:])} `\",\n }\n ],\n },\n {\"type\": \"divider\"},\n {\n \"type\": \"section\",\n \"text\": {\"type\": \"mrkdwn\", \"text\": \"Join our Slack Community!\"},\n \"accessory\": {\n \"type\": \"button\",\n \"text\": {\"type\": \"plain_text\", \"text\": \"Prowler :slack:\"},\n \"url\": \"https://join.slack.com/t/prowler-workspace/shared_invite/zt-1hix76xsl-2uq222JIXrC7Q8It~9ZNog\",\n },\n },\n {\n \"type\": \"section\",\n \"text\": {\n \"type\": \"mrkdwn\",\n \"text\": \"Feel free to contact us in our repo\",\n },\n \"accessory\": {\n \"type\": \"button\",\n \"text\": {\"type\": \"plain_text\", \"text\": \"Prowler :github:\"},\n \"url\": \"https://github.com/prowler-cloud/prowler\",\n },\n },\n {\n \"type\": \"section\",\n \"text\": {\n \"type\": \"mrkdwn\",\n \"text\": \"See all the things you can do with ProwlerPro\",\n },\n \"accessory\": {\n \"type\": \"button\",\n \"text\": {\"type\": \"plain_text\", \"text\": \"Prowler Pro\"},\n \"url\": \"https://prowler.pro\",\n },\n },\n ]\n return blocks\n except Exception as error:\n logger.error(\n f\"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}\"\n )\n",
"path": "prowler/lib/outputs/slack.py"
},
{
"content": "from prowler.lib.check.models import Check, Check_Report_AWS\nfrom prowler.providers.aws.services.apigateway.apigateway_client import (\n apigateway_client,\n)\n\n\nclass apigateway_restapi_authorizers_enabled(Check):\n def execute(self):\n findings = []\n for rest_api in apigateway_client.rest_apis:\n report = Check_Report_AWS(self.metadata())\n report.region = rest_api.region\n report.resource_id = rest_api.name\n report.resource_arn = rest_api.arn\n report.resource_tags = rest_api.tags\n # it there are not authorizers at api level and resources without methods (default case) ->\n report.status = \"FAIL\"\n report.status_extended = f\"API Gateway {rest_api.name} ID {rest_api.id} does not have an authorizer configured at api level.\"\n if rest_api.authorizer:\n report.status = \"PASS\"\n report.status_extended = f\"API Gateway {rest_api.name} ID {rest_api.id} has an authorizer configured at api level\"\n else:\n # we want to know if api has not authorizers and all the resources don't have methods configured\n resources_have_methods = False\n all_methods_authorized = True\n resource_paths_with_unathorized_methods = []\n for resource in rest_api.resources:\n # if the resource has methods test if they have all configured authorizer\n if resource.resource_methods:\n resources_have_methods = True\n for (\n http_method,\n authorization_method,\n ) in resource.resource_methods.items():\n if authorization_method == \"NONE\":\n all_methods_authorized = False\n unauthorized_method = (\n resource.path + \" -> \" + http_method\n )\n resource_paths_with_unathorized_methods.append(\n unauthorized_method\n )\n # if there are methods in at least one resource and are all authorized\n if all_methods_authorized and resources_have_methods:\n report.status = \"PASS\"\n report.status_extended = f\"API Gateway {rest_api.name} ID {rest_api.id} has all methods authorized\"\n # if there are methods in at least one result but some of then are not authorized-> list it\n elif not all_methods_authorized:\n report.status_extended = f\"API Gateway {rest_api.name} ID {rest_api.id} does not have authorizers at api level and the following paths and methods are unauthorized: {'; '.join(resource_paths_with_unathorized_methods)}.\"\n\n findings.append(report)\n\n return findings\n",
"path": "prowler/providers/aws/services/apigateway/apigateway_restapi_authorizers_enabled/apigateway_restapi_authorizers_enabled.py"
},
{
"content": "import sys\n\nfrom botocore.config import Config\nfrom colorama import Fore, Style\n\nfrom prowler.config.config import load_and_validate_config_file\nfrom prowler.lib.logger import logger\nfrom prowler.providers.aws.aws_provider import (\n AWS_Provider,\n assume_role,\n get_aws_enabled_regions,\n get_checks_from_input_arn,\n get_regions_from_audit_resources,\n)\nfrom prowler.providers.aws.lib.arn.arn import parse_iam_credentials_arn\nfrom prowler.providers.aws.lib.audit_info.audit_info import current_audit_info\nfrom prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info, AWS_Credentials\nfrom prowler.providers.aws.lib.credentials.credentials import (\n print_aws_credentials,\n validate_aws_credentials,\n)\nfrom prowler.providers.aws.lib.organizations.organizations import (\n get_organizations_metadata,\n)\nfrom prowler.providers.aws.lib.resource_api_tagging.resource_api_tagging import (\n get_tagged_resources,\n)\nfrom prowler.providers.azure.azure_provider import Azure_Provider\nfrom prowler.providers.azure.lib.audit_info.audit_info import azure_audit_info\nfrom prowler.providers.azure.lib.audit_info.models import (\n Azure_Audit_Info,\n Azure_Region_Config,\n)\nfrom prowler.providers.azure.lib.exception.exception import AzureException\nfrom prowler.providers.gcp.gcp_provider import GCP_Provider\nfrom prowler.providers.gcp.lib.audit_info.audit_info import gcp_audit_info\nfrom prowler.providers.gcp.lib.audit_info.models import GCP_Audit_Info\n\n\nclass Audit_Info:\n def __init__(self):\n logger.info(\"Setting Audit Info ...\")\n\n def print_gcp_credentials(self, audit_info: GCP_Audit_Info):\n # Beautify audited profile, set \"default\" if there is no profile set\n try:\n getattr(audit_info.credentials, \"_service_account_email\")\n profile = (\n audit_info.credentials._service_account_email\n if audit_info.credentials._service_account_email is not None\n else \"default\"\n )\n except AttributeError:\n profile = \"default\"\n\n report = f\"\"\"\nThis report is being generated using credentials below:\n\nGCP Account: {Fore.YELLOW}[{profile}]{Style.RESET_ALL} GCP Project IDs: {Fore.YELLOW}[{\", \".join(audit_info.project_ids)}]{Style.RESET_ALL}\n\"\"\"\n print(report)\n\n def print_azure_credentials(self, audit_info: Azure_Audit_Info):\n printed_subscriptions = []\n for key, value in audit_info.identity.subscriptions.items():\n intermediate = key + \" : \" + value\n printed_subscriptions.append(intermediate)\n report = f\"\"\"\nThis report is being generated using the identity below:\n\nAzure Tenant IDs: {Fore.YELLOW}[{\" \".join(audit_info.identity.tenant_ids)}]{Style.RESET_ALL} Azure Tenant Domain: {Fore.YELLOW}[{audit_info.identity.domain}]{Style.RESET_ALL} Azure Region: {Fore.YELLOW}[{audit_info.azure_region_config.name}]{Style.RESET_ALL}\nAzure Subscriptions: {Fore.YELLOW}{printed_subscriptions}{Style.RESET_ALL}\nAzure Identity Type: {Fore.YELLOW}[{audit_info.identity.identity_type}]{Style.RESET_ALL} Azure Identity ID: {Fore.YELLOW}[{audit_info.identity.identity_id}]{Style.RESET_ALL}\n\"\"\"\n print(report)\n\n def set_aws_audit_info(self, arguments) -> AWS_Audit_Info:\n \"\"\"\n set_aws_audit_info returns the AWS_Audit_Info\n \"\"\"\n logger.info(\"Setting AWS session ...\")\n\n # Assume Role Options\n input_role = arguments.get(\"role\")\n current_audit_info.assumed_role_info.role_arn = input_role\n input_session_duration = arguments.get(\"session_duration\")\n input_external_id = arguments.get(\"external_id\")\n\n # STS Endpoint Region\n sts_endpoint_region = arguments.get(\"sts_endpoint_region\")\n\n # MFA Configuration (false by default)\n input_mfa = arguments.get(\"mfa\")\n current_audit_info.mfa_enabled = input_mfa\n\n input_profile = arguments.get(\"profile\")\n input_regions = arguments.get(\"region\")\n organizations_role_arn = arguments.get(\"organizations_role\")\n\n # Assumed AWS session\n assumed_session = None\n\n # Set the maximum retries for the standard retrier config\n aws_retries_max_attempts = arguments.get(\"aws_retries_max_attempts\")\n if aws_retries_max_attempts:\n # Create the new config\n config = Config(\n retries={\n \"max_attempts\": aws_retries_max_attempts,\n \"mode\": \"standard\",\n },\n )\n # Merge the new configuration\n new_boto3_config = current_audit_info.session_config.merge(config)\n current_audit_info.session_config = new_boto3_config\n\n # Set ignore unused services argument\n current_audit_info.ignore_unused_services = arguments.get(\n \"ignore_unused_services\"\n )\n\n # Setting session\n current_audit_info.profile = input_profile\n current_audit_info.audited_regions = input_regions\n\n logger.info(\"Generating original session ...\")\n # Create an global original session using only profile/basic credentials info\n aws_provider = AWS_Provider(current_audit_info)\n current_audit_info.original_session = aws_provider.aws_session\n logger.info(\"Validating credentials ...\")\n # Verificate if we have valid credentials\n caller_identity = validate_aws_credentials(\n current_audit_info.original_session, input_regions, sts_endpoint_region\n )\n\n logger.info(\"Credentials validated\")\n logger.info(f\"Original caller identity UserId: {caller_identity['UserId']}\")\n logger.info(f\"Original caller identity ARN: {caller_identity['Arn']}\")\n\n current_audit_info.audited_account = caller_identity[\"Account\"]\n current_audit_info.audited_identity_arn = caller_identity[\"Arn\"]\n current_audit_info.audited_user_id = caller_identity[\"UserId\"]\n current_audit_info.audited_partition = parse_iam_credentials_arn(\n caller_identity[\"Arn\"]\n ).partition\n current_audit_info.audited_account_arn = f\"arn:{current_audit_info.audited_partition}:iam::{current_audit_info.audited_account}:root\"\n\n logger.info(\"Checking if role assumption is needed ...\")\n if input_role:\n current_audit_info.assumed_role_info.role_arn = input_role\n current_audit_info.assumed_role_info.session_duration = (\n input_session_duration\n )\n current_audit_info.assumed_role_info.external_id = input_external_id\n current_audit_info.assumed_role_info.mfa_enabled = input_mfa\n\n # Check if role arn is valid\n try:\n # this returns the arn already parsed into a dict to be used when it is needed to access its fields\n role_arn_parsed = parse_iam_credentials_arn(\n current_audit_info.assumed_role_info.role_arn\n )\n\n except Exception as error:\n logger.critical(f\"{error.__class__.__name__} -- {error}\")\n sys.exit(1)\n\n else:\n logger.info(\n f\"Assuming role {current_audit_info.assumed_role_info.role_arn}\"\n )\n # Assume the role\n assumed_role_response = assume_role(\n aws_provider.aws_session,\n aws_provider.role_info,\n sts_endpoint_region,\n )\n logger.info(\"Role assumed\")\n # Set the info needed to create a session with an assumed role\n current_audit_info.credentials = AWS_Credentials(\n aws_access_key_id=assumed_role_response[\"Credentials\"][\n \"AccessKeyId\"\n ],\n aws_session_token=assumed_role_response[\"Credentials\"][\n \"SessionToken\"\n ],\n aws_secret_access_key=assumed_role_response[\"Credentials\"][\n \"SecretAccessKey\"\n ],\n expiration=assumed_role_response[\"Credentials\"][\"Expiration\"],\n )\n # new session is needed\n assumed_session = aws_provider.set_session(current_audit_info)\n\n if assumed_session:\n logger.info(\"Audit session is the new session created assuming role\")\n current_audit_info.audit_session = assumed_session\n current_audit_info.audited_account = role_arn_parsed.account_id\n current_audit_info.audited_partition = role_arn_parsed.partition\n current_audit_info.audited_account_arn = f\"arn:{current_audit_info.audited_partition}:iam::{current_audit_info.audited_account}:root\"\n else:\n logger.info(\"Audit session is the original one\")\n current_audit_info.audit_session = current_audit_info.original_session\n\n logger.info(\"Checking if organizations role assumption is needed ...\")\n if organizations_role_arn:\n current_audit_info.assumed_role_info.role_arn = organizations_role_arn\n current_audit_info.assumed_role_info.session_duration = (\n input_session_duration\n )\n current_audit_info.assumed_role_info.external_id = input_external_id\n current_audit_info.assumed_role_info.mfa_enabled = input_mfa\n\n # Check if role arn is valid\n try:\n # this returns the arn already parsed into a dict to be used when it is needed to access its fields\n role_arn_parsed = parse_iam_credentials_arn(\n current_audit_info.assumed_role_info.role_arn\n )\n\n except Exception as error:\n logger.critical(f\"{error.__class__.__name__} -- {error}\")\n sys.exit(1)\n\n else:\n logger.info(\n f\"Getting organizations metadata for account {organizations_role_arn}\"\n )\n assumed_credentials = assume_role(\n aws_provider.aws_session,\n aws_provider.role_info,\n sts_endpoint_region,\n )\n current_audit_info.organizations_metadata = get_organizations_metadata(\n current_audit_info.audited_account, assumed_credentials\n )\n logger.info(\"Organizations metadata retrieved\")\n\n # Setting default region of session\n if current_audit_info.audit_session.region_name:\n current_audit_info.profile_region = (\n current_audit_info.audit_session.region_name\n )\n else:\n current_audit_info.profile_region = \"us-east-1\"\n\n if not arguments.get(\"only_logs\"):\n print_aws_credentials(current_audit_info)\n\n # Parse Scan Tags\n if arguments.get(\"resource_tags\"):\n input_resource_tags = arguments.get(\"resource_tags\")\n current_audit_info.audit_resources = get_tagged_resources(\n input_resource_tags, current_audit_info\n )\n\n # Parse Input Resource ARNs\n if arguments.get(\"resource_arn\"):\n current_audit_info.audit_resources = arguments.get(\"resource_arn\")\n\n # Get Enabled Regions\n current_audit_info.enabled_regions = get_aws_enabled_regions(current_audit_info)\n\n return current_audit_info\n\n def set_aws_execution_parameters(self, provider, audit_info) -> list[str]:\n # Once the audit_info is set and we have the eventual checks from arn, it is time to exclude the others\n try:\n if audit_info.audit_resources:\n audit_info.audited_regions = get_regions_from_audit_resources(\n audit_info.audit_resources\n )\n return get_checks_from_input_arn(audit_info.audit_resources, provider)\n except Exception as error:\n logger.critical(\n f\"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}\"\n )\n sys.exit(1)\n\n def set_azure_audit_info(self, arguments) -> Azure_Audit_Info:\n \"\"\"\n set_azure_audit_info returns the Azure_Audit_Info\n \"\"\"\n logger.info(\"Setting Azure session ...\")\n subscription_ids = arguments.get(\"subscription_ids\")\n\n logger.info(\"Checking if any credentials mode is set ...\")\n az_cli_auth = arguments.get(\"az_cli_auth\")\n sp_env_auth = arguments.get(\"sp_env_auth\")\n browser_auth = arguments.get(\"browser_auth\")\n managed_entity_auth = arguments.get(\"managed_entity_auth\")\n tenant_id = arguments.get(\"tenant_id\")\n\n logger.info(\"Checking if region is different than default one\")\n region = arguments.get(\"azure_region\")\n\n if (\n not az_cli_auth\n and not sp_env_auth\n and not browser_auth\n and not managed_entity_auth\n ):\n raise AzureException(\n \"Azure provider requires at least one authentication method set: [--az-cli-auth | --sp-env-auth | --browser-auth | --managed-identity-auth]\"\n )\n if (not browser_auth and tenant_id) or (browser_auth and not tenant_id):\n raise AzureException(\n \"Azure Tenant ID (--tenant-id) is required only for browser authentication mode\"\n )\n\n azure_provider = Azure_Provider(\n az_cli_auth,\n sp_env_auth,\n browser_auth,\n managed_entity_auth,\n subscription_ids,\n tenant_id,\n region,\n )\n azure_audit_info.credentials = azure_provider.get_credentials()\n azure_audit_info.identity = azure_provider.get_identity()\n region_config = azure_provider.get_region_config()\n azure_audit_info.azure_region_config = Azure_Region_Config(\n name=region,\n authority=region_config[\"authority\"],\n base_url=region_config[\"base_url\"],\n credential_scopes=region_config[\"credential_scopes\"],\n )\n\n if not arguments.get(\"only_logs\"):\n self.print_azure_credentials(azure_audit_info)\n\n return azure_audit_info\n\n def set_gcp_audit_info(self, arguments) -> GCP_Audit_Info:\n \"\"\"\n set_gcp_audit_info returns the GCP_Audit_Info\n \"\"\"\n logger.info(\"Setting GCP session ...\")\n project_ids = arguments.get(\"project_ids\")\n\n logger.info(\"Checking if any credentials mode is set ...\")\n credentials_file = arguments.get(\"credentials_file\")\n\n gcp_provider = GCP_Provider(\n credentials_file,\n project_ids,\n )\n\n (\n gcp_audit_info.credentials,\n gcp_audit_info.default_project_id,\n gcp_audit_info.project_ids,\n ) = gcp_provider.get_credentials()\n\n if not arguments.get(\"only_logs\"):\n self.print_gcp_credentials(gcp_audit_info)\n\n return gcp_audit_info\n\n\ndef set_provider_audit_info(provider: str, arguments: dict):\n \"\"\"\n set_provider_audit_info configures automatically the audit session based on the selected provider and returns the audit_info object.\n \"\"\"\n try:\n provider_set_audit_info = f\"set_{provider}_audit_info\"\n provider_audit_info = getattr(Audit_Info(), provider_set_audit_info)(arguments)\n\n # Set the audit configuration from the config file\n provider_audit_info.audit_config = load_and_validate_config_file(\n provider, arguments[\"config_file\"]\n )\n except Exception as error:\n logger.critical(\n f\"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}\"\n )\n sys.exit(1)\n else:\n return provider_audit_info\n\n\ndef set_provider_execution_parameters(provider: str, audit_info):\n \"\"\"\n set_provider_audit_info configures automatically the audit execution based on the selected provider and returns the checks that are going to be executed.\n \"\"\"\n try:\n set_provider_execution_parameters_function = (\n f\"set_{provider}_execution_parameters\"\n )\n checks_to_execute = getattr(\n Audit_Info(), set_provider_execution_parameters_function\n )(provider, audit_info)\n except Exception as error:\n logger.critical(\n f\"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}\"\n )\n sys.exit(1)\n else:\n return checks_to_execute\n",
"path": "prowler/providers/common/audit_info.py"
},
{
"content": "from argparse import Namespace\nfrom os import rmdir\n\nfrom boto3 import session\nfrom mock import patch\n\nfrom prowler.lib.outputs.html import get_assessment_summary\nfrom prowler.providers.aws.lib.audit_info.audit_info import AWS_Audit_Info\nfrom prowler.providers.azure.lib.audit_info.audit_info import (\n Azure_Audit_Info,\n Azure_Identity_Info,\n Azure_Region_Config,\n)\nfrom prowler.providers.common.models import Audit_Metadata\nfrom prowler.providers.common.outputs import (\n Aws_Output_Options,\n Azure_Output_Options,\n Gcp_Output_Options,\n get_provider_output_model,\n set_provider_output_options,\n)\nfrom prowler.providers.gcp.lib.audit_info.models import GCP_Audit_Info\n\nAWS_ACCOUNT_NUMBER = \"012345678912\"\nDATETIME = \"20230101120000\"\n\n\n@patch(\"prowler.providers.common.outputs.output_file_timestamp\", new=DATETIME)\nclass Test_Common_Output_Options:\n # Mocked Azure Audit Info\n def set_mocked_azure_audit_info(self):\n audit_info = Azure_Audit_Info(\n credentials=None,\n identity=Azure_Identity_Info(),\n audit_metadata=None,\n audit_resources=None,\n audit_config=None,\n azure_region_config=Azure_Region_Config(),\n )\n return audit_info\n\n # Mocked GCP Audit Info\n def set_mocked_gcp_audit_info(self):\n audit_info = GCP_Audit_Info(\n credentials=None,\n default_project_id=\"test-project1\",\n project_ids=[\"test-project1\", \"test-project2\"],\n audit_resources=None,\n audit_metadata=None,\n audit_config=None,\n )\n return audit_info\n\n # Mocked AWS Audit Info\n def set_mocked_aws_audit_info(self):\n audit_info = AWS_Audit_Info(\n session_config=None,\n original_session=None,\n audit_session=session.Session(\n profile_name=None,\n botocore_session=None,\n ),\n audited_account=AWS_ACCOUNT_NUMBER,\n audited_account_arn=f\"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:root\",\n audited_user_id=\"test-user\",\n audited_partition=\"aws\",\n audited_identity_arn=\"test-user-arn\",\n profile=None,\n profile_region=None,\n credentials=None,\n assumed_role_info=None,\n audited_regions=None,\n organizations_metadata=None,\n audit_resources=None,\n mfa_enabled=False,\n audit_metadata=Audit_Metadata(\n services_scanned=0,\n expected_checks=[],\n completed_checks=0,\n audit_progress=0,\n ),\n )\n return audit_info\n\n def test_set_provider_output_options_aws(self):\n # Set the cloud provider\n provider = \"aws\"\n # Set the arguments passed\n arguments = Namespace()\n arguments.quiet = True\n arguments.output_modes = [\"html\", \"csv\", \"json\"]\n arguments.output_directory = \"output_test_directory\"\n arguments.verbose = True\n arguments.output_filename = \"output_test_filename\"\n arguments.security_hub = True\n arguments.shodan = \"test-api-key\"\n arguments.only_logs = False\n arguments.unix_timestamp = False\n arguments.send_sh_only_fails = True\n\n audit_info = self.set_mocked_aws_audit_info()\n allowlist_file = \"\"\n bulk_checks_metadata = {}\n output_options = set_provider_output_options(\n provider, arguments, audit_info, allowlist_file, bulk_checks_metadata\n )\n assert isinstance(output_options, Aws_Output_Options)\n assert output_options.security_hub_enabled\n assert output_options.send_sh_only_fails\n assert output_options.is_quiet\n assert output_options.output_modes == [\"html\", \"csv\", \"json\", \"json-asff\"]\n assert output_options.output_directory == arguments.output_directory\n assert output_options.allowlist_file == \"\"\n assert output_options.bulk_checks_metadata == {}\n assert output_options.verbose\n assert output_options.output_filename == arguments.output_filename\n\n # Delete testing directory\n rmdir(arguments.output_directory)\n\n def test_set_provider_output_options_gcp(self):\n # Set the cloud provider\n provider = \"gcp\"\n # Set the arguments passed\n arguments = Namespace()\n arguments.quiet = True\n arguments.output_modes = [\"html\", \"csv\", \"json\"]\n arguments.output_directory = \"output_test_directory\"\n arguments.verbose = True\n arguments.output_filename = \"output_test_filename\"\n arguments.only_logs = False\n arguments.unix_timestamp = False\n\n audit_info = self.set_mocked_gcp_audit_info()\n allowlist_file = \"\"\n bulk_checks_metadata = {}\n output_options = set_provider_output_options(\n provider, arguments, audit_info, allowlist_file, bulk_checks_metadata\n )\n assert isinstance(output_options, Gcp_Output_Options)\n assert output_options.is_quiet\n assert output_options.output_modes == [\"html\", \"csv\", \"json\"]\n assert output_options.output_directory == arguments.output_directory\n assert output_options.allowlist_file == \"\"\n assert output_options.bulk_checks_metadata == {}\n assert output_options.verbose\n assert output_options.output_filename == arguments.output_filename\n\n # Delete testing directory\n rmdir(arguments.output_directory)\n\n def test_set_provider_output_options_aws_no_output_filename(self):\n # Set the cloud provider\n provider = \"aws\"\n # Set the arguments passed\n arguments = Namespace()\n arguments.quiet = True\n arguments.output_modes = [\"html\", \"csv\", \"json\"]\n arguments.output_directory = \"output_test_directory\"\n arguments.verbose = True\n arguments.security_hub = True\n arguments.shodan = \"test-api-key\"\n arguments.only_logs = False\n arguments.unix_timestamp = False\n arguments.send_sh_only_fails = True\n\n # Mock AWS Audit Info\n audit_info = self.set_mocked_aws_audit_info()\n\n allowlist_file = \"\"\n bulk_checks_metadata = {}\n output_options = set_provider_output_options(\n provider, arguments, audit_info, allowlist_file, bulk_checks_metadata\n )\n assert isinstance(output_options, Aws_Output_Options)\n assert output_options.security_hub_enabled\n assert output_options.send_sh_only_fails\n assert output_options.is_quiet\n assert output_options.output_modes == [\"html\", \"csv\", \"json\", \"json-asff\"]\n assert output_options.output_directory == arguments.output_directory\n assert output_options.allowlist_file == \"\"\n assert output_options.bulk_checks_metadata == {}\n assert output_options.verbose\n assert (\n output_options.output_filename\n == f\"prowler-output-{AWS_ACCOUNT_NUMBER}-{DATETIME}\"\n )\n\n # Delete testing directory\n rmdir(arguments.output_directory)\n\n def test_set_provider_output_options_azure_domain(self):\n # Set the cloud provider\n provider = \"azure\"\n # Set the arguments passed\n arguments = Namespace()\n arguments.quiet = True\n arguments.output_modes = [\"html\", \"csv\", \"json\"]\n arguments.output_directory = \"output_test_directory\"\n arguments.verbose = True\n arguments.only_logs = False\n arguments.unix_timestamp = False\n\n # Mock Azure Audit Info\n audit_info = self.set_mocked_azure_audit_info()\n audit_info.identity.domain = \"test-domain\"\n\n allowlist_file = \"\"\n bulk_checks_metadata = {}\n output_options = set_provider_output_options(\n provider, arguments, audit_info, allowlist_file, bulk_checks_metadata\n )\n assert isinstance(output_options, Azure_Output_Options)\n assert output_options.is_quiet\n assert output_options.output_modes == [\n \"html\",\n \"csv\",\n \"json\",\n ]\n assert output_options.output_directory == arguments.output_directory\n assert output_options.allowlist_file == \"\"\n assert output_options.bulk_checks_metadata == {}\n assert output_options.verbose\n assert (\n output_options.output_filename\n == f\"prowler-output-{audit_info.identity.domain}-{DATETIME}\"\n )\n\n # Delete testing directory\n rmdir(arguments.output_directory)\n\n def test_set_provider_output_options_azure_tenant_ids(self):\n # Set the cloud provider\n provider = \"azure\"\n # Set the arguments passed\n arguments = Namespace()\n arguments.quiet = True\n arguments.output_modes = [\"html\", \"csv\", \"json\"]\n arguments.output_directory = \"output_test_directory\"\n arguments.verbose = True\n arguments.only_logs = False\n arguments.unix_timestamp = False\n\n # Mock Azure Audit Info\n audit_info = self.set_mocked_azure_audit_info()\n tenants = [\"tenant-1\", \"tenant-2\"]\n audit_info.identity.tenant_ids = tenants\n\n allowlist_file = \"\"\n bulk_checks_metadata = {}\n output_options = set_provider_output_options(\n provider, arguments, audit_info, allowlist_file, bulk_checks_metadata\n )\n assert isinstance(output_options, Azure_Output_Options)\n assert output_options.is_quiet\n assert output_options.output_modes == [\n \"html\",\n \"csv\",\n \"json\",\n ]\n assert output_options.output_directory == arguments.output_directory\n assert output_options.allowlist_file == \"\"\n assert output_options.bulk_checks_metadata == {}\n assert output_options.verbose\n assert (\n output_options.output_filename\n == f\"prowler-output-{'-'.join(tenants)}-{DATETIME}\"\n )\n\n # Delete testing directory\n rmdir(arguments.output_directory)\n\n def test_azure_get_assessment_summary(self):\n # Mock Azure Audit Info\n audit_info = self.set_mocked_azure_audit_info()\n tenants = [\"tenant-1\", \"tenant-2\"]\n audit_info.identity.tenant_ids = tenants\n audit_info.identity.subscriptions = {\n \"Azure subscription 1\": \"12345-qwerty\",\n \"Subscription2\": \"12345-qwerty\",\n }\n printed_subscriptions = []\n for key, value in audit_info.identity.subscriptions.items():\n intermediate = key + \" : \" + value\n printed_subscriptions.append(intermediate)\n assert (\n get_assessment_summary(audit_info)\n == f\"\"\"\n <div class=\"col-md-2\">\n <div class=\"card\">\n <div class=\"card-header\">\n Azure Assessment Summary\n </div>\n <ul class=\"list-group list-group-flush\">\n <li class=\"list-group-item\">\n <b>Azure Tenant IDs:</b> {\" \".join(audit_info.identity.tenant_ids)}\n </li>\n <li class=\"list-group-item\">\n <b>Azure Tenant Domain:</b> {audit_info.identity.domain}\n </li>\n <li class=\"list-group-item\">\n <b>Azure Subscriptions:</b> {\" \".join(printed_subscriptions)}\n </li>\n </ul>\n </div>\n </div>\n <div class=\"col-md-4\">\n <div class=\"card\">\n <div class=\"card-header\">\n Azure Credentials\n </div>\n <ul class=\"list-group list-group-flush\">\n <li class=\"list-group-item\">\n <b>Azure Identity Type:</b> {audit_info.identity.identity_type}\n </li>\n <li class=\"list-group-item\">\n <b>Azure Identity ID:</b> {audit_info.identity.identity_id}\n </li>\n </ul>\n </div>\n </div>\n \"\"\"\n )\n\n def test_aws_get_assessment_summary(self):\n # Mock AWS Audit Info\n audit_info = self.set_mocked_aws_audit_info()\n\n assert (\n get_assessment_summary(audit_info)\n == f\"\"\"\n <div class=\"col-md-2\">\n <div class=\"card\">\n <div class=\"card-header\">\n AWS Assessment Summary\n </div>\n <ul class=\"list-group list-group-flush\">\n <li class=\"list-group-item\">\n <b>AWS Account:</b> {audit_info.audited_account}\n </li>\n <li class=\"list-group-item\">\n <b>AWS-CLI Profile:</b> default\n </li>\n <li class=\"list-group-item\">\n <b>Audited Regions:</b> All Regions\n </li>\n </ul>\n </div>\n </div>\n <div class=\"col-md-4\">\n <div class=\"card\">\n <div class=\"card-header\">\n AWS Credentials\n </div>\n <ul class=\"list-group list-group-flush\">\n <li class=\"list-group-item\">\n <b>User Id:</b> {audit_info.audited_user_id}\n </li>\n <li class=\"list-group-item\">\n <b>Caller Identity ARN:</b> {audit_info.audited_identity_arn}\n </li>\n </ul>\n </div>\n </div>\n \"\"\"\n )\n\n def test_gcp_get_assessment_summary(self):\n # Mock Azure Audit Info\n audit_info = self.set_mocked_gcp_audit_info()\n profile = \"default\"\n assert (\n get_assessment_summary(audit_info)\n == f\"\"\"\n <div class=\"col-md-2\">\n <div class=\"card\">\n <div class=\"card-header\">\n GCP Assessment Summary\n </div>\n <ul class=\"list-group list-group-flush\">\n <li class=\"list-group-item\">\n <b>GCP Project IDs:</b> {', '.join(audit_info.project_ids)}\n </li>\n </ul>\n </div>\n </div>\n <div class=\"col-md-4\">\n <div class=\"card\">\n <div class=\"card-header\">\n GCP Credentials\n </div>\n <ul class=\"list-group list-group-flush\">\n <li class=\"list-group-item\">\n <b>GCP Account:</b> {profile}\n </li>\n </ul>\n </div>\n </div>\n \"\"\"\n )\n\n def test_get_provider_output_model(self):\n audit_info_class_names = [\n \"AWS_Audit_Info\",\n \"GCP_Audit_Info\",\n \"Azure_Audit_Info\",\n ]\n for class_name in audit_info_class_names:\n provider_prefix = class_name.split(\"_\", 1)[0].lower().capitalize()\n assert (\n get_provider_output_model(class_name).__name__\n == f\"{provider_prefix}_Check_Output_CSV\"\n )\n",
"path": "tests/providers/common/common_outputs_test.py"
}
] | 11_4 | python | import unittest
import sys
class TestFStringFormatting(unittest.TestCase):
def test_html_py(self):
self.assert_f_string_format("prowler/lib/outputs/html.py", [410])
def test_slack_py(self):
self.assert_f_string_format("prowler/lib/outputs/slack.py", [16, 38])
def test_apigateway_restapi_authorizers_enabled_py(self):
self.assert_f_string_format("prowler/providers/aws/services/apigateway/apigateway_restapi_authorizers_enabled/apigateway_restapi_authorizers_enabled.py", [38])
def test_audit_info_py(self):
self.assert_f_string_format("prowler/providers/common/audit_info.py", [66])
def test_common_outputs_test_py(self):
self.assert_f_string_format("tests/providers/common/common_outputs_test.py", [284])
def assert_f_string_format(self, file_path, line_numbers):
with open(file_path, 'r') as file:
lines = file.readlines()
for line_number in line_numbers:
line = lines[line_number - 1].strip()
self.assertIn("f\"", line, f"F-string format not found in {file_path} at line {line_number}")
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestFStringFormatting))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
main()
|
https://github.com/teamqurrent/prowler | You will need to streamline the allowlist checking logic in `allowlist.py` and update the regional constants in `audit_info_utils.py`. In `allowlist.py`, modify the `is_allowlisted` function to iterate over all accounts in the allowlist and simplify the logic to check if a finding is allowlisted. Also, refine the is_excepted function to use more efficient conditional checks by adding 'or not excepted_accounts' to each check. In `audit_info_utils.py`, add the new regional constants AWS_REGION_EU_SOUTH_3 and AWS_REGION_EU_CENTRAL_1 with values eu-south-3 and eu-central-1 respectively. These modifications will enhance the efficiency and clarity of the allowlist checking process and expand the regional coverage in the tests. | 10e8222 | about-time==4.2.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:6a538862d33ce67d997429d14998310e1dbfda6cb7d9bbfbf799c4709847fece \
--hash=sha256:8bbf4c75fe13cbd3d72f49a03b02c5c7dca32169b6d49117c257e7eb3eaee341
adal==1.2.7 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:2a7451ed7441ddbc57703042204a3e30ef747478eea022c70f789fc7f084bc3d \
--hash=sha256:d74f45b81317454d96e982fd1c50e6fb5c99ac2223728aea8764433a39f566f1
alive-progress==3.1.5 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:347220c1858e3abe137fa0746895668c04df09c5261a13dc03f05795e8a29be5 \
--hash=sha256:42e399a66c8150dc507602dff7b7953f105ef11faf97ddaa6d27b1cbf45c4c98
attrs==23.1.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04 \
--hash=sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015
awsipranges==0.3.3 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4f0b3f22a9dc1163c85b513bed812b6c92bdacd674e6a7b68252a3c25b99e2c0 \
--hash=sha256:f3d7a54aeaf7fe310beb5d377a4034a63a51b72677ae6af3e0967bc4de7eedaf
azure-common==1.1.28 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4ac0cd3214e36b6a1b6a442686722a5d8cc449603aa833f3f0f40bda836704a3 \
--hash=sha256:5c12d3dcf4ec20599ca6b0d3e09e86e146353d443e7fcc050c9a19c1f9df20ad
azure-core==1.28.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:dec36dfc8eb0b052a853f30c07437effec2f9e3e1fc8f703d9bdaa5cfc0043d9 \
--hash=sha256:e9eefc66fc1fde56dab6f04d4e5d12c60754d5a9fa49bdcfd8534fc96ed936bd
azure-identity==1.15.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4c28fc246b7f9265610eb5261d65931183d019a23d4b0e99357facb2e6c227c8 \
--hash=sha256:a14b1f01c7036f11f148f22cd8c16e05035293d714458d6b44ddf534d93eb912
azure-mgmt-authorization==4.0.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:69b85abc09ae64fc72975bd43431170d8c7eb5d166754b98aac5f3845de57dc4 \
--hash=sha256:d8feeb3842e6ddf1a370963ca4f61fb6edc124e8997b807dd025bc9b2379cd1a
azure-mgmt-core==1.4.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:81071675f186a585555ef01816f2774d49c1c9024cb76e5720c3c0f6b337bb7d \
--hash=sha256:d195208340094f98e5a6661b781cde6f6a051e79ce317caabd8ff97030a9b3ae
azure-mgmt-security==5.0.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:38b03efe82c2344cea203fda95e6d00b7ac22782fa1c0b585cd0ea2c8ff3e702 \
--hash=sha256:73a74ce8f6ffb1b345ce101c8abdd42238f161f0988d168d23918feda0089654
azure-mgmt-sql==3.0.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:129042cc011225e27aee6ef2697d585fa5722e5d1aeb0038af6ad2451a285457 \
--hash=sha256:1d1dd940d4d41be4ee319aad626341251572a5bf4a2addec71779432d9a1381f
azure-mgmt-storage==21.1.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:593f2544fc4f05750c4fe7ca4d83c32ea1e9d266e57899bbf79ce5940124e8cc \
--hash=sha256:d6d3c0e917c988bc9ed0472477d3ef3f90886009eb1d97a711944f8375630162
azure-mgmt-subscription==3.1.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:38d4574a8d47fa17e3587d756e296cb63b82ad8fb21cd8543bcee443a502bf48 \
--hash=sha256:4e255b4ce9b924357bb8c5009b3c88a2014d3203b2495e2256fa027bf84e800e
azure-storage-blob==12.19.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:26c0a4320a34a3c2a1b74528ba6812ebcb632a04cd67b1c7377232c4b01a5897 \
--hash=sha256:7bbc2c9c16678f7a420367fef6b172ba8730a7e66df7f4d7a55d5b3c8216615b
boto3==1.26.165 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:9e7242b9059d937f34264125fecd844cb5e01acce6be093f6c44869fdf7c6e30 \
--hash=sha256:fa85b67147c8dc99b6e7c699fc086103f958f9677db934f70659e6e6a72a818c
botocore==1.29.165 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:6f35d59e230095aed7cd747604fe248fa384bebb7d09549077892f936a8ca3df \
--hash=sha256:988b948be685006b43c4bbd8f5c0cb93e77c66deb70561994e0c5b31b5a67210
cachetools==5.3.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:13dfddc7b8df938c21a940dfa6557ce6e94a2f1cdfa58eb90c805721d58f2c14 \
--hash=sha256:429e1a1e845c008ea6c85aa35d4b98b65d6a9763eeef3e37e92728a12d1de9d4
certifi==2023.7.22 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \
--hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9
cffi==1.15.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \
--hash=sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef \
--hash=sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104 \
--hash=sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426 \
--hash=sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405 \
--hash=sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375 \
--hash=sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a \
--hash=sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e \
--hash=sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc \
--hash=sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf \
--hash=sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185 \
--hash=sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497 \
--hash=sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3 \
--hash=sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35 \
--hash=sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c \
--hash=sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83 \
--hash=sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21 \
--hash=sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca \
--hash=sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984 \
--hash=sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac \
--hash=sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd \
--hash=sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee \
--hash=sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a \
--hash=sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2 \
--hash=sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192 \
--hash=sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7 \
--hash=sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585 \
--hash=sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f \
--hash=sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e \
--hash=sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27 \
--hash=sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b \
--hash=sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e \
--hash=sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e \
--hash=sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d \
--hash=sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c \
--hash=sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415 \
--hash=sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82 \
--hash=sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02 \
--hash=sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314 \
--hash=sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325 \
--hash=sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c \
--hash=sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3 \
--hash=sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914 \
--hash=sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045 \
--hash=sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d \
--hash=sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9 \
--hash=sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5 \
--hash=sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2 \
--hash=sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c \
--hash=sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3 \
--hash=sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2 \
--hash=sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8 \
--hash=sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d \
--hash=sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d \
--hash=sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9 \
--hash=sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162 \
--hash=sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76 \
--hash=sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4 \
--hash=sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e \
--hash=sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9 \
--hash=sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6 \
--hash=sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b \
--hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \
--hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0
charset-normalizer==3.1.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6 \
--hash=sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1 \
--hash=sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e \
--hash=sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373 \
--hash=sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62 \
--hash=sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230 \
--hash=sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be \
--hash=sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c \
--hash=sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0 \
--hash=sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448 \
--hash=sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f \
--hash=sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649 \
--hash=sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d \
--hash=sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0 \
--hash=sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706 \
--hash=sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a \
--hash=sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59 \
--hash=sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23 \
--hash=sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5 \
--hash=sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb \
--hash=sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e \
--hash=sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e \
--hash=sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c \
--hash=sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28 \
--hash=sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d \
--hash=sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41 \
--hash=sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974 \
--hash=sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce \
--hash=sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f \
--hash=sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1 \
--hash=sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d \
--hash=sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8 \
--hash=sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017 \
--hash=sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31 \
--hash=sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7 \
--hash=sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8 \
--hash=sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e \
--hash=sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14 \
--hash=sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd \
--hash=sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d \
--hash=sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795 \
--hash=sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b \
--hash=sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b \
--hash=sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b \
--hash=sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203 \
--hash=sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f \
--hash=sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19 \
--hash=sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1 \
--hash=sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a \
--hash=sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac \
--hash=sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9 \
--hash=sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0 \
--hash=sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137 \
--hash=sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f \
--hash=sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6 \
--hash=sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5 \
--hash=sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909 \
--hash=sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f \
--hash=sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0 \
--hash=sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324 \
--hash=sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755 \
--hash=sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb \
--hash=sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854 \
--hash=sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c \
--hash=sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60 \
--hash=sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84 \
--hash=sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0 \
--hash=sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b \
--hash=sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1 \
--hash=sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531 \
--hash=sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1 \
--hash=sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11 \
--hash=sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326 \
--hash=sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df \
--hash=sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab
click-plugins==1.1.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b \
--hash=sha256:5d262006d3222f5057fd81e1623d4443e41dcda5dc815c06b442aa3c02889fc8
click==8.1.3 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e \
--hash=sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48
colorama==0.4.6 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \
--hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6
contextlib2==21.6.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:3fbdb64466afd23abaf6c977627b75b6139a5a3e8ce38405c5b413aed7a0471f \
--hash=sha256:ab1e2bfe1d01d968e1b7e8d9023bc51ef3509bba217bb730cee3827e1ee82869
cryptography==41.0.4 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:004b6ccc95943f6a9ad3142cfabcc769d7ee38a3f60fb0dddbfb431f818c3a67 \
--hash=sha256:047c4603aeb4bbd8db2756e38f5b8bd7e94318c047cfe4efeb5d715e08b49311 \
--hash=sha256:0d9409894f495d465fe6fda92cb70e8323e9648af912d5b9141d616df40a87b8 \
--hash=sha256:23a25c09dfd0d9f28da2352503b23e086f8e78096b9fd585d1d14eca01613e13 \
--hash=sha256:2ed09183922d66c4ec5fdaa59b4d14e105c084dd0febd27452de8f6f74704143 \
--hash=sha256:35c00f637cd0b9d5b6c6bd11b6c3359194a8eba9c46d4e875a3660e3b400005f \
--hash=sha256:37480760ae08065437e6573d14be973112c9e6dcaf5f11d00147ee74f37a3829 \
--hash=sha256:3b224890962a2d7b57cf5eeb16ccaafba6083f7b811829f00476309bce2fe0fd \
--hash=sha256:5a0f09cefded00e648a127048119f77bc2b2ec61e736660b5789e638f43cc397 \
--hash=sha256:5b72205a360f3b6176485a333256b9bcd48700fc755fef51c8e7e67c4b63e3ac \
--hash=sha256:7e53db173370dea832190870e975a1e09c86a879b613948f09eb49324218c14d \
--hash=sha256:7febc3094125fc126a7f6fb1f420d0da639f3f32cb15c8ff0dc3997c4549f51a \
--hash=sha256:80907d3faa55dc5434a16579952ac6da800935cd98d14dbd62f6f042c7f5e839 \
--hash=sha256:86defa8d248c3fa029da68ce61fe735432b047e32179883bdb1e79ed9bb8195e \
--hash=sha256:8ac4f9ead4bbd0bc8ab2d318f97d85147167a488be0e08814a37eb2f439d5cf6 \
--hash=sha256:93530900d14c37a46ce3d6c9e6fd35dbe5f5601bf6b3a5c325c7bffc030344d9 \
--hash=sha256:9eeb77214afae972a00dee47382d2591abe77bdae166bda672fb1e24702a3860 \
--hash=sha256:b5f4dfe950ff0479f1f00eda09c18798d4f49b98f4e2006d644b3301682ebdca \
--hash=sha256:c3391bd8e6de35f6f1140e50aaeb3e2b3d6a9012536ca23ab0d9c35ec18c8a91 \
--hash=sha256:c880eba5175f4307129784eca96f4e70b88e57aa3f680aeba3bab0e980b0f37d \
--hash=sha256:cecfefa17042941f94ab54f769c8ce0fe14beff2694e9ac684176a2535bf9714 \
--hash=sha256:e40211b4923ba5a6dc9769eab704bdb3fbb58d56c5b336d30996c24fcf12aadb \
--hash=sha256:efc8ad4e6fc4f1752ebfb58aefece8b4e3c4cae940b0994d43649bdfce8d0d4f
detect-secrets==1.4.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:d08ecabeee8b68c0acb0e8a354fb98d822a653f6ed05e520cead4c6fc1fc02cd \
--hash=sha256:d56787e339758cef48c9ccd6692f7a094b9963c979c9813580b0169e41132833
filelock==3.12.2 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:002740518d8aa59a26b0c76e10fb8c6e15eae825d34b6fdf670333fd7b938d81 \
--hash=sha256:cbb791cdea2a72f23da6ac5b5269ab0a0d161e9ef0100e653b69049a7706d1ec
google-api-core==2.11.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4b9bb5d5a380a0befa0573b302651b8a9a89262c1730e37bf423cec511804c22 \
--hash=sha256:ce222e27b0de0d7bc63eb043b956996d6dccab14cc3b690aaea91c9cc99dc16e
google-api-python-client==2.108.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:6396efca83185fb205c0abdbc1c2ee57b40475578c6af37f6d0e30a639aade99 \
--hash=sha256:9d1327213e388943ebcd7db5ce6e7f47987a7e6874e3e1f6116010eea4a0e75d
google-auth-httplib2==0.1.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:42c50900b8e4dcdf8222364d1f0efe32b8421fb6ed72f2613f12f75cc933478c \
--hash=sha256:c64bc555fdc6dd788ea62ecf7bccffcf497bf77244887a3f3d7a5a02f8e3fc29
google-auth==2.17.3 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:ce311e2bc58b130fddf316df57c9b3943c2a7b4f6ec31de9663a9333e4064efc \
--hash=sha256:f586b274d3eb7bd932ea424b1c702a30e0393a2e2bc4ca3eae8263ffd8be229f
googleapis-common-protos==1.59.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4168fcb568a826a52f23510412da405abd93f4d23ba544bb68d943b14ba3cb44 \
--hash=sha256:b287dc48449d1d41af0c69f4ea26242b5ae4c3d7249a38b0984c86a4caffff1f
grapheme==0.6.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:44c2b9f21bbe77cfb05835fec230bd435954275267fea1858013b102f8603cca
httplib2==0.22.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:14ae0a53c1ba8f3d37e9e27cf37eabb0fb9980f435ba405d546948b009dd64dc \
--hash=sha256:d7a10bc5ef5ab08322488bde8c726eeee5c8618723fdb399597ec58f3d82df81
idna==3.4 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \
--hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2
isodate==0.6.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96 \
--hash=sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9
jmespath==1.0.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980 \
--hash=sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe
jsonschema-specifications==2023.6.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:3d2b82663aff01815f744bb5c7887e2121a63399b49b104a3c96145474d091d7 \
--hash=sha256:ca1c4dd059a9e7b34101cf5b3ab7ff1d18b139f35950d598d629837ef66e8f28
jsonschema==4.18.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:8caf5b57a990a98e9b39832ef3cb35c176fe331414252b6e1b26fd5866f891a4 \
--hash=sha256:b508dd6142bd03f4c3670534c80af68cd7bbff9ea830b9cf2625d4a3c49ddf60
msal-extensions==1.0.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:91e3db9620b822d0ed2b4d1850056a0f133cba04455e62f11612e40f5502f2ee \
--hash=sha256:c676aba56b0cce3783de1b5c5ecfe828db998167875126ca4b47dc6436451354
msal==1.24.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:aa0972884b3c6fdec53d9a0bd15c12e5bd7b71ac1b66d746f54d128709f3f8f8 \
--hash=sha256:ce4320688f95c301ee74a4d0e9dbcfe029a63663a8cc61756f40d0d0d36574ad
msgraph-core==0.2.2 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:147324246788abe8ed7e05534cd9e4e0ec98b33b30e011693b8d014cebf97f63 \
--hash=sha256:e297564b9a0ca228493d8851f95cb2de9522143d82efa40ce3a6ad286e21392e
msrest==0.7.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:21120a810e1233e5e6cc7fe40b474eeb4ec6f757a15d7cf86702c369f9567c32 \
--hash=sha256:6e7661f46f3afd88b75667b7187a92829924446c7ea1d169be8c4bb7eeb788b9
msrestazure==0.6.4 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:3de50f56147ef529b31e099a982496690468ecef33f0544cb0fa0cfe1e1de5b9 \
--hash=sha256:a06f0dabc9a6f5efe3b6add4bd8fb623aeadacf816b7a35b0f89107e0544d189
oauthlib==3.2.2 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca \
--hash=sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918
portalocker==2.7.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:032e81d534a88ec1736d03f780ba073f047a06c478b06e2937486f334e955c51 \
--hash=sha256:a07c5b4f3985c3cf4798369631fb7011adb498e2a46d8440efc75a8f29a0f983
protobuf==4.23.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:03eee35b60317112a72d19c54d0bff7bc58ff12fea4cd7b018232bd99758ffdf \
--hash=sha256:2b94bd6df92d71bd1234a2ffe7ce96ddf6d10cf637a18d6b55ad0a89fbb7fc21 \
--hash=sha256:36f5370a930cb77c8ad2f4135590c672d0d2c72d4a707c7d0058dce4b4b4a598 \
--hash=sha256:5f1eba1da2a2f3f7df469fccddef3cc060b8a16cfe3cc65961ad36b4dbcf59c5 \
--hash=sha256:6c16657d6717a0c62d5d740cb354fbad1b0d8cb811669e06fc1caa0ff4799ddd \
--hash=sha256:6fe180b56e1169d72ecc4acbd39186339aed20af5384531b8e8979b02bbee159 \
--hash=sha256:7cb5b9a05ce52c6a782bb97de52679bd3438ff2b7460eff5da348db65650f227 \
--hash=sha256:9744e934ea5855d12191040ea198eaf704ac78665d365a89d9572e3b627c2688 \
--hash=sha256:9f5a0fbfcdcc364f3986f9ed9f8bb1328fb84114fd790423ff3d7fdb0f85c2d1 \
--hash=sha256:baca40d067dddd62141a129f244703160d278648b569e90bb0e3753067644711 \
--hash=sha256:d5a35ff54e3f62e8fc7be02bb0d2fbc212bba1a5a9cc2748090690093996f07b \
--hash=sha256:e62fb869762b4ba18666370e2f8a18f17f8ab92dd4467295c6d38be6f8fef60b \
--hash=sha256:ebde3a023b8e11bfa6c890ef34cd6a8b47d586f26135e86c21344fe433daf2e2
pyasn1-modules==0.3.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c \
--hash=sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d
pyasn1==0.5.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57 \
--hash=sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde
pycparser==2.21 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \
--hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206
pydantic==1.10.13 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:1740068fd8e2ef6eb27a20e5651df000978edce6da6803c2bef0bc74540f9548 \
--hash=sha256:210ce042e8f6f7c01168b2d84d4c9eb2b009fe7bf572c2266e235edf14bacd80 \
--hash=sha256:32c8b48dcd3b2ac4e78b0ba4af3a2c2eb6048cb75202f0ea7b34feb740efc340 \
--hash=sha256:3ecea2b9d80e5333303eeb77e180b90e95eea8f765d08c3d278cd56b00345d01 \
--hash=sha256:4b03e42ec20286f052490423682016fd80fda830d8e4119f8ab13ec7464c0132 \
--hash=sha256:4c5370a7edaac06daee3af1c8b1192e305bc102abcbf2a92374b5bc793818599 \
--hash=sha256:56e3ff861c3b9c6857579de282ce8baabf443f42ffba355bf070770ed63e11e1 \
--hash=sha256:5a1f9f747851338933942db7af7b6ee8268568ef2ed86c4185c6ef4402e80ba8 \
--hash=sha256:5e08865bc6464df8c7d61439ef4439829e3ab62ab1669cddea8dd00cd74b9ffe \
--hash=sha256:61d9dce220447fb74f45e73d7ff3b530e25db30192ad8d425166d43c5deb6df0 \
--hash=sha256:654db58ae399fe6434e55325a2c3e959836bd17a6f6a0b6ca8107ea0571d2e17 \
--hash=sha256:678bcf5591b63cc917100dc50ab6caebe597ac67e8c9ccb75e698f66038ea953 \
--hash=sha256:6cf25c1a65c27923a17b3da28a0bdb99f62ee04230c931d83e888012851f4e7f \
--hash=sha256:75ac15385a3534d887a99c713aa3da88a30fbd6204a5cd0dc4dab3d770b9bd2f \
--hash=sha256:75b297827b59bc229cac1a23a2f7a4ac0031068e5be0ce385be1462e7e17a35d \
--hash=sha256:7d6f6e7305244bddb4414ba7094ce910560c907bdfa3501e9db1a7fd7eaea127 \
--hash=sha256:84bafe2e60b5e78bc64a2941b4c071a4b7404c5c907f5f5a99b0139781e69ed8 \
--hash=sha256:854223752ba81e3abf663d685f105c64150873cc6f5d0c01d3e3220bcff7d36f \
--hash=sha256:8ae5dd6b721459bfa30805f4c25880e0dd78fc5b5879f9f7a692196ddcb5a580 \
--hash=sha256:8ef467901d7a41fa0ca6db9ae3ec0021e3f657ce2c208e98cd511f3161c762c6 \
--hash=sha256:968ac42970f57b8344ee08837b62f6ee6f53c33f603547a55571c954a4225691 \
--hash=sha256:97cce3ae7341f7620a0ba5ef6cf043975cd9d2b81f3aa5f4ea37928269bc1b87 \
--hash=sha256:9849f031cf8a2f0a928fe885e5a04b08006d6d41876b8bbd2fc68a18f9f2e3fd \
--hash=sha256:9f00790179497767aae6bcdc36355792c79e7bbb20b145ff449700eb076c5f96 \
--hash=sha256:b87326822e71bd5f313e7d3bfdc77ac3247035ac10b0c0618bd99dcf95b1e687 \
--hash=sha256:b97c1fac8c49be29486df85968682b0afa77e1b809aff74b83081cc115e52f33 \
--hash=sha256:bc0898c12f8e9c97f6cd44c0ed70d55749eaf783716896960b4ecce2edfd2d69 \
--hash=sha256:c553f6a156deb868ba38a23cf0df886c63492e9257f60a79c0fd8e7173537653 \
--hash=sha256:c636925f38b8db208e09d344c7aa4f29a86bb9947495dd6b6d376ad10334fb78 \
--hash=sha256:c958d053453a1c4b1c2062b05cd42d9d5c8eb67537b8d5a7e3c3032943ecd261 \
--hash=sha256:d3a3c792a58e1622667a2837512099eac62490cdfd63bd407993aaf200a4cf1f \
--hash=sha256:e31647d85a2013d926ce60b84f9dd5300d44535a9941fe825dc349ae1f760df9 \
--hash=sha256:e70ca129d2053fb8b728ee7d1af8e553a928d7e301a311094b8a0501adc8763d \
--hash=sha256:efff03cc7a4f29d9009d1c96ceb1e7a70a65cfe86e89d34e4a5f2ab1e5693737 \
--hash=sha256:f59ef915cac80275245824e9d771ee939133be38215555e9dc90c6cb148aaeb5 \
--hash=sha256:f8e81fc5fb17dae698f52bdd1c4f18b6ca674d7068242b2aff075f588301bbb0
pyjwt==2.6.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd \
--hash=sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14
pyjwt[crypto]==2.6.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd \
--hash=sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14
pyparsing==3.0.9 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb \
--hash=sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc
python-dateutil==2.8.2 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \
--hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9
pywin32==306 ; python_version >= "3.9" and platform_system == "Windows" and python_version < "3.12" \
--hash=sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d \
--hash=sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65 \
--hash=sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e \
--hash=sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b \
--hash=sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4 \
--hash=sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040 \
--hash=sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a \
--hash=sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36 \
--hash=sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8 \
--hash=sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e \
--hash=sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802 \
--hash=sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a \
--hash=sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407 \
--hash=sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0
pyyaml==6.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf \
--hash=sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293 \
--hash=sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b \
--hash=sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57 \
--hash=sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b \
--hash=sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4 \
--hash=sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07 \
--hash=sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba \
--hash=sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9 \
--hash=sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287 \
--hash=sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513 \
--hash=sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0 \
--hash=sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782 \
--hash=sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0 \
--hash=sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92 \
--hash=sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f \
--hash=sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2 \
--hash=sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc \
--hash=sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1 \
--hash=sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c \
--hash=sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86 \
--hash=sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4 \
--hash=sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c \
--hash=sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34 \
--hash=sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b \
--hash=sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d \
--hash=sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c \
--hash=sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb \
--hash=sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7 \
--hash=sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737 \
--hash=sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3 \
--hash=sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d \
--hash=sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358 \
--hash=sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53 \
--hash=sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78 \
--hash=sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803 \
--hash=sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a \
--hash=sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f \
--hash=sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174 \
--hash=sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5
referencing==0.29.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:90cb53782d550ba28d2166ef3f55731f38397def8832baac5d45235f1995e35e \
--hash=sha256:d3c8f323ee1480095da44d55917cfb8278d73d6b4d5f677e3e40eb21314ac67f
requests-file==1.5.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:07d74208d3389d01c38ab89ef403af0cfec63957d53a0081d8eca738d0247d8e \
--hash=sha256:dfe5dae75c12481f68ba353183c53a65e6044c923e64c24b2209f6c7570ca953
requests-oauthlib==1.3.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5 \
--hash=sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a
requests==2.31.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \
--hash=sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1
rpds-py==0.8.10 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:08166467258fd0240a1256fce272f689f2360227ee41c72aeea103e9e4f63d2b \
--hash=sha256:083df0fafe199371206111583c686c985dddaf95ab3ee8e7b24f1fda54515d09 \
--hash=sha256:0da53292edafecba5e1d8c1218f99babf2ed0bf1c791d83c0ab5c29b57223068 \
--hash=sha256:0eeb2731708207d0fe2619afe6c4dc8cb9798f7de052da891de5f19c0006c315 \
--hash=sha256:134ec8f14ca7dbc6d9ae34dac632cdd60939fe3734b5d287a69683c037c51acb \
--hash=sha256:13e643ce8ad502a0263397362fb887594b49cf84bf518d6038c16f235f2bcea4 \
--hash=sha256:148b0b38d719c0760e31ce9285a9872972bdd7774969a4154f40c980e5beaca7 \
--hash=sha256:14f1c356712f66653b777ecd8819804781b23dbbac4eade4366b94944c9e78ad \
--hash=sha256:15a90d0ac11b4499171067ae40a220d1ca3cb685ec0acc356d8f3800e07e4cb8 \
--hash=sha256:1a2edf8173ac0c7a19da21bc68818be1321998528b5e3f748d6ee90c0ba2a1fd \
--hash=sha256:1b21575031478609db6dbd1f0465e739fe0e7f424a8e7e87610a6c7f68b4eb16 \
--hash=sha256:1ee45cd1d84beed6cbebc839fd85c2e70a3a1325c8cfd16b62c96e2ffb565eca \
--hash=sha256:220bdcad2d2936f674650d304e20ac480a3ce88a40fe56cd084b5780f1d104d9 \
--hash=sha256:2418cf17d653d24ffb8b75e81f9f60b7ba1b009a23298a433a4720b2a0a17017 \
--hash=sha256:2614c2732bf45de5c7f9e9e54e18bc78693fa2f635ae58d2895b7965e470378c \
--hash=sha256:2cd3045e7f6375dda64ed7db1c5136826facb0159ea982f77d9cf6125025bd34 \
--hash=sha256:2eb4b08c45f8f8d8254cdbfacd3fc5d6b415d64487fb30d7380b0d0569837bf1 \
--hash=sha256:300eb606e6b94a7a26f11c8cc8ee59e295c6649bd927f91e1dbd37a4c89430b6 \
--hash=sha256:376b8de737401050bd12810003d207e824380be58810c031f10ec563ff6aef3d \
--hash=sha256:3793c21494bad1373da517001d0849eea322e9a049a0e4789e50d8d1329df8e7 \
--hash=sha256:37f7ee4dc86db7af3bac6d2a2cedbecb8e57ce4ed081f6464510e537589f8b1e \
--hash=sha256:3816a890a6a9e9f1de250afa12ca71c9a7a62f2b715a29af6aaee3aea112c181 \
--hash=sha256:3c490204e16bca4f835dba8467869fe7295cdeaa096e4c5a7af97f3454a97991 \
--hash=sha256:3cc5e5b5514796f45f03a568981971b12a3570f3de2e76114f7dc18d4b60a3c4 \
--hash=sha256:41c89a366eae49ad9e65ed443a8f94aee762931a1e3723749d72aeac80f5ef2f \
--hash=sha256:4a8ca409f1252e1220bf09c57290b76cae2f14723746215a1e0506472ebd7bdf \
--hash=sha256:4b519bac7c09444dd85280fd60f28c6dde4389c88dddf4279ba9b630aca3bbbe \
--hash=sha256:521fc8861a86ae54359edf53a15a05fabc10593cea7b3357574132f8427a5e5a \
--hash=sha256:574868858a7ff6011192c023a5289158ed20e3f3b94b54f97210a773f2f22921 \
--hash=sha256:5a665f6f1a87614d1c3039baf44109094926dedf785e346d8b0a728e9cabd27a \
--hash=sha256:5d1c2bc319428d50b3e0fa6b673ab8cc7fa2755a92898db3a594cbc4eeb6d1f7 \
--hash=sha256:60e0e86e870350e03b3e25f9b1dd2c6cc72d2b5f24e070249418320a6f9097b7 \
--hash=sha256:695f642a3a5dbd4ad2ffbbacf784716ecd87f1b7a460843b9ddf965ccaeafff4 \
--hash=sha256:69d089c026f6a8b9d64a06ff67dc3be196707b699d7f6ca930c25f00cf5e30d8 \
--hash=sha256:6c6a0225b8501d881b32ebf3f5807a08ad3685b5eb5f0a6bfffd3a6e039b2055 \
--hash=sha256:70bb9c8004b97b4ef7ae56a2aa56dfaa74734a0987c78e7e85f00004ab9bf2d0 \
--hash=sha256:73a1e48430f418f0ac3dfd87860e4cc0d33ad6c0f589099a298cb53724db1169 \
--hash=sha256:7495010b658ec5b52835f21d8c8b1a7e52e194c50f095d4223c0b96c3da704b1 \
--hash=sha256:7947e6e2c2ad68b1c12ee797d15e5f8d0db36331200b0346871492784083b0c6 \
--hash=sha256:7b38a9ac96eeb6613e7f312cd0014de64c3f07000e8bf0004ad6ec153bac46f8 \
--hash=sha256:7d20a8ed227683401cc508e7be58cba90cc97f784ea8b039c8cd01111e6043e0 \
--hash=sha256:7f29b8c55fd3a2bc48e485e37c4e2df3317f43b5cc6c4b6631c33726f52ffbb3 \
--hash=sha256:802f42200d8caf7f25bbb2a6464cbd83e69d600151b7e3b49f49a47fa56b0a38 \
--hash=sha256:805a5f3f05d186c5d50de2e26f765ba7896d0cc1ac5b14ffc36fae36df5d2f10 \
--hash=sha256:82bb361cae4d0a627006dadd69dc2f36b7ad5dc1367af9d02e296ec565248b5b \
--hash=sha256:84eb541a44f7a18f07a6bfc48b95240739e93defe1fdfb4f2a295f37837945d7 \
--hash=sha256:89c92b74e8bf6f53a6f4995fd52f4bd510c12f103ee62c99e22bc9e05d45583c \
--hash=sha256:8c398fda6df361a30935ab4c4bccb7f7a3daef2964ca237f607c90e9f3fdf66f \
--hash=sha256:915031002c86a5add7c6fd4beb601b2415e8a1c956590a5f91d825858e92fe6e \
--hash=sha256:927d784648211447201d4c6f1babddb7971abad922b32257ab74de2f2750fad0 \
--hash=sha256:92cf5b3ee60eef41f41e1a2cabca466846fb22f37fc580ffbcb934d1bcab225a \
--hash=sha256:93d06cccae15b3836247319eee7b6f1fdcd6c10dabb4e6d350d27bd0bdca2711 \
--hash=sha256:93d99f957a300d7a4ced41615c45aeb0343bb8f067c42b770b505de67a132346 \
--hash=sha256:96b293c0498c70162effb13100624c5863797d99df75f2f647438bd10cbf73e4 \
--hash=sha256:97cab733d303252f7c2f7052bf021a3469d764fc2b65e6dbef5af3cbf89d4892 \
--hash=sha256:996cc95830de9bc22b183661d95559ec6b3cd900ad7bc9154c4cbf5be0c9b734 \
--hash=sha256:9a7d20c1cf8d7b3960c5072c265ec47b3f72a0c608a9a6ee0103189b4f28d531 \
--hash=sha256:9cd57981d9fab04fc74438d82460f057a2419974d69a96b06a440822d693b3c0 \
--hash=sha256:a11ab0d97be374efd04f640c04fe5c2d3dabc6dfb998954ea946ee3aec97056d \
--hash=sha256:a13c8e56c46474cd5958d525ce6a9996727a83d9335684e41f5192c83deb6c58 \
--hash=sha256:a38b9f526d0d6cbdaa37808c400e3d9f9473ac4ff64d33d9163fd05d243dbd9b \
--hash=sha256:a7c6304b894546b5a6bdc0fe15761fa53fe87d28527a7142dae8de3c663853e1 \
--hash=sha256:ad3bfb44c8840fb4be719dc58e229f435e227fbfbe133dc33f34981ff622a8f8 \
--hash=sha256:ae40f4a70a1f40939d66ecbaf8e7edc144fded190c4a45898a8cfe19d8fc85ea \
--hash=sha256:b01b39ad5411563031ea3977bbbc7324d82b088e802339e6296f082f78f6115c \
--hash=sha256:b2e3c4f2a8e3da47f850d7ea0d7d56720f0f091d66add889056098c4b2fd576c \
--hash=sha256:b41941583adce4242af003d2a8337b066ba6148ca435f295f31ac6d9e4ea2722 \
--hash=sha256:b4627520a02fccbd324b33c7a83e5d7906ec746e1083a9ac93c41ac7d15548c7 \
--hash=sha256:ba9f1d1ebe4b63801977cec7401f2d41e888128ae40b5441270d43140efcad52 \
--hash=sha256:c03a435d26c3999c2a8642cecad5d1c4d10c961817536af52035f6f4ee2f5dd0 \
--hash=sha256:c200b30dd573afa83847bed7e3041aa36a8145221bf0cfdfaa62d974d720805c \
--hash=sha256:c493365d3fad241d52f096e4995475a60a80f4eba4d3ff89b713bc65c2ca9615 \
--hash=sha256:c4d42e83ddbf3445e6514f0aff96dca511421ed0392d9977d3990d9f1ba6753c \
--hash=sha256:c60528671d9d467009a6ec284582179f6b88651e83367d0ab54cb739021cd7de \
--hash=sha256:c72ebc22e70e04126158c46ba56b85372bc4d54d00d296be060b0db1671638a4 \
--hash=sha256:ccbbd276642788c4376fbe8d4e6c50f0fb4972ce09ecb051509062915891cbf0 \
--hash=sha256:ceaac0c603bf5ac2f505a78b2dcab78d3e6b706be6596c8364b64cc613d208d2 \
--hash=sha256:d19db6ba816e7f59fc806c690918da80a7d186f00247048cd833acdab9b4847b \
--hash=sha256:d5c191713e98e7c28800233f039a32a42c1a4f9a001a8a0f2448b07391881036 \
--hash=sha256:d64f9f88d5203274a002b54442cafc9c7a1abff2a238f3e767b70aadf919b451 \
--hash=sha256:d77dff3a5aa5eedcc3da0ebd10ff8e4969bc9541aa3333a8d41715b429e99f47 \
--hash=sha256:dd4f16e57c12c0ae17606c53d1b57d8d1c8792efe3f065a37cb3341340599d49 \
--hash=sha256:e39d7ab0c18ac99955b36cd19f43926450baba21e3250f053e0704d6ffd76873 \
--hash=sha256:e3d0cd3dff0e7638a7b5390f3a53057c4e347f4ef122ee84ed93fc2fb7ea4aa2 \
--hash=sha256:e7dfb1cbb895810fa2b892b68153c17716c6abaa22c7dc2b2f6dcf3364932a1c \
--hash=sha256:e8e24b210a4deb5a7744971f8f77393005bae7f873568e37dfd9effe808be7f7 \
--hash=sha256:e9c0683cb35a9b5881b41bc01d5568ffc667910d9dbc632a1fba4e7d59e98773 \
--hash=sha256:ed41f3f49507936a6fe7003985ea2574daccfef999775525d79eb67344e23767 \
--hash=sha256:ee744fca8d1ea822480a2a4e7c5f2e1950745477143668f0b523769426060f29 \
--hash=sha256:f3f1e860be21f3e83011116a65e7310486300e08d9a3028e73e8d13bb6c77292 \
--hash=sha256:f43ab4cb04bde6109eb2555528a64dfd8a265cc6a9920a67dcbde13ef53a46c8 \
--hash=sha256:f53f55a8852f0e49b0fc76f2412045d6ad9d5772251dea8f55ea45021616e7d5 \
--hash=sha256:f59996d0550894affaad8743e97b9b9c98f638b221fac12909210ec3d9294786 \
--hash=sha256:f96f3f98fbff7af29e9edf9a6584f3c1382e7788783d07ba3721790625caa43e \
--hash=sha256:f9adb5664b78fcfcd830000416c8cc69853ef43cb084d645b3f1f0296edd9bae \
--hash=sha256:fa326b3505d5784436d9433b7980171ab2375535d93dd63fbcd20af2b5ca1bb6 \
--hash=sha256:fafc0049add8043ad07ab5382ee80d80ed7e3699847f26c9a5cf4d3714d96a84
rsa==4.9 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \
--hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21
s3transfer==0.6.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:3c0da2d074bf35d6870ef157158641178a4204a6e689e82546083e31e0311346 \
--hash=sha256:640bb492711f4c0c0905e1f62b6aaeb771881935ad27884852411f8e9cacbca9
schema==0.7.5 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:f06717112c61895cabc4707752b88716e8420a8819d71404501e114f91043197 \
--hash=sha256:f3ffdeeada09ec34bf40d7d79996d9f7175db93b7a5065de0faa7f41083c1e6c
shodan==1.30.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:bedb6e8c2b4459592c1bc17b4d4b57dab0cb58a455ad589ee26a6304242cd505
six==1.16.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \
--hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254
slack-sdk==3.24.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:741ea5381e65f4407d24ed81203912cbd6bfe807a6704b1d3c5ad346c86000b6 \
--hash=sha256:cae64f0177a53d34cca59cc691d4535edd18929843a936b97cea421db9e4fbfe
tabulate==0.9.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c \
--hash=sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f
tldextract==3.4.4 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:581e7dbefc90e7bb857bb6f768d25c811a3c5f0892ed56a9a2999ddb7b1b70c2 \
--hash=sha256:5fe3210c577463545191d45ad522d3d5e78d55218ce97215e82004dcae1e1234
typing-extensions==4.5.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb \
--hash=sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4
uritemplate==4.1.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4346edfc5c3b79f694bccd6d6099a322bbeb628dbf2cd86eea55a456ce5124f0 \
--hash=sha256:830c08b8d99bdd312ea4ead05994a38e8936266f84b9a7878232db50b044e02e
urllib3==1.26.18 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07 \
--hash=sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0
xlsxwriter==3.1.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:02913b50b74c00f165933d5da3e3a02cab4204cb4932722a1b342c5c71034122 \
--hash=sha256:b70a147d36235d1ee835cfd037396f789db1f76740a0e5c917d54137169341de | python3.9 | 0fff0568 | diff --git a/prowler/providers/aws/lib/allowlist/allowlist.py b/prowler/providers/aws/lib/allowlist/allowlist.py
--- a/prowler/providers/aws/lib/allowlist/allowlist.py
+++ b/prowler/providers/aws/lib/allowlist/allowlist.py
@@ -143,29 +143,23 @@ def is_allowlisted(
finding_tags,
):
try:
- allowlisted_checks = {}
# By default is not allowlisted
is_finding_allowlisted = False
- # First set account key from allowlist dict
- if audited_account in allowlist["Accounts"]:
- allowlisted_checks = allowlist["Accounts"][audited_account]["Checks"]
- # If there is a *, it affects to all accounts
- # This cannot be elif since in the case of * and single accounts we
- # want to merge allowlisted checks from * to the other accounts check list
- if "*" in allowlist["Accounts"]:
- checks_multi_account = allowlist["Accounts"]["*"]["Checks"]
- allowlisted_checks.update(checks_multi_account)
-
- # Test if it is allowlisted
- if is_allowlisted_in_check(
- allowlisted_checks,
- audited_account,
- check,
- finding_region,
- finding_resource,
- finding_tags,
- ):
- is_finding_allowlisted = True
+
+ # We always check all the accounts present in the allowlist
+ # if one allowlists the finding we set the finding as allowlisted
+ for account in allowlist["Accounts"]:
+ if account == audited_account or account == "*":
+ if is_allowlisted_in_check(
+ allowlist["Accounts"][account]["Checks"],
+ audited_account,
+ check,
+ finding_region,
+ finding_resource,
+ finding_tags,
+ ):
+ is_finding_allowlisted = True
+ break
return is_finding_allowlisted
except Exception as error:
@@ -310,10 +304,10 @@ def is_excepted(
is_tag_excepted = __is_item_matched__(excepted_tags, finding_tags)
if (
- is_account_excepted
- and is_region_excepted
- and is_resource_excepted
- and is_tag_excepted
+ (is_account_excepted or not excepted_accounts)
+ and (is_region_excepted or not excepted_regions)
+ and (is_resource_excepted or not excepted_resources)
+ and (is_tag_excepted or not excepted_tags)
):
excepted = True
return excepted
diff --git a/tests/providers/aws/audit_info_utils.py b/tests/providers/aws/audit_info_utils.py
--- a/tests/providers/aws/audit_info_utils.py
+++ b/tests/providers/aws/audit_info_utils.py
@@ -18,8 +18,11 @@ AWS_REGION_EU_WEST_2 = "eu-west-2"
AWS_REGION_CN_NORTHWEST_1 = "cn-northwest-1"
AWS_REGION_CN_NORTH_1 = "cn-north-1"
AWS_REGION_EU_SOUTH_2 = "eu-south-2"
+AWS_REGION_EU_SOUTH_3 = "eu-south-3"
AWS_REGION_US_WEST_2 = "us-west-2"
AWS_REGION_US_EAST_2 = "us-east-2"
+AWS_REGION_EU_CENTRAL_1 = "eu-central-1"
+
# China Regions
AWS_REGION_CHINA_NORHT_1 = "cn-north-1"
diff --git a/tests/providers/aws/lib/allowlist/allowlist_test.py b/tests/providers/aws/lib/allowlist/allowlist_test.py
--- a/tests/providers/aws/lib/allowlist/allowlist_test.py
+++ b/tests/providers/aws/lib/allowlist/allowlist_test.py
@@ -15,6 +15,8 @@ from prowler.providers.aws.lib.allowlist.allowlist import (
)
from tests.providers.aws.audit_info_utils import (
AWS_ACCOUNT_NUMBER,
+ AWS_REGION_EU_CENTRAL_1,
+ AWS_REGION_EU_SOUTH_3,
AWS_REGION_EU_WEST_1,
AWS_REGION_US_EAST_1,
set_mocked_aws_audit_info,
@@ -132,8 +134,7 @@ class Test_Allowlist:
)
# Allowlist tests
-
- def test_allowlist_findings(self):
+ def test_allowlist_findings_only_wildcard(self):
# Allowlist example
allowlist = {
"Accounts": {
@@ -205,12 +206,6 @@ class Test_Allowlist:
"Tags": ["*"],
"Regions": ["*"],
"Resources": ["*"],
- "Exceptions": {
- "Tags": [],
- "Regions": [],
- "Accounts": [],
- "Resources": [],
- },
}
}
}
@@ -444,6 +439,155 @@ class Test_Allowlist:
)
)
+ def test_is_allowlisted_all_and_single_account_with_different_resources(self):
+ # Allowlist example
+ allowlist = {
+ "Accounts": {
+ "*": {
+ "Checks": {
+ "check_test_1": {
+ "Regions": ["*"],
+ "Resources": ["resource_1", "resource_2"],
+ },
+ }
+ },
+ AWS_ACCOUNT_NUMBER: {
+ "Checks": {
+ "check_test_1": {
+ "Regions": ["*"],
+ "Resources": ["resource_3"],
+ }
+ }
+ },
+ }
+ }
+
+ assert is_allowlisted(
+ allowlist,
+ "111122223333",
+ "check_test_1",
+ AWS_REGION_US_EAST_1,
+ "resource_1",
+ "",
+ )
+
+ assert is_allowlisted(
+ allowlist,
+ "111122223333",
+ "check_test_1",
+ AWS_REGION_US_EAST_1,
+ "resource_2",
+ "",
+ )
+
+ assert not is_allowlisted(
+ allowlist,
+ "111122223333",
+ "check_test_1",
+ AWS_REGION_US_EAST_1,
+ "resource_3",
+ "",
+ )
+
+ assert is_allowlisted(
+ allowlist,
+ AWS_ACCOUNT_NUMBER,
+ "check_test_1",
+ AWS_REGION_US_EAST_1,
+ "resource_3",
+ "",
+ )
+
+ assert is_allowlisted(
+ allowlist,
+ AWS_ACCOUNT_NUMBER,
+ "check_test_1",
+ AWS_REGION_US_EAST_1,
+ "resource_2",
+ "",
+ )
+
+ def test_is_allowlisted_all_and_single_account_with_different_resources_and_exceptions(
+ self,
+ ):
+ # Allowlist example
+ allowlist = {
+ "Accounts": {
+ "*": {
+ "Checks": {
+ "check_test_1": {
+ "Regions": ["*"],
+ "Resources": ["resource_1", "resource_2"],
+ "Exceptions": {"Regions": [AWS_REGION_US_EAST_1]},
+ },
+ }
+ },
+ AWS_ACCOUNT_NUMBER: {
+ "Checks": {
+ "check_test_1": {
+ "Regions": ["*"],
+ "Resources": ["resource_3"],
+ "Exceptions": {"Regions": [AWS_REGION_EU_WEST_1]},
+ }
+ }
+ },
+ }
+ }
+
+ assert not is_allowlisted(
+ allowlist,
+ AWS_ACCOUNT_NUMBER,
+ "check_test_1",
+ AWS_REGION_US_EAST_1,
+ "resource_2",
+ "",
+ )
+
+ assert not is_allowlisted(
+ allowlist,
+ "111122223333",
+ "check_test_1",
+ AWS_REGION_US_EAST_1,
+ "resource_1",
+ "",
+ )
+
+ assert is_allowlisted(
+ allowlist,
+ "111122223333",
+ "check_test_1",
+ AWS_REGION_EU_WEST_1,
+ "resource_2",
+ "",
+ )
+
+ assert not is_allowlisted(
+ allowlist,
+ "111122223333",
+ "check_test_1",
+ AWS_REGION_US_EAST_1,
+ "resource_3",
+ "",
+ )
+
+ assert is_allowlisted(
+ allowlist,
+ AWS_ACCOUNT_NUMBER,
+ "check_test_1",
+ AWS_REGION_US_EAST_1,
+ "resource_3",
+ "",
+ )
+
+ assert not is_allowlisted(
+ allowlist,
+ AWS_ACCOUNT_NUMBER,
+ "check_test_1",
+ AWS_REGION_EU_WEST_1,
+ "resource_3",
+ "",
+ )
+
def test_is_allowlisted_single_account(self):
allowlist = {
"Accounts": {
@@ -717,6 +861,111 @@ class Test_Allowlist:
)
)
+ def test_is_allowlisted_specific_account_with_other_account_excepted(self):
+ # Allowlist example
+ allowlist = {
+ "Accounts": {
+ AWS_ACCOUNT_NUMBER: {
+ "Checks": {
+ "check_test": {
+ "Regions": [AWS_REGION_EU_WEST_1],
+ "Resources": ["*"],
+ "Tags": [],
+ "Exceptions": {"Accounts": ["111122223333"]},
+ }
+ }
+ }
+ }
+ }
+
+ assert is_allowlisted(
+ allowlist,
+ AWS_ACCOUNT_NUMBER,
+ "check_test",
+ AWS_REGION_EU_WEST_1,
+ "prowler",
+ "environment=dev",
+ )
+
+ assert not is_allowlisted(
+ allowlist,
+ "111122223333",
+ "check_test",
+ AWS_REGION_EU_WEST_1,
+ "prowler",
+ "environment=dev",
+ )
+
+ def test_is_allowlisted_complex_allowlist(self):
+ # Allowlist example
+ allowlist = {
+ "Accounts": {
+ "*": {
+ "Checks": {
+ "s3_bucket_object_versioning": {
+ "Regions": [AWS_REGION_EU_WEST_1, AWS_REGION_US_EAST_1],
+ "Resources": ["ci-logs", "logs", ".+-logs"],
+ },
+ "ecs_task_definitions_no_environment_secrets": {
+ "Regions": ["*"],
+ "Resources": ["*"],
+ "Exceptions": {
+ "Accounts": [AWS_ACCOUNT_NUMBER],
+ "Regions": [
+ AWS_REGION_EU_WEST_1,
+ AWS_REGION_EU_SOUTH_3,
+ ],
+ },
+ },
+ "*": {
+ "Regions": ["*"],
+ "Resources": ["*"],
+ "Tags": ["environment=dev"],
+ },
+ }
+ },
+ AWS_ACCOUNT_NUMBER: {
+ "Checks": {
+ "*": {
+ "Regions": ["*"],
+ "Resources": ["*"],
+ "Exceptions": {
+ "Resources": ["test"],
+ "Tags": ["environment=prod"],
+ },
+ }
+ }
+ },
+ }
+ }
+
+ assert is_allowlisted(
+ allowlist,
+ AWS_ACCOUNT_NUMBER,
+ "test_check",
+ AWS_REGION_EU_WEST_1,
+ "prowler-logs",
+ "environment=dev",
+ )
+
+ assert is_allowlisted(
+ allowlist,
+ AWS_ACCOUNT_NUMBER,
+ "ecs_task_definitions_no_environment_secrets",
+ AWS_REGION_EU_WEST_1,
+ "prowler",
+ "environment=dev",
+ )
+
+ assert is_allowlisted(
+ allowlist,
+ AWS_ACCOUNT_NUMBER,
+ "s3_bucket_object_versioning",
+ AWS_REGION_EU_WEST_1,
+ "prowler-logs",
+ "environment=dev",
+ )
+
def test_is_allowlisted_in_tags(self):
allowlist_tags = ["environment=dev", "project=prowler"]
@@ -791,6 +1040,107 @@ class Test_Allowlist:
"environment=test",
)
+ def test_is_excepted_only_in_account(self):
+ # Allowlist example
+ exceptions = {
+ "Accounts": [AWS_ACCOUNT_NUMBER],
+ "Regions": [],
+ "Resources": [],
+ "Tags": [],
+ }
+
+ assert is_excepted(
+ exceptions,
+ AWS_ACCOUNT_NUMBER,
+ "eu-central-1",
+ "test",
+ "environment=test",
+ )
+
+ def test_is_excepted_only_in_region(self):
+ # Allowlist example
+ exceptions = {
+ "Accounts": [],
+ "Regions": [AWS_REGION_EU_CENTRAL_1, AWS_REGION_EU_SOUTH_3],
+ "Resources": [],
+ "Tags": [],
+ }
+
+ assert is_excepted(
+ exceptions,
+ AWS_ACCOUNT_NUMBER,
+ AWS_REGION_EU_CENTRAL_1,
+ "test",
+ "environment=test",
+ )
+
+ def test_is_excepted_only_in_resources(self):
+ # Allowlist example
+ exceptions = {
+ "Accounts": [],
+ "Regions": [],
+ "Resources": ["resource_1"],
+ "Tags": [],
+ }
+
+ assert is_excepted(
+ exceptions,
+ AWS_ACCOUNT_NUMBER,
+ AWS_REGION_EU_CENTRAL_1,
+ "resource_1",
+ "environment=test",
+ )
+
+ def test_is_excepted_only_in_tags(self):
+ # Allowlist example
+ exceptions = {
+ "Accounts": [],
+ "Regions": [],
+ "Resources": [],
+ "Tags": ["environment=test"],
+ }
+
+ assert is_excepted(
+ exceptions,
+ AWS_ACCOUNT_NUMBER,
+ AWS_REGION_EU_CENTRAL_1,
+ "resource_1",
+ "environment=test",
+ )
+
+ def test_is_excepted_in_account_and_tags(self):
+ # Allowlist example
+ exceptions = {
+ "Accounts": [AWS_ACCOUNT_NUMBER],
+ "Regions": [],
+ "Resources": [],
+ "Tags": ["environment=test"],
+ }
+
+ assert is_excepted(
+ exceptions,
+ AWS_ACCOUNT_NUMBER,
+ AWS_REGION_EU_CENTRAL_1,
+ "resource_1",
+ "environment=test",
+ )
+
+ assert not is_excepted(
+ exceptions,
+ "111122223333",
+ AWS_REGION_EU_CENTRAL_1,
+ "resource_1",
+ "environment=test",
+ )
+
+ assert not is_excepted(
+ exceptions,
+ "111122223333",
+ AWS_REGION_EU_CENTRAL_1,
+ "resource_1",
+ "environment=dev",
+ )
+
def test_is_excepted_all_wildcard(self):
exceptions = {
"Accounts": ["*"],
| [
{
"content": "import re\nimport sys\nfrom typing import Any\n\nimport yaml\nfrom boto3.dynamodb.conditions import Attr\nfrom schema import Optional, Schema\n\nfrom prowler.lib.logger import logger\nfrom prowler.lib.outputs.models import unroll_tags\n\nallowlist_schema = Schema(\n {\n \"Accounts\": {\n str: {\n \"Checks\": {\n str: {\n \"Regions\": list,\n \"Resources\": list,\n Optional(\"Tags\"): list,\n Optional(\"Exceptions\"): {\n Optional(\"Accounts\"): list,\n Optional(\"Regions\"): list,\n Optional(\"Resources\"): list,\n Optional(\"Tags\"): list,\n },\n }\n }\n }\n }\n }\n)\n\n\ndef parse_allowlist_file(audit_info, allowlist_file):\n try:\n # Check if file is a S3 URI\n if re.search(\"^s3://([^/]+)/(.*?([^/]+))$\", allowlist_file):\n bucket = allowlist_file.split(\"/\")[2]\n key = (\"/\").join(allowlist_file.split(\"/\")[3:])\n s3_client = audit_info.audit_session.client(\"s3\")\n allowlist = yaml.safe_load(\n s3_client.get_object(Bucket=bucket, Key=key)[\"Body\"]\n )[\"Allowlist\"]\n # Check if file is a Lambda Function ARN\n elif re.search(r\"^arn:(\\w+):lambda:\", allowlist_file):\n lambda_region = allowlist_file.split(\":\")[3]\n lambda_client = audit_info.audit_session.client(\n \"lambda\", region_name=lambda_region\n )\n lambda_response = lambda_client.invoke(\n FunctionName=allowlist_file, InvocationType=\"RequestResponse\"\n )\n lambda_payload = lambda_response[\"Payload\"].read()\n allowlist = yaml.safe_load(lambda_payload)[\"Allowlist\"]\n # Check if file is a DynamoDB ARN\n elif re.search(\n r\"^arn:aws(-cn|-us-gov)?:dynamodb:[a-z]{2}-[a-z-]+-[1-9]{1}:[0-9]{12}:table\\/[a-zA-Z0-9._-]+$\",\n allowlist_file,\n ):\n allowlist = {\"Accounts\": {}}\n table_region = allowlist_file.split(\":\")[3]\n dynamodb_resource = audit_info.audit_session.resource(\n \"dynamodb\", region_name=table_region\n )\n dynamo_table = dynamodb_resource.Table(allowlist_file.split(\"/\")[1])\n response = dynamo_table.scan(\n FilterExpression=Attr(\"Accounts\").is_in(\n [audit_info.audited_account, \"*\"]\n )\n )\n dynamodb_items = response[\"Items\"]\n # Paginate through all results\n while \"LastEvaluatedKey\" in dynamodb_items:\n response = dynamo_table.scan(\n ExclusiveStartKey=response[\"LastEvaluatedKey\"],\n FilterExpression=Attr(\"Accounts\").is_in(\n [audit_info.audited_account, \"*\"]\n ),\n )\n dynamodb_items.update(response[\"Items\"])\n for item in dynamodb_items:\n # Create allowlist for every item\n allowlist[\"Accounts\"][item[\"Accounts\"]] = {\n \"Checks\": {\n item[\"Checks\"]: {\n \"Regions\": item[\"Regions\"],\n \"Resources\": item[\"Resources\"],\n }\n }\n }\n if \"Tags\" in item:\n allowlist[\"Accounts\"][item[\"Accounts\"]][\"Checks\"][item[\"Checks\"]][\n \"Tags\"\n ] = item[\"Tags\"]\n if \"Exceptions\" in item:\n allowlist[\"Accounts\"][item[\"Accounts\"]][\"Checks\"][item[\"Checks\"]][\n \"Exceptions\"\n ] = item[\"Exceptions\"]\n else:\n with open(allowlist_file) as f:\n allowlist = yaml.safe_load(f)[\"Allowlist\"]\n try:\n allowlist_schema.validate(allowlist)\n except Exception as error:\n logger.critical(\n f\"{error.__class__.__name__} -- Allowlist YAML is malformed - {error}[{error.__traceback__.tb_lineno}]\"\n )\n sys.exit(1)\n return allowlist\n except Exception as error:\n logger.critical(\n f\"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]\"\n )\n sys.exit(1)\n\n\ndef allowlist_findings(\n allowlist: dict,\n audited_account: str,\n check_findings: [Any],\n):\n # Check if finding is allowlisted\n for finding in check_findings:\n if is_allowlisted(\n allowlist,\n audited_account,\n finding.check_metadata.CheckID,\n finding.region,\n finding.resource_id,\n unroll_tags(finding.resource_tags),\n ):\n finding.status = \"WARNING\"\n return check_findings\n\n\ndef is_allowlisted(\n allowlist: dict,\n audited_account: str,\n check: str,\n finding_region: str,\n finding_resource: str,\n finding_tags,\n):\n try:\n allowlisted_checks = {}\n # By default is not allowlisted\n is_finding_allowlisted = False\n # First set account key from allowlist dict\n if audited_account in allowlist[\"Accounts\"]:\n allowlisted_checks = allowlist[\"Accounts\"][audited_account][\"Checks\"]\n # If there is a *, it affects to all accounts\n # This cannot be elif since in the case of * and single accounts we\n # want to merge allowlisted checks from * to the other accounts check list\n if \"*\" in allowlist[\"Accounts\"]:\n checks_multi_account = allowlist[\"Accounts\"][\"*\"][\"Checks\"]\n allowlisted_checks.update(checks_multi_account)\n\n # Test if it is allowlisted\n if is_allowlisted_in_check(\n allowlisted_checks,\n audited_account,\n check,\n finding_region,\n finding_resource,\n finding_tags,\n ):\n is_finding_allowlisted = True\n\n return is_finding_allowlisted\n except Exception as error:\n logger.critical(\n f\"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]\"\n )\n sys.exit(1)\n\n\ndef is_allowlisted_in_check(\n allowlisted_checks,\n audited_account,\n check,\n finding_region,\n finding_resource,\n finding_tags,\n):\n try:\n # Default value is not allowlisted\n is_check_allowlisted = False\n\n for allowlisted_check, allowlisted_check_info in allowlisted_checks.items():\n # map lambda to awslambda\n allowlisted_check = re.sub(\"^lambda\", \"awslambda\", allowlisted_check)\n\n # Check if the finding is excepted\n exceptions = allowlisted_check_info.get(\"Exceptions\")\n if is_excepted(\n exceptions,\n audited_account,\n finding_region,\n finding_resource,\n finding_tags,\n ):\n # Break loop and return default value since is excepted\n break\n\n allowlisted_regions = allowlisted_check_info.get(\"Regions\")\n allowlisted_resources = allowlisted_check_info.get(\"Resources\")\n allowlisted_tags = allowlisted_check_info.get(\"Tags\")\n # If there is a *, it affects to all checks\n if (\n \"*\" == allowlisted_check\n or check == allowlisted_check\n or re.search(allowlisted_check, check)\n ):\n allowlisted_in_check = True\n allowlisted_in_region = is_allowlisted_in_region(\n allowlisted_regions, finding_region\n )\n allowlisted_in_resource = is_allowlisted_in_resource(\n allowlisted_resources, finding_resource\n )\n allowlisted_in_tags = is_allowlisted_in_tags(\n allowlisted_tags, finding_tags\n )\n\n # For a finding to be allowlisted requires the following set to True:\n # - allowlisted_in_check -> True\n # - allowlisted_in_region -> True\n # - allowlisted_in_tags -> True or allowlisted_in_resource -> True\n # - excepted -> False\n\n if (\n allowlisted_in_check\n and allowlisted_in_region\n and (allowlisted_in_tags or allowlisted_in_resource)\n ):\n is_check_allowlisted = True\n\n return is_check_allowlisted\n except Exception as error:\n logger.critical(\n f\"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]\"\n )\n sys.exit(1)\n\n\ndef is_allowlisted_in_region(\n allowlisted_regions,\n finding_region,\n):\n try:\n return __is_item_matched__(allowlisted_regions, finding_region)\n except Exception as error:\n logger.critical(\n f\"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]\"\n )\n sys.exit(1)\n\n\ndef is_allowlisted_in_tags(allowlisted_tags, finding_tags):\n try:\n return __is_item_matched__(allowlisted_tags, finding_tags)\n except Exception as error:\n logger.critical(\n f\"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]\"\n )\n sys.exit(1)\n\n\ndef is_allowlisted_in_resource(allowlisted_resources, finding_resource):\n try:\n return __is_item_matched__(allowlisted_resources, finding_resource)\n\n except Exception as error:\n logger.critical(\n f\"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]\"\n )\n sys.exit(1)\n\n\ndef is_excepted(\n exceptions,\n audited_account,\n finding_region,\n finding_resource,\n finding_tags,\n):\n \"\"\"is_excepted returns True if the account, region, resource and tags are excepted\"\"\"\n try:\n excepted = False\n is_account_excepted = False\n is_region_excepted = False\n is_resource_excepted = False\n is_tag_excepted = False\n if exceptions:\n excepted_accounts = exceptions.get(\"Accounts\", [])\n is_account_excepted = __is_item_matched__(\n excepted_accounts, audited_account\n )\n\n excepted_regions = exceptions.get(\"Regions\", [])\n is_region_excepted = __is_item_matched__(excepted_regions, finding_region)\n\n excepted_resources = exceptions.get(\"Resources\", [])\n is_resource_excepted = __is_item_matched__(\n excepted_resources, finding_resource\n )\n\n excepted_tags = exceptions.get(\"Tags\", [])\n is_tag_excepted = __is_item_matched__(excepted_tags, finding_tags)\n\n if (\n is_account_excepted\n and is_region_excepted\n and is_resource_excepted\n and is_tag_excepted\n ):\n excepted = True\n return excepted\n except Exception as error:\n logger.critical(\n f\"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]\"\n )\n sys.exit(1)\n\n\ndef __is_item_matched__(matched_items, finding_items):\n \"\"\"__is_item_matched__ return True if any of the matched_items are present in the finding_items, otherwise returns False.\"\"\"\n try:\n is_item_matched = False\n if matched_items and (finding_items or finding_items == \"\"):\n for item in matched_items:\n if item == \"*\":\n item = \".*\"\n if re.search(item, finding_items):\n is_item_matched = True\n break\n return is_item_matched\n except Exception as error:\n logger.critical(\n f\"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]\"\n )\n sys.exit(1)\n",
"path": "prowler/providers/aws/lib/allowlist/allowlist.py"
},
{
"content": "from boto3 import session\n\nfrom prowler.providers.aws.lib.audit_info.models import AWS_Assume_Role, AWS_Audit_Info\nfrom prowler.providers.common.models import Audit_Metadata\n\n# Root AWS Account\nAWS_ACCOUNT_NUMBER = \"123456789012\"\nAWS_ACCOUNT_ARN = f\"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:root\"\n\n# Commercial Regions\nAWS_REGION_US_EAST_1 = \"us-east-1\"\nAWS_REGION_US_EAST_1_AZA = \"us-east-1a\"\nAWS_REGION_US_EAST_1_AZB = \"us-east-1b\"\nAWS_REGION_EU_WEST_1 = \"eu-west-1\"\nAWS_REGION_EU_WEST_1_AZA = \"eu-west-1a\"\nAWS_REGION_EU_WEST_1_AZB = \"eu-west-1b\"\nAWS_REGION_EU_WEST_2 = \"eu-west-2\"\nAWS_REGION_CN_NORTHWEST_1 = \"cn-northwest-1\"\nAWS_REGION_CN_NORTH_1 = \"cn-north-1\"\nAWS_REGION_EU_SOUTH_2 = \"eu-south-2\"\nAWS_REGION_US_WEST_2 = \"us-west-2\"\nAWS_REGION_US_EAST_2 = \"us-east-2\"\n\n# China Regions\nAWS_REGION_CHINA_NORHT_1 = \"cn-north-1\"\n\n# Gov Cloud Regions\nAWS_REGION_GOV_CLOUD_US_EAST_1 = \"us-gov-east-1\"\n\n# Iso Regions\nAWS_REGION_ISO_GLOBAL = \"aws-iso-global\"\n\n# AWS Partitions\nAWS_COMMERCIAL_PARTITION = \"aws\"\nAWS_GOV_CLOUD_PARTITION = \"aws-us-gov\"\nAWS_CHINA_PARTITION = \"aws-cn\"\nAWS_ISO_PARTITION = \"aws-iso\"\n\n\n# Mocked AWS Audit Info\ndef set_mocked_aws_audit_info(\n audited_regions: [str] = [],\n audited_account: str = AWS_ACCOUNT_NUMBER,\n audited_account_arn: str = AWS_ACCOUNT_ARN,\n audited_partition: str = AWS_COMMERCIAL_PARTITION,\n expected_checks: [str] = [],\n profile_region: str = None,\n audit_config: dict = {},\n ignore_unused_services: bool = False,\n assumed_role_info: AWS_Assume_Role = None,\n audit_session: session.Session = session.Session(\n profile_name=None,\n botocore_session=None,\n ),\n original_session: session.Session = None,\n enabled_regions: set = None,\n):\n audit_info = AWS_Audit_Info(\n session_config=None,\n original_session=original_session,\n audit_session=audit_session,\n audited_account=audited_account,\n audited_account_arn=audited_account_arn,\n audited_user_id=None,\n audited_partition=audited_partition,\n audited_identity_arn=None,\n profile=None,\n profile_region=profile_region,\n credentials=None,\n assumed_role_info=assumed_role_info,\n audited_regions=audited_regions,\n organizations_metadata=None,\n audit_resources=[],\n mfa_enabled=False,\n audit_metadata=Audit_Metadata(\n services_scanned=0,\n expected_checks=expected_checks,\n completed_checks=0,\n audit_progress=0,\n ),\n audit_config=audit_config,\n ignore_unused_services=ignore_unused_services,\n enabled_regions=enabled_regions if enabled_regions else set(audited_regions),\n )\n return audit_info\n",
"path": "tests/providers/aws/audit_info_utils.py"
},
{
"content": "import yaml\nfrom boto3 import resource\nfrom mock import MagicMock\nfrom moto import mock_dynamodb, mock_s3\n\nfrom prowler.providers.aws.lib.allowlist.allowlist import (\n allowlist_findings,\n is_allowlisted,\n is_allowlisted_in_check,\n is_allowlisted_in_region,\n is_allowlisted_in_resource,\n is_allowlisted_in_tags,\n is_excepted,\n parse_allowlist_file,\n)\nfrom tests.providers.aws.audit_info_utils import (\n AWS_ACCOUNT_NUMBER,\n AWS_REGION_EU_WEST_1,\n AWS_REGION_US_EAST_1,\n set_mocked_aws_audit_info,\n)\n\n\nclass Test_Allowlist:\n # Test S3 allowlist\n @mock_s3\n def test_s3_allowlist(self):\n audit_info = set_mocked_aws_audit_info()\n # Create bucket and upload allowlist yaml\n s3_resource = resource(\"s3\", region_name=AWS_REGION_US_EAST_1)\n s3_resource.create_bucket(Bucket=\"test-allowlist\")\n s3_resource.Object(\"test-allowlist\", \"allowlist.yaml\").put(\n Body=open(\n \"tests/providers/aws/lib/allowlist/fixtures/allowlist.yaml\",\n \"rb\",\n )\n )\n\n with open(\"tests/providers/aws/lib/allowlist/fixtures/allowlist.yaml\") as f:\n assert yaml.safe_load(f)[\"Allowlist\"] == parse_allowlist_file(\n audit_info, \"s3://test-allowlist/allowlist.yaml\"\n )\n\n # Test DynamoDB allowlist\n @mock_dynamodb\n def test_dynamo_allowlist(self):\n audit_info = set_mocked_aws_audit_info()\n # Create table and put item\n dynamodb_resource = resource(\"dynamodb\", region_name=AWS_REGION_US_EAST_1)\n table_name = \"test-allowlist\"\n params = {\n \"TableName\": table_name,\n \"KeySchema\": [\n {\"AttributeName\": \"Accounts\", \"KeyType\": \"HASH\"},\n {\"AttributeName\": \"Checks\", \"KeyType\": \"RANGE\"},\n ],\n \"AttributeDefinitions\": [\n {\"AttributeName\": \"Accounts\", \"AttributeType\": \"S\"},\n {\"AttributeName\": \"Checks\", \"AttributeType\": \"S\"},\n ],\n \"ProvisionedThroughput\": {\n \"ReadCapacityUnits\": 10,\n \"WriteCapacityUnits\": 10,\n },\n }\n table = dynamodb_resource.create_table(**params)\n table.put_item(\n Item={\n \"Accounts\": \"*\",\n \"Checks\": \"iam_user_hardware_mfa_enabled\",\n \"Regions\": [AWS_REGION_EU_WEST_1, AWS_REGION_US_EAST_1],\n \"Resources\": [\"keyword\"],\n }\n )\n\n assert (\n \"keyword\"\n in parse_allowlist_file(\n audit_info,\n \"arn:aws:dynamodb:\"\n + AWS_REGION_US_EAST_1\n + \":\"\n + str(AWS_ACCOUNT_NUMBER)\n + \":table/\"\n + table_name,\n )[\"Accounts\"][\"*\"][\"Checks\"][\"iam_user_hardware_mfa_enabled\"][\"Resources\"]\n )\n\n @mock_dynamodb\n def test_dynamo_allowlist_with_tags(self):\n audit_info = set_mocked_aws_audit_info()\n # Create table and put item\n dynamodb_resource = resource(\"dynamodb\", region_name=AWS_REGION_US_EAST_1)\n table_name = \"test-allowlist\"\n params = {\n \"TableName\": table_name,\n \"KeySchema\": [\n {\"AttributeName\": \"Accounts\", \"KeyType\": \"HASH\"},\n {\"AttributeName\": \"Checks\", \"KeyType\": \"RANGE\"},\n ],\n \"AttributeDefinitions\": [\n {\"AttributeName\": \"Accounts\", \"AttributeType\": \"S\"},\n {\"AttributeName\": \"Checks\", \"AttributeType\": \"S\"},\n ],\n \"ProvisionedThroughput\": {\n \"ReadCapacityUnits\": 10,\n \"WriteCapacityUnits\": 10,\n },\n }\n table = dynamodb_resource.create_table(**params)\n table.put_item(\n Item={\n \"Accounts\": \"*\",\n \"Checks\": \"*\",\n \"Regions\": [\"*\"],\n \"Resources\": [\"*\"],\n \"Tags\": [\"environment=dev\"],\n }\n )\n\n assert (\n \"environment=dev\"\n in parse_allowlist_file(\n audit_info,\n \"arn:aws:dynamodb:\"\n + AWS_REGION_US_EAST_1\n + \":\"\n + str(AWS_ACCOUNT_NUMBER)\n + \":table/\"\n + table_name,\n )[\"Accounts\"][\"*\"][\"Checks\"][\"*\"][\"Tags\"]\n )\n\n # Allowlist tests\n\n def test_allowlist_findings(self):\n # Allowlist example\n allowlist = {\n \"Accounts\": {\n \"*\": {\n \"Checks\": {\n \"check_test\": {\n \"Regions\": [AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1],\n \"Resources\": [\"prowler\", \"^test\", \"prowler-pro\"],\n }\n }\n }\n }\n }\n\n # Check Findings\n check_findings = []\n finding_1 = MagicMock\n finding_1.check_metadata = MagicMock\n finding_1.check_metadata.CheckID = \"check_test\"\n finding_1.status = \"FAIL\"\n finding_1.region = AWS_REGION_US_EAST_1\n finding_1.resource_id = \"prowler\"\n finding_1.resource_tags = []\n\n check_findings.append(finding_1)\n\n allowlisted_findings = allowlist_findings(\n allowlist, AWS_ACCOUNT_NUMBER, check_findings\n )\n assert len(allowlisted_findings) == 1\n assert allowlisted_findings[0].status == \"WARNING\"\n\n def test_is_allowlisted_with_everything_excepted(self):\n allowlist = {\n \"Accounts\": {\n \"*\": {\n \"Checks\": {\n \"athena_*\": {\n \"Regions\": \"*\",\n \"Resources\": \"*\",\n \"Tags\": \"*\",\n \"Exceptions\": {\n \"Accounts\": [\"*\"],\n \"Regions\": [\"*\"],\n \"Resources\": [\"*\"],\n \"Tags\": [\"*\"],\n },\n }\n }\n }\n }\n }\n\n assert not is_allowlisted(\n allowlist,\n AWS_ACCOUNT_NUMBER,\n \"athena_1\",\n AWS_REGION_US_EAST_1,\n \"prowler\",\n \"\",\n )\n\n def test_is_allowlisted_with_default_allowlist(self):\n allowlist = {\n \"Accounts\": {\n \"*\": {\n \"Checks\": {\n \"*\": {\n \"Tags\": [\"*\"],\n \"Regions\": [\"*\"],\n \"Resources\": [\"*\"],\n \"Exceptions\": {\n \"Tags\": [],\n \"Regions\": [],\n \"Accounts\": [],\n \"Resources\": [],\n },\n }\n }\n }\n }\n }\n\n assert is_allowlisted(\n allowlist,\n AWS_ACCOUNT_NUMBER,\n \"athena_1\",\n AWS_REGION_US_EAST_1,\n \"prowler\",\n \"\",\n )\n\n def test_is_allowlisted(self):\n # Allowlist example\n allowlist = {\n \"Accounts\": {\n \"*\": {\n \"Checks\": {\n \"check_test\": {\n \"Regions\": [AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1],\n \"Resources\": [\"prowler\", \"^test\", \"prowler-pro\"],\n }\n }\n }\n }\n }\n\n assert is_allowlisted(\n allowlist,\n AWS_ACCOUNT_NUMBER,\n \"check_test\",\n AWS_REGION_US_EAST_1,\n \"prowler\",\n \"\",\n )\n\n assert is_allowlisted(\n allowlist,\n AWS_ACCOUNT_NUMBER,\n \"check_test\",\n AWS_REGION_US_EAST_1,\n \"prowler-test\",\n \"\",\n )\n\n assert is_allowlisted(\n allowlist,\n AWS_ACCOUNT_NUMBER,\n \"check_test\",\n AWS_REGION_US_EAST_1,\n \"test-prowler\",\n \"\",\n )\n\n assert is_allowlisted(\n allowlist,\n AWS_ACCOUNT_NUMBER,\n \"check_test\",\n AWS_REGION_US_EAST_1,\n \"prowler-pro-test\",\n \"\",\n )\n\n assert not (\n is_allowlisted(\n allowlist, AWS_ACCOUNT_NUMBER, \"check_test\", \"us-east-2\", \"test\", \"\"\n )\n )\n\n def test_is_allowlisted_wildcard(self):\n # Allowlist example\n allowlist = {\n \"Accounts\": {\n \"*\": {\n \"Checks\": {\n \"check_test\": {\n \"Regions\": [AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1],\n \"Resources\": [\".*\"],\n }\n }\n }\n }\n }\n\n assert is_allowlisted(\n allowlist,\n AWS_ACCOUNT_NUMBER,\n \"check_test\",\n AWS_REGION_US_EAST_1,\n \"prowler\",\n \"\",\n )\n\n assert is_allowlisted(\n allowlist,\n AWS_ACCOUNT_NUMBER,\n \"check_test\",\n AWS_REGION_US_EAST_1,\n \"prowler-test\",\n \"\",\n )\n\n assert is_allowlisted(\n allowlist,\n AWS_ACCOUNT_NUMBER,\n \"check_test\",\n AWS_REGION_US_EAST_1,\n \"test-prowler\",\n \"\",\n )\n\n assert not (\n is_allowlisted(\n allowlist, AWS_ACCOUNT_NUMBER, \"check_test\", \"us-east-2\", \"test\", \"\"\n )\n )\n\n def test_is_allowlisted_asterisk(self):\n # Allowlist example\n allowlist = {\n \"Accounts\": {\n \"*\": {\n \"Checks\": {\n \"check_test\": {\n \"Regions\": [AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1],\n \"Resources\": [\"*\"],\n }\n }\n }\n }\n }\n\n assert is_allowlisted(\n allowlist,\n AWS_ACCOUNT_NUMBER,\n \"check_test\",\n AWS_REGION_US_EAST_1,\n \"prowler\",\n \"\",\n )\n\n assert is_allowlisted(\n allowlist,\n AWS_ACCOUNT_NUMBER,\n \"check_test\",\n AWS_REGION_US_EAST_1,\n \"prowler-test\",\n \"\",\n )\n\n assert is_allowlisted(\n allowlist,\n AWS_ACCOUNT_NUMBER,\n \"check_test\",\n AWS_REGION_US_EAST_1,\n \"test-prowler\",\n \"\",\n )\n\n assert not (\n is_allowlisted(\n allowlist, AWS_ACCOUNT_NUMBER, \"check_test\", \"us-east-2\", \"test\", \"\"\n )\n )\n\n def test_is_allowlisted_all_and_single_account(self):\n # Allowlist example\n allowlist = {\n \"Accounts\": {\n \"*\": {\n \"Checks\": {\n \"check_test_2\": {\n \"Regions\": [AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1],\n \"Resources\": [\"*\"],\n }\n }\n },\n AWS_ACCOUNT_NUMBER: {\n \"Checks\": {\n \"check_test\": {\n \"Regions\": [AWS_REGION_US_EAST_1],\n \"Resources\": [\"*\"],\n }\n }\n },\n }\n }\n\n assert is_allowlisted(\n allowlist,\n AWS_ACCOUNT_NUMBER,\n \"check_test_2\",\n AWS_REGION_US_EAST_1,\n \"prowler\",\n \"\",\n )\n\n assert is_allowlisted(\n allowlist,\n AWS_ACCOUNT_NUMBER,\n \"check_test\",\n AWS_REGION_US_EAST_1,\n \"prowler\",\n \"\",\n )\n\n assert is_allowlisted(\n allowlist,\n AWS_ACCOUNT_NUMBER,\n \"check_test\",\n AWS_REGION_US_EAST_1,\n \"prowler-test\",\n \"\",\n )\n\n assert is_allowlisted(\n allowlist,\n AWS_ACCOUNT_NUMBER,\n \"check_test\",\n AWS_REGION_US_EAST_1,\n \"test-prowler\",\n \"\",\n )\n\n assert not (\n is_allowlisted(\n allowlist, AWS_ACCOUNT_NUMBER, \"check_test\", \"us-east-2\", \"test\", \"\"\n )\n )\n\n def test_is_allowlisted_single_account(self):\n allowlist = {\n \"Accounts\": {\n AWS_ACCOUNT_NUMBER: {\n \"Checks\": {\n \"check_test\": {\n \"Regions\": [AWS_REGION_US_EAST_1],\n \"Resources\": [\"prowler\"],\n }\n }\n }\n }\n }\n\n assert is_allowlisted(\n allowlist,\n AWS_ACCOUNT_NUMBER,\n \"check_test\",\n AWS_REGION_US_EAST_1,\n \"prowler\",\n \"\",\n )\n\n assert not (\n is_allowlisted(\n allowlist, AWS_ACCOUNT_NUMBER, \"check_test\", \"us-east-2\", \"test\", \"\"\n )\n )\n\n def test_is_allowlisted_in_region(self):\n allowlisted_regions = [AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1]\n finding_region = AWS_REGION_US_EAST_1\n\n assert is_allowlisted_in_region(allowlisted_regions, finding_region)\n\n def test_is_allowlisted_in_region_wildcard(self):\n allowlisted_regions = [\"*\"]\n finding_region = AWS_REGION_US_EAST_1\n\n assert is_allowlisted_in_region(allowlisted_regions, finding_region)\n\n def test_is_not_allowlisted_in_region(self):\n allowlisted_regions = [AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1]\n finding_region = \"eu-west-2\"\n\n assert not is_allowlisted_in_region(allowlisted_regions, finding_region)\n\n def test_is_allowlisted_in_check(self):\n allowlisted_checks = {\n \"check_test\": {\n \"Regions\": [AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1],\n \"Resources\": [\"*\"],\n }\n }\n\n assert is_allowlisted_in_check(\n allowlisted_checks,\n AWS_ACCOUNT_NUMBER,\n \"check_test\",\n AWS_REGION_US_EAST_1,\n \"prowler\",\n \"\",\n )\n\n assert is_allowlisted_in_check(\n allowlisted_checks,\n AWS_ACCOUNT_NUMBER,\n \"check_test\",\n AWS_REGION_US_EAST_1,\n \"prowler-test\",\n \"\",\n )\n\n assert is_allowlisted_in_check(\n allowlisted_checks,\n AWS_ACCOUNT_NUMBER,\n \"check_test\",\n AWS_REGION_US_EAST_1,\n \"test-prowler\",\n \"\",\n )\n\n assert not (\n is_allowlisted_in_check(\n allowlisted_checks,\n AWS_ACCOUNT_NUMBER,\n \"check_test\",\n \"us-east-2\",\n \"test\",\n \"\",\n )\n )\n\n def test_is_allowlisted_in_check_regex(self):\n # Allowlist example\n allowlisted_checks = {\n \"s3_*\": {\n \"Regions\": [AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1],\n \"Resources\": [\"*\"],\n }\n }\n\n assert is_allowlisted_in_check(\n allowlisted_checks,\n AWS_ACCOUNT_NUMBER,\n \"s3_bucket_public_access\",\n AWS_REGION_US_EAST_1,\n \"prowler\",\n \"\",\n )\n\n assert is_allowlisted_in_check(\n allowlisted_checks,\n AWS_ACCOUNT_NUMBER,\n \"s3_bucket_no_mfa_delete\",\n AWS_REGION_US_EAST_1,\n \"prowler-test\",\n \"\",\n )\n\n assert is_allowlisted_in_check(\n allowlisted_checks,\n AWS_ACCOUNT_NUMBER,\n \"s3_bucket_policy_public_write_access\",\n AWS_REGION_US_EAST_1,\n \"test-prowler\",\n \"\",\n )\n\n assert not (\n is_allowlisted_in_check(\n allowlisted_checks,\n AWS_ACCOUNT_NUMBER,\n \"iam_user_hardware_mfa_enabled\",\n AWS_REGION_US_EAST_1,\n \"test\",\n \"\",\n )\n )\n\n def test_is_allowlisted_lambda_generic_check(self):\n allowlisted_checks = {\n \"lambda_*\": {\n \"Regions\": [AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1],\n \"Resources\": [\"*\"],\n }\n }\n\n assert is_allowlisted_in_check(\n allowlisted_checks,\n AWS_ACCOUNT_NUMBER,\n \"awslambda_function_invoke_api_operations_cloudtrail_logging_enabled\",\n AWS_REGION_US_EAST_1,\n \"prowler\",\n \"\",\n )\n\n assert is_allowlisted_in_check(\n allowlisted_checks,\n AWS_ACCOUNT_NUMBER,\n \"awslambda_function_no_secrets_in_code\",\n AWS_REGION_US_EAST_1,\n \"prowler\",\n \"\",\n )\n\n assert is_allowlisted_in_check(\n allowlisted_checks,\n AWS_ACCOUNT_NUMBER,\n \"awslambda_function_no_secrets_in_variables\",\n AWS_REGION_US_EAST_1,\n \"prowler\",\n \"\",\n )\n\n assert is_allowlisted_in_check(\n allowlisted_checks,\n AWS_ACCOUNT_NUMBER,\n \"awslambda_function_not_publicly_accessible\",\n AWS_REGION_US_EAST_1,\n \"prowler\",\n \"\",\n )\n\n assert is_allowlisted_in_check(\n allowlisted_checks,\n AWS_ACCOUNT_NUMBER,\n \"awslambda_function_url_cors_policy\",\n AWS_REGION_US_EAST_1,\n \"prowler\",\n \"\",\n )\n\n assert is_allowlisted_in_check(\n allowlisted_checks,\n AWS_ACCOUNT_NUMBER,\n \"awslambda_function_url_public\",\n AWS_REGION_US_EAST_1,\n \"prowler\",\n \"\",\n )\n\n assert is_allowlisted_in_check(\n allowlisted_checks,\n AWS_ACCOUNT_NUMBER,\n \"awslambda_function_using_supported_runtimes\",\n AWS_REGION_US_EAST_1,\n \"prowler\",\n \"\",\n )\n\n def test_is_allowlisted_lambda_concrete_check(self):\n allowlisted_checks = {\n \"lambda_function_no_secrets_in_variables\": {\n \"Regions\": [AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1],\n \"Resources\": [\"*\"],\n }\n }\n\n assert is_allowlisted_in_check(\n allowlisted_checks,\n AWS_ACCOUNT_NUMBER,\n \"awslambda_function_no_secrets_in_variables\",\n AWS_REGION_US_EAST_1,\n \"prowler\",\n \"\",\n )\n\n def test_is_allowlisted_tags(self):\n # Allowlist example\n allowlist = {\n \"Accounts\": {\n \"*\": {\n \"Checks\": {\n \"check_test\": {\n \"Regions\": [AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1],\n \"Resources\": [\"*\"],\n \"Tags\": [\"environment=dev\", \"project=.*\"],\n }\n }\n }\n }\n }\n\n assert is_allowlisted(\n allowlist,\n AWS_ACCOUNT_NUMBER,\n \"check_test\",\n AWS_REGION_US_EAST_1,\n \"prowler\",\n \"environment=dev\",\n )\n\n assert is_allowlisted(\n allowlist,\n AWS_ACCOUNT_NUMBER,\n \"check_test\",\n AWS_REGION_US_EAST_1,\n \"prowler-test\",\n \"environment=dev | project=prowler\",\n )\n\n assert not (\n is_allowlisted(\n allowlist,\n AWS_ACCOUNT_NUMBER,\n \"check_test\",\n \"us-east-2\",\n \"test\",\n \"environment=pro\",\n )\n )\n\n def test_is_allowlisted_in_tags(self):\n allowlist_tags = [\"environment=dev\", \"project=prowler\"]\n\n assert is_allowlisted_in_tags(allowlist_tags, \"environment=dev\")\n\n assert is_allowlisted_in_tags(\n allowlist_tags,\n \"environment=dev | project=prowler\",\n )\n\n assert not (\n is_allowlisted_in_tags(\n allowlist_tags,\n \"environment=pro\",\n )\n )\n\n def test_is_allowlisted_in_tags_regex(self):\n allowlist_tags = [\"environment=(dev|test)\", \".*=prowler\"]\n\n assert is_allowlisted_in_tags(\n allowlist_tags,\n \"environment=test | proj=prowler\",\n )\n\n assert is_allowlisted_in_tags(\n allowlist_tags,\n \"env=prod | project=prowler\",\n )\n\n assert not is_allowlisted_in_tags(\n allowlist_tags,\n \"environment=prod | project=myproj\",\n )\n\n def test_is_allowlisted_in_tags_with_no_tags_in_finding(self):\n allowlist_tags = [\"environment=(dev|test)\", \".*=prowler\"]\n finding_tags = \"\"\n\n assert not is_allowlisted_in_tags(allowlist_tags, finding_tags)\n\n def test_is_excepted(self):\n # Allowlist example\n exceptions = {\n \"Accounts\": [AWS_ACCOUNT_NUMBER],\n \"Regions\": [\"eu-central-1\", \"eu-south-3\"],\n \"Resources\": [\"test\"],\n \"Tags\": [\"environment=test\", \"project=.*\"],\n }\n\n assert is_excepted(\n exceptions,\n AWS_ACCOUNT_NUMBER,\n \"eu-central-1\",\n \"test\",\n \"environment=test\",\n )\n\n assert is_excepted(\n exceptions,\n AWS_ACCOUNT_NUMBER,\n \"eu-south-3\",\n \"test\",\n \"environment=test\",\n )\n\n assert is_excepted(\n exceptions,\n AWS_ACCOUNT_NUMBER,\n \"eu-south-3\",\n \"test123\",\n \"environment=test\",\n )\n\n def test_is_excepted_all_wildcard(self):\n exceptions = {\n \"Accounts\": [\"*\"],\n \"Regions\": [\"*\"],\n \"Resources\": [\"*\"],\n \"Tags\": [\"*\"],\n }\n assert is_excepted(\n exceptions, AWS_ACCOUNT_NUMBER, \"eu-south-2\", \"test\", \"environment=test\"\n )\n assert not is_excepted(\n exceptions, AWS_ACCOUNT_NUMBER, \"eu-south-2\", \"test\", None\n )\n\n def test_is_not_excepted(self):\n exceptions = {\n \"Accounts\": [AWS_ACCOUNT_NUMBER],\n \"Regions\": [\"eu-central-1\", \"eu-south-3\"],\n \"Resources\": [\"test\"],\n \"Tags\": [\"environment=test\", \"project=.*\"],\n }\n\n assert not is_excepted(\n exceptions,\n AWS_ACCOUNT_NUMBER,\n \"eu-south-2\",\n \"test\",\n \"environment=test\",\n )\n\n assert not is_excepted(\n exceptions,\n AWS_ACCOUNT_NUMBER,\n \"eu-south-3\",\n \"prowler\",\n \"environment=test\",\n )\n\n assert not is_excepted(\n exceptions,\n AWS_ACCOUNT_NUMBER,\n \"eu-south-3\",\n \"test\",\n \"environment=pro\",\n )\n\n def test_is_allowlisted_in_resource(self):\n allowlist_resources = [\"prowler\", \"^test\", \"prowler-pro\"]\n\n assert is_allowlisted_in_resource(allowlist_resources, \"prowler\")\n assert is_allowlisted_in_resource(allowlist_resources, \"prowler-test\")\n assert is_allowlisted_in_resource(allowlist_resources, \"test-prowler\")\n assert not is_allowlisted_in_resource(allowlist_resources, \"random\")\n",
"path": "tests/providers/aws/lib/allowlist/allowlist_test.py"
}
] | 11_5 | python | import unittest
import sys
class Test_Allowlist(unittest.TestCase):
def test_is_allowlisted_all_and_single_account_with_different_resources(self):
from prowler.providers.aws.lib.allowlist.allowlist import (
is_allowlisted,
is_excepted,
)
from tests.providers.aws.audit_info_utils import (
AWS_ACCOUNT_NUMBER,
AWS_REGION_US_EAST_1,
)
# Allowlist example
allowlist = {
"Accounts": {
"*": {
"Checks": {
"check_test_1": {
"Regions": ["*"],
"Resources": ["resource_1", "resource_2"],
},
}
},
AWS_ACCOUNT_NUMBER: {
"Checks": {
"check_test_1": {
"Regions": ["*"],
"Resources": ["resource_3"],
}
}
},
}
}
assert is_allowlisted(
allowlist,
"111122223333",
"check_test_1",
AWS_REGION_US_EAST_1,
"resource_1",
"",
)
assert is_allowlisted(
allowlist,
"111122223333",
"check_test_1",
AWS_REGION_US_EAST_1,
"resource_2",
"",
)
assert not is_allowlisted(
allowlist,
"111122223333",
"check_test_1",
AWS_REGION_US_EAST_1,
"resource_3",
"",
)
assert is_allowlisted(
allowlist,
AWS_ACCOUNT_NUMBER,
"check_test_1",
AWS_REGION_US_EAST_1,
"resource_3",
"",
)
assert is_allowlisted(
allowlist,
AWS_ACCOUNT_NUMBER,
"check_test_1",
AWS_REGION_US_EAST_1,
"resource_2",
"",
)
def test_is_allowlisted_all_and_single_account_with_different_resources_and_exceptions(
self,
):
from prowler.providers.aws.lib.allowlist.allowlist import (
is_allowlisted,
is_excepted,
)
from tests.providers.aws.audit_info_utils import (
AWS_ACCOUNT_NUMBER,
AWS_REGION_EU_WEST_1,
AWS_REGION_US_EAST_1,
)
# Allowlist example
allowlist = {
"Accounts": {
"*": {
"Checks": {
"check_test_1": {
"Regions": ["*"],
"Resources": ["resource_1", "resource_2"],
"Exceptions": {"Regions": [AWS_REGION_US_EAST_1]},
},
}
},
AWS_ACCOUNT_NUMBER: {
"Checks": {
"check_test_1": {
"Regions": ["*"],
"Resources": ["resource_3"],
"Exceptions": {"Regions": [AWS_REGION_EU_WEST_1]},
}
}
},
}
}
assert not is_allowlisted(
allowlist,
AWS_ACCOUNT_NUMBER,
"check_test_1",
AWS_REGION_US_EAST_1,
"resource_2",
"",
)
assert not is_allowlisted(
allowlist,
"111122223333",
"check_test_1",
AWS_REGION_US_EAST_1,
"resource_1",
"",
)
assert is_allowlisted(
allowlist,
"111122223333",
"check_test_1",
AWS_REGION_EU_WEST_1,
"resource_2",
"",
)
assert not is_allowlisted(
allowlist,
"111122223333",
"check_test_1",
AWS_REGION_US_EAST_1,
"resource_3",
"",
)
assert is_allowlisted(
allowlist,
AWS_ACCOUNT_NUMBER,
"check_test_1",
AWS_REGION_US_EAST_1,
"resource_3",
"",
)
assert not is_allowlisted(
allowlist,
AWS_ACCOUNT_NUMBER,
"check_test_1",
AWS_REGION_EU_WEST_1,
"resource_3",
"",
)
def test_is_allowlisted_complex_allowlist(self):
from prowler.providers.aws.lib.allowlist.allowlist import (
is_allowlisted,
is_excepted,
)
from tests.providers.aws.audit_info_utils import (
AWS_ACCOUNT_NUMBER,
AWS_REGION_EU_SOUTH_3,
AWS_REGION_EU_WEST_1,
AWS_REGION_US_EAST_1,
)
# Allowlist example
allowlist = {
"Accounts": {
"*": {
"Checks": {
"s3_bucket_object_versioning": {
"Regions": [AWS_REGION_EU_WEST_1, AWS_REGION_US_EAST_1],
"Resources": ["ci-logs", "logs", ".+-logs"],
},
"ecs_task_definitions_no_environment_secrets": {
"Regions": ["*"],
"Resources": ["*"],
"Exceptions": {
"Accounts": [AWS_ACCOUNT_NUMBER],
"Regions": [
AWS_REGION_EU_WEST_1,
AWS_REGION_EU_SOUTH_3,
],
},
},
"*": {
"Regions": ["*"],
"Resources": ["*"],
"Tags": ["environment=dev"],
},
}
},
AWS_ACCOUNT_NUMBER: {
"Checks": {
"*": {
"Regions": ["*"],
"Resources": ["*"],
"Exceptions": {
"Resources": ["test"],
"Tags": ["environment=prod"],
},
}
}
},
}
}
assert is_allowlisted(
allowlist,
AWS_ACCOUNT_NUMBER,
"test_check",
AWS_REGION_EU_WEST_1,
"prowler-logs",
"environment=dev",
)
assert is_allowlisted(
allowlist,
AWS_ACCOUNT_NUMBER,
"ecs_task_definitions_no_environment_secrets",
AWS_REGION_EU_WEST_1,
"prowler",
"environment=dev",
)
assert is_allowlisted(
allowlist,
AWS_ACCOUNT_NUMBER,
"s3_bucket_object_versioning",
AWS_REGION_EU_WEST_1,
"prowler-logs",
"environment=dev",
)
def test_is_excepted_only_in_account(self):
from prowler.providers.aws.lib.allowlist.allowlist import (
is_allowlisted,
is_excepted,
)
from tests.providers.aws.audit_info_utils import (
AWS_ACCOUNT_NUMBER
)
# Allowlist example
exceptions = {
"Accounts": [AWS_ACCOUNT_NUMBER],
"Regions": [],
"Resources": [],
"Tags": [],
}
assert is_excepted(
exceptions,
AWS_ACCOUNT_NUMBER,
"eu-central-1",
"test",
"environment=test",
)
def test_is_excepted_only_in_region(self):
from prowler.providers.aws.lib.allowlist.allowlist import (
is_allowlisted,
is_excepted,
)
from tests.providers.aws.audit_info_utils import (
AWS_ACCOUNT_NUMBER,
AWS_REGION_EU_CENTRAL_1,
AWS_REGION_EU_SOUTH_3,
)
# Allowlist example
exceptions = {
"Accounts": [],
"Regions": [AWS_REGION_EU_CENTRAL_1, AWS_REGION_EU_SOUTH_3],
"Resources": [],
"Tags": [],
}
assert is_excepted(
exceptions,
AWS_ACCOUNT_NUMBER,
AWS_REGION_EU_CENTRAL_1,
"test",
"environment=test",
)
def test_is_excepted_only_in_resources(self):
from prowler.providers.aws.lib.allowlist.allowlist import (
is_allowlisted,
is_excepted,
)
from tests.providers.aws.audit_info_utils import (
AWS_ACCOUNT_NUMBER,
AWS_REGION_EU_CENTRAL_1,
)
# Allowlist example
exceptions = {
"Accounts": [],
"Regions": [],
"Resources": ["resource_1"],
"Tags": [],
}
assert is_excepted(
exceptions,
AWS_ACCOUNT_NUMBER,
AWS_REGION_EU_CENTRAL_1,
"resource_1",
"environment=test",
)
def test_is_excepted_only_in_tags(self):
from prowler.providers.aws.lib.allowlist.allowlist import (
is_allowlisted,
is_excepted,
)
from tests.providers.aws.audit_info_utils import (
AWS_ACCOUNT_NUMBER,
AWS_REGION_EU_CENTRAL_1,
)
# Allowlist example
exceptions = {
"Accounts": [],
"Regions": [],
"Resources": [],
"Tags": ["environment=test"],
}
assert is_excepted(
exceptions,
AWS_ACCOUNT_NUMBER,
AWS_REGION_EU_CENTRAL_1,
"resource_1",
"environment=test",
)
def test_is_excepted_in_account_and_tags(self):
from prowler.providers.aws.lib.allowlist.allowlist import (
is_allowlisted,
is_excepted,
)
from tests.providers.aws.audit_info_utils import (
AWS_ACCOUNT_NUMBER,
AWS_REGION_EU_CENTRAL_1,
)
# Allowlist example
exceptions = {
"Accounts": [AWS_ACCOUNT_NUMBER],
"Regions": [],
"Resources": [],
"Tags": ["environment=test"],
}
assert is_excepted(
exceptions,
AWS_ACCOUNT_NUMBER,
AWS_REGION_EU_CENTRAL_1,
"resource_1",
"environment=test",
)
assert not is_excepted(
exceptions,
"111122223333",
AWS_REGION_EU_CENTRAL_1,
"resource_1",
"environment=test",
)
assert not is_excepted(
exceptions,
"111122223333",
AWS_REGION_EU_CENTRAL_1,
"resource_1",
"environment=dev",
)
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(Test_Allowlist))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
main()
|
https://github.com/teamqurrent/prowler | Youre goal is to validate incoming s3 bucket names. Create the validate_bucket function in `arguments.py`, which uses a specific regex to validate AWS S3 bucket names as per AWS's naming rules. The regex should check that the bucket name does not start with 'xn--' or end with '-s3alias', starts with a lowercase letter or number, and contains only lowercase letters, numbers, and hyphens. If a name doesn't match, ArgumentTypeError is raised with a message guiding to AWS's bucket naming rules 'Bucket name must be valid (https://docs.aws.amazon.com/AmazonS3/latest/userguide/bucketnamingrules.html)'. Update the argument parsers handling AWS S3 buckets to utilize this new validation function, ensuring only valid bucket names are processed by the CLI. | fdeb523 | about-time==4.2.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:6a538862d33ce67d997429d14998310e1dbfda6cb7d9bbfbf799c4709847fece \
--hash=sha256:8bbf4c75fe13cbd3d72f49a03b02c5c7dca32169b6d49117c257e7eb3eaee341
adal==1.2.7 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:2a7451ed7441ddbc57703042204a3e30ef747478eea022c70f789fc7f084bc3d \
--hash=sha256:d74f45b81317454d96e982fd1c50e6fb5c99ac2223728aea8764433a39f566f1
alive-progress==3.1.5 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:347220c1858e3abe137fa0746895668c04df09c5261a13dc03f05795e8a29be5 \
--hash=sha256:42e399a66c8150dc507602dff7b7953f105ef11faf97ddaa6d27b1cbf45c4c98
attrs==23.1.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04 \
--hash=sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015
awsipranges==0.3.3 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4f0b3f22a9dc1163c85b513bed812b6c92bdacd674e6a7b68252a3c25b99e2c0 \
--hash=sha256:f3d7a54aeaf7fe310beb5d377a4034a63a51b72677ae6af3e0967bc4de7eedaf
azure-common==1.1.28 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4ac0cd3214e36b6a1b6a442686722a5d8cc449603aa833f3f0f40bda836704a3 \
--hash=sha256:5c12d3dcf4ec20599ca6b0d3e09e86e146353d443e7fcc050c9a19c1f9df20ad
azure-core==1.28.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:dec36dfc8eb0b052a853f30c07437effec2f9e3e1fc8f703d9bdaa5cfc0043d9 \
--hash=sha256:e9eefc66fc1fde56dab6f04d4e5d12c60754d5a9fa49bdcfd8534fc96ed936bd
azure-identity==1.15.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4c28fc246b7f9265610eb5261d65931183d019a23d4b0e99357facb2e6c227c8 \
--hash=sha256:a14b1f01c7036f11f148f22cd8c16e05035293d714458d6b44ddf534d93eb912
azure-mgmt-authorization==4.0.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:69b85abc09ae64fc72975bd43431170d8c7eb5d166754b98aac5f3845de57dc4 \
--hash=sha256:d8feeb3842e6ddf1a370963ca4f61fb6edc124e8997b807dd025bc9b2379cd1a
azure-mgmt-core==1.4.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:81071675f186a585555ef01816f2774d49c1c9024cb76e5720c3c0f6b337bb7d \
--hash=sha256:d195208340094f98e5a6661b781cde6f6a051e79ce317caabd8ff97030a9b3ae
azure-mgmt-security==5.0.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:38b03efe82c2344cea203fda95e6d00b7ac22782fa1c0b585cd0ea2c8ff3e702 \
--hash=sha256:73a74ce8f6ffb1b345ce101c8abdd42238f161f0988d168d23918feda0089654
azure-mgmt-sql==3.0.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:129042cc011225e27aee6ef2697d585fa5722e5d1aeb0038af6ad2451a285457 \
--hash=sha256:1d1dd940d4d41be4ee319aad626341251572a5bf4a2addec71779432d9a1381f
azure-mgmt-storage==21.1.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:593f2544fc4f05750c4fe7ca4d83c32ea1e9d266e57899bbf79ce5940124e8cc \
--hash=sha256:d6d3c0e917c988bc9ed0472477d3ef3f90886009eb1d97a711944f8375630162
azure-mgmt-subscription==3.1.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:38d4574a8d47fa17e3587d756e296cb63b82ad8fb21cd8543bcee443a502bf48 \
--hash=sha256:4e255b4ce9b924357bb8c5009b3c88a2014d3203b2495e2256fa027bf84e800e
azure-storage-blob==12.19.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:26c0a4320a34a3c2a1b74528ba6812ebcb632a04cd67b1c7377232c4b01a5897 \
--hash=sha256:7bbc2c9c16678f7a420367fef6b172ba8730a7e66df7f4d7a55d5b3c8216615b
boto3==1.26.165 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:9e7242b9059d937f34264125fecd844cb5e01acce6be093f6c44869fdf7c6e30 \
--hash=sha256:fa85b67147c8dc99b6e7c699fc086103f958f9677db934f70659e6e6a72a818c
botocore==1.29.165 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:6f35d59e230095aed7cd747604fe248fa384bebb7d09549077892f936a8ca3df \
--hash=sha256:988b948be685006b43c4bbd8f5c0cb93e77c66deb70561994e0c5b31b5a67210
cachetools==5.3.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:13dfddc7b8df938c21a940dfa6557ce6e94a2f1cdfa58eb90c805721d58f2c14 \
--hash=sha256:429e1a1e845c008ea6c85aa35d4b98b65d6a9763eeef3e37e92728a12d1de9d4
certifi==2023.7.22 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \
--hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9
cffi==1.15.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \
--hash=sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef \
--hash=sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104 \
--hash=sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426 \
--hash=sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405 \
--hash=sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375 \
--hash=sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a \
--hash=sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e \
--hash=sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc \
--hash=sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf \
--hash=sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185 \
--hash=sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497 \
--hash=sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3 \
--hash=sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35 \
--hash=sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c \
--hash=sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83 \
--hash=sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21 \
--hash=sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca \
--hash=sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984 \
--hash=sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac \
--hash=sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd \
--hash=sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee \
--hash=sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a \
--hash=sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2 \
--hash=sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192 \
--hash=sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7 \
--hash=sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585 \
--hash=sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f \
--hash=sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e \
--hash=sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27 \
--hash=sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b \
--hash=sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e \
--hash=sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e \
--hash=sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d \
--hash=sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c \
--hash=sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415 \
--hash=sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82 \
--hash=sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02 \
--hash=sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314 \
--hash=sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325 \
--hash=sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c \
--hash=sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3 \
--hash=sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914 \
--hash=sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045 \
--hash=sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d \
--hash=sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9 \
--hash=sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5 \
--hash=sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2 \
--hash=sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c \
--hash=sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3 \
--hash=sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2 \
--hash=sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8 \
--hash=sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d \
--hash=sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d \
--hash=sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9 \
--hash=sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162 \
--hash=sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76 \
--hash=sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4 \
--hash=sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e \
--hash=sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9 \
--hash=sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6 \
--hash=sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b \
--hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \
--hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0
charset-normalizer==3.1.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6 \
--hash=sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1 \
--hash=sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e \
--hash=sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373 \
--hash=sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62 \
--hash=sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230 \
--hash=sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be \
--hash=sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c \
--hash=sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0 \
--hash=sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448 \
--hash=sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f \
--hash=sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649 \
--hash=sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d \
--hash=sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0 \
--hash=sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706 \
--hash=sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a \
--hash=sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59 \
--hash=sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23 \
--hash=sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5 \
--hash=sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb \
--hash=sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e \
--hash=sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e \
--hash=sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c \
--hash=sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28 \
--hash=sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d \
--hash=sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41 \
--hash=sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974 \
--hash=sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce \
--hash=sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f \
--hash=sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1 \
--hash=sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d \
--hash=sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8 \
--hash=sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017 \
--hash=sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31 \
--hash=sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7 \
--hash=sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8 \
--hash=sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e \
--hash=sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14 \
--hash=sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd \
--hash=sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d \
--hash=sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795 \
--hash=sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b \
--hash=sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b \
--hash=sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b \
--hash=sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203 \
--hash=sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f \
--hash=sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19 \
--hash=sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1 \
--hash=sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a \
--hash=sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac \
--hash=sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9 \
--hash=sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0 \
--hash=sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137 \
--hash=sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f \
--hash=sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6 \
--hash=sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5 \
--hash=sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909 \
--hash=sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f \
--hash=sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0 \
--hash=sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324 \
--hash=sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755 \
--hash=sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb \
--hash=sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854 \
--hash=sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c \
--hash=sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60 \
--hash=sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84 \
--hash=sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0 \
--hash=sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b \
--hash=sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1 \
--hash=sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531 \
--hash=sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1 \
--hash=sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11 \
--hash=sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326 \
--hash=sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df \
--hash=sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab
click-plugins==1.1.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b \
--hash=sha256:5d262006d3222f5057fd81e1623d4443e41dcda5dc815c06b442aa3c02889fc8
click==8.1.3 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e \
--hash=sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48
colorama==0.4.6 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \
--hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6
contextlib2==21.6.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:3fbdb64466afd23abaf6c977627b75b6139a5a3e8ce38405c5b413aed7a0471f \
--hash=sha256:ab1e2bfe1d01d968e1b7e8d9023bc51ef3509bba217bb730cee3827e1ee82869
cryptography==41.0.4 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:004b6ccc95943f6a9ad3142cfabcc769d7ee38a3f60fb0dddbfb431f818c3a67 \
--hash=sha256:047c4603aeb4bbd8db2756e38f5b8bd7e94318c047cfe4efeb5d715e08b49311 \
--hash=sha256:0d9409894f495d465fe6fda92cb70e8323e9648af912d5b9141d616df40a87b8 \
--hash=sha256:23a25c09dfd0d9f28da2352503b23e086f8e78096b9fd585d1d14eca01613e13 \
--hash=sha256:2ed09183922d66c4ec5fdaa59b4d14e105c084dd0febd27452de8f6f74704143 \
--hash=sha256:35c00f637cd0b9d5b6c6bd11b6c3359194a8eba9c46d4e875a3660e3b400005f \
--hash=sha256:37480760ae08065437e6573d14be973112c9e6dcaf5f11d00147ee74f37a3829 \
--hash=sha256:3b224890962a2d7b57cf5eeb16ccaafba6083f7b811829f00476309bce2fe0fd \
--hash=sha256:5a0f09cefded00e648a127048119f77bc2b2ec61e736660b5789e638f43cc397 \
--hash=sha256:5b72205a360f3b6176485a333256b9bcd48700fc755fef51c8e7e67c4b63e3ac \
--hash=sha256:7e53db173370dea832190870e975a1e09c86a879b613948f09eb49324218c14d \
--hash=sha256:7febc3094125fc126a7f6fb1f420d0da639f3f32cb15c8ff0dc3997c4549f51a \
--hash=sha256:80907d3faa55dc5434a16579952ac6da800935cd98d14dbd62f6f042c7f5e839 \
--hash=sha256:86defa8d248c3fa029da68ce61fe735432b047e32179883bdb1e79ed9bb8195e \
--hash=sha256:8ac4f9ead4bbd0bc8ab2d318f97d85147167a488be0e08814a37eb2f439d5cf6 \
--hash=sha256:93530900d14c37a46ce3d6c9e6fd35dbe5f5601bf6b3a5c325c7bffc030344d9 \
--hash=sha256:9eeb77214afae972a00dee47382d2591abe77bdae166bda672fb1e24702a3860 \
--hash=sha256:b5f4dfe950ff0479f1f00eda09c18798d4f49b98f4e2006d644b3301682ebdca \
--hash=sha256:c3391bd8e6de35f6f1140e50aaeb3e2b3d6a9012536ca23ab0d9c35ec18c8a91 \
--hash=sha256:c880eba5175f4307129784eca96f4e70b88e57aa3f680aeba3bab0e980b0f37d \
--hash=sha256:cecfefa17042941f94ab54f769c8ce0fe14beff2694e9ac684176a2535bf9714 \
--hash=sha256:e40211b4923ba5a6dc9769eab704bdb3fbb58d56c5b336d30996c24fcf12aadb \
--hash=sha256:efc8ad4e6fc4f1752ebfb58aefece8b4e3c4cae940b0994d43649bdfce8d0d4f
detect-secrets==1.4.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:d08ecabeee8b68c0acb0e8a354fb98d822a653f6ed05e520cead4c6fc1fc02cd \
--hash=sha256:d56787e339758cef48c9ccd6692f7a094b9963c979c9813580b0169e41132833
filelock==3.12.2 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:002740518d8aa59a26b0c76e10fb8c6e15eae825d34b6fdf670333fd7b938d81 \
--hash=sha256:cbb791cdea2a72f23da6ac5b5269ab0a0d161e9ef0100e653b69049a7706d1ec
google-api-core==2.11.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4b9bb5d5a380a0befa0573b302651b8a9a89262c1730e37bf423cec511804c22 \
--hash=sha256:ce222e27b0de0d7bc63eb043b956996d6dccab14cc3b690aaea91c9cc99dc16e
google-api-python-client==2.108.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:6396efca83185fb205c0abdbc1c2ee57b40475578c6af37f6d0e30a639aade99 \
--hash=sha256:9d1327213e388943ebcd7db5ce6e7f47987a7e6874e3e1f6116010eea4a0e75d
google-auth-httplib2==0.1.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:42c50900b8e4dcdf8222364d1f0efe32b8421fb6ed72f2613f12f75cc933478c \
--hash=sha256:c64bc555fdc6dd788ea62ecf7bccffcf497bf77244887a3f3d7a5a02f8e3fc29
google-auth==2.17.3 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:ce311e2bc58b130fddf316df57c9b3943c2a7b4f6ec31de9663a9333e4064efc \
--hash=sha256:f586b274d3eb7bd932ea424b1c702a30e0393a2e2bc4ca3eae8263ffd8be229f
googleapis-common-protos==1.59.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4168fcb568a826a52f23510412da405abd93f4d23ba544bb68d943b14ba3cb44 \
--hash=sha256:b287dc48449d1d41af0c69f4ea26242b5ae4c3d7249a38b0984c86a4caffff1f
grapheme==0.6.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:44c2b9f21bbe77cfb05835fec230bd435954275267fea1858013b102f8603cca
httplib2==0.22.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:14ae0a53c1ba8f3d37e9e27cf37eabb0fb9980f435ba405d546948b009dd64dc \
--hash=sha256:d7a10bc5ef5ab08322488bde8c726eeee5c8618723fdb399597ec58f3d82df81
idna==3.4 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \
--hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2
isodate==0.6.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96 \
--hash=sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9
jmespath==1.0.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980 \
--hash=sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe
jsonschema-specifications==2023.6.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:3d2b82663aff01815f744bb5c7887e2121a63399b49b104a3c96145474d091d7 \
--hash=sha256:ca1c4dd059a9e7b34101cf5b3ab7ff1d18b139f35950d598d629837ef66e8f28
jsonschema==4.18.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:8caf5b57a990a98e9b39832ef3cb35c176fe331414252b6e1b26fd5866f891a4 \
--hash=sha256:b508dd6142bd03f4c3670534c80af68cd7bbff9ea830b9cf2625d4a3c49ddf60
msal-extensions==1.0.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:91e3db9620b822d0ed2b4d1850056a0f133cba04455e62f11612e40f5502f2ee \
--hash=sha256:c676aba56b0cce3783de1b5c5ecfe828db998167875126ca4b47dc6436451354
msal==1.24.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:aa0972884b3c6fdec53d9a0bd15c12e5bd7b71ac1b66d746f54d128709f3f8f8 \
--hash=sha256:ce4320688f95c301ee74a4d0e9dbcfe029a63663a8cc61756f40d0d0d36574ad
msgraph-core==0.2.2 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:147324246788abe8ed7e05534cd9e4e0ec98b33b30e011693b8d014cebf97f63 \
--hash=sha256:e297564b9a0ca228493d8851f95cb2de9522143d82efa40ce3a6ad286e21392e
msrest==0.7.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:21120a810e1233e5e6cc7fe40b474eeb4ec6f757a15d7cf86702c369f9567c32 \
--hash=sha256:6e7661f46f3afd88b75667b7187a92829924446c7ea1d169be8c4bb7eeb788b9
msrestazure==0.6.4 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:3de50f56147ef529b31e099a982496690468ecef33f0544cb0fa0cfe1e1de5b9 \
--hash=sha256:a06f0dabc9a6f5efe3b6add4bd8fb623aeadacf816b7a35b0f89107e0544d189
oauthlib==3.2.2 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca \
--hash=sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918
portalocker==2.7.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:032e81d534a88ec1736d03f780ba073f047a06c478b06e2937486f334e955c51 \
--hash=sha256:a07c5b4f3985c3cf4798369631fb7011adb498e2a46d8440efc75a8f29a0f983
protobuf==4.23.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:03eee35b60317112a72d19c54d0bff7bc58ff12fea4cd7b018232bd99758ffdf \
--hash=sha256:2b94bd6df92d71bd1234a2ffe7ce96ddf6d10cf637a18d6b55ad0a89fbb7fc21 \
--hash=sha256:36f5370a930cb77c8ad2f4135590c672d0d2c72d4a707c7d0058dce4b4b4a598 \
--hash=sha256:5f1eba1da2a2f3f7df469fccddef3cc060b8a16cfe3cc65961ad36b4dbcf59c5 \
--hash=sha256:6c16657d6717a0c62d5d740cb354fbad1b0d8cb811669e06fc1caa0ff4799ddd \
--hash=sha256:6fe180b56e1169d72ecc4acbd39186339aed20af5384531b8e8979b02bbee159 \
--hash=sha256:7cb5b9a05ce52c6a782bb97de52679bd3438ff2b7460eff5da348db65650f227 \
--hash=sha256:9744e934ea5855d12191040ea198eaf704ac78665d365a89d9572e3b627c2688 \
--hash=sha256:9f5a0fbfcdcc364f3986f9ed9f8bb1328fb84114fd790423ff3d7fdb0f85c2d1 \
--hash=sha256:baca40d067dddd62141a129f244703160d278648b569e90bb0e3753067644711 \
--hash=sha256:d5a35ff54e3f62e8fc7be02bb0d2fbc212bba1a5a9cc2748090690093996f07b \
--hash=sha256:e62fb869762b4ba18666370e2f8a18f17f8ab92dd4467295c6d38be6f8fef60b \
--hash=sha256:ebde3a023b8e11bfa6c890ef34cd6a8b47d586f26135e86c21344fe433daf2e2
pyasn1-modules==0.3.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c \
--hash=sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d
pyasn1==0.5.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57 \
--hash=sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde
pycparser==2.21 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \
--hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206
pydantic==1.10.13 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:1740068fd8e2ef6eb27a20e5651df000978edce6da6803c2bef0bc74540f9548 \
--hash=sha256:210ce042e8f6f7c01168b2d84d4c9eb2b009fe7bf572c2266e235edf14bacd80 \
--hash=sha256:32c8b48dcd3b2ac4e78b0ba4af3a2c2eb6048cb75202f0ea7b34feb740efc340 \
--hash=sha256:3ecea2b9d80e5333303eeb77e180b90e95eea8f765d08c3d278cd56b00345d01 \
--hash=sha256:4b03e42ec20286f052490423682016fd80fda830d8e4119f8ab13ec7464c0132 \
--hash=sha256:4c5370a7edaac06daee3af1c8b1192e305bc102abcbf2a92374b5bc793818599 \
--hash=sha256:56e3ff861c3b9c6857579de282ce8baabf443f42ffba355bf070770ed63e11e1 \
--hash=sha256:5a1f9f747851338933942db7af7b6ee8268568ef2ed86c4185c6ef4402e80ba8 \
--hash=sha256:5e08865bc6464df8c7d61439ef4439829e3ab62ab1669cddea8dd00cd74b9ffe \
--hash=sha256:61d9dce220447fb74f45e73d7ff3b530e25db30192ad8d425166d43c5deb6df0 \
--hash=sha256:654db58ae399fe6434e55325a2c3e959836bd17a6f6a0b6ca8107ea0571d2e17 \
--hash=sha256:678bcf5591b63cc917100dc50ab6caebe597ac67e8c9ccb75e698f66038ea953 \
--hash=sha256:6cf25c1a65c27923a17b3da28a0bdb99f62ee04230c931d83e888012851f4e7f \
--hash=sha256:75ac15385a3534d887a99c713aa3da88a30fbd6204a5cd0dc4dab3d770b9bd2f \
--hash=sha256:75b297827b59bc229cac1a23a2f7a4ac0031068e5be0ce385be1462e7e17a35d \
--hash=sha256:7d6f6e7305244bddb4414ba7094ce910560c907bdfa3501e9db1a7fd7eaea127 \
--hash=sha256:84bafe2e60b5e78bc64a2941b4c071a4b7404c5c907f5f5a99b0139781e69ed8 \
--hash=sha256:854223752ba81e3abf663d685f105c64150873cc6f5d0c01d3e3220bcff7d36f \
--hash=sha256:8ae5dd6b721459bfa30805f4c25880e0dd78fc5b5879f9f7a692196ddcb5a580 \
--hash=sha256:8ef467901d7a41fa0ca6db9ae3ec0021e3f657ce2c208e98cd511f3161c762c6 \
--hash=sha256:968ac42970f57b8344ee08837b62f6ee6f53c33f603547a55571c954a4225691 \
--hash=sha256:97cce3ae7341f7620a0ba5ef6cf043975cd9d2b81f3aa5f4ea37928269bc1b87 \
--hash=sha256:9849f031cf8a2f0a928fe885e5a04b08006d6d41876b8bbd2fc68a18f9f2e3fd \
--hash=sha256:9f00790179497767aae6bcdc36355792c79e7bbb20b145ff449700eb076c5f96 \
--hash=sha256:b87326822e71bd5f313e7d3bfdc77ac3247035ac10b0c0618bd99dcf95b1e687 \
--hash=sha256:b97c1fac8c49be29486df85968682b0afa77e1b809aff74b83081cc115e52f33 \
--hash=sha256:bc0898c12f8e9c97f6cd44c0ed70d55749eaf783716896960b4ecce2edfd2d69 \
--hash=sha256:c553f6a156deb868ba38a23cf0df886c63492e9257f60a79c0fd8e7173537653 \
--hash=sha256:c636925f38b8db208e09d344c7aa4f29a86bb9947495dd6b6d376ad10334fb78 \
--hash=sha256:c958d053453a1c4b1c2062b05cd42d9d5c8eb67537b8d5a7e3c3032943ecd261 \
--hash=sha256:d3a3c792a58e1622667a2837512099eac62490cdfd63bd407993aaf200a4cf1f \
--hash=sha256:e31647d85a2013d926ce60b84f9dd5300d44535a9941fe825dc349ae1f760df9 \
--hash=sha256:e70ca129d2053fb8b728ee7d1af8e553a928d7e301a311094b8a0501adc8763d \
--hash=sha256:efff03cc7a4f29d9009d1c96ceb1e7a70a65cfe86e89d34e4a5f2ab1e5693737 \
--hash=sha256:f59ef915cac80275245824e9d771ee939133be38215555e9dc90c6cb148aaeb5 \
--hash=sha256:f8e81fc5fb17dae698f52bdd1c4f18b6ca674d7068242b2aff075f588301bbb0
pyjwt==2.6.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd \
--hash=sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14
pyjwt[crypto]==2.6.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd \
--hash=sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14
pyparsing==3.0.9 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb \
--hash=sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc
python-dateutil==2.8.2 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \
--hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9
pywin32==306 ; python_version >= "3.9" and platform_system == "Windows" and python_version < "3.12" \
--hash=sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d \
--hash=sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65 \
--hash=sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e \
--hash=sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b \
--hash=sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4 \
--hash=sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040 \
--hash=sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a \
--hash=sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36 \
--hash=sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8 \
--hash=sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e \
--hash=sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802 \
--hash=sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a \
--hash=sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407 \
--hash=sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0
pyyaml==6.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf \
--hash=sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293 \
--hash=sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b \
--hash=sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57 \
--hash=sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b \
--hash=sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4 \
--hash=sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07 \
--hash=sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba \
--hash=sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9 \
--hash=sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287 \
--hash=sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513 \
--hash=sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0 \
--hash=sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782 \
--hash=sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0 \
--hash=sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92 \
--hash=sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f \
--hash=sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2 \
--hash=sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc \
--hash=sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1 \
--hash=sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c \
--hash=sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86 \
--hash=sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4 \
--hash=sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c \
--hash=sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34 \
--hash=sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b \
--hash=sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d \
--hash=sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c \
--hash=sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb \
--hash=sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7 \
--hash=sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737 \
--hash=sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3 \
--hash=sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d \
--hash=sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358 \
--hash=sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53 \
--hash=sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78 \
--hash=sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803 \
--hash=sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a \
--hash=sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f \
--hash=sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174 \
--hash=sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5
referencing==0.29.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:90cb53782d550ba28d2166ef3f55731f38397def8832baac5d45235f1995e35e \
--hash=sha256:d3c8f323ee1480095da44d55917cfb8278d73d6b4d5f677e3e40eb21314ac67f
requests-file==1.5.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:07d74208d3389d01c38ab89ef403af0cfec63957d53a0081d8eca738d0247d8e \
--hash=sha256:dfe5dae75c12481f68ba353183c53a65e6044c923e64c24b2209f6c7570ca953
requests-oauthlib==1.3.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5 \
--hash=sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a
requests==2.31.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \
--hash=sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1
rpds-py==0.8.10 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:08166467258fd0240a1256fce272f689f2360227ee41c72aeea103e9e4f63d2b \
--hash=sha256:083df0fafe199371206111583c686c985dddaf95ab3ee8e7b24f1fda54515d09 \
--hash=sha256:0da53292edafecba5e1d8c1218f99babf2ed0bf1c791d83c0ab5c29b57223068 \
--hash=sha256:0eeb2731708207d0fe2619afe6c4dc8cb9798f7de052da891de5f19c0006c315 \
--hash=sha256:134ec8f14ca7dbc6d9ae34dac632cdd60939fe3734b5d287a69683c037c51acb \
--hash=sha256:13e643ce8ad502a0263397362fb887594b49cf84bf518d6038c16f235f2bcea4 \
--hash=sha256:148b0b38d719c0760e31ce9285a9872972bdd7774969a4154f40c980e5beaca7 \
--hash=sha256:14f1c356712f66653b777ecd8819804781b23dbbac4eade4366b94944c9e78ad \
--hash=sha256:15a90d0ac11b4499171067ae40a220d1ca3cb685ec0acc356d8f3800e07e4cb8 \
--hash=sha256:1a2edf8173ac0c7a19da21bc68818be1321998528b5e3f748d6ee90c0ba2a1fd \
--hash=sha256:1b21575031478609db6dbd1f0465e739fe0e7f424a8e7e87610a6c7f68b4eb16 \
--hash=sha256:1ee45cd1d84beed6cbebc839fd85c2e70a3a1325c8cfd16b62c96e2ffb565eca \
--hash=sha256:220bdcad2d2936f674650d304e20ac480a3ce88a40fe56cd084b5780f1d104d9 \
--hash=sha256:2418cf17d653d24ffb8b75e81f9f60b7ba1b009a23298a433a4720b2a0a17017 \
--hash=sha256:2614c2732bf45de5c7f9e9e54e18bc78693fa2f635ae58d2895b7965e470378c \
--hash=sha256:2cd3045e7f6375dda64ed7db1c5136826facb0159ea982f77d9cf6125025bd34 \
--hash=sha256:2eb4b08c45f8f8d8254cdbfacd3fc5d6b415d64487fb30d7380b0d0569837bf1 \
--hash=sha256:300eb606e6b94a7a26f11c8cc8ee59e295c6649bd927f91e1dbd37a4c89430b6 \
--hash=sha256:376b8de737401050bd12810003d207e824380be58810c031f10ec563ff6aef3d \
--hash=sha256:3793c21494bad1373da517001d0849eea322e9a049a0e4789e50d8d1329df8e7 \
--hash=sha256:37f7ee4dc86db7af3bac6d2a2cedbecb8e57ce4ed081f6464510e537589f8b1e \
--hash=sha256:3816a890a6a9e9f1de250afa12ca71c9a7a62f2b715a29af6aaee3aea112c181 \
--hash=sha256:3c490204e16bca4f835dba8467869fe7295cdeaa096e4c5a7af97f3454a97991 \
--hash=sha256:3cc5e5b5514796f45f03a568981971b12a3570f3de2e76114f7dc18d4b60a3c4 \
--hash=sha256:41c89a366eae49ad9e65ed443a8f94aee762931a1e3723749d72aeac80f5ef2f \
--hash=sha256:4a8ca409f1252e1220bf09c57290b76cae2f14723746215a1e0506472ebd7bdf \
--hash=sha256:4b519bac7c09444dd85280fd60f28c6dde4389c88dddf4279ba9b630aca3bbbe \
--hash=sha256:521fc8861a86ae54359edf53a15a05fabc10593cea7b3357574132f8427a5e5a \
--hash=sha256:574868858a7ff6011192c023a5289158ed20e3f3b94b54f97210a773f2f22921 \
--hash=sha256:5a665f6f1a87614d1c3039baf44109094926dedf785e346d8b0a728e9cabd27a \
--hash=sha256:5d1c2bc319428d50b3e0fa6b673ab8cc7fa2755a92898db3a594cbc4eeb6d1f7 \
--hash=sha256:60e0e86e870350e03b3e25f9b1dd2c6cc72d2b5f24e070249418320a6f9097b7 \
--hash=sha256:695f642a3a5dbd4ad2ffbbacf784716ecd87f1b7a460843b9ddf965ccaeafff4 \
--hash=sha256:69d089c026f6a8b9d64a06ff67dc3be196707b699d7f6ca930c25f00cf5e30d8 \
--hash=sha256:6c6a0225b8501d881b32ebf3f5807a08ad3685b5eb5f0a6bfffd3a6e039b2055 \
--hash=sha256:70bb9c8004b97b4ef7ae56a2aa56dfaa74734a0987c78e7e85f00004ab9bf2d0 \
--hash=sha256:73a1e48430f418f0ac3dfd87860e4cc0d33ad6c0f589099a298cb53724db1169 \
--hash=sha256:7495010b658ec5b52835f21d8c8b1a7e52e194c50f095d4223c0b96c3da704b1 \
--hash=sha256:7947e6e2c2ad68b1c12ee797d15e5f8d0db36331200b0346871492784083b0c6 \
--hash=sha256:7b38a9ac96eeb6613e7f312cd0014de64c3f07000e8bf0004ad6ec153bac46f8 \
--hash=sha256:7d20a8ed227683401cc508e7be58cba90cc97f784ea8b039c8cd01111e6043e0 \
--hash=sha256:7f29b8c55fd3a2bc48e485e37c4e2df3317f43b5cc6c4b6631c33726f52ffbb3 \
--hash=sha256:802f42200d8caf7f25bbb2a6464cbd83e69d600151b7e3b49f49a47fa56b0a38 \
--hash=sha256:805a5f3f05d186c5d50de2e26f765ba7896d0cc1ac5b14ffc36fae36df5d2f10 \
--hash=sha256:82bb361cae4d0a627006dadd69dc2f36b7ad5dc1367af9d02e296ec565248b5b \
--hash=sha256:84eb541a44f7a18f07a6bfc48b95240739e93defe1fdfb4f2a295f37837945d7 \
--hash=sha256:89c92b74e8bf6f53a6f4995fd52f4bd510c12f103ee62c99e22bc9e05d45583c \
--hash=sha256:8c398fda6df361a30935ab4c4bccb7f7a3daef2964ca237f607c90e9f3fdf66f \
--hash=sha256:915031002c86a5add7c6fd4beb601b2415e8a1c956590a5f91d825858e92fe6e \
--hash=sha256:927d784648211447201d4c6f1babddb7971abad922b32257ab74de2f2750fad0 \
--hash=sha256:92cf5b3ee60eef41f41e1a2cabca466846fb22f37fc580ffbcb934d1bcab225a \
--hash=sha256:93d06cccae15b3836247319eee7b6f1fdcd6c10dabb4e6d350d27bd0bdca2711 \
--hash=sha256:93d99f957a300d7a4ced41615c45aeb0343bb8f067c42b770b505de67a132346 \
--hash=sha256:96b293c0498c70162effb13100624c5863797d99df75f2f647438bd10cbf73e4 \
--hash=sha256:97cab733d303252f7c2f7052bf021a3469d764fc2b65e6dbef5af3cbf89d4892 \
--hash=sha256:996cc95830de9bc22b183661d95559ec6b3cd900ad7bc9154c4cbf5be0c9b734 \
--hash=sha256:9a7d20c1cf8d7b3960c5072c265ec47b3f72a0c608a9a6ee0103189b4f28d531 \
--hash=sha256:9cd57981d9fab04fc74438d82460f057a2419974d69a96b06a440822d693b3c0 \
--hash=sha256:a11ab0d97be374efd04f640c04fe5c2d3dabc6dfb998954ea946ee3aec97056d \
--hash=sha256:a13c8e56c46474cd5958d525ce6a9996727a83d9335684e41f5192c83deb6c58 \
--hash=sha256:a38b9f526d0d6cbdaa37808c400e3d9f9473ac4ff64d33d9163fd05d243dbd9b \
--hash=sha256:a7c6304b894546b5a6bdc0fe15761fa53fe87d28527a7142dae8de3c663853e1 \
--hash=sha256:ad3bfb44c8840fb4be719dc58e229f435e227fbfbe133dc33f34981ff622a8f8 \
--hash=sha256:ae40f4a70a1f40939d66ecbaf8e7edc144fded190c4a45898a8cfe19d8fc85ea \
--hash=sha256:b01b39ad5411563031ea3977bbbc7324d82b088e802339e6296f082f78f6115c \
--hash=sha256:b2e3c4f2a8e3da47f850d7ea0d7d56720f0f091d66add889056098c4b2fd576c \
--hash=sha256:b41941583adce4242af003d2a8337b066ba6148ca435f295f31ac6d9e4ea2722 \
--hash=sha256:b4627520a02fccbd324b33c7a83e5d7906ec746e1083a9ac93c41ac7d15548c7 \
--hash=sha256:ba9f1d1ebe4b63801977cec7401f2d41e888128ae40b5441270d43140efcad52 \
--hash=sha256:c03a435d26c3999c2a8642cecad5d1c4d10c961817536af52035f6f4ee2f5dd0 \
--hash=sha256:c200b30dd573afa83847bed7e3041aa36a8145221bf0cfdfaa62d974d720805c \
--hash=sha256:c493365d3fad241d52f096e4995475a60a80f4eba4d3ff89b713bc65c2ca9615 \
--hash=sha256:c4d42e83ddbf3445e6514f0aff96dca511421ed0392d9977d3990d9f1ba6753c \
--hash=sha256:c60528671d9d467009a6ec284582179f6b88651e83367d0ab54cb739021cd7de \
--hash=sha256:c72ebc22e70e04126158c46ba56b85372bc4d54d00d296be060b0db1671638a4 \
--hash=sha256:ccbbd276642788c4376fbe8d4e6c50f0fb4972ce09ecb051509062915891cbf0 \
--hash=sha256:ceaac0c603bf5ac2f505a78b2dcab78d3e6b706be6596c8364b64cc613d208d2 \
--hash=sha256:d19db6ba816e7f59fc806c690918da80a7d186f00247048cd833acdab9b4847b \
--hash=sha256:d5c191713e98e7c28800233f039a32a42c1a4f9a001a8a0f2448b07391881036 \
--hash=sha256:d64f9f88d5203274a002b54442cafc9c7a1abff2a238f3e767b70aadf919b451 \
--hash=sha256:d77dff3a5aa5eedcc3da0ebd10ff8e4969bc9541aa3333a8d41715b429e99f47 \
--hash=sha256:dd4f16e57c12c0ae17606c53d1b57d8d1c8792efe3f065a37cb3341340599d49 \
--hash=sha256:e39d7ab0c18ac99955b36cd19f43926450baba21e3250f053e0704d6ffd76873 \
--hash=sha256:e3d0cd3dff0e7638a7b5390f3a53057c4e347f4ef122ee84ed93fc2fb7ea4aa2 \
--hash=sha256:e7dfb1cbb895810fa2b892b68153c17716c6abaa22c7dc2b2f6dcf3364932a1c \
--hash=sha256:e8e24b210a4deb5a7744971f8f77393005bae7f873568e37dfd9effe808be7f7 \
--hash=sha256:e9c0683cb35a9b5881b41bc01d5568ffc667910d9dbc632a1fba4e7d59e98773 \
--hash=sha256:ed41f3f49507936a6fe7003985ea2574daccfef999775525d79eb67344e23767 \
--hash=sha256:ee744fca8d1ea822480a2a4e7c5f2e1950745477143668f0b523769426060f29 \
--hash=sha256:f3f1e860be21f3e83011116a65e7310486300e08d9a3028e73e8d13bb6c77292 \
--hash=sha256:f43ab4cb04bde6109eb2555528a64dfd8a265cc6a9920a67dcbde13ef53a46c8 \
--hash=sha256:f53f55a8852f0e49b0fc76f2412045d6ad9d5772251dea8f55ea45021616e7d5 \
--hash=sha256:f59996d0550894affaad8743e97b9b9c98f638b221fac12909210ec3d9294786 \
--hash=sha256:f96f3f98fbff7af29e9edf9a6584f3c1382e7788783d07ba3721790625caa43e \
--hash=sha256:f9adb5664b78fcfcd830000416c8cc69853ef43cb084d645b3f1f0296edd9bae \
--hash=sha256:fa326b3505d5784436d9433b7980171ab2375535d93dd63fbcd20af2b5ca1bb6 \
--hash=sha256:fafc0049add8043ad07ab5382ee80d80ed7e3699847f26c9a5cf4d3714d96a84
rsa==4.9 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \
--hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21
s3transfer==0.6.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:3c0da2d074bf35d6870ef157158641178a4204a6e689e82546083e31e0311346 \
--hash=sha256:640bb492711f4c0c0905e1f62b6aaeb771881935ad27884852411f8e9cacbca9
schema==0.7.5 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:f06717112c61895cabc4707752b88716e8420a8819d71404501e114f91043197 \
--hash=sha256:f3ffdeeada09ec34bf40d7d79996d9f7175db93b7a5065de0faa7f41083c1e6c
shodan==1.30.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:bedb6e8c2b4459592c1bc17b4d4b57dab0cb58a455ad589ee26a6304242cd505
six==1.16.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \
--hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254
slack-sdk==3.24.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:741ea5381e65f4407d24ed81203912cbd6bfe807a6704b1d3c5ad346c86000b6 \
--hash=sha256:cae64f0177a53d34cca59cc691d4535edd18929843a936b97cea421db9e4fbfe
tabulate==0.9.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c \
--hash=sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f
tldextract==3.4.4 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:581e7dbefc90e7bb857bb6f768d25c811a3c5f0892ed56a9a2999ddb7b1b70c2 \
--hash=sha256:5fe3210c577463545191d45ad522d3d5e78d55218ce97215e82004dcae1e1234
typing-extensions==4.5.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb \
--hash=sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4
uritemplate==4.1.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4346edfc5c3b79f694bccd6d6099a322bbeb628dbf2cd86eea55a456ce5124f0 \
--hash=sha256:830c08b8d99bdd312ea4ead05994a38e8936266f84b9a7878232db50b044e02e
urllib3==1.26.18 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07 \
--hash=sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0
xlsxwriter==3.1.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:02913b50b74c00f165933d5da3e3a02cab4204cb4932722a1b342c5c71034122 \
--hash=sha256:b70a147d36235d1ee835cfd037396f789db1f76740a0e5c917d54137169341de | python3.9 | c8831f0f | diff --git a/prowler/providers/aws/lib/arguments/arguments.py b/prowler/providers/aws/lib/arguments/arguments.py
--- a/prowler/providers/aws/lib/arguments/arguments.py
+++ b/prowler/providers/aws/lib/arguments/arguments.py
@@ -1,4 +1,5 @@
from argparse import ArgumentTypeError, Namespace
+from re import search
from prowler.providers.aws.aws_provider import get_aws_available_regions
from prowler.providers.aws.lib.arn.arn import arn_type
@@ -104,6 +105,7 @@ def init_parser(self):
"-B",
"--output-bucket",
nargs="?",
+ type=validate_bucket,
default=None,
help="Custom output bucket, requires -M <mode> and it can work also with -o flag.",
)
@@ -111,6 +113,7 @@ def init_parser(self):
"-D",
"--output-bucket-no-assume",
nargs="?",
+ type=validate_bucket,
default=None,
help="Same as -B but do not use the assumed role credentials to put objects to the bucket, instead uses the initial credentials.",
)
@@ -190,3 +193,13 @@ def validate_arguments(arguments: Namespace) -> tuple[bool, str]:
return (False, "To use -I/-T options -R option is needed")
return (True, "")
+
+
+def validate_bucket(bucket_name):
+ """validate_bucket validates that the input bucket_name is valid"""
+ if search("(?!(^xn--|.+-s3alias$))^[a-z0-9][a-z0-9-]{1,61}[a-z0-9]$", bucket_name):
+ return bucket_name
+ else:
+ raise ArgumentTypeError(
+ "Bucket name must be valid (https://docs.aws.amazon.com/AmazonS3/latest/userguide/bucketnamingrules.html)"
+ )
diff --git a/tests/lib/cli/parser_test.py b/tests/lib/cli/parser_test.py
--- a/tests/lib/cli/parser_test.py
+++ b/tests/lib/cli/parser_test.py
@@ -5,6 +5,7 @@ import pytest
from mock import patch
from prowler.lib.cli.parser import ProwlerArgumentParser
+from prowler.providers.aws.lib.arguments.arguments import validate_bucket
from prowler.providers.azure.lib.arguments.arguments import validate_azure_region
prowler_command = "prowler"
@@ -1138,3 +1139,28 @@ class Test_Parser:
match=f"Region {invalid_region} not allowed, allowed regions are {' '.join(expected_regions)}",
):
validate_azure_region(invalid_region)
+
+ def test_validate_bucket_invalid_bucket_names(self):
+ bad_bucket_names = [
+ "xn--bucket-name",
+ "mrryadfpcwlscicvnrchmtmyhwrvzkgfgdxnlnvaaummnywciixnzvycnzmhhpwb",
+ "192.168.5.4",
+ "bucket-name-s3alias",
+ "bucket-name-s3alias-",
+ "bucket-n$ame",
+ "bu",
+ ]
+ for bucket_name in bad_bucket_names:
+ with pytest.raises(ArgumentTypeError) as argument_error:
+ validate_bucket(bucket_name)
+
+ assert argument_error.type == ArgumentTypeError
+ assert (
+ argument_error.value.args[0]
+ == "Bucket name must be valid (https://docs.aws.amazon.com/AmazonS3/latest/userguide/bucketnamingrules.html)"
+ )
+
+ def test_validate_bucket_valid_bucket_names(self):
+ valid_bucket_names = ["bucket-name" "test" "test-test-test"]
+ for bucket_name in valid_bucket_names:
+ assert validate_bucket(bucket_name) == bucket_name
| [
{
"content": "from argparse import ArgumentTypeError, Namespace\n\nfrom prowler.providers.aws.aws_provider import get_aws_available_regions\nfrom prowler.providers.aws.lib.arn.arn import arn_type\n\n\ndef init_parser(self):\n \"\"\"Init the AWS Provider CLI parser\"\"\"\n aws_parser = self.subparsers.add_parser(\n \"aws\", parents=[self.common_providers_parser], help=\"AWS Provider\"\n )\n # Authentication Methods\n aws_auth_subparser = aws_parser.add_argument_group(\"Authentication Modes\")\n aws_auth_subparser.add_argument(\n \"-p\",\n \"--profile\",\n nargs=\"?\",\n default=None,\n help=\"AWS profile to launch prowler with\",\n )\n aws_auth_subparser.add_argument(\n \"-R\",\n \"--role\",\n nargs=\"?\",\n default=None,\n help=\"ARN of the role to be assumed\",\n # Pending ARN validation\n )\n aws_auth_subparser.add_argument(\n \"--sts-endpoint-region\",\n nargs=\"?\",\n default=None,\n help=\"Specify the AWS STS endpoint region to use. Read more at https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_enable-regions.html\",\n )\n aws_auth_subparser.add_argument(\n \"--mfa\",\n action=\"store_true\",\n help=\"IAM entity enforces MFA so you need to input the MFA ARN and the TOTP\",\n )\n aws_auth_subparser.add_argument(\n \"-T\",\n \"--session-duration\",\n nargs=\"?\",\n default=3600,\n type=validate_session_duration,\n help=\"Assumed role session duration in seconds, must be between 900 and 43200. Default: 3600\",\n # Pending session duration validation\n )\n aws_auth_subparser.add_argument(\n \"-I\",\n \"--external-id\",\n nargs=\"?\",\n default=None,\n help=\"External ID to be passed when assuming role\",\n )\n # AWS Regions\n aws_regions_subparser = aws_parser.add_argument_group(\"AWS Regions\")\n aws_regions_subparser.add_argument(\n \"-f\",\n \"--region\",\n \"--filter-region\",\n nargs=\"+\",\n help=\"AWS region names to run Prowler against\",\n choices=get_aws_available_regions(),\n )\n # AWS Organizations\n aws_orgs_subparser = aws_parser.add_argument_group(\"AWS Organizations\")\n aws_orgs_subparser.add_argument(\n \"-O\",\n \"--organizations-role\",\n nargs=\"?\",\n help=\"Specify AWS Organizations management role ARN to be assumed, to get Organization metadata\",\n )\n # AWS Security Hub\n aws_security_hub_subparser = aws_parser.add_argument_group(\"AWS Security Hub\")\n aws_security_hub_subparser.add_argument(\n \"-S\",\n \"--security-hub\",\n action=\"store_true\",\n help=\"Send check output to AWS Security Hub\",\n )\n aws_security_hub_subparser.add_argument(\n \"--skip-sh-update\",\n action=\"store_true\",\n help=\"Skip updating previous findings of Prowler in Security Hub\",\n )\n aws_security_hub_subparser.add_argument(\n \"--send-sh-only-fails\",\n action=\"store_true\",\n help=\"Send only Prowler failed findings to SecurityHub\",\n )\n # AWS Quick Inventory\n aws_quick_inventory_subparser = aws_parser.add_argument_group(\"Quick Inventory\")\n aws_quick_inventory_subparser.add_argument(\n \"-i\",\n \"--quick-inventory\",\n action=\"store_true\",\n help=\"Run Prowler Quick Inventory. The inventory will be stored in an output csv by default\",\n )\n # AWS Outputs\n aws_outputs_subparser = aws_parser.add_argument_group(\"AWS Outputs to S3\")\n aws_outputs_bucket_parser = aws_outputs_subparser.add_mutually_exclusive_group()\n aws_outputs_bucket_parser.add_argument(\n \"-B\",\n \"--output-bucket\",\n nargs=\"?\",\n default=None,\n help=\"Custom output bucket, requires -M <mode> and it can work also with -o flag.\",\n )\n aws_outputs_bucket_parser.add_argument(\n \"-D\",\n \"--output-bucket-no-assume\",\n nargs=\"?\",\n default=None,\n help=\"Same as -B but do not use the assumed role credentials to put objects to the bucket, instead uses the initial credentials.\",\n )\n aws_3rd_party_subparser = aws_parser.add_argument_group(\"3rd Party Integrations\")\n aws_3rd_party_subparser.add_argument(\n \"-N\",\n \"--shodan\",\n nargs=\"?\",\n default=None,\n help=\"Shodan API key used by check ec2_elastic_ip_shodan.\",\n )\n # Allowlist\n allowlist_subparser = aws_parser.add_argument_group(\"Allowlist\")\n allowlist_subparser.add_argument(\n \"-w\",\n \"--allowlist-file\",\n nargs=\"?\",\n default=None,\n help=\"Path for allowlist yaml file. See example prowler/config/aws_allowlist.yaml for reference and format. It also accepts AWS DynamoDB Table or Lambda ARNs or S3 URIs, see more in https://docs.prowler.cloud/en/latest/tutorials/allowlist/\",\n )\n\n # Based Scans\n aws_based_scans_subparser = aws_parser.add_argument_group(\"AWS Based Scans\")\n aws_based_scans_parser = aws_based_scans_subparser.add_mutually_exclusive_group()\n aws_based_scans_parser.add_argument(\n \"--resource-tags\",\n nargs=\"+\",\n default=None,\n help=\"Scan only resources with specific AWS Tags (Key=Value), e.g., Environment=dev Project=prowler\",\n )\n aws_based_scans_parser.add_argument(\n \"--resource-arn\",\n nargs=\"+\",\n type=arn_type,\n default=None,\n help=\"Scan only resources with specific AWS Resource ARNs, e.g., arn:aws:iam::012345678910:user/test arn:aws:ec2:us-east-1:123456789012:vpc/vpc-12345678\",\n )\n\n # Boto3 Config\n boto3_config_subparser = aws_parser.add_argument_group(\"Boto3 Config\")\n boto3_config_subparser.add_argument(\n \"--aws-retries-max-attempts\",\n nargs=\"?\",\n default=None,\n type=int,\n help=\"Set the maximum attemps for the Boto3 standard retrier config (Default: 3)\",\n )\n\n # Ignore Unused Services\n ignore_unused_services_subparser = aws_parser.add_argument_group(\n \"Ignore Unused Services\"\n )\n ignore_unused_services_subparser.add_argument(\n \"--ignore-unused-services\",\n action=\"store_true\",\n help=\"Ignore findings in unused services\",\n )\n\n\ndef validate_session_duration(duration):\n \"\"\"validate_session_duration validates that the AWS STS Assume Role Session Duration is between 900 and 43200 seconds.\"\"\"\n duration = int(duration)\n # Since the range(i,j) goes from i to j-1 we have to j+1\n if duration not in range(900, 43201):\n raise ArgumentTypeError(\"Session duration must be between 900 and 43200\")\n return duration\n\n\ndef validate_arguments(arguments: Namespace) -> tuple[bool, str]:\n \"\"\"validate_arguments returns {True, \"} if the provider arguments passed are valid and can be used together. It performs an extra validation, specific for the AWS provider, apart from the argparse lib.\"\"\"\n\n # Handle if session_duration is not the default value or external_id is set\n if (\n arguments.session_duration and arguments.session_duration != 3600\n ) or arguments.external_id:\n if not arguments.role:\n return (False, \"To use -I/-T options -R option is needed\")\n\n return (True, \"\")\n",
"path": "prowler/providers/aws/lib/arguments/arguments.py"
},
{
"content": "import uuid\nfrom argparse import ArgumentTypeError\n\nimport pytest\nfrom mock import patch\n\nfrom prowler.lib.cli.parser import ProwlerArgumentParser\nfrom prowler.providers.azure.lib.arguments.arguments import validate_azure_region\n\nprowler_command = \"prowler\"\n\n# capsys\n# https://docs.pytest.org/en/7.1.x/how-to/capture-stdout-stderr.html\nprowler_default_usage_error = \"usage: prowler [-h] [-v] {aws,azure,gcp} ...\"\n\n\ndef mock_get_available_providers():\n return [\"aws\", \"azure\", \"gcp\"]\n\n\nclass Test_Parser:\n def setup_method(self):\n # We need this to mock the get_available_providers function call\n # since the importlib.import_module is not working starting from the test class\n self.patch_get_available_providers = patch(\n \"prowler.providers.common.arguments.get_available_providers\",\n new=mock_get_available_providers,\n )\n self.patch_get_available_providers.start()\n\n # Init parser\n self.parser = ProwlerArgumentParser()\n\n def test_default_parser_no_arguments_aws(self):\n provider = \"aws\"\n command = [prowler_command]\n parsed = self.parser.parse(command)\n assert parsed.provider == provider\n assert not parsed.quiet\n assert len(parsed.output_modes) == 4\n assert \"csv\" in parsed.output_modes\n assert \"html\" in parsed.output_modes\n assert \"json\" in parsed.output_modes\n assert not parsed.output_filename\n assert \"output\" in parsed.output_directory\n assert not parsed.verbose\n assert not parsed.no_banner\n assert not parsed.slack\n assert not parsed.unix_timestamp\n assert parsed.log_level == \"CRITICAL\"\n assert not parsed.log_file\n assert not parsed.only_logs\n assert not parsed.checks\n assert not parsed.checks_file\n assert not parsed.checks_folder\n assert not parsed.services\n assert not parsed.severity\n assert not parsed.compliance\n assert len(parsed.categories) == 0\n assert not parsed.excluded_checks\n assert not parsed.excluded_services\n assert not parsed.list_checks\n assert not parsed.list_services\n assert not parsed.list_compliance\n assert not parsed.list_compliance_requirements\n assert not parsed.list_categories\n assert not parsed.profile\n assert not parsed.role\n assert parsed.session_duration == 3600\n assert not parsed.external_id\n assert not parsed.region\n assert not parsed.organizations_role\n assert not parsed.security_hub\n assert not parsed.quick_inventory\n assert not parsed.output_bucket\n assert not parsed.output_bucket_no_assume\n assert not parsed.shodan\n assert not parsed.allowlist_file\n assert not parsed.resource_tags\n assert not parsed.ignore_unused_services\n\n def test_default_parser_no_arguments_azure(self):\n provider = \"azure\"\n command = [prowler_command, provider]\n parsed = self.parser.parse(command)\n assert parsed.provider == provider\n assert not parsed.quiet\n assert len(parsed.output_modes) == 4\n assert \"csv\" in parsed.output_modes\n assert \"html\" in parsed.output_modes\n assert \"json\" in parsed.output_modes\n assert not parsed.output_filename\n assert \"output\" in parsed.output_directory\n assert not parsed.verbose\n assert not parsed.no_banner\n assert not parsed.slack\n assert not parsed.unix_timestamp\n assert parsed.log_level == \"CRITICAL\"\n assert not parsed.log_file\n assert not parsed.only_logs\n assert not parsed.checks\n assert not parsed.checks_file\n assert not parsed.checks_folder\n assert not parsed.services\n assert not parsed.severity\n assert not parsed.compliance\n assert len(parsed.categories) == 0\n assert not parsed.excluded_checks\n assert not parsed.excluded_services\n assert not parsed.list_checks\n assert not parsed.list_services\n assert not parsed.list_compliance\n assert not parsed.list_compliance_requirements\n assert not parsed.list_categories\n assert len(parsed.subscription_ids) == 0\n assert not parsed.az_cli_auth\n assert not parsed.sp_env_auth\n assert not parsed.browser_auth\n assert not parsed.managed_identity_auth\n\n def test_default_parser_no_arguments_gcp(self):\n provider = \"gcp\"\n command = [prowler_command, provider]\n parsed = self.parser.parse(command)\n assert parsed.provider == provider\n assert not parsed.quiet\n assert len(parsed.output_modes) == 4\n assert \"csv\" in parsed.output_modes\n assert \"html\" in parsed.output_modes\n assert \"json\" in parsed.output_modes\n assert not parsed.output_filename\n assert \"output\" in parsed.output_directory\n assert not parsed.verbose\n assert not parsed.no_banner\n assert not parsed.slack\n assert not parsed.unix_timestamp\n assert parsed.log_level == \"CRITICAL\"\n assert not parsed.log_file\n assert not parsed.only_logs\n assert not parsed.checks\n assert not parsed.checks_file\n assert not parsed.checks_folder\n assert not parsed.services\n assert not parsed.severity\n assert not parsed.compliance\n assert len(parsed.categories) == 0\n assert not parsed.excluded_checks\n assert not parsed.excluded_services\n assert not parsed.list_checks\n assert not parsed.list_services\n assert not parsed.list_compliance\n assert not parsed.list_compliance_requirements\n assert not parsed.list_categories\n assert not parsed.credentials_file\n\n def test_root_parser_version_short(self):\n command = [prowler_command, \"-v\"]\n with pytest.raises(SystemExit) as wrapped_exit:\n _ = self.parser.parse(command)\n assert wrapped_exit.type == SystemExit\n assert wrapped_exit.value.code == 0\n\n def test_root_parser_version_long(self):\n command = [prowler_command, \"--version\"]\n with pytest.raises(SystemExit) as wrapped_exit:\n _ = self.parser.parse(command)\n assert wrapped_exit.type == SystemExit\n assert wrapped_exit.value.code == 0\n\n def test_root_parser_help_short(self):\n command = [prowler_command, \"-h\"]\n with pytest.raises(SystemExit) as wrapped_exit:\n _ = self.parser.parse(command)\n assert wrapped_exit.type == SystemExit\n assert wrapped_exit.value.code == 0\n\n def test_root_parser_help_long(self):\n command = [prowler_command, \"--help\"]\n with pytest.raises(SystemExit) as wrapped_exit:\n _ = self.parser.parse(command)\n assert wrapped_exit.type == SystemExit\n assert wrapped_exit.value.code == 0\n\n def test_root_parser_default_aws_provider(self):\n command = [prowler_command]\n parsed = self.parser.parse(command)\n assert parsed.provider == \"aws\"\n\n def test_root_parser_aws_provider(self):\n command = [prowler_command, \"aws\"]\n parsed = self.parser.parse(command)\n assert parsed.provider == \"aws\"\n\n def test_root_parser_azure_provider(self):\n command = [prowler_command, \"azure\"]\n parsed = self.parser.parse(command)\n print(parsed)\n assert parsed.provider == \"azure\"\n\n def test_root_parser_gcp_provider(self):\n command = [prowler_command, \"gcp\"]\n parsed = self.parser.parse(command)\n print(parsed)\n assert parsed.provider == \"gcp\"\n\n def test_root_parser_quiet_short(self):\n command = [prowler_command, \"-q\"]\n parsed = self.parser.parse(command)\n assert parsed.quiet\n\n def test_root_parser_quiet_long(self):\n command = [prowler_command, \"--quiet\"]\n parsed = self.parser.parse(command)\n assert parsed.quiet\n\n def test_root_parser_exit_code_3_short(self):\n command = [prowler_command, \"-z\"]\n parsed = self.parser.parse(command)\n assert parsed.ignore_exit_code_3\n\n def test_root_parser_exit_code_3_long(self):\n command = [prowler_command, \"--ignore-exit-code-3\"]\n parsed = self.parser.parse(command)\n assert parsed.ignore_exit_code_3\n\n def test_root_parser_default_output_modes(self):\n command = [prowler_command]\n parsed = self.parser.parse(command)\n assert len(parsed.output_modes) == 4\n assert \"csv\" in parsed.output_modes\n assert \"json\" in parsed.output_modes\n assert \"html\" in parsed.output_modes\n\n def test_root_parser_output_modes_short(self):\n command = [prowler_command, \"-M\", \"csv\"]\n parsed = self.parser.parse(command)\n assert len(parsed.output_modes) == 1\n assert \"csv\" in parsed.output_modes\n\n def test_root_parser_output_modes_long(self):\n command = [prowler_command, \"--output-modes\", \"csv\"]\n parsed = self.parser.parse(command)\n assert len(parsed.output_modes) == 1\n assert \"csv\" in parsed.output_modes\n\n def test_root_parser_output_filename_short(self):\n filename = \"test_output.txt\"\n command = [prowler_command, \"-F\", filename]\n parsed = self.parser.parse(command)\n assert parsed.output_filename == filename\n\n def test_root_parser_output_filename_long(self):\n filename = \"test_output.txt\"\n command = [prowler_command, \"-F\", filename]\n parsed = self.parser.parse(command)\n assert parsed.output_filename == filename\n\n def test_root_parser_output_directory_default(self):\n dirname = \"output\"\n command = [prowler_command]\n parsed = self.parser.parse(command)\n assert dirname in parsed.output_directory\n\n def test_root_parser_output_directory_default_short(self):\n dirname = \"outputs\"\n command = [prowler_command, \"-o\", dirname]\n parsed = self.parser.parse(command)\n assert parsed.output_directory == dirname\n\n def test_root_parser_output_directory_default_long(self):\n dirname = \"outputs\"\n command = [prowler_command, \"--output-directory\", dirname]\n parsed = self.parser.parse(command)\n assert parsed.output_directory == dirname\n\n def test_root_parser_verbose(self):\n command = [prowler_command, \"--verbose\"]\n parsed = self.parser.parse(command)\n assert parsed.verbose\n\n def test_root_parser_no_banner_short(self):\n command = [prowler_command, \"-b\"]\n parsed = self.parser.parse(command)\n assert parsed.no_banner\n\n def test_root_parser_no_banner_long(self):\n command = [prowler_command, \"--no-banner\"]\n parsed = self.parser.parse(command)\n assert parsed.no_banner\n\n def test_root_parser_slack(self):\n command = [prowler_command, \"--slack\"]\n parsed = self.parser.parse(command)\n assert parsed.slack\n\n def test_root_parser_unix_timestamp(self):\n command = [prowler_command, \"--unix-timestamp\"]\n parsed = self.parser.parse(command)\n assert parsed.unix_timestamp\n\n def test_logging_parser_only_logs_set(self):\n command = [prowler_command, \"--only-logs\"]\n parsed = self.parser.parse(command)\n assert parsed.only_logs\n assert parsed.no_banner\n\n def test_logging_parser_log_level_default(self):\n log_level = \"CRITICAL\"\n command = [prowler_command]\n parsed = self.parser.parse(command)\n assert parsed.log_level == log_level\n\n def test_logging_parser_log_level_debug(self):\n log_level = \"DEBUG\"\n command = [prowler_command, \"--log-level\", log_level]\n parsed = self.parser.parse(command)\n assert parsed.log_level == log_level\n\n def test_logging_parser_log_level_info(self):\n log_level = \"INFO\"\n command = [prowler_command, \"--log-level\", log_level]\n parsed = self.parser.parse(command)\n assert parsed.log_level == log_level\n\n def test_logging_parser_log_level_warning(self):\n log_level = \"WARNING\"\n command = [prowler_command, \"--log-level\", log_level]\n parsed = self.parser.parse(command)\n assert parsed.log_level == log_level\n\n def test_logging_parser_log_level_error(self):\n log_level = \"ERROR\"\n command = [prowler_command, \"--log-level\", log_level]\n parsed = self.parser.parse(command)\n assert parsed.log_level == log_level\n\n def test_logging_parser_log_level_critical(self):\n log_level = \"CRITICAL\"\n command = [prowler_command, \"--log-level\", log_level]\n parsed = self.parser.parse(command)\n assert parsed.log_level == log_level\n\n def test_logging_parser_log_file_default(self):\n command = [prowler_command]\n parsed = self.parser.parse(command)\n assert not parsed.log_file\n\n def test_logging_parser_log_file(self):\n log_file = \"test.log\"\n command = [prowler_command, \"--log-file\", log_file]\n parsed = self.parser.parse(command)\n assert parsed.log_file == log_file\n\n def test_exclude_checks_parser_excluded_checks_short(self):\n excluded_checks = \"check_test\"\n command = [prowler_command, \"-e\", excluded_checks]\n parsed = self.parser.parse(command)\n assert excluded_checks in parsed.excluded_checks\n\n def test_exclude_checks_parser_excluded_checks_short_two(self):\n excluded_checks_1 = \"check_test_1\"\n excluded_checks_2 = \"check_test_2\"\n command = [prowler_command, \"-e\", excluded_checks_1, excluded_checks_2]\n parsed = self.parser.parse(command)\n assert len(parsed.excluded_checks) == 2\n assert excluded_checks_1 in parsed.excluded_checks\n assert excluded_checks_2 in parsed.excluded_checks\n\n def test_exclude_checks_parser_excluded_checks_long(self):\n excluded_check = \"check_test\"\n command = [prowler_command, \"--excluded-checks\", excluded_check]\n parsed = self.parser.parse(command)\n assert excluded_check in parsed.excluded_checks\n\n def test_exclude_checks_parser_excluded_checks_long_two(self):\n excluded_checks_1 = \"check_test_1\"\n excluded_checks_2 = \"check_test_2\"\n command = [\n prowler_command,\n \"--excluded-checks\",\n excluded_checks_1,\n excluded_checks_2,\n ]\n parsed = self.parser.parse(command)\n assert len(parsed.excluded_checks) == 2\n assert excluded_checks_1 in parsed.excluded_checks\n assert excluded_checks_2 in parsed.excluded_checks\n\n def test_exclude_checks_parser_excluded_services_long(self):\n excluded_service = \"accessanalyzer\"\n command = [prowler_command, \"--excluded-services\", excluded_service]\n parsed = self.parser.parse(command)\n assert excluded_service in parsed.excluded_services\n\n def test_exclude_checks_parser_excluded_services_long_two(self):\n excluded_service_1 = \"accessanalyzer\"\n excluded_service_2 = \"s3\"\n command = [\n prowler_command,\n \"--excluded-services\",\n excluded_service_1,\n excluded_service_2,\n ]\n parsed = self.parser.parse(command)\n assert len(parsed.excluded_services) == 2\n assert excluded_service_1 in parsed.excluded_services\n assert excluded_service_2 in parsed.excluded_services\n\n def test_checks_parser_checks_short(self):\n check = \"check_test_1\"\n argument = \"-c\"\n command = [prowler_command, argument, check]\n parsed = self.parser.parse(command)\n assert len(parsed.checks) == 1\n assert check in parsed.checks\n\n def test_checks_parser_checks_short_two(self):\n check_1 = \"check_test_1\"\n check_2 = \"check_test_2\"\n argument = \"-c\"\n command = [prowler_command, argument, check_1, check_2]\n parsed = self.parser.parse(command)\n assert len(parsed.checks) == 2\n assert check_1 in parsed.checks\n assert check_2 in parsed.checks\n\n def test_checks_parser_checks_long(self):\n check = \"check_test_1\"\n argument = \"--checks\"\n command = [prowler_command, argument, check]\n parsed = self.parser.parse(command)\n assert len(parsed.checks) == 1\n assert check in parsed.checks\n\n def test_checks_parser_checks_long_two(self):\n check_1 = \"check_test_1\"\n check_2 = \"check_test_2\"\n argument = \"--checks\"\n command = [prowler_command, argument, check_1, check_2]\n parsed = self.parser.parse(command)\n assert len(parsed.checks) == 2\n assert check_1 in parsed.checks\n assert check_2 in parsed.checks\n\n def test_checks_parser_checks_file_short(self):\n argument = \"-C\"\n filename = \"checks.txt\"\n command = [prowler_command, argument, filename]\n parsed = self.parser.parse(command)\n assert parsed.checks_file == filename\n\n def test_checks_parser_checks_file_long(self):\n argument = \"--checks-file\"\n filename = \"checks.txt\"\n command = [prowler_command, argument, filename]\n parsed = self.parser.parse(command)\n assert parsed.checks_file == filename\n\n def test_checks_parser_checks_folder_short(self):\n argument = \"-x\"\n filename = \"custom-checks-folder/\"\n command = [prowler_command, argument, filename]\n parsed = self.parser.parse(command)\n assert parsed.checks_folder == filename\n\n def test_checks_parser_checks_folder_long(self):\n argument = \"--checks-folder\"\n filename = \"custom-checks-folder/\"\n command = [prowler_command, argument, filename]\n parsed = self.parser.parse(command)\n assert parsed.checks_folder == filename\n\n def test_checks_parser_services_short(self):\n argument = \"-s\"\n service_1 = \"iam\"\n command = [prowler_command, argument, service_1]\n parsed = self.parser.parse(command)\n assert service_1 in parsed.services\n\n def test_checks_parser_services_short_two(self):\n argument = \"-s\"\n service_1 = \"iam\"\n service_2 = \"s3\"\n command = [prowler_command, argument, service_1, service_2]\n parsed = self.parser.parse(command)\n assert len(parsed.services) == 2\n assert service_1 in parsed.services\n assert service_2 in parsed.services\n\n def test_checks_parser_services_long(self):\n argument = \"--services\"\n service_1 = \"iam\"\n command = [prowler_command, argument, service_1]\n parsed = self.parser.parse(command)\n assert service_1 in parsed.services\n\n def test_checks_parser_services_long_two(self):\n argument = \"--services\"\n service_1 = \"iam\"\n service_2 = \"s3\"\n command = [prowler_command, argument, service_1, service_2]\n parsed = self.parser.parse(command)\n assert len(parsed.services) == 2\n assert service_1 in parsed.services\n assert service_2 in parsed.services\n\n def test_checks_parser_services_with_severity(self):\n argument1 = \"--services\"\n service_1 = \"iam\"\n argument2 = \"--severity\"\n severity = \"low\"\n command = [prowler_command, argument1, service_1, argument2, severity]\n parsed = self.parser.parse(command)\n assert len(parsed.services) == 1\n assert service_1 in parsed.services\n assert len(parsed.severity) == 1\n assert severity in parsed.severity\n\n def test_checks_parser_informational_severity(self):\n argument = \"--severity\"\n severity = \"informational\"\n command = [prowler_command, argument, severity]\n parsed = self.parser.parse(command)\n assert len(parsed.severity) == 1\n assert severity in parsed.severity\n\n def test_checks_parser_low_severity(self):\n argument = \"--severity\"\n severity = \"low\"\n command = [prowler_command, argument, severity]\n parsed = self.parser.parse(command)\n assert len(parsed.severity) == 1\n assert severity in parsed.severity\n\n def test_checks_parser_medium_severity(self):\n argument = \"--severity\"\n severity = \"medium\"\n command = [prowler_command, argument, severity]\n parsed = self.parser.parse(command)\n assert len(parsed.severity) == 1\n assert severity in parsed.severity\n\n def test_checks_parser_high_severity(self):\n argument = \"--severity\"\n severity = \"high\"\n command = [prowler_command, argument, severity]\n parsed = self.parser.parse(command)\n assert len(parsed.severity) == 1\n assert severity in parsed.severity\n\n def test_checks_parser_critical_severity(self):\n argument = \"--severity\"\n severity = \"critical\"\n command = [prowler_command, argument, severity]\n parsed = self.parser.parse(command)\n assert len(parsed.severity) == 1\n assert severity in parsed.severity\n\n def test_checks_parser_two_severities(self):\n argument = \"--severity\"\n severity_1 = \"critical\"\n severity_2 = \"high\"\n command = [prowler_command, argument, severity_1, severity_2]\n parsed = self.parser.parse(command)\n assert len(parsed.severity) == 2\n assert severity_1 in parsed.severity\n assert severity_2 in parsed.severity\n\n def test_checks_parser_wrong_severity(self, capsys):\n argument = \"--severity\"\n severity = \"kk\"\n command = [prowler_command, argument, severity]\n with pytest.raises(SystemExit) as wrapped_exit:\n _ = self.parser.parse(command)\n assert wrapped_exit.type == SystemExit\n assert wrapped_exit.value.code == 2\n\n def test_checks_parser_wrong_compliance(self):\n argument = \"--compliance\"\n framework = \"ens_rd2022_azure\"\n command = [prowler_command, argument, framework]\n with pytest.raises(SystemExit) as wrapped_exit:\n _ = self.parser.parse(command)\n assert wrapped_exit.type == SystemExit\n assert wrapped_exit.value.code == 2\n\n def test_checks_parser_compliance(self):\n argument = \"--compliance\"\n framework = \"cis_1.5_aws\"\n command = [prowler_command, argument, framework]\n parsed = self.parser.parse(command)\n assert len(parsed.compliance) == 1\n assert framework in parsed.compliance\n\n def test_checks_parser_compliance_two(self):\n argument = \"--compliance\"\n framework_1 = \"cis_1.5_aws\"\n framework_2 = \"ens_rd2022_aws\"\n command = [prowler_command, argument, framework_1, framework_2]\n parsed = self.parser.parse(command)\n assert len(parsed.compliance) == 2\n assert framework_1 in parsed.compliance\n assert framework_2 in parsed.compliance\n\n def test_checks_parser_categories(self):\n argument = \"--categories\"\n category = \"secrets\"\n command = [prowler_command, argument, category]\n parsed = self.parser.parse(command)\n assert len(parsed.categories) == 1\n assert category in parsed.categories\n\n def test_checks_parser_categories_two(self):\n argument = \"--categories\"\n category_1 = \"secrets\"\n category_2 = \"forensics\"\n command = [prowler_command, argument, category_1, category_2]\n parsed = self.parser.parse(command)\n assert len(parsed.categories) == 2\n assert category_1 in parsed.categories\n assert category_2 in parsed.categories\n\n def test_list_checks_parser_list_checks_short(self):\n argument = \"-l\"\n command = [prowler_command, argument]\n parsed = self.parser.parse(command)\n assert parsed.list_checks\n\n def test_list_checks_parser_list_checks_long(self):\n argument = \"--list-checks\"\n command = [prowler_command, argument]\n parsed = self.parser.parse(command)\n assert parsed.list_checks\n\n def test_list_checks_parser_list_checks_json(self):\n argument = \"--list-checks-json\"\n command = [prowler_command, argument]\n parsed = self.parser.parse(command)\n assert parsed.list_checks_json\n\n def test_list_checks_parser_list_services(self):\n argument = \"--list-services\"\n command = [prowler_command, argument]\n parsed = self.parser.parse(command)\n assert parsed.list_services\n\n def test_list_checks_parser_list_compliance(self):\n argument = \"--list-compliance\"\n command = [prowler_command, argument]\n parsed = self.parser.parse(command)\n assert parsed.list_compliance\n\n def test_list_checks_parser_list_categories(self):\n argument = \"--list-categories\"\n command = [prowler_command, argument]\n parsed = self.parser.parse(command)\n assert parsed.list_categories\n\n def test_list_checks_parser_list_compliance_requirements_no_arguments(self):\n argument = \"--list-compliance-requirements\"\n command = [prowler_command, argument]\n with pytest.raises(SystemExit) as wrapped_exit:\n _ = self.parser.parse(command)\n assert wrapped_exit.type == SystemExit\n assert wrapped_exit.value.code == 2\n\n def test_list_checks_parser_list_compliance_requirements_bad(self):\n argument = \"--list-compliance-requirements\"\n bad_framework = \"cis_1.4_azure\"\n command = [prowler_command, argument, bad_framework]\n with pytest.raises(SystemExit) as wrapped_exit:\n _ = self.parser.parse(command)\n assert wrapped_exit.type == SystemExit\n assert wrapped_exit.value.code == 2\n\n def test_list_checks_parser_list_compliance_requirements_one(self):\n argument = \"--list-compliance-requirements\"\n framework = \"cis_1.4_aws\"\n command = [prowler_command, argument, framework]\n parsed = self.parser.parse(command)\n assert len(parsed.list_compliance_requirements) == 1\n assert framework in parsed.list_compliance_requirements\n\n def test_aws_parser_profile_no_profile_short(self):\n argument = \"-p\"\n profile = \"\"\n command = [prowler_command, argument, profile]\n parsed = self.parser.parse(command)\n assert parsed.profile == profile\n\n def test_aws_parser_profile_short(self):\n argument = \"-p\"\n profile = \"test\"\n command = [prowler_command, argument, profile]\n parsed = self.parser.parse(command)\n assert parsed.profile == profile\n\n def test_aws_parser_profile_long(self):\n argument = \"--profile\"\n profile = \"test\"\n command = [prowler_command, argument, profile]\n parsed = self.parser.parse(command)\n assert parsed.profile == profile\n\n def test_aws_parser_no_role_arn_short(self):\n argument = \"-R\"\n role = \"\"\n command = [prowler_command, argument, role]\n parsed = self.parser.parse(command)\n assert parsed.role == role\n\n def test_aws_parser_role_arn_short(self):\n argument = \"-R\"\n role = \"test\"\n command = [prowler_command, argument, role]\n parsed = self.parser.parse(command)\n assert parsed.role == role\n\n def test_aws_parser_role_arn_long(self):\n argument = \"--role\"\n role = \"test\"\n command = [prowler_command, argument, role]\n parsed = self.parser.parse(command)\n assert parsed.role == role\n\n def test_aws_parser_mfa(self):\n argument = \"--mfa\"\n command = [prowler_command, argument]\n parsed = self.parser.parse(command)\n assert parsed.mfa\n\n def test_aws_parser_session_duration_short(self, capsys):\n argument = \"-T\"\n duration = \"900\"\n command = [prowler_command, argument, duration]\n with pytest.raises(SystemExit) as wrapped_exit:\n _ = self.parser.parse(command)\n assert wrapped_exit.type == SystemExit\n assert wrapped_exit.value.code == 2\n assert (\n capsys.readouterr().err\n == f\"{prowler_default_usage_error}\\nprowler: error: aws: To use -I/-T options -R option is needed\\n\"\n )\n\n def test_aws_parser_session_duration_long(self, capsys):\n argument = \"--session-duration\"\n duration = \"900\"\n command = [prowler_command, argument, duration]\n with pytest.raises(SystemExit) as wrapped_exit:\n _ = self.parser.parse(command)\n assert wrapped_exit.type == SystemExit\n assert wrapped_exit.value.code == 2\n assert (\n capsys.readouterr().err\n == f\"{prowler_default_usage_error}\\nprowler: error: aws: To use -I/-T options -R option is needed\\n\"\n )\n\n # TODO\n def test_aws_parser_external_id_no_short(self):\n argument = \"-I\"\n external_id = \"\"\n command = [prowler_command, argument, external_id]\n parsed = self.parser.parse(command)\n assert not parsed.profile\n\n def test_aws_parser_external_id_short(self, capsys):\n argument = \"-I\"\n external_id = str(uuid.uuid4())\n command = [prowler_command, argument, external_id]\n with pytest.raises(SystemExit) as wrapped_exit:\n _ = self.parser.parse(command)\n assert wrapped_exit.type == SystemExit\n assert wrapped_exit.value.code == 2\n assert (\n capsys.readouterr().err\n == f\"{prowler_default_usage_error}\\nprowler: error: aws: To use -I/-T options -R option is needed\\n\"\n )\n\n def test_aws_parser_external_id_long(self, capsys):\n argument = \"--external-id\"\n external_id = str(uuid.uuid4())\n command = [prowler_command, argument, external_id]\n with pytest.raises(SystemExit) as wrapped_exit:\n _ = self.parser.parse(command)\n assert wrapped_exit.type == SystemExit\n assert wrapped_exit.value.code == 2\n assert (\n capsys.readouterr().err\n == f\"{prowler_default_usage_error}\\nprowler: error: aws: To use -I/-T options -R option is needed\\n\"\n )\n\n def test_aws_parser_region_f(self):\n argument = \"-f\"\n region = \"eu-west-1\"\n command = [prowler_command, argument, region]\n parsed = self.parser.parse(command)\n assert len(parsed.region) == 1\n assert region in parsed.region\n\n def test_aws_parser_region_f_bad_region(self):\n argument = \"-f\"\n region = \"no-region\"\n command = [prowler_command, argument, region]\n with pytest.raises(SystemExit) as wrapped_exit:\n _ = self.parser.parse(command)\n assert wrapped_exit.type == SystemExit\n assert wrapped_exit.value.code == 2\n\n def test_aws_parser_region(self):\n argument = \"--region\"\n region = \"eu-west-1\"\n command = [prowler_command, argument, region]\n parsed = self.parser.parse(command)\n assert len(parsed.region) == 1\n assert region in parsed.region\n\n def test_aws_parser_two_regions(self):\n argument = \"--region\"\n region_1 = \"eu-west-1\"\n region_2 = \"eu-west-2\"\n command = [prowler_command, argument, region_1, region_2]\n parsed = self.parser.parse(command)\n assert len(parsed.region) == 2\n assert region_1 in parsed.region\n assert region_2 in parsed.region\n\n def test_aws_parser_bad_region(self):\n argument = \"--region\"\n region = \"no-region\"\n command = [prowler_command, argument, region]\n with pytest.raises(SystemExit) as wrapped_exit:\n _ = self.parser.parse(command)\n assert wrapped_exit.type == SystemExit\n assert wrapped_exit.value.code == 2\n\n def test_aws_parser_filter_region(self):\n argument = \"--filter-region\"\n region = \"eu-west-1\"\n command = [prowler_command, argument, region]\n parsed = self.parser.parse(command)\n assert len(parsed.region) == 1\n assert region in parsed.region\n\n def test_aws_parser_bad_filter_region(self):\n argument = \"--filter-region\"\n region = \"no-region\"\n command = [prowler_command, argument, region]\n with pytest.raises(SystemExit) as wrapped_exit:\n _ = self.parser.parse(command)\n assert wrapped_exit.type == SystemExit\n assert wrapped_exit.value.code == 2\n\n def test_aws_parser_organizations_role_short(self):\n argument = \"-O\"\n organizations_role = \"role_test\"\n command = [prowler_command, argument, organizations_role]\n parsed = self.parser.parse(command)\n assert parsed.organizations_role == organizations_role\n\n def test_aws_parser_organizations_role_long(self):\n argument = \"--organizations-role\"\n organizations_role = \"role_test\"\n command = [prowler_command, argument, organizations_role]\n parsed = self.parser.parse(command)\n assert parsed.organizations_role == organizations_role\n\n def test_aws_parser_security_hub_short(self):\n argument = \"-S\"\n command = [prowler_command, argument]\n parsed = self.parser.parse(command)\n assert parsed.security_hub\n\n def test_aws_parser_security_hub_long(self):\n argument = \"--security-hub\"\n command = [prowler_command, argument]\n parsed = self.parser.parse(command)\n assert parsed.security_hub\n\n def test_aws_parser_skip_sh_update(self):\n argument = \"--skip-sh-update\"\n command = [prowler_command, argument]\n parsed = self.parser.parse(command)\n assert parsed.skip_sh_update\n\n def test_aws_parser_send_only_fail(self):\n argument = \"--send-sh-only-fails\"\n command = [prowler_command, argument]\n parsed = self.parser.parse(command)\n assert parsed.send_sh_only_fails\n\n def test_aws_parser_quick_inventory_short(self):\n argument = \"-i\"\n command = [prowler_command, argument]\n parsed = self.parser.parse(command)\n assert parsed.quick_inventory\n\n def test_aws_parser_quick_inventory_long(self):\n argument = \"--quick-inventory\"\n command = [prowler_command, argument]\n parsed = self.parser.parse(command)\n assert parsed.quick_inventory\n\n def test_aws_parser_output_bucket_short(self):\n argument = \"-B\"\n bucket = \"test-bucket\"\n command = [prowler_command, argument, bucket]\n parsed = self.parser.parse(command)\n assert parsed.output_bucket == bucket\n\n def test_aws_parser_output_bucket_long(self):\n argument = \"--output-bucket\"\n bucket = \"test-bucket\"\n command = [prowler_command, argument, bucket]\n parsed = self.parser.parse(command)\n assert parsed.output_bucket == bucket\n\n def test_aws_parser_output_bucket_no_assume_short(self):\n argument = \"-D\"\n bucket = \"test-bucket\"\n command = [prowler_command, argument, bucket]\n parsed = self.parser.parse(command)\n assert parsed.output_bucket_no_assume == bucket\n\n def test_aws_parser_output_bucket_no_assume_long(self):\n argument = \"--output-bucket-no-assume\"\n bucket = \"test-bucket\"\n command = [prowler_command, argument, bucket]\n parsed = self.parser.parse(command)\n assert parsed.output_bucket_no_assume == bucket\n\n def test_aws_parser_shodan_short(self):\n argument = \"-N\"\n shodan_api_key = str(uuid.uuid4())\n command = [prowler_command, argument, shodan_api_key]\n parsed = self.parser.parse(command)\n assert parsed.shodan == shodan_api_key\n\n def test_aws_parser_shodan_long(self):\n argument = \"--shodan\"\n shodan_api_key = str(uuid.uuid4())\n command = [prowler_command, argument, shodan_api_key]\n parsed = self.parser.parse(command)\n assert parsed.shodan == shodan_api_key\n\n def test_aws_parser_allowlist_short(self):\n argument = \"-w\"\n allowlist_file = \"allowlist.txt\"\n command = [prowler_command, argument, allowlist_file]\n parsed = self.parser.parse(command)\n assert parsed.allowlist_file == allowlist_file\n\n def test_aws_parser_allowlist_long(self):\n argument = \"--allowlist-file\"\n allowlist_file = \"allowlist.txt\"\n command = [prowler_command, argument, allowlist_file]\n parsed = self.parser.parse(command)\n assert parsed.allowlist_file == allowlist_file\n\n def test_aws_parser_resource_tags(self):\n argument = \"--resource-tags\"\n scan_tag1 = \"Key=Value\"\n scan_tag2 = \"Key2=Value2\"\n command = [prowler_command, argument, scan_tag1, scan_tag2]\n parsed = self.parser.parse(command)\n assert len(parsed.resource_tags) == 2\n assert scan_tag1 in parsed.resource_tags\n assert scan_tag2 in parsed.resource_tags\n\n def test_aws_parser_resource_arn(self):\n argument = \"--resource-arn\"\n resource_arn1 = \"arn:aws:iam::012345678910:user/test\"\n resource_arn2 = \"arn:aws:ec2:us-east-1:123456789012:vpc/vpc-12345678\"\n command = [prowler_command, argument, resource_arn1, resource_arn2]\n parsed = self.parser.parse(command)\n assert len(parsed.resource_arn) == 2\n assert resource_arn1 in parsed.resource_arn\n assert resource_arn2 in parsed.resource_arn\n\n def test_aws_parser_wrong_resource_arn(self):\n argument = \"--resource-arn\"\n resource_arn = \"arn:azure:iam::account:user/test\"\n command = [prowler_command, argument, resource_arn]\n with pytest.raises(SystemExit) as ex:\n self.parser.parse(command)\n assert ex.type == SystemExit\n\n def test_aws_parser_aws_retries_max_attempts(self):\n argument = \"--aws-retries-max-attempts\"\n max_retries = \"10\"\n command = [prowler_command, argument, max_retries]\n parsed = self.parser.parse(command)\n assert parsed.aws_retries_max_attempts == int(max_retries)\n\n def test_aws_parser_ignore_unused_services(self):\n argument = \"--ignore-unused-services\"\n command = [prowler_command, argument]\n parsed = self.parser.parse(command)\n assert parsed.ignore_unused_services\n\n def test_aws_parser_config_file(self):\n argument = \"--config-file\"\n config_file = \"./test-config.yaml\"\n command = [prowler_command, argument, config_file]\n parsed = self.parser.parse(command)\n assert parsed.config_file == config_file\n\n def test_aws_parser_sts_endpoint_region(self):\n argument = \"--sts-endpoint-region\"\n sts_endpoint_region = \"eu-west-1\"\n command = [prowler_command, argument, sts_endpoint_region]\n parsed = self.parser.parse(command)\n assert parsed.sts_endpoint_region == sts_endpoint_region\n\n def test_parser_azure_auth_sp(self):\n argument = \"--sp-env-auth\"\n command = [prowler_command, \"azure\", argument]\n parsed = self.parser.parse(command)\n assert parsed.provider == \"azure\"\n assert parsed.sp_env_auth\n\n def test_parser_azure_auth_browser(self):\n argument = \"--browser-auth\"\n command = [prowler_command, \"azure\", argument]\n parsed = self.parser.parse(command)\n assert parsed.provider == \"azure\"\n assert parsed.browser_auth\n\n def test_parser_azure_tenant_id(self):\n argument = \"--tenant-id\"\n tenant_id = \"test-tenant-id\"\n command = [prowler_command, \"azure\", argument, tenant_id]\n parsed = self.parser.parse(command)\n assert parsed.provider == \"azure\"\n assert parsed.tenant_id == tenant_id\n\n def test_parser_azure_auth_az_cli(self):\n argument = \"--az-cli-auth\"\n command = [prowler_command, \"azure\", argument]\n parsed = self.parser.parse(command)\n assert parsed.provider == \"azure\"\n assert parsed.az_cli_auth\n\n def test_parser_azure_auth_managed_identity(self):\n argument = \"--managed-identity-auth\"\n command = [prowler_command, \"azure\", argument]\n parsed = self.parser.parse(command)\n assert parsed.provider == \"azure\"\n assert parsed.managed_identity_auth\n\n def test_parser_azure_subscription_ids(self):\n argument = \"--subscription-ids\"\n subscription_1 = \"test_subscription_1\"\n subscription_2 = \"test_subscription_2\"\n command = [prowler_command, \"azure\", argument, subscription_1, subscription_2]\n parsed = self.parser.parse(command)\n assert parsed.provider == \"azure\"\n assert len(parsed.subscription_ids) == 2\n assert parsed.subscription_ids[0] == subscription_1\n assert parsed.subscription_ids[1] == subscription_2\n\n def test_parser_azure_region(self):\n argument = \"--azure-region\"\n region = \"AzureChinaCloud\"\n command = [prowler_command, \"azure\", argument, region]\n parsed = self.parser.parse(command)\n assert parsed.provider == \"azure\"\n assert parsed.azure_region == region\n\n # Test AWS flags with Azure provider\n def test_parser_azure_with_aws_flag(self, capsys):\n command = [prowler_command, \"azure\", \"-p\"]\n with pytest.raises(SystemExit) as wrapped_exit:\n _ = self.parser.parse(command)\n assert wrapped_exit.type == SystemExit\n assert wrapped_exit.value.code == 2\n assert (\n capsys.readouterr().err\n == f\"{prowler_default_usage_error}\\nprowler: error: unrecognized arguments: -p\\n\"\n )\n\n # Test Azure flags with AWS provider\n def test_parser_aws_with_azure_flag(self, capsys):\n command = [prowler_command, \"aws\", \"--subscription-ids\"]\n with pytest.raises(SystemExit) as wrapped_exit:\n _ = self.parser.parse(command)\n assert wrapped_exit.type == SystemExit\n assert wrapped_exit.value.code == 2\n assert (\n capsys.readouterr().err\n == f\"{prowler_default_usage_error}\\nprowler: error: unrecognized arguments: --subscription-ids\\n\"\n )\n\n def test_parser_gcp_auth_credentials_file(self):\n argument = \"--credentials-file\"\n file = \"test.json\"\n command = [prowler_command, \"gcp\", argument, file]\n parsed = self.parser.parse(command)\n assert parsed.provider == \"gcp\"\n assert parsed.credentials_file == file\n\n def test_parser_gcp_project_ids(self):\n argument = \"--project-ids\"\n project_1 = \"test_project_1\"\n project_2 = \"test_project_2\"\n command = [prowler_command, \"gcp\", argument, project_1, project_2]\n parsed = self.parser.parse(command)\n assert parsed.provider == \"gcp\"\n assert len(parsed.project_ids) == 2\n assert parsed.project_ids[0] == project_1\n assert parsed.project_ids[1] == project_2\n\n def test_validate_azure_region_valid_regions(self):\n expected_regions = [\n \"AzureChinaCloud\",\n \"AzureUSGovernment\",\n \"AzureGermanCloud\",\n \"AzureCloud\",\n ]\n input_regions = [\n \"AzureChinaCloud\",\n \"AzureUSGovernment\",\n \"AzureGermanCloud\",\n \"AzureCloud\",\n ]\n for region in input_regions:\n assert validate_azure_region(region) in expected_regions\n\n def test_validate_azure_region_invalid_regions(self):\n expected_regions = [\n \"AzureChinaCloud\",\n \"AzureUSGovernment\",\n \"AzureGermanCloud\",\n \"AzureCloud\",\n ]\n invalid_region = \"non-valid-region\"\n with pytest.raises(\n ArgumentTypeError,\n match=f\"Region {invalid_region} not allowed, allowed regions are {' '.join(expected_regions)}\",\n ):\n validate_azure_region(invalid_region)\n",
"path": "tests/lib/cli/parser_test.py"
}
] | 11_6 | python | import unittest
import sys
from unittest.mock import patch
# import pytest
prowler_command = "prowler"
# capsys
# https://docs.pytest.org/en/7.1.x/how-to/capture-stdout-stderr.html
prowler_default_usage_error = "usage: prowler [-h] [-v] {aws,azure,gcp} ..."
def mock_get_available_providers():
return ["aws", "azure", "gcp"]
class Test_Parser(unittest.TestCase):
def setup_method(self):
from prowler.lib.cli.parser import ProwlerArgumentParser
# We need this to mock the get_available_providers function call
# since the importlib.import_module is not working starting from the test class
self.patch_get_available_providers = patch(
"prowler.providers.common.arguments.get_available_providers",
new=mock_get_available_providers,
)
self.patch_get_available_providers.start()
# Init parser
self.parser = ProwlerArgumentParser()
def test_validate_bucket_invalid_bucket_names(self):
from argparse import ArgumentTypeError
from prowler.lib.cli.parser import ProwlerArgumentParser
from prowler.providers.aws.lib.arguments.arguments import validate_bucket
bad_bucket_names = [
"xn--bucket-name",
"mrryadfpcwlscicvnrchmtmyhwrvzkgfgdxnlnvaaummnywciixnzvycnzmhhpwb",
"192.168.5.4",
"bucket-name-s3alias",
"bucket-name-s3alias-",
"bucket-n$ame",
"bu",
]
for bucket_name in bad_bucket_names:
with self.assertRaises(ArgumentTypeError) as argument_error:
validate_bucket(bucket_name)
self.assertEqual(argument_error.exception.args[0], "Bucket name must be valid (https://docs.aws.amazon.com/AmazonS3/latest/userguide/bucketnamingrules.html)")
# for bucket_name in bad_bucket_names:
# with pytest.raises(ArgumentTypeError) as argument_error:
# validate_bucket(bucket_name)
# assert argument_error.type == ArgumentTypeError
# assert (
# argument_error.value.args[0]
# == "Bucket name must be valid (https://docs.aws.amazon.com/AmazonS3/latest/userguide/bucketnamingrules.html)"
# )
def test_validate_bucket_valid_bucket_names(self):
from prowler.providers.aws.lib.arguments.arguments import validate_bucket
valid_bucket_names = ["bucket-name", "test", "test-test-test"]
for bucket_name in valid_bucket_names:
assert validate_bucket(bucket_name) == bucket_name
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(Test_Parser))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
main()
|
https://github.com/teamqurrent/prowler | Your objective is to introduce a new command-line argument --send-sh-only-fails to Prowler's AWS Security Hub integration. You need to modify `arguments.py`, `security_hub.py`, and `outputs.py`. In `arguments.py`, add a new command-line argument --send-sh-only-fails to the AWS Security Hub parser. In `security_hub.py`, update the `prepare_security_hub_findings` function to filter out findings that are not failed if the --send-sh-only-fails flag is set. In `outputs.py`, include a new attribute in Aws_Output_Options to keep track of this flag's state | 9a86846 | about-time==4.2.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:6a538862d33ce67d997429d14998310e1dbfda6cb7d9bbfbf799c4709847fece \
--hash=sha256:8bbf4c75fe13cbd3d72f49a03b02c5c7dca32169b6d49117c257e7eb3eaee341
adal==1.2.7 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:2a7451ed7441ddbc57703042204a3e30ef747478eea022c70f789fc7f084bc3d \
--hash=sha256:d74f45b81317454d96e982fd1c50e6fb5c99ac2223728aea8764433a39f566f1
alive-progress==3.1.5 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:347220c1858e3abe137fa0746895668c04df09c5261a13dc03f05795e8a29be5 \
--hash=sha256:42e399a66c8150dc507602dff7b7953f105ef11faf97ddaa6d27b1cbf45c4c98
attrs==23.1.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04 \
--hash=sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015
awsipranges==0.3.3 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4f0b3f22a9dc1163c85b513bed812b6c92bdacd674e6a7b68252a3c25b99e2c0 \
--hash=sha256:f3d7a54aeaf7fe310beb5d377a4034a63a51b72677ae6af3e0967bc4de7eedaf
azure-common==1.1.28 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4ac0cd3214e36b6a1b6a442686722a5d8cc449603aa833f3f0f40bda836704a3 \
--hash=sha256:5c12d3dcf4ec20599ca6b0d3e09e86e146353d443e7fcc050c9a19c1f9df20ad
azure-core==1.28.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:dec36dfc8eb0b052a853f30c07437effec2f9e3e1fc8f703d9bdaa5cfc0043d9 \
--hash=sha256:e9eefc66fc1fde56dab6f04d4e5d12c60754d5a9fa49bdcfd8534fc96ed936bd
azure-identity==1.15.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4c28fc246b7f9265610eb5261d65931183d019a23d4b0e99357facb2e6c227c8 \
--hash=sha256:a14b1f01c7036f11f148f22cd8c16e05035293d714458d6b44ddf534d93eb912
azure-mgmt-authorization==4.0.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:69b85abc09ae64fc72975bd43431170d8c7eb5d166754b98aac5f3845de57dc4 \
--hash=sha256:d8feeb3842e6ddf1a370963ca4f61fb6edc124e8997b807dd025bc9b2379cd1a
azure-mgmt-core==1.4.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:81071675f186a585555ef01816f2774d49c1c9024cb76e5720c3c0f6b337bb7d \
--hash=sha256:d195208340094f98e5a6661b781cde6f6a051e79ce317caabd8ff97030a9b3ae
azure-mgmt-security==5.0.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:38b03efe82c2344cea203fda95e6d00b7ac22782fa1c0b585cd0ea2c8ff3e702 \
--hash=sha256:73a74ce8f6ffb1b345ce101c8abdd42238f161f0988d168d23918feda0089654
azure-mgmt-sql==3.0.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:129042cc011225e27aee6ef2697d585fa5722e5d1aeb0038af6ad2451a285457 \
--hash=sha256:1d1dd940d4d41be4ee319aad626341251572a5bf4a2addec71779432d9a1381f
azure-mgmt-storage==21.1.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:593f2544fc4f05750c4fe7ca4d83c32ea1e9d266e57899bbf79ce5940124e8cc \
--hash=sha256:d6d3c0e917c988bc9ed0472477d3ef3f90886009eb1d97a711944f8375630162
azure-mgmt-subscription==3.1.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:38d4574a8d47fa17e3587d756e296cb63b82ad8fb21cd8543bcee443a502bf48 \
--hash=sha256:4e255b4ce9b924357bb8c5009b3c88a2014d3203b2495e2256fa027bf84e800e
azure-storage-blob==12.19.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:26c0a4320a34a3c2a1b74528ba6812ebcb632a04cd67b1c7377232c4b01a5897 \
--hash=sha256:7bbc2c9c16678f7a420367fef6b172ba8730a7e66df7f4d7a55d5b3c8216615b
boto3==1.26.165 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:9e7242b9059d937f34264125fecd844cb5e01acce6be093f6c44869fdf7c6e30 \
--hash=sha256:fa85b67147c8dc99b6e7c699fc086103f958f9677db934f70659e6e6a72a818c
botocore==1.29.165 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:6f35d59e230095aed7cd747604fe248fa384bebb7d09549077892f936a8ca3df \
--hash=sha256:988b948be685006b43c4bbd8f5c0cb93e77c66deb70561994e0c5b31b5a67210
cachetools==5.3.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:13dfddc7b8df938c21a940dfa6557ce6e94a2f1cdfa58eb90c805721d58f2c14 \
--hash=sha256:429e1a1e845c008ea6c85aa35d4b98b65d6a9763eeef3e37e92728a12d1de9d4
certifi==2023.7.22 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \
--hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9
cffi==1.15.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \
--hash=sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef \
--hash=sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104 \
--hash=sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426 \
--hash=sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405 \
--hash=sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375 \
--hash=sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a \
--hash=sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e \
--hash=sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc \
--hash=sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf \
--hash=sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185 \
--hash=sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497 \
--hash=sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3 \
--hash=sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35 \
--hash=sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c \
--hash=sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83 \
--hash=sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21 \
--hash=sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca \
--hash=sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984 \
--hash=sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac \
--hash=sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd \
--hash=sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee \
--hash=sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a \
--hash=sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2 \
--hash=sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192 \
--hash=sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7 \
--hash=sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585 \
--hash=sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f \
--hash=sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e \
--hash=sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27 \
--hash=sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b \
--hash=sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e \
--hash=sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e \
--hash=sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d \
--hash=sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c \
--hash=sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415 \
--hash=sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82 \
--hash=sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02 \
--hash=sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314 \
--hash=sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325 \
--hash=sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c \
--hash=sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3 \
--hash=sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914 \
--hash=sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045 \
--hash=sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d \
--hash=sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9 \
--hash=sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5 \
--hash=sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2 \
--hash=sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c \
--hash=sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3 \
--hash=sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2 \
--hash=sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8 \
--hash=sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d \
--hash=sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d \
--hash=sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9 \
--hash=sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162 \
--hash=sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76 \
--hash=sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4 \
--hash=sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e \
--hash=sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9 \
--hash=sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6 \
--hash=sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b \
--hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \
--hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0
charset-normalizer==3.1.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6 \
--hash=sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1 \
--hash=sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e \
--hash=sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373 \
--hash=sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62 \
--hash=sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230 \
--hash=sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be \
--hash=sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c \
--hash=sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0 \
--hash=sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448 \
--hash=sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f \
--hash=sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649 \
--hash=sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d \
--hash=sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0 \
--hash=sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706 \
--hash=sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a \
--hash=sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59 \
--hash=sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23 \
--hash=sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5 \
--hash=sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb \
--hash=sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e \
--hash=sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e \
--hash=sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c \
--hash=sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28 \
--hash=sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d \
--hash=sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41 \
--hash=sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974 \
--hash=sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce \
--hash=sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f \
--hash=sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1 \
--hash=sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d \
--hash=sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8 \
--hash=sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017 \
--hash=sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31 \
--hash=sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7 \
--hash=sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8 \
--hash=sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e \
--hash=sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14 \
--hash=sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd \
--hash=sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d \
--hash=sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795 \
--hash=sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b \
--hash=sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b \
--hash=sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b \
--hash=sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203 \
--hash=sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f \
--hash=sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19 \
--hash=sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1 \
--hash=sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a \
--hash=sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac \
--hash=sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9 \
--hash=sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0 \
--hash=sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137 \
--hash=sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f \
--hash=sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6 \
--hash=sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5 \
--hash=sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909 \
--hash=sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f \
--hash=sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0 \
--hash=sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324 \
--hash=sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755 \
--hash=sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb \
--hash=sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854 \
--hash=sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c \
--hash=sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60 \
--hash=sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84 \
--hash=sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0 \
--hash=sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b \
--hash=sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1 \
--hash=sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531 \
--hash=sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1 \
--hash=sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11 \
--hash=sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326 \
--hash=sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df \
--hash=sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab
click-plugins==1.1.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b \
--hash=sha256:5d262006d3222f5057fd81e1623d4443e41dcda5dc815c06b442aa3c02889fc8
click==8.1.3 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e \
--hash=sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48
colorama==0.4.6 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \
--hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6
contextlib2==21.6.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:3fbdb64466afd23abaf6c977627b75b6139a5a3e8ce38405c5b413aed7a0471f \
--hash=sha256:ab1e2bfe1d01d968e1b7e8d9023bc51ef3509bba217bb730cee3827e1ee82869
cryptography==41.0.6 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:068bc551698c234742c40049e46840843f3d98ad7ce265fd2bd4ec0d11306596 \
--hash=sha256:0f27acb55a4e77b9be8d550d762b0513ef3fc658cd3eb15110ebbcbd626db12c \
--hash=sha256:2132d5865eea673fe6712c2ed5fb4fa49dba10768bb4cc798345748380ee3660 \
--hash=sha256:3288acccef021e3c3c10d58933f44e8602cf04dba96d9796d70d537bb2f4bbc4 \
--hash=sha256:35f3f288e83c3f6f10752467c48919a7a94b7d88cc00b0668372a0d2ad4f8ead \
--hash=sha256:398ae1fc711b5eb78e977daa3cbf47cec20f2c08c5da129b7a296055fbb22aed \
--hash=sha256:422e3e31d63743855e43e5a6fcc8b4acab860f560f9321b0ee6269cc7ed70cc3 \
--hash=sha256:48783b7e2bef51224020efb61b42704207dde583d7e371ef8fc2a5fb6c0aabc7 \
--hash=sha256:4d03186af98b1c01a4eda396b137f29e4e3fb0173e30f885e27acec8823c1b09 \
--hash=sha256:5daeb18e7886a358064a68dbcaf441c036cbdb7da52ae744e7b9207b04d3908c \
--hash=sha256:60e746b11b937911dc70d164060d28d273e31853bb359e2b2033c9e93e6f3c43 \
--hash=sha256:742ae5e9a2310e9dade7932f9576606836ed174da3c7d26bc3d3ab4bd49b9f65 \
--hash=sha256:7e00fb556bda398b99b0da289ce7053639d33b572847181d6483ad89835115f6 \
--hash=sha256:85abd057699b98fce40b41737afb234fef05c67e116f6f3650782c10862c43da \
--hash=sha256:8efb2af8d4ba9dbc9c9dd8f04d19a7abb5b49eab1f3694e7b5a16a5fc2856f5c \
--hash=sha256:ae236bb8760c1e55b7a39b6d4d32d2279bc6c7c8500b7d5a13b6fb9fc97be35b \
--hash=sha256:afda76d84b053923c27ede5edc1ed7d53e3c9f475ebaf63c68e69f1403c405a8 \
--hash=sha256:b27a7fd4229abef715e064269d98a7e2909ebf92eb6912a9603c7e14c181928c \
--hash=sha256:b648fe2a45e426aaee684ddca2632f62ec4613ef362f4d681a9a6283d10e079d \
--hash=sha256:c5a550dc7a3b50b116323e3d376241829fd326ac47bc195e04eb33a8170902a9 \
--hash=sha256:da46e2b5df770070412c46f87bac0849b8d685c5f2679771de277a422c7d0b86 \
--hash=sha256:f39812f70fc5c71a15aa3c97b2bbe213c3f2a460b79bd21c40d033bb34a9bf36 \
--hash=sha256:ff369dd19e8fe0528b02e8df9f2aeb2479f89b1270d90f96a63500afe9af5cae
detect-secrets==1.4.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:d08ecabeee8b68c0acb0e8a354fb98d822a653f6ed05e520cead4c6fc1fc02cd \
--hash=sha256:d56787e339758cef48c9ccd6692f7a094b9963c979c9813580b0169e41132833
filelock==3.12.2 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:002740518d8aa59a26b0c76e10fb8c6e15eae825d34b6fdf670333fd7b938d81 \
--hash=sha256:cbb791cdea2a72f23da6ac5b5269ab0a0d161e9ef0100e653b69049a7706d1ec
google-api-core==2.11.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4b9bb5d5a380a0befa0573b302651b8a9a89262c1730e37bf423cec511804c22 \
--hash=sha256:ce222e27b0de0d7bc63eb043b956996d6dccab14cc3b690aaea91c9cc99dc16e
google-api-python-client==2.111.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:3a45a53c031478d1c82c7162dd25c9a965247bca6bd438af0838a9d9b8219405 \
--hash=sha256:b605adee2d09a843b97a59925757802904679e44e5599708cedb8939900dfbc7
google-auth-httplib2==0.2.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:38aa7badf48f974f1eb9861794e9c0cb2a0511a4ec0679b1f886d108f5640e05 \
--hash=sha256:b65a0a2123300dd71281a7bf6e64d65a0759287df52729bdd1ae2e47dc311a3d
google-auth==2.17.3 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:ce311e2bc58b130fddf316df57c9b3943c2a7b4f6ec31de9663a9333e4064efc \
--hash=sha256:f586b274d3eb7bd932ea424b1c702a30e0393a2e2bc4ca3eae8263ffd8be229f
googleapis-common-protos==1.59.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4168fcb568a826a52f23510412da405abd93f4d23ba544bb68d943b14ba3cb44 \
--hash=sha256:b287dc48449d1d41af0c69f4ea26242b5ae4c3d7249a38b0984c86a4caffff1f
grapheme==0.6.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:44c2b9f21bbe77cfb05835fec230bd435954275267fea1858013b102f8603cca
httplib2==0.22.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:14ae0a53c1ba8f3d37e9e27cf37eabb0fb9980f435ba405d546948b009dd64dc \
--hash=sha256:d7a10bc5ef5ab08322488bde8c726eeee5c8618723fdb399597ec58f3d82df81
idna==3.4 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \
--hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2
isodate==0.6.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96 \
--hash=sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9
jmespath==1.0.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980 \
--hash=sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe
jsonschema-specifications==2023.6.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:3d2b82663aff01815f744bb5c7887e2121a63399b49b104a3c96145474d091d7 \
--hash=sha256:ca1c4dd059a9e7b34101cf5b3ab7ff1d18b139f35950d598d629837ef66e8f28
jsonschema==4.20.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4f614fd46d8d61258610998997743ec5492a648b33cf478c1ddc23ed4598a5fa \
--hash=sha256:ed6231f0429ecf966f5bc8dfef245998220549cbbcf140f913b7464c52c3b6b3
msal-extensions==1.0.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:91e3db9620b822d0ed2b4d1850056a0f133cba04455e62f11612e40f5502f2ee \
--hash=sha256:c676aba56b0cce3783de1b5c5ecfe828db998167875126ca4b47dc6436451354
msal==1.24.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:aa0972884b3c6fdec53d9a0bd15c12e5bd7b71ac1b66d746f54d128709f3f8f8 \
--hash=sha256:ce4320688f95c301ee74a4d0e9dbcfe029a63663a8cc61756f40d0d0d36574ad
msgraph-core==0.2.2 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:147324246788abe8ed7e05534cd9e4e0ec98b33b30e011693b8d014cebf97f63 \
--hash=sha256:e297564b9a0ca228493d8851f95cb2de9522143d82efa40ce3a6ad286e21392e
msrest==0.7.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:21120a810e1233e5e6cc7fe40b474eeb4ec6f757a15d7cf86702c369f9567c32 \
--hash=sha256:6e7661f46f3afd88b75667b7187a92829924446c7ea1d169be8c4bb7eeb788b9
msrestazure==0.6.4 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:3de50f56147ef529b31e099a982496690468ecef33f0544cb0fa0cfe1e1de5b9 \
--hash=sha256:a06f0dabc9a6f5efe3b6add4bd8fb623aeadacf816b7a35b0f89107e0544d189
oauthlib==3.2.2 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca \
--hash=sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918
portalocker==2.7.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:032e81d534a88ec1736d03f780ba073f047a06c478b06e2937486f334e955c51 \
--hash=sha256:a07c5b4f3985c3cf4798369631fb7011adb498e2a46d8440efc75a8f29a0f983
protobuf==4.23.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:03eee35b60317112a72d19c54d0bff7bc58ff12fea4cd7b018232bd99758ffdf \
--hash=sha256:2b94bd6df92d71bd1234a2ffe7ce96ddf6d10cf637a18d6b55ad0a89fbb7fc21 \
--hash=sha256:36f5370a930cb77c8ad2f4135590c672d0d2c72d4a707c7d0058dce4b4b4a598 \
--hash=sha256:5f1eba1da2a2f3f7df469fccddef3cc060b8a16cfe3cc65961ad36b4dbcf59c5 \
--hash=sha256:6c16657d6717a0c62d5d740cb354fbad1b0d8cb811669e06fc1caa0ff4799ddd \
--hash=sha256:6fe180b56e1169d72ecc4acbd39186339aed20af5384531b8e8979b02bbee159 \
--hash=sha256:7cb5b9a05ce52c6a782bb97de52679bd3438ff2b7460eff5da348db65650f227 \
--hash=sha256:9744e934ea5855d12191040ea198eaf704ac78665d365a89d9572e3b627c2688 \
--hash=sha256:9f5a0fbfcdcc364f3986f9ed9f8bb1328fb84114fd790423ff3d7fdb0f85c2d1 \
--hash=sha256:baca40d067dddd62141a129f244703160d278648b569e90bb0e3753067644711 \
--hash=sha256:d5a35ff54e3f62e8fc7be02bb0d2fbc212bba1a5a9cc2748090690093996f07b \
--hash=sha256:e62fb869762b4ba18666370e2f8a18f17f8ab92dd4467295c6d38be6f8fef60b \
--hash=sha256:ebde3a023b8e11bfa6c890ef34cd6a8b47d586f26135e86c21344fe433daf2e2
pyasn1-modules==0.3.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c \
--hash=sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d
pyasn1==0.5.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57 \
--hash=sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde
pycparser==2.21 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \
--hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206
pydantic==1.10.13 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:1740068fd8e2ef6eb27a20e5651df000978edce6da6803c2bef0bc74540f9548 \
--hash=sha256:210ce042e8f6f7c01168b2d84d4c9eb2b009fe7bf572c2266e235edf14bacd80 \
--hash=sha256:32c8b48dcd3b2ac4e78b0ba4af3a2c2eb6048cb75202f0ea7b34feb740efc340 \
--hash=sha256:3ecea2b9d80e5333303eeb77e180b90e95eea8f765d08c3d278cd56b00345d01 \
--hash=sha256:4b03e42ec20286f052490423682016fd80fda830d8e4119f8ab13ec7464c0132 \
--hash=sha256:4c5370a7edaac06daee3af1c8b1192e305bc102abcbf2a92374b5bc793818599 \
--hash=sha256:56e3ff861c3b9c6857579de282ce8baabf443f42ffba355bf070770ed63e11e1 \
--hash=sha256:5a1f9f747851338933942db7af7b6ee8268568ef2ed86c4185c6ef4402e80ba8 \
--hash=sha256:5e08865bc6464df8c7d61439ef4439829e3ab62ab1669cddea8dd00cd74b9ffe \
--hash=sha256:61d9dce220447fb74f45e73d7ff3b530e25db30192ad8d425166d43c5deb6df0 \
--hash=sha256:654db58ae399fe6434e55325a2c3e959836bd17a6f6a0b6ca8107ea0571d2e17 \
--hash=sha256:678bcf5591b63cc917100dc50ab6caebe597ac67e8c9ccb75e698f66038ea953 \
--hash=sha256:6cf25c1a65c27923a17b3da28a0bdb99f62ee04230c931d83e888012851f4e7f \
--hash=sha256:75ac15385a3534d887a99c713aa3da88a30fbd6204a5cd0dc4dab3d770b9bd2f \
--hash=sha256:75b297827b59bc229cac1a23a2f7a4ac0031068e5be0ce385be1462e7e17a35d \
--hash=sha256:7d6f6e7305244bddb4414ba7094ce910560c907bdfa3501e9db1a7fd7eaea127 \
--hash=sha256:84bafe2e60b5e78bc64a2941b4c071a4b7404c5c907f5f5a99b0139781e69ed8 \
--hash=sha256:854223752ba81e3abf663d685f105c64150873cc6f5d0c01d3e3220bcff7d36f \
--hash=sha256:8ae5dd6b721459bfa30805f4c25880e0dd78fc5b5879f9f7a692196ddcb5a580 \
--hash=sha256:8ef467901d7a41fa0ca6db9ae3ec0021e3f657ce2c208e98cd511f3161c762c6 \
--hash=sha256:968ac42970f57b8344ee08837b62f6ee6f53c33f603547a55571c954a4225691 \
--hash=sha256:97cce3ae7341f7620a0ba5ef6cf043975cd9d2b81f3aa5f4ea37928269bc1b87 \
--hash=sha256:9849f031cf8a2f0a928fe885e5a04b08006d6d41876b8bbd2fc68a18f9f2e3fd \
--hash=sha256:9f00790179497767aae6bcdc36355792c79e7bbb20b145ff449700eb076c5f96 \
--hash=sha256:b87326822e71bd5f313e7d3bfdc77ac3247035ac10b0c0618bd99dcf95b1e687 \
--hash=sha256:b97c1fac8c49be29486df85968682b0afa77e1b809aff74b83081cc115e52f33 \
--hash=sha256:bc0898c12f8e9c97f6cd44c0ed70d55749eaf783716896960b4ecce2edfd2d69 \
--hash=sha256:c553f6a156deb868ba38a23cf0df886c63492e9257f60a79c0fd8e7173537653 \
--hash=sha256:c636925f38b8db208e09d344c7aa4f29a86bb9947495dd6b6d376ad10334fb78 \
--hash=sha256:c958d053453a1c4b1c2062b05cd42d9d5c8eb67537b8d5a7e3c3032943ecd261 \
--hash=sha256:d3a3c792a58e1622667a2837512099eac62490cdfd63bd407993aaf200a4cf1f \
--hash=sha256:e31647d85a2013d926ce60b84f9dd5300d44535a9941fe825dc349ae1f760df9 \
--hash=sha256:e70ca129d2053fb8b728ee7d1af8e553a928d7e301a311094b8a0501adc8763d \
--hash=sha256:efff03cc7a4f29d9009d1c96ceb1e7a70a65cfe86e89d34e4a5f2ab1e5693737 \
--hash=sha256:f59ef915cac80275245824e9d771ee939133be38215555e9dc90c6cb148aaeb5 \
--hash=sha256:f8e81fc5fb17dae698f52bdd1c4f18b6ca674d7068242b2aff075f588301bbb0
pyjwt==2.6.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd \
--hash=sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14
pyjwt[crypto]==2.6.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd \
--hash=sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14
pyparsing==3.0.9 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb \
--hash=sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc
python-dateutil==2.8.2 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \
--hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9
pywin32==306 ; python_version >= "3.9" and platform_system == "Windows" and python_version < "3.12" \
--hash=sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d \
--hash=sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65 \
--hash=sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e \
--hash=sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b \
--hash=sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4 \
--hash=sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040 \
--hash=sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a \
--hash=sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36 \
--hash=sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8 \
--hash=sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e \
--hash=sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802 \
--hash=sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a \
--hash=sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407 \
--hash=sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0
pyyaml==6.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf \
--hash=sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293 \
--hash=sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b \
--hash=sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57 \
--hash=sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b \
--hash=sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4 \
--hash=sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07 \
--hash=sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba \
--hash=sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9 \
--hash=sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287 \
--hash=sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513 \
--hash=sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0 \
--hash=sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782 \
--hash=sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0 \
--hash=sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92 \
--hash=sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f \
--hash=sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2 \
--hash=sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc \
--hash=sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1 \
--hash=sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c \
--hash=sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86 \
--hash=sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4 \
--hash=sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c \
--hash=sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34 \
--hash=sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b \
--hash=sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d \
--hash=sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c \
--hash=sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb \
--hash=sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7 \
--hash=sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737 \
--hash=sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3 \
--hash=sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d \
--hash=sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358 \
--hash=sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53 \
--hash=sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78 \
--hash=sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803 \
--hash=sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a \
--hash=sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f \
--hash=sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174 \
--hash=sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5
referencing==0.29.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:90cb53782d550ba28d2166ef3f55731f38397def8832baac5d45235f1995e35e \
--hash=sha256:d3c8f323ee1480095da44d55917cfb8278d73d6b4d5f677e3e40eb21314ac67f
requests-file==1.5.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:07d74208d3389d01c38ab89ef403af0cfec63957d53a0081d8eca738d0247d8e \
--hash=sha256:dfe5dae75c12481f68ba353183c53a65e6044c923e64c24b2209f6c7570ca953
requests-oauthlib==1.3.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5 \
--hash=sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a
requests==2.31.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \
--hash=sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1
rpds-py==0.8.10 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:08166467258fd0240a1256fce272f689f2360227ee41c72aeea103e9e4f63d2b \
--hash=sha256:083df0fafe199371206111583c686c985dddaf95ab3ee8e7b24f1fda54515d09 \
--hash=sha256:0da53292edafecba5e1d8c1218f99babf2ed0bf1c791d83c0ab5c29b57223068 \
--hash=sha256:0eeb2731708207d0fe2619afe6c4dc8cb9798f7de052da891de5f19c0006c315 \
--hash=sha256:134ec8f14ca7dbc6d9ae34dac632cdd60939fe3734b5d287a69683c037c51acb \
--hash=sha256:13e643ce8ad502a0263397362fb887594b49cf84bf518d6038c16f235f2bcea4 \
--hash=sha256:148b0b38d719c0760e31ce9285a9872972bdd7774969a4154f40c980e5beaca7 \
--hash=sha256:14f1c356712f66653b777ecd8819804781b23dbbac4eade4366b94944c9e78ad \
--hash=sha256:15a90d0ac11b4499171067ae40a220d1ca3cb685ec0acc356d8f3800e07e4cb8 \
--hash=sha256:1a2edf8173ac0c7a19da21bc68818be1321998528b5e3f748d6ee90c0ba2a1fd \
--hash=sha256:1b21575031478609db6dbd1f0465e739fe0e7f424a8e7e87610a6c7f68b4eb16 \
--hash=sha256:1ee45cd1d84beed6cbebc839fd85c2e70a3a1325c8cfd16b62c96e2ffb565eca \
--hash=sha256:220bdcad2d2936f674650d304e20ac480a3ce88a40fe56cd084b5780f1d104d9 \
--hash=sha256:2418cf17d653d24ffb8b75e81f9f60b7ba1b009a23298a433a4720b2a0a17017 \
--hash=sha256:2614c2732bf45de5c7f9e9e54e18bc78693fa2f635ae58d2895b7965e470378c \
--hash=sha256:2cd3045e7f6375dda64ed7db1c5136826facb0159ea982f77d9cf6125025bd34 \
--hash=sha256:2eb4b08c45f8f8d8254cdbfacd3fc5d6b415d64487fb30d7380b0d0569837bf1 \
--hash=sha256:300eb606e6b94a7a26f11c8cc8ee59e295c6649bd927f91e1dbd37a4c89430b6 \
--hash=sha256:376b8de737401050bd12810003d207e824380be58810c031f10ec563ff6aef3d \
--hash=sha256:3793c21494bad1373da517001d0849eea322e9a049a0e4789e50d8d1329df8e7 \
--hash=sha256:37f7ee4dc86db7af3bac6d2a2cedbecb8e57ce4ed081f6464510e537589f8b1e \
--hash=sha256:3816a890a6a9e9f1de250afa12ca71c9a7a62f2b715a29af6aaee3aea112c181 \
--hash=sha256:3c490204e16bca4f835dba8467869fe7295cdeaa096e4c5a7af97f3454a97991 \
--hash=sha256:3cc5e5b5514796f45f03a568981971b12a3570f3de2e76114f7dc18d4b60a3c4 \
--hash=sha256:41c89a366eae49ad9e65ed443a8f94aee762931a1e3723749d72aeac80f5ef2f \
--hash=sha256:4a8ca409f1252e1220bf09c57290b76cae2f14723746215a1e0506472ebd7bdf \
--hash=sha256:4b519bac7c09444dd85280fd60f28c6dde4389c88dddf4279ba9b630aca3bbbe \
--hash=sha256:521fc8861a86ae54359edf53a15a05fabc10593cea7b3357574132f8427a5e5a \
--hash=sha256:574868858a7ff6011192c023a5289158ed20e3f3b94b54f97210a773f2f22921 \
--hash=sha256:5a665f6f1a87614d1c3039baf44109094926dedf785e346d8b0a728e9cabd27a \
--hash=sha256:5d1c2bc319428d50b3e0fa6b673ab8cc7fa2755a92898db3a594cbc4eeb6d1f7 \
--hash=sha256:60e0e86e870350e03b3e25f9b1dd2c6cc72d2b5f24e070249418320a6f9097b7 \
--hash=sha256:695f642a3a5dbd4ad2ffbbacf784716ecd87f1b7a460843b9ddf965ccaeafff4 \
--hash=sha256:69d089c026f6a8b9d64a06ff67dc3be196707b699d7f6ca930c25f00cf5e30d8 \
--hash=sha256:6c6a0225b8501d881b32ebf3f5807a08ad3685b5eb5f0a6bfffd3a6e039b2055 \
--hash=sha256:70bb9c8004b97b4ef7ae56a2aa56dfaa74734a0987c78e7e85f00004ab9bf2d0 \
--hash=sha256:73a1e48430f418f0ac3dfd87860e4cc0d33ad6c0f589099a298cb53724db1169 \
--hash=sha256:7495010b658ec5b52835f21d8c8b1a7e52e194c50f095d4223c0b96c3da704b1 \
--hash=sha256:7947e6e2c2ad68b1c12ee797d15e5f8d0db36331200b0346871492784083b0c6 \
--hash=sha256:7b38a9ac96eeb6613e7f312cd0014de64c3f07000e8bf0004ad6ec153bac46f8 \
--hash=sha256:7d20a8ed227683401cc508e7be58cba90cc97f784ea8b039c8cd01111e6043e0 \
--hash=sha256:7f29b8c55fd3a2bc48e485e37c4e2df3317f43b5cc6c4b6631c33726f52ffbb3 \
--hash=sha256:802f42200d8caf7f25bbb2a6464cbd83e69d600151b7e3b49f49a47fa56b0a38 \
--hash=sha256:805a5f3f05d186c5d50de2e26f765ba7896d0cc1ac5b14ffc36fae36df5d2f10 \
--hash=sha256:82bb361cae4d0a627006dadd69dc2f36b7ad5dc1367af9d02e296ec565248b5b \
--hash=sha256:84eb541a44f7a18f07a6bfc48b95240739e93defe1fdfb4f2a295f37837945d7 \
--hash=sha256:89c92b74e8bf6f53a6f4995fd52f4bd510c12f103ee62c99e22bc9e05d45583c \
--hash=sha256:8c398fda6df361a30935ab4c4bccb7f7a3daef2964ca237f607c90e9f3fdf66f \
--hash=sha256:915031002c86a5add7c6fd4beb601b2415e8a1c956590a5f91d825858e92fe6e \
--hash=sha256:927d784648211447201d4c6f1babddb7971abad922b32257ab74de2f2750fad0 \
--hash=sha256:92cf5b3ee60eef41f41e1a2cabca466846fb22f37fc580ffbcb934d1bcab225a \
--hash=sha256:93d06cccae15b3836247319eee7b6f1fdcd6c10dabb4e6d350d27bd0bdca2711 \
--hash=sha256:93d99f957a300d7a4ced41615c45aeb0343bb8f067c42b770b505de67a132346 \
--hash=sha256:96b293c0498c70162effb13100624c5863797d99df75f2f647438bd10cbf73e4 \
--hash=sha256:97cab733d303252f7c2f7052bf021a3469d764fc2b65e6dbef5af3cbf89d4892 \
--hash=sha256:996cc95830de9bc22b183661d95559ec6b3cd900ad7bc9154c4cbf5be0c9b734 \
--hash=sha256:9a7d20c1cf8d7b3960c5072c265ec47b3f72a0c608a9a6ee0103189b4f28d531 \
--hash=sha256:9cd57981d9fab04fc74438d82460f057a2419974d69a96b06a440822d693b3c0 \
--hash=sha256:a11ab0d97be374efd04f640c04fe5c2d3dabc6dfb998954ea946ee3aec97056d \
--hash=sha256:a13c8e56c46474cd5958d525ce6a9996727a83d9335684e41f5192c83deb6c58 \
--hash=sha256:a38b9f526d0d6cbdaa37808c400e3d9f9473ac4ff64d33d9163fd05d243dbd9b \
--hash=sha256:a7c6304b894546b5a6bdc0fe15761fa53fe87d28527a7142dae8de3c663853e1 \
--hash=sha256:ad3bfb44c8840fb4be719dc58e229f435e227fbfbe133dc33f34981ff622a8f8 \
--hash=sha256:ae40f4a70a1f40939d66ecbaf8e7edc144fded190c4a45898a8cfe19d8fc85ea \
--hash=sha256:b01b39ad5411563031ea3977bbbc7324d82b088e802339e6296f082f78f6115c \
--hash=sha256:b2e3c4f2a8e3da47f850d7ea0d7d56720f0f091d66add889056098c4b2fd576c \
--hash=sha256:b41941583adce4242af003d2a8337b066ba6148ca435f295f31ac6d9e4ea2722 \
--hash=sha256:b4627520a02fccbd324b33c7a83e5d7906ec746e1083a9ac93c41ac7d15548c7 \
--hash=sha256:ba9f1d1ebe4b63801977cec7401f2d41e888128ae40b5441270d43140efcad52 \
--hash=sha256:c03a435d26c3999c2a8642cecad5d1c4d10c961817536af52035f6f4ee2f5dd0 \
--hash=sha256:c200b30dd573afa83847bed7e3041aa36a8145221bf0cfdfaa62d974d720805c \
--hash=sha256:c493365d3fad241d52f096e4995475a60a80f4eba4d3ff89b713bc65c2ca9615 \
--hash=sha256:c4d42e83ddbf3445e6514f0aff96dca511421ed0392d9977d3990d9f1ba6753c \
--hash=sha256:c60528671d9d467009a6ec284582179f6b88651e83367d0ab54cb739021cd7de \
--hash=sha256:c72ebc22e70e04126158c46ba56b85372bc4d54d00d296be060b0db1671638a4 \
--hash=sha256:ccbbd276642788c4376fbe8d4e6c50f0fb4972ce09ecb051509062915891cbf0 \
--hash=sha256:ceaac0c603bf5ac2f505a78b2dcab78d3e6b706be6596c8364b64cc613d208d2 \
--hash=sha256:d19db6ba816e7f59fc806c690918da80a7d186f00247048cd833acdab9b4847b \
--hash=sha256:d5c191713e98e7c28800233f039a32a42c1a4f9a001a8a0f2448b07391881036 \
--hash=sha256:d64f9f88d5203274a002b54442cafc9c7a1abff2a238f3e767b70aadf919b451 \
--hash=sha256:d77dff3a5aa5eedcc3da0ebd10ff8e4969bc9541aa3333a8d41715b429e99f47 \
--hash=sha256:dd4f16e57c12c0ae17606c53d1b57d8d1c8792efe3f065a37cb3341340599d49 \
--hash=sha256:e39d7ab0c18ac99955b36cd19f43926450baba21e3250f053e0704d6ffd76873 \
--hash=sha256:e3d0cd3dff0e7638a7b5390f3a53057c4e347f4ef122ee84ed93fc2fb7ea4aa2 \
--hash=sha256:e7dfb1cbb895810fa2b892b68153c17716c6abaa22c7dc2b2f6dcf3364932a1c \
--hash=sha256:e8e24b210a4deb5a7744971f8f77393005bae7f873568e37dfd9effe808be7f7 \
--hash=sha256:e9c0683cb35a9b5881b41bc01d5568ffc667910d9dbc632a1fba4e7d59e98773 \
--hash=sha256:ed41f3f49507936a6fe7003985ea2574daccfef999775525d79eb67344e23767 \
--hash=sha256:ee744fca8d1ea822480a2a4e7c5f2e1950745477143668f0b523769426060f29 \
--hash=sha256:f3f1e860be21f3e83011116a65e7310486300e08d9a3028e73e8d13bb6c77292 \
--hash=sha256:f43ab4cb04bde6109eb2555528a64dfd8a265cc6a9920a67dcbde13ef53a46c8 \
--hash=sha256:f53f55a8852f0e49b0fc76f2412045d6ad9d5772251dea8f55ea45021616e7d5 \
--hash=sha256:f59996d0550894affaad8743e97b9b9c98f638b221fac12909210ec3d9294786 \
--hash=sha256:f96f3f98fbff7af29e9edf9a6584f3c1382e7788783d07ba3721790625caa43e \
--hash=sha256:f9adb5664b78fcfcd830000416c8cc69853ef43cb084d645b3f1f0296edd9bae \
--hash=sha256:fa326b3505d5784436d9433b7980171ab2375535d93dd63fbcd20af2b5ca1bb6 \
--hash=sha256:fafc0049add8043ad07ab5382ee80d80ed7e3699847f26c9a5cf4d3714d96a84
rsa==4.9 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \
--hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21
s3transfer==0.6.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:3c0da2d074bf35d6870ef157158641178a4204a6e689e82546083e31e0311346 \
--hash=sha256:640bb492711f4c0c0905e1f62b6aaeb771881935ad27884852411f8e9cacbca9
schema==0.7.5 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:f06717112c61895cabc4707752b88716e8420a8819d71404501e114f91043197 \
--hash=sha256:f3ffdeeada09ec34bf40d7d79996d9f7175db93b7a5065de0faa7f41083c1e6c
shodan==1.31.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:c73275386ea02390e196c35c660706a28dd4d537c5a21eb387ab6236fac251f6
six==1.16.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \
--hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254
slack-sdk==3.26.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:d1600211eaa37c71a5f92daf4404074c3e6b3f5359a37c93c818b39d88ab4ca0 \
--hash=sha256:f80f0d15f0fce539b470447d2a07b03ecdad6b24f69c1edd05d464cf21253a06
tabulate==0.9.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c \
--hash=sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f
tldextract==3.4.4 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:581e7dbefc90e7bb857bb6f768d25c811a3c5f0892ed56a9a2999ddb7b1b70c2 \
--hash=sha256:5fe3210c577463545191d45ad522d3d5e78d55218ce97215e82004dcae1e1234
typing-extensions==4.5.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb \
--hash=sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4
uritemplate==4.1.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4346edfc5c3b79f694bccd6d6099a322bbeb628dbf2cd86eea55a456ce5124f0 \
--hash=sha256:830c08b8d99bdd312ea4ead05994a38e8936266f84b9a7878232db50b044e02e
urllib3==1.26.18 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07 \
--hash=sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0
xlsxwriter==3.1.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:02913b50b74c00f165933d5da3e3a02cab4204cb4932722a1b342c5c71034122 \
--hash=sha256:b70a147d36235d1ee835cfd037396f789db1f76740a0e5c917d54137169341de | python3.9 | fdeb5235 | diff --git a/prowler/providers/aws/lib/arguments/arguments.py b/prowler/providers/aws/lib/arguments/arguments.py
--- a/prowler/providers/aws/lib/arguments/arguments.py
+++ b/prowler/providers/aws/lib/arguments/arguments.py
@@ -84,6 +84,11 @@ def init_parser(self):
action="store_true",
help="Skip updating previous findings of Prowler in Security Hub",
)
+ aws_security_hub_subparser.add_argument(
+ "--send-sh-only-fails",
+ action="store_true",
+ help="Send only Prowler failed findings to SecurityHub",
+ )
# AWS Quick Inventory
aws_quick_inventory_subparser = aws_parser.add_argument_group("Quick Inventory")
aws_quick_inventory_subparser.add_argument(
diff --git a/prowler/providers/aws/lib/security_hub/security_hub.py b/prowler/providers/aws/lib/security_hub/security_hub.py
--- a/prowler/providers/aws/lib/security_hub/security_hub.py
+++ b/prowler/providers/aws/lib/security_hub/security_hub.py
@@ -29,7 +29,9 @@ def prepare_security_hub_findings(
continue
# Handle quiet mode
- if output_options.is_quiet and finding.status != "FAIL":
+ if (
+ output_options.is_quiet or output_options.send_sh_only_fails
+ ) and finding.status != "FAIL":
continue
# Get the finding region
diff --git a/prowler/providers/common/outputs.py b/prowler/providers/common/outputs.py
--- a/prowler/providers/common/outputs.py
+++ b/prowler/providers/common/outputs.py
@@ -69,7 +69,8 @@ class Provider_Output_Options:
if arguments.output_directory:
if not isdir(arguments.output_directory):
if arguments.output_modes:
- makedirs(arguments.output_directory)
+ # exist_ok is set to True not to raise FileExistsError
+ makedirs(arguments.output_directory, exist_ok=True)
class Azure_Output_Options(Provider_Output_Options):
@@ -134,6 +135,7 @@ class Aws_Output_Options(Provider_Output_Options):
# Security Hub Outputs
self.security_hub_enabled = arguments.security_hub
+ self.send_sh_only_fails = arguments.send_sh_only_fails
if arguments.security_hub:
if not self.output_modes:
self.output_modes = ["json-asff"]
diff --git a/tests/lib/cli/parser_test.py b/tests/lib/cli/parser_test.py
--- a/tests/lib/cli/parser_test.py
+++ b/tests/lib/cli/parser_test.py
@@ -882,6 +882,12 @@ class Test_Parser:
parsed = self.parser.parse(command)
assert parsed.skip_sh_update
+ def test_aws_parser_send_only_fail(self):
+ argument = "--send-sh-only-fails"
+ command = [prowler_command, argument]
+ parsed = self.parser.parse(command)
+ assert parsed.send_sh_only_fails
+
def test_aws_parser_quick_inventory_short(self):
argument = "-i"
command = [prowler_command, argument]
diff --git a/tests/providers/aws/lib/security_hub/security_hub_test.py b/tests/providers/aws/lib/security_hub/security_hub_test.py
--- a/tests/providers/aws/lib/security_hub/security_hub_test.py
+++ b/tests/providers/aws/lib/security_hub/security_hub_test.py
@@ -21,6 +21,49 @@ from tests.providers.aws.audit_info_utils import (
set_mocked_aws_audit_info,
)
+
+def get_security_hub_finding(status: str):
+ return {
+ "SchemaVersion": "2018-10-08",
+ "Id": f"prowler-iam_user_accesskey_unused-{AWS_ACCOUNT_NUMBER}-{AWS_REGION_EU_WEST_1}-ee26b0dd4",
+ "ProductArn": f"arn:aws:securityhub:{AWS_REGION_EU_WEST_1}::product/prowler/prowler",
+ "RecordState": "ACTIVE",
+ "ProductFields": {
+ "ProviderName": "Prowler",
+ "ProviderVersion": prowler_version,
+ "ProwlerResourceName": "test",
+ },
+ "GeneratorId": "prowler-iam_user_accesskey_unused",
+ "AwsAccountId": f"{AWS_ACCOUNT_NUMBER}",
+ "Types": ["Software and Configuration Checks"],
+ "FirstObservedAt": timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ"),
+ "UpdatedAt": timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ"),
+ "CreatedAt": timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ"),
+ "Severity": {"Label": "LOW"},
+ "Title": "Ensure Access Keys unused are disabled",
+ "Description": "test",
+ "Resources": [
+ {
+ "Type": "AwsIamAccessAnalyzer",
+ "Id": "test",
+ "Partition": "aws",
+ "Region": f"{AWS_REGION_EU_WEST_1}",
+ }
+ ],
+ "Compliance": {
+ "Status": status,
+ "RelatedRequirements": [],
+ "AssociatedStandards": [],
+ },
+ "Remediation": {
+ "Recommendation": {
+ "Text": "Run sudo yum update and cross your fingers and toes.",
+ "Url": "https://myfp.com/recommendations/dangerous_things_and_how_to_fix_them.html",
+ }
+ },
+ }
+
+
# Mocking Security Hub Get Findings
make_api_call = botocore.client.BaseClient._make_api_call
@@ -64,10 +107,13 @@ class Test_SecurityHub:
return finding
- def set_mocked_output_options(self, is_quiet):
+ def set_mocked_output_options(
+ self, is_quiet: bool = False, send_sh_only_fails: bool = False
+ ):
output_options = MagicMock
output_options.bulk_checks_metadata = {}
output_options.is_quiet = is_quiet
+ output_options.send_sh_only_fails = send_sh_only_fails
return output_options
@@ -98,47 +144,7 @@ class Test_SecurityHub:
output_options,
enabled_regions,
) == {
- AWS_REGION_EU_WEST_1: [
- {
- "SchemaVersion": "2018-10-08",
- "Id": f"prowler-iam_user_accesskey_unused-{AWS_ACCOUNT_NUMBER}-{AWS_REGION_EU_WEST_1}-ee26b0dd4",
- "ProductArn": f"arn:aws:securityhub:{AWS_REGION_EU_WEST_1}::product/prowler/prowler",
- "RecordState": "ACTIVE",
- "ProductFields": {
- "ProviderName": "Prowler",
- "ProviderVersion": prowler_version,
- "ProwlerResourceName": "test",
- },
- "GeneratorId": "prowler-iam_user_accesskey_unused",
- "AwsAccountId": f"{AWS_ACCOUNT_NUMBER}",
- "Types": ["Software and Configuration Checks"],
- "FirstObservedAt": timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ"),
- "UpdatedAt": timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ"),
- "CreatedAt": timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ"),
- "Severity": {"Label": "LOW"},
- "Title": "Ensure Access Keys unused are disabled",
- "Description": "test",
- "Resources": [
- {
- "Type": "AwsIamAccessAnalyzer",
- "Id": "test",
- "Partition": "aws",
- "Region": f"{AWS_REGION_EU_WEST_1}",
- }
- ],
- "Compliance": {
- "Status": "PASSED",
- "RelatedRequirements": [],
- "AssociatedStandards": [],
- },
- "Remediation": {
- "Recommendation": {
- "Text": "Run sudo yum update and cross your fingers and toes.",
- "Url": "https://myfp.com/recommendations/dangerous_things_and_how_to_fix_them.html",
- }
- },
- }
- ],
+ AWS_REGION_EU_WEST_1: [get_security_hub_finding("PASSED")],
}
def test_prepare_security_hub_findings_quiet_INFO_finding(self):
@@ -171,7 +177,7 @@ class Test_SecurityHub:
enabled_regions,
) == {AWS_REGION_EU_WEST_1: []}
- def test_prepare_security_hub_findings_quiet(self):
+ def test_prepare_security_hub_findings_quiet_PASS(self):
enabled_regions = [AWS_REGION_EU_WEST_1]
output_options = self.set_mocked_output_options(is_quiet=True)
findings = [self.generate_finding("PASS", AWS_REGION_EU_WEST_1)]
@@ -186,6 +192,51 @@ class Test_SecurityHub:
enabled_regions,
) == {AWS_REGION_EU_WEST_1: []}
+ def test_prepare_security_hub_findings_quiet_FAIL(self):
+ enabled_regions = [AWS_REGION_EU_WEST_1]
+ output_options = self.set_mocked_output_options(is_quiet=True)
+ findings = [self.generate_finding("FAIL", AWS_REGION_EU_WEST_1)]
+ audit_info = set_mocked_aws_audit_info(
+ audited_regions=[AWS_REGION_EU_WEST_1, AWS_REGION_EU_WEST_2]
+ )
+
+ assert prepare_security_hub_findings(
+ findings,
+ audit_info,
+ output_options,
+ enabled_regions,
+ ) == {AWS_REGION_EU_WEST_1: [get_security_hub_finding("FAILED")]}
+
+ def test_prepare_security_hub_findings_send_sh_only_fails_PASS(self):
+ enabled_regions = [AWS_REGION_EU_WEST_1]
+ output_options = self.set_mocked_output_options(send_sh_only_fails=True)
+ findings = [self.generate_finding("PASS", AWS_REGION_EU_WEST_1)]
+ audit_info = set_mocked_aws_audit_info(
+ audited_regions=[AWS_REGION_EU_WEST_1, AWS_REGION_EU_WEST_2]
+ )
+
+ assert prepare_security_hub_findings(
+ findings,
+ audit_info,
+ output_options,
+ enabled_regions,
+ ) == {AWS_REGION_EU_WEST_1: []}
+
+ def test_prepare_security_hub_findings_send_sh_only_fails_FAIL(self):
+ enabled_regions = [AWS_REGION_EU_WEST_1]
+ output_options = self.set_mocked_output_options(send_sh_only_fails=True)
+ findings = [self.generate_finding("FAIL", AWS_REGION_EU_WEST_1)]
+ audit_info = set_mocked_aws_audit_info(
+ audited_regions=[AWS_REGION_EU_WEST_1, AWS_REGION_EU_WEST_2]
+ )
+
+ assert prepare_security_hub_findings(
+ findings,
+ audit_info,
+ output_options,
+ enabled_regions,
+ ) == {AWS_REGION_EU_WEST_1: [get_security_hub_finding("FAILED")]}
+
def test_prepare_security_hub_findings_no_audited_regions(self):
enabled_regions = [AWS_REGION_EU_WEST_1]
output_options = self.set_mocked_output_options(is_quiet=False)
@@ -198,47 +249,7 @@ class Test_SecurityHub:
output_options,
enabled_regions,
) == {
- AWS_REGION_EU_WEST_1: [
- {
- "SchemaVersion": "2018-10-08",
- "Id": f"prowler-iam_user_accesskey_unused-{AWS_ACCOUNT_NUMBER}-{AWS_REGION_EU_WEST_1}-ee26b0dd4",
- "ProductArn": f"arn:aws:securityhub:{AWS_REGION_EU_WEST_1}::product/prowler/prowler",
- "RecordState": "ACTIVE",
- "ProductFields": {
- "ProviderName": "Prowler",
- "ProviderVersion": prowler_version,
- "ProwlerResourceName": "test",
- },
- "GeneratorId": "prowler-iam_user_accesskey_unused",
- "AwsAccountId": f"{AWS_ACCOUNT_NUMBER}",
- "Types": ["Software and Configuration Checks"],
- "FirstObservedAt": timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ"),
- "UpdatedAt": timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ"),
- "CreatedAt": timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ"),
- "Severity": {"Label": "LOW"},
- "Title": "Ensure Access Keys unused are disabled",
- "Description": "test",
- "Resources": [
- {
- "Type": "AwsIamAccessAnalyzer",
- "Id": "test",
- "Partition": "aws",
- "Region": f"{AWS_REGION_EU_WEST_1}",
- }
- ],
- "Compliance": {
- "Status": "PASSED",
- "RelatedRequirements": [],
- "AssociatedStandards": [],
- },
- "Remediation": {
- "Recommendation": {
- "Text": "Run sudo yum update and cross your fingers and toes.",
- "Url": "https://myfp.com/recommendations/dangerous_things_and_how_to_fix_them.html",
- }
- },
- }
- ],
+ AWS_REGION_EU_WEST_1: [get_security_hub_finding("PASSED")],
}
@patch("botocore.client.BaseClient._make_api_call", new=mock_make_api_call)
diff --git a/tests/providers/common/common_outputs_test.py b/tests/providers/common/common_outputs_test.py
--- a/tests/providers/common/common_outputs_test.py
+++ b/tests/providers/common/common_outputs_test.py
@@ -96,6 +96,7 @@ class Test_Common_Output_Options:
arguments.shodan = "test-api-key"
arguments.only_logs = False
arguments.unix_timestamp = False
+ arguments.send_sh_only_fails = True
audit_info = self.set_mocked_aws_audit_info()
allowlist_file = ""
@@ -105,6 +106,7 @@ class Test_Common_Output_Options:
)
assert isinstance(output_options, Aws_Output_Options)
assert output_options.security_hub_enabled
+ assert output_options.send_sh_only_fails
assert output_options.is_quiet
assert output_options.output_modes == ["html", "csv", "json", "json-asff"]
assert output_options.output_directory == arguments.output_directory
@@ -160,6 +162,7 @@ class Test_Common_Output_Options:
arguments.shodan = "test-api-key"
arguments.only_logs = False
arguments.unix_timestamp = False
+ arguments.send_sh_only_fails = True
# Mock AWS Audit Info
audit_info = self.set_mocked_aws_audit_info()
@@ -171,6 +174,7 @@ class Test_Common_Output_Options:
)
assert isinstance(output_options, Aws_Output_Options)
assert output_options.security_hub_enabled
+ assert output_options.send_sh_only_fails
assert output_options.is_quiet
assert output_options.output_modes == ["html", "csv", "json", "json-asff"]
assert output_options.output_directory == arguments.output_directory
| [
{
"content": "from argparse import ArgumentTypeError, Namespace\n\nfrom prowler.providers.aws.aws_provider import get_aws_available_regions\nfrom prowler.providers.aws.lib.arn.arn import arn_type\n\n\ndef init_parser(self):\n \"\"\"Init the AWS Provider CLI parser\"\"\"\n aws_parser = self.subparsers.add_parser(\n \"aws\", parents=[self.common_providers_parser], help=\"AWS Provider\"\n )\n # Authentication Methods\n aws_auth_subparser = aws_parser.add_argument_group(\"Authentication Modes\")\n aws_auth_subparser.add_argument(\n \"-p\",\n \"--profile\",\n nargs=\"?\",\n default=None,\n help=\"AWS profile to launch prowler with\",\n )\n aws_auth_subparser.add_argument(\n \"-R\",\n \"--role\",\n nargs=\"?\",\n default=None,\n help=\"ARN of the role to be assumed\",\n # Pending ARN validation\n )\n aws_auth_subparser.add_argument(\n \"--sts-endpoint-region\",\n nargs=\"?\",\n default=None,\n help=\"Specify the AWS STS endpoint region to use. Read more at https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_enable-regions.html\",\n )\n aws_auth_subparser.add_argument(\n \"--mfa\",\n action=\"store_true\",\n help=\"IAM entity enforces MFA so you need to input the MFA ARN and the TOTP\",\n )\n aws_auth_subparser.add_argument(\n \"-T\",\n \"--session-duration\",\n nargs=\"?\",\n default=3600,\n type=validate_session_duration,\n help=\"Assumed role session duration in seconds, must be between 900 and 43200. Default: 3600\",\n # Pending session duration validation\n )\n aws_auth_subparser.add_argument(\n \"-I\",\n \"--external-id\",\n nargs=\"?\",\n default=None,\n help=\"External ID to be passed when assuming role\",\n )\n # AWS Regions\n aws_regions_subparser = aws_parser.add_argument_group(\"AWS Regions\")\n aws_regions_subparser.add_argument(\n \"-f\",\n \"--region\",\n \"--filter-region\",\n nargs=\"+\",\n help=\"AWS region names to run Prowler against\",\n choices=get_aws_available_regions(),\n )\n # AWS Organizations\n aws_orgs_subparser = aws_parser.add_argument_group(\"AWS Organizations\")\n aws_orgs_subparser.add_argument(\n \"-O\",\n \"--organizations-role\",\n nargs=\"?\",\n help=\"Specify AWS Organizations management role ARN to be assumed, to get Organization metadata\",\n )\n # AWS Security Hub\n aws_security_hub_subparser = aws_parser.add_argument_group(\"AWS Security Hub\")\n aws_security_hub_subparser.add_argument(\n \"-S\",\n \"--security-hub\",\n action=\"store_true\",\n help=\"Send check output to AWS Security Hub\",\n )\n aws_security_hub_subparser.add_argument(\n \"--skip-sh-update\",\n action=\"store_true\",\n help=\"Skip updating previous findings of Prowler in Security Hub\",\n )\n # AWS Quick Inventory\n aws_quick_inventory_subparser = aws_parser.add_argument_group(\"Quick Inventory\")\n aws_quick_inventory_subparser.add_argument(\n \"-i\",\n \"--quick-inventory\",\n action=\"store_true\",\n help=\"Run Prowler Quick Inventory. The inventory will be stored in an output csv by default\",\n )\n # AWS Outputs\n aws_outputs_subparser = aws_parser.add_argument_group(\"AWS Outputs to S3\")\n aws_outputs_bucket_parser = aws_outputs_subparser.add_mutually_exclusive_group()\n aws_outputs_bucket_parser.add_argument(\n \"-B\",\n \"--output-bucket\",\n nargs=\"?\",\n default=None,\n help=\"Custom output bucket, requires -M <mode> and it can work also with -o flag.\",\n )\n aws_outputs_bucket_parser.add_argument(\n \"-D\",\n \"--output-bucket-no-assume\",\n nargs=\"?\",\n default=None,\n help=\"Same as -B but do not use the assumed role credentials to put objects to the bucket, instead uses the initial credentials.\",\n )\n aws_3rd_party_subparser = aws_parser.add_argument_group(\"3rd Party Integrations\")\n aws_3rd_party_subparser.add_argument(\n \"-N\",\n \"--shodan\",\n nargs=\"?\",\n default=None,\n help=\"Shodan API key used by check ec2_elastic_ip_shodan.\",\n )\n # Allowlist\n allowlist_subparser = aws_parser.add_argument_group(\"Allowlist\")\n allowlist_subparser.add_argument(\n \"-w\",\n \"--allowlist-file\",\n nargs=\"?\",\n default=None,\n help=\"Path for allowlist yaml file. See example prowler/config/aws_allowlist.yaml for reference and format. It also accepts AWS DynamoDB Table or Lambda ARNs or S3 URIs, see more in https://docs.prowler.cloud/en/latest/tutorials/allowlist/\",\n )\n\n # Based Scans\n aws_based_scans_subparser = aws_parser.add_argument_group(\"AWS Based Scans\")\n aws_based_scans_parser = aws_based_scans_subparser.add_mutually_exclusive_group()\n aws_based_scans_parser.add_argument(\n \"--resource-tags\",\n nargs=\"+\",\n default=None,\n help=\"Scan only resources with specific AWS Tags (Key=Value), e.g., Environment=dev Project=prowler\",\n )\n aws_based_scans_parser.add_argument(\n \"--resource-arn\",\n nargs=\"+\",\n type=arn_type,\n default=None,\n help=\"Scan only resources with specific AWS Resource ARNs, e.g., arn:aws:iam::012345678910:user/test arn:aws:ec2:us-east-1:123456789012:vpc/vpc-12345678\",\n )\n\n # Boto3 Config\n boto3_config_subparser = aws_parser.add_argument_group(\"Boto3 Config\")\n boto3_config_subparser.add_argument(\n \"--aws-retries-max-attempts\",\n nargs=\"?\",\n default=None,\n type=int,\n help=\"Set the maximum attemps for the Boto3 standard retrier config (Default: 3)\",\n )\n\n # Ignore Unused Services\n ignore_unused_services_subparser = aws_parser.add_argument_group(\n \"Ignore Unused Services\"\n )\n ignore_unused_services_subparser.add_argument(\n \"--ignore-unused-services\",\n action=\"store_true\",\n help=\"Ignore findings in unused services\",\n )\n\n\ndef validate_session_duration(duration):\n \"\"\"validate_session_duration validates that the AWS STS Assume Role Session Duration is between 900 and 43200 seconds.\"\"\"\n duration = int(duration)\n # Since the range(i,j) goes from i to j-1 we have to j+1\n if duration not in range(900, 43201):\n raise ArgumentTypeError(\"Session duration must be between 900 and 43200\")\n return duration\n\n\ndef validate_arguments(arguments: Namespace) -> tuple[bool, str]:\n \"\"\"validate_arguments returns {True, \"} if the provider arguments passed are valid and can be used together. It performs an extra validation, specific for the AWS provider, apart from the argparse lib.\"\"\"\n\n # Handle if session_duration is not the default value or external_id is set\n if (\n arguments.session_duration and arguments.session_duration != 3600\n ) or arguments.external_id:\n if not arguments.role:\n return (False, \"To use -I/-T options -R option is needed\")\n\n return (True, \"\")\n",
"path": "prowler/providers/aws/lib/arguments/arguments.py"
},
{
"content": "from boto3 import session\n\nfrom prowler.config.config import timestamp_utc\nfrom prowler.lib.logger import logger\nfrom prowler.lib.outputs.json import fill_json_asff\nfrom prowler.lib.outputs.models import Check_Output_JSON_ASFF\nfrom prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info\n\nSECURITY_HUB_INTEGRATION_NAME = \"prowler/prowler\"\nSECURITY_HUB_MAX_BATCH = 100\n\n\ndef prepare_security_hub_findings(\n findings: [], audit_info: AWS_Audit_Info, output_options, enabled_regions: []\n) -> dict:\n security_hub_findings_per_region = {}\n\n # Create a key per audited region\n for region in enabled_regions:\n security_hub_findings_per_region[region] = []\n\n for finding in findings:\n # We don't send the INFO findings to AWS Security Hub\n if finding.status == \"INFO\":\n continue\n\n # We don't send findings to not enabled regions\n if finding.region not in enabled_regions:\n continue\n\n # Handle quiet mode\n if output_options.is_quiet and finding.status != \"FAIL\":\n continue\n\n # Get the finding region\n region = finding.region\n\n # Format the finding in the JSON ASFF format\n finding_json_asff = fill_json_asff(\n Check_Output_JSON_ASFF(), audit_info, finding, output_options\n )\n\n # Include that finding within their region in the JSON format\n security_hub_findings_per_region[region].append(\n finding_json_asff.dict(exclude_none=True)\n )\n\n return security_hub_findings_per_region\n\n\ndef verify_security_hub_integration_enabled_per_region(\n partition: str,\n region: str,\n session: session.Session,\n aws_account_number: str,\n) -> bool:\n f\"\"\"verify_security_hub_integration_enabled returns True if the {SECURITY_HUB_INTEGRATION_NAME} is enabled for the given region. Otherwise returns false.\"\"\"\n prowler_integration_enabled = False\n\n try:\n logger.info(\n f\"Checking if the {SECURITY_HUB_INTEGRATION_NAME} is enabled in the {region} region.\"\n )\n # Check if security hub is enabled in current region\n security_hub_client = session.client(\"securityhub\", region_name=region)\n security_hub_client.describe_hub()\n\n # Check if Prowler integration is enabled in Security Hub\n security_hub_prowler_integration_arn = f\"arn:{partition}:securityhub:{region}:{aws_account_number}:product-subscription/{SECURITY_HUB_INTEGRATION_NAME}\"\n if security_hub_prowler_integration_arn not in str(\n security_hub_client.list_enabled_products_for_import()\n ):\n logger.error(\n f\"Security Hub is enabled in {region} but Prowler integration does not accept findings. More info: https://docs.prowler.cloud/en/latest/tutorials/aws/securityhub/\"\n )\n else:\n prowler_integration_enabled = True\n\n except Exception as error:\n logger.error(\n f\"{error.__class__.__name__} -- [{error.__traceback__.tb_lineno}]:{error} in region {region}\"\n )\n\n finally:\n return prowler_integration_enabled\n\n\ndef batch_send_to_security_hub(\n security_hub_findings_per_region: dict,\n session: session.Session,\n) -> int:\n \"\"\"\n send_to_security_hub sends findings to Security Hub and returns the number of findings that were successfully sent.\n \"\"\"\n\n success_count = 0\n try:\n # Iterate findings by region\n for region, findings in security_hub_findings_per_region.items():\n # Send findings to Security Hub\n logger.info(f\"Sending findings to Security Hub in the region {region}\")\n\n security_hub_client = session.client(\"securityhub\", region_name=region)\n\n success_count = __send_findings_to_security_hub__(\n findings, region, security_hub_client\n )\n\n except Exception as error:\n logger.error(\n f\"{error.__class__.__name__} -- [{error.__traceback__.tb_lineno}]:{error} in region {region}\"\n )\n return success_count\n\n\n# Move previous Security Hub check findings to ARCHIVED (as prowler didn't re-detect them)\ndef resolve_security_hub_previous_findings(\n security_hub_findings_per_region: dict, audit_info: AWS_Audit_Info\n) -> list:\n \"\"\"\n resolve_security_hub_previous_findings archives all the findings that does not appear in the current execution\n \"\"\"\n logger.info(\"Checking previous findings in Security Hub to archive them.\")\n success_count = 0\n for region in security_hub_findings_per_region.keys():\n try:\n current_findings = security_hub_findings_per_region[region]\n # Get current findings IDs\n current_findings_ids = []\n for finding in current_findings:\n current_findings_ids.append(finding[\"Id\"])\n # Get findings of that region\n security_hub_client = audit_info.audit_session.client(\n \"securityhub\", region_name=region\n )\n findings_filter = {\n \"ProductName\": [{\"Value\": \"Prowler\", \"Comparison\": \"EQUALS\"}],\n \"RecordState\": [{\"Value\": \"ACTIVE\", \"Comparison\": \"EQUALS\"}],\n \"AwsAccountId\": [\n {\"Value\": audit_info.audited_account, \"Comparison\": \"EQUALS\"}\n ],\n \"Region\": [{\"Value\": region, \"Comparison\": \"EQUALS\"}],\n }\n get_findings_paginator = security_hub_client.get_paginator(\"get_findings\")\n findings_to_archive = []\n for page in get_findings_paginator.paginate(Filters=findings_filter):\n # Archive findings that have not appear in this execution\n for finding in page[\"Findings\"]:\n if finding[\"Id\"] not in current_findings_ids:\n finding[\"RecordState\"] = \"ARCHIVED\"\n finding[\"UpdatedAt\"] = timestamp_utc.strftime(\n \"%Y-%m-%dT%H:%M:%SZ\"\n )\n\n findings_to_archive.append(finding)\n logger.info(f\"Archiving {len(findings_to_archive)} findings.\")\n\n # Send archive findings to SHub\n success_count += __send_findings_to_security_hub__(\n findings_to_archive, region, security_hub_client\n )\n except Exception as error:\n logger.error(\n f\"{error.__class__.__name__} -- [{error.__traceback__.tb_lineno}]:{error} in region {region}\"\n )\n return success_count\n\n\ndef __send_findings_to_security_hub__(\n findings: [dict], region: str, security_hub_client\n):\n \"\"\"Private function send_findings_to_security_hub chunks the findings in groups of 100 findings and send them to AWS Security Hub. It returns the number of sent findings.\"\"\"\n success_count = 0\n try:\n list_chunked = [\n findings[i : i + SECURITY_HUB_MAX_BATCH]\n for i in range(0, len(findings), SECURITY_HUB_MAX_BATCH)\n ]\n\n for findings in list_chunked:\n batch_import = security_hub_client.batch_import_findings(Findings=findings)\n if batch_import[\"FailedCount\"] > 0:\n failed_import = batch_import[\"FailedFindings\"][0]\n logger.error(\n f\"Failed to send findings to AWS Security Hub -- {failed_import['ErrorCode']} -- {failed_import['ErrorMessage']}\"\n )\n success_count += batch_import[\"SuccessCount\"]\n\n except Exception as error:\n logger.error(\n f\"{error.__class__.__name__} -- [{error.__traceback__.tb_lineno}]:{error} in region {region}\"\n )\n finally:\n return success_count\n",
"path": "prowler/providers/aws/lib/security_hub/security_hub.py"
},
{
"content": "import importlib\nimport sys\nfrom dataclasses import dataclass\nfrom os import makedirs\nfrom os.path import isdir\n\nfrom prowler.config.config import change_config_var, output_file_timestamp\nfrom prowler.lib.logger import logger\n\n\ndef set_provider_output_options(\n provider: str, arguments, audit_info, allowlist_file, bulk_checks_metadata\n):\n \"\"\"\n set_provider_output_options configures automatically the outputs based on the selected provider and returns the Provider_Output_Options object.\n \"\"\"\n try:\n # Dynamically load the Provider_Output_Options class\n provider_output_class = f\"{provider.capitalize()}_Output_Options\"\n provider_output_options = getattr(\n importlib.import_module(__name__), provider_output_class\n )(arguments, audit_info, allowlist_file, bulk_checks_metadata)\n except Exception as error:\n logger.critical(\n f\"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}\"\n )\n sys.exit(1)\n else:\n return provider_output_options\n\n\ndef get_provider_output_model(audit_info_class_name):\n \"\"\"\n get_provider_output_model returns the model _Check_Output_CSV for each provider\n \"\"\"\n # from AWS_Audit_Info -> AWS -> aws -> Aws\n output_provider = audit_info_class_name.split(\"_\", 1)[0].lower().capitalize()\n output_provider_model_name = f\"{output_provider}_Check_Output_CSV\"\n output_provider_models_path = \"prowler.lib.outputs.models\"\n output_provider_model = getattr(\n importlib.import_module(output_provider_models_path), output_provider_model_name\n )\n\n return output_provider_model\n\n\n@dataclass\nclass Provider_Output_Options:\n is_quiet: bool\n output_modes: list\n output_directory: str\n allowlist_file: str\n bulk_checks_metadata: dict\n verbose: str\n output_filename: str\n only_logs: bool\n unix_timestamp: bool\n\n def __init__(self, arguments, allowlist_file, bulk_checks_metadata):\n self.is_quiet = arguments.quiet\n self.output_modes = arguments.output_modes\n self.output_directory = arguments.output_directory\n self.verbose = arguments.verbose\n self.bulk_checks_metadata = bulk_checks_metadata\n self.allowlist_file = allowlist_file\n self.only_logs = arguments.only_logs\n self.unix_timestamp = arguments.unix_timestamp\n # Check output directory, if it is not created -> create it\n if arguments.output_directory:\n if not isdir(arguments.output_directory):\n if arguments.output_modes:\n makedirs(arguments.output_directory)\n\n\nclass Azure_Output_Options(Provider_Output_Options):\n def __init__(self, arguments, audit_info, allowlist_file, bulk_checks_metadata):\n # First call Provider_Output_Options init\n super().__init__(arguments, allowlist_file, bulk_checks_metadata)\n\n # Check if custom output filename was input, if not, set the default\n if (\n not hasattr(arguments, \"output_filename\")\n or arguments.output_filename is None\n ):\n if (\n audit_info.identity.domain\n != \"Unknown tenant domain (missing AAD permissions)\"\n ):\n self.output_filename = f\"prowler-output-{audit_info.identity.domain}-{output_file_timestamp}\"\n else:\n self.output_filename = f\"prowler-output-{'-'.join(audit_info.identity.tenant_ids)}-{output_file_timestamp}\"\n else:\n self.output_filename = arguments.output_filename\n\n\nclass Gcp_Output_Options(Provider_Output_Options):\n def __init__(self, arguments, audit_info, allowlist_file, bulk_checks_metadata):\n # First call Provider_Output_Options init\n super().__init__(arguments, allowlist_file, bulk_checks_metadata)\n\n # Check if custom output filename was input, if not, set the default\n if (\n not hasattr(arguments, \"output_filename\")\n or arguments.output_filename is None\n ):\n self.output_filename = f\"prowler-output-{audit_info.default_project_id}-{output_file_timestamp}\"\n else:\n self.output_filename = arguments.output_filename\n\n\nclass Aws_Output_Options(Provider_Output_Options):\n security_hub_enabled: bool\n\n def __init__(self, arguments, audit_info, allowlist_file, bulk_checks_metadata):\n # First call Provider_Output_Options init\n super().__init__(arguments, allowlist_file, bulk_checks_metadata)\n\n # Confire Shodan API\n if arguments.shodan:\n audit_info = change_config_var(\n \"shodan_api_key\", arguments.shodan, audit_info\n )\n\n # Check if custom output filename was input, if not, set the default\n if (\n not hasattr(arguments, \"output_filename\")\n or arguments.output_filename is None\n ):\n self.output_filename = (\n f\"prowler-output-{audit_info.audited_account}-{output_file_timestamp}\"\n )\n else:\n self.output_filename = arguments.output_filename\n\n # Security Hub Outputs\n self.security_hub_enabled = arguments.security_hub\n if arguments.security_hub:\n if not self.output_modes:\n self.output_modes = [\"json-asff\"]\n else:\n self.output_modes.append(\"json-asff\")\n",
"path": "prowler/providers/common/outputs.py"
},
{
"content": "import uuid\nfrom argparse import ArgumentTypeError\n\nimport pytest\nfrom mock import patch\n\nfrom prowler.lib.cli.parser import ProwlerArgumentParser\nfrom prowler.providers.azure.lib.arguments.arguments import validate_azure_region\n\nprowler_command = \"prowler\"\n\n# capsys\n# https://docs.pytest.org/en/7.1.x/how-to/capture-stdout-stderr.html\nprowler_default_usage_error = \"usage: prowler [-h] [-v] {aws,azure,gcp} ...\"\n\n\ndef mock_get_available_providers():\n return [\"aws\", \"azure\", \"gcp\"]\n\n\nclass Test_Parser:\n def setup_method(self):\n # We need this to mock the get_available_providers function call\n # since the importlib.import_module is not working starting from the test class\n self.patch_get_available_providers = patch(\n \"prowler.providers.common.arguments.get_available_providers\",\n new=mock_get_available_providers,\n )\n self.patch_get_available_providers.start()\n\n # Init parser\n self.parser = ProwlerArgumentParser()\n\n def test_default_parser_no_arguments_aws(self):\n provider = \"aws\"\n command = [prowler_command]\n parsed = self.parser.parse(command)\n assert parsed.provider == provider\n assert not parsed.quiet\n assert len(parsed.output_modes) == 4\n assert \"csv\" in parsed.output_modes\n assert \"html\" in parsed.output_modes\n assert \"json\" in parsed.output_modes\n assert not parsed.output_filename\n assert \"output\" in parsed.output_directory\n assert not parsed.verbose\n assert not parsed.no_banner\n assert not parsed.slack\n assert not parsed.unix_timestamp\n assert parsed.log_level == \"CRITICAL\"\n assert not parsed.log_file\n assert not parsed.only_logs\n assert not parsed.checks\n assert not parsed.checks_file\n assert not parsed.checks_folder\n assert not parsed.services\n assert not parsed.severity\n assert not parsed.compliance\n assert len(parsed.categories) == 0\n assert not parsed.excluded_checks\n assert not parsed.excluded_services\n assert not parsed.list_checks\n assert not parsed.list_services\n assert not parsed.list_compliance\n assert not parsed.list_compliance_requirements\n assert not parsed.list_categories\n assert not parsed.profile\n assert not parsed.role\n assert parsed.session_duration == 3600\n assert not parsed.external_id\n assert not parsed.region\n assert not parsed.organizations_role\n assert not parsed.security_hub\n assert not parsed.quick_inventory\n assert not parsed.output_bucket\n assert not parsed.output_bucket_no_assume\n assert not parsed.shodan\n assert not parsed.allowlist_file\n assert not parsed.resource_tags\n assert not parsed.ignore_unused_services\n\n def test_default_parser_no_arguments_azure(self):\n provider = \"azure\"\n command = [prowler_command, provider]\n parsed = self.parser.parse(command)\n assert parsed.provider == provider\n assert not parsed.quiet\n assert len(parsed.output_modes) == 4\n assert \"csv\" in parsed.output_modes\n assert \"html\" in parsed.output_modes\n assert \"json\" in parsed.output_modes\n assert not parsed.output_filename\n assert \"output\" in parsed.output_directory\n assert not parsed.verbose\n assert not parsed.no_banner\n assert not parsed.slack\n assert not parsed.unix_timestamp\n assert parsed.log_level == \"CRITICAL\"\n assert not parsed.log_file\n assert not parsed.only_logs\n assert not parsed.checks\n assert not parsed.checks_file\n assert not parsed.checks_folder\n assert not parsed.services\n assert not parsed.severity\n assert not parsed.compliance\n assert len(parsed.categories) == 0\n assert not parsed.excluded_checks\n assert not parsed.excluded_services\n assert not parsed.list_checks\n assert not parsed.list_services\n assert not parsed.list_compliance\n assert not parsed.list_compliance_requirements\n assert not parsed.list_categories\n assert len(parsed.subscription_ids) == 0\n assert not parsed.az_cli_auth\n assert not parsed.sp_env_auth\n assert not parsed.browser_auth\n assert not parsed.managed_identity_auth\n\n def test_default_parser_no_arguments_gcp(self):\n provider = \"gcp\"\n command = [prowler_command, provider]\n parsed = self.parser.parse(command)\n assert parsed.provider == provider\n assert not parsed.quiet\n assert len(parsed.output_modes) == 4\n assert \"csv\" in parsed.output_modes\n assert \"html\" in parsed.output_modes\n assert \"json\" in parsed.output_modes\n assert not parsed.output_filename\n assert \"output\" in parsed.output_directory\n assert not parsed.verbose\n assert not parsed.no_banner\n assert not parsed.slack\n assert not parsed.unix_timestamp\n assert parsed.log_level == \"CRITICAL\"\n assert not parsed.log_file\n assert not parsed.only_logs\n assert not parsed.checks\n assert not parsed.checks_file\n assert not parsed.checks_folder\n assert not parsed.services\n assert not parsed.severity\n assert not parsed.compliance\n assert len(parsed.categories) == 0\n assert not parsed.excluded_checks\n assert not parsed.excluded_services\n assert not parsed.list_checks\n assert not parsed.list_services\n assert not parsed.list_compliance\n assert not parsed.list_compliance_requirements\n assert not parsed.list_categories\n assert not parsed.credentials_file\n\n def test_root_parser_version_short(self):\n command = [prowler_command, \"-v\"]\n with pytest.raises(SystemExit) as wrapped_exit:\n _ = self.parser.parse(command)\n assert wrapped_exit.type == SystemExit\n assert wrapped_exit.value.code == 0\n\n def test_root_parser_version_long(self):\n command = [prowler_command, \"--version\"]\n with pytest.raises(SystemExit) as wrapped_exit:\n _ = self.parser.parse(command)\n assert wrapped_exit.type == SystemExit\n assert wrapped_exit.value.code == 0\n\n def test_root_parser_help_short(self):\n command = [prowler_command, \"-h\"]\n with pytest.raises(SystemExit) as wrapped_exit:\n _ = self.parser.parse(command)\n assert wrapped_exit.type == SystemExit\n assert wrapped_exit.value.code == 0\n\n def test_root_parser_help_long(self):\n command = [prowler_command, \"--help\"]\n with pytest.raises(SystemExit) as wrapped_exit:\n _ = self.parser.parse(command)\n assert wrapped_exit.type == SystemExit\n assert wrapped_exit.value.code == 0\n\n def test_root_parser_default_aws_provider(self):\n command = [prowler_command]\n parsed = self.parser.parse(command)\n assert parsed.provider == \"aws\"\n\n def test_root_parser_aws_provider(self):\n command = [prowler_command, \"aws\"]\n parsed = self.parser.parse(command)\n assert parsed.provider == \"aws\"\n\n def test_root_parser_azure_provider(self):\n command = [prowler_command, \"azure\"]\n parsed = self.parser.parse(command)\n print(parsed)\n assert parsed.provider == \"azure\"\n\n def test_root_parser_gcp_provider(self):\n command = [prowler_command, \"gcp\"]\n parsed = self.parser.parse(command)\n print(parsed)\n assert parsed.provider == \"gcp\"\n\n def test_root_parser_quiet_short(self):\n command = [prowler_command, \"-q\"]\n parsed = self.parser.parse(command)\n assert parsed.quiet\n\n def test_root_parser_quiet_long(self):\n command = [prowler_command, \"--quiet\"]\n parsed = self.parser.parse(command)\n assert parsed.quiet\n\n def test_root_parser_exit_code_3_short(self):\n command = [prowler_command, \"-z\"]\n parsed = self.parser.parse(command)\n assert parsed.ignore_exit_code_3\n\n def test_root_parser_exit_code_3_long(self):\n command = [prowler_command, \"--ignore-exit-code-3\"]\n parsed = self.parser.parse(command)\n assert parsed.ignore_exit_code_3\n\n def test_root_parser_default_output_modes(self):\n command = [prowler_command]\n parsed = self.parser.parse(command)\n assert len(parsed.output_modes) == 4\n assert \"csv\" in parsed.output_modes\n assert \"json\" in parsed.output_modes\n assert \"html\" in parsed.output_modes\n\n def test_root_parser_output_modes_short(self):\n command = [prowler_command, \"-M\", \"csv\"]\n parsed = self.parser.parse(command)\n assert len(parsed.output_modes) == 1\n assert \"csv\" in parsed.output_modes\n\n def test_root_parser_output_modes_long(self):\n command = [prowler_command, \"--output-modes\", \"csv\"]\n parsed = self.parser.parse(command)\n assert len(parsed.output_modes) == 1\n assert \"csv\" in parsed.output_modes\n\n def test_root_parser_output_filename_short(self):\n filename = \"test_output.txt\"\n command = [prowler_command, \"-F\", filename]\n parsed = self.parser.parse(command)\n assert parsed.output_filename == filename\n\n def test_root_parser_output_filename_long(self):\n filename = \"test_output.txt\"\n command = [prowler_command, \"-F\", filename]\n parsed = self.parser.parse(command)\n assert parsed.output_filename == filename\n\n def test_root_parser_output_directory_default(self):\n dirname = \"output\"\n command = [prowler_command]\n parsed = self.parser.parse(command)\n assert dirname in parsed.output_directory\n\n def test_root_parser_output_directory_default_short(self):\n dirname = \"outputs\"\n command = [prowler_command, \"-o\", dirname]\n parsed = self.parser.parse(command)\n assert parsed.output_directory == dirname\n\n def test_root_parser_output_directory_default_long(self):\n dirname = \"outputs\"\n command = [prowler_command, \"--output-directory\", dirname]\n parsed = self.parser.parse(command)\n assert parsed.output_directory == dirname\n\n def test_root_parser_verbose(self):\n command = [prowler_command, \"--verbose\"]\n parsed = self.parser.parse(command)\n assert parsed.verbose\n\n def test_root_parser_no_banner_short(self):\n command = [prowler_command, \"-b\"]\n parsed = self.parser.parse(command)\n assert parsed.no_banner\n\n def test_root_parser_no_banner_long(self):\n command = [prowler_command, \"--no-banner\"]\n parsed = self.parser.parse(command)\n assert parsed.no_banner\n\n def test_root_parser_slack(self):\n command = [prowler_command, \"--slack\"]\n parsed = self.parser.parse(command)\n assert parsed.slack\n\n def test_root_parser_unix_timestamp(self):\n command = [prowler_command, \"--unix-timestamp\"]\n parsed = self.parser.parse(command)\n assert parsed.unix_timestamp\n\n def test_logging_parser_only_logs_set(self):\n command = [prowler_command, \"--only-logs\"]\n parsed = self.parser.parse(command)\n assert parsed.only_logs\n assert parsed.no_banner\n\n def test_logging_parser_log_level_default(self):\n log_level = \"CRITICAL\"\n command = [prowler_command]\n parsed = self.parser.parse(command)\n assert parsed.log_level == log_level\n\n def test_logging_parser_log_level_debug(self):\n log_level = \"DEBUG\"\n command = [prowler_command, \"--log-level\", log_level]\n parsed = self.parser.parse(command)\n assert parsed.log_level == log_level\n\n def test_logging_parser_log_level_info(self):\n log_level = \"INFO\"\n command = [prowler_command, \"--log-level\", log_level]\n parsed = self.parser.parse(command)\n assert parsed.log_level == log_level\n\n def test_logging_parser_log_level_warning(self):\n log_level = \"WARNING\"\n command = [prowler_command, \"--log-level\", log_level]\n parsed = self.parser.parse(command)\n assert parsed.log_level == log_level\n\n def test_logging_parser_log_level_error(self):\n log_level = \"ERROR\"\n command = [prowler_command, \"--log-level\", log_level]\n parsed = self.parser.parse(command)\n assert parsed.log_level == log_level\n\n def test_logging_parser_log_level_critical(self):\n log_level = \"CRITICAL\"\n command = [prowler_command, \"--log-level\", log_level]\n parsed = self.parser.parse(command)\n assert parsed.log_level == log_level\n\n def test_logging_parser_log_file_default(self):\n command = [prowler_command]\n parsed = self.parser.parse(command)\n assert not parsed.log_file\n\n def test_logging_parser_log_file(self):\n log_file = \"test.log\"\n command = [prowler_command, \"--log-file\", log_file]\n parsed = self.parser.parse(command)\n assert parsed.log_file == log_file\n\n def test_exclude_checks_parser_excluded_checks_short(self):\n excluded_checks = \"check_test\"\n command = [prowler_command, \"-e\", excluded_checks]\n parsed = self.parser.parse(command)\n assert excluded_checks in parsed.excluded_checks\n\n def test_exclude_checks_parser_excluded_checks_short_two(self):\n excluded_checks_1 = \"check_test_1\"\n excluded_checks_2 = \"check_test_2\"\n command = [prowler_command, \"-e\", excluded_checks_1, excluded_checks_2]\n parsed = self.parser.parse(command)\n assert len(parsed.excluded_checks) == 2\n assert excluded_checks_1 in parsed.excluded_checks\n assert excluded_checks_2 in parsed.excluded_checks\n\n def test_exclude_checks_parser_excluded_checks_long(self):\n excluded_check = \"check_test\"\n command = [prowler_command, \"--excluded-checks\", excluded_check]\n parsed = self.parser.parse(command)\n assert excluded_check in parsed.excluded_checks\n\n def test_exclude_checks_parser_excluded_checks_long_two(self):\n excluded_checks_1 = \"check_test_1\"\n excluded_checks_2 = \"check_test_2\"\n command = [\n prowler_command,\n \"--excluded-checks\",\n excluded_checks_1,\n excluded_checks_2,\n ]\n parsed = self.parser.parse(command)\n assert len(parsed.excluded_checks) == 2\n assert excluded_checks_1 in parsed.excluded_checks\n assert excluded_checks_2 in parsed.excluded_checks\n\n def test_exclude_checks_parser_excluded_services_long(self):\n excluded_service = \"accessanalyzer\"\n command = [prowler_command, \"--excluded-services\", excluded_service]\n parsed = self.parser.parse(command)\n assert excluded_service in parsed.excluded_services\n\n def test_exclude_checks_parser_excluded_services_long_two(self):\n excluded_service_1 = \"accessanalyzer\"\n excluded_service_2 = \"s3\"\n command = [\n prowler_command,\n \"--excluded-services\",\n excluded_service_1,\n excluded_service_2,\n ]\n parsed = self.parser.parse(command)\n assert len(parsed.excluded_services) == 2\n assert excluded_service_1 in parsed.excluded_services\n assert excluded_service_2 in parsed.excluded_services\n\n def test_checks_parser_checks_short(self):\n check = \"check_test_1\"\n argument = \"-c\"\n command = [prowler_command, argument, check]\n parsed = self.parser.parse(command)\n assert len(parsed.checks) == 1\n assert check in parsed.checks\n\n def test_checks_parser_checks_short_two(self):\n check_1 = \"check_test_1\"\n check_2 = \"check_test_2\"\n argument = \"-c\"\n command = [prowler_command, argument, check_1, check_2]\n parsed = self.parser.parse(command)\n assert len(parsed.checks) == 2\n assert check_1 in parsed.checks\n assert check_2 in parsed.checks\n\n def test_checks_parser_checks_long(self):\n check = \"check_test_1\"\n argument = \"--checks\"\n command = [prowler_command, argument, check]\n parsed = self.parser.parse(command)\n assert len(parsed.checks) == 1\n assert check in parsed.checks\n\n def test_checks_parser_checks_long_two(self):\n check_1 = \"check_test_1\"\n check_2 = \"check_test_2\"\n argument = \"--checks\"\n command = [prowler_command, argument, check_1, check_2]\n parsed = self.parser.parse(command)\n assert len(parsed.checks) == 2\n assert check_1 in parsed.checks\n assert check_2 in parsed.checks\n\n def test_checks_parser_checks_file_short(self):\n argument = \"-C\"\n filename = \"checks.txt\"\n command = [prowler_command, argument, filename]\n parsed = self.parser.parse(command)\n assert parsed.checks_file == filename\n\n def test_checks_parser_checks_file_long(self):\n argument = \"--checks-file\"\n filename = \"checks.txt\"\n command = [prowler_command, argument, filename]\n parsed = self.parser.parse(command)\n assert parsed.checks_file == filename\n\n def test_checks_parser_checks_folder_short(self):\n argument = \"-x\"\n filename = \"custom-checks-folder/\"\n command = [prowler_command, argument, filename]\n parsed = self.parser.parse(command)\n assert parsed.checks_folder == filename\n\n def test_checks_parser_checks_folder_long(self):\n argument = \"--checks-folder\"\n filename = \"custom-checks-folder/\"\n command = [prowler_command, argument, filename]\n parsed = self.parser.parse(command)\n assert parsed.checks_folder == filename\n\n def test_checks_parser_services_short(self):\n argument = \"-s\"\n service_1 = \"iam\"\n command = [prowler_command, argument, service_1]\n parsed = self.parser.parse(command)\n assert service_1 in parsed.services\n\n def test_checks_parser_services_short_two(self):\n argument = \"-s\"\n service_1 = \"iam\"\n service_2 = \"s3\"\n command = [prowler_command, argument, service_1, service_2]\n parsed = self.parser.parse(command)\n assert len(parsed.services) == 2\n assert service_1 in parsed.services\n assert service_2 in parsed.services\n\n def test_checks_parser_services_long(self):\n argument = \"--services\"\n service_1 = \"iam\"\n command = [prowler_command, argument, service_1]\n parsed = self.parser.parse(command)\n assert service_1 in parsed.services\n\n def test_checks_parser_services_long_two(self):\n argument = \"--services\"\n service_1 = \"iam\"\n service_2 = \"s3\"\n command = [prowler_command, argument, service_1, service_2]\n parsed = self.parser.parse(command)\n assert len(parsed.services) == 2\n assert service_1 in parsed.services\n assert service_2 in parsed.services\n\n def test_checks_parser_services_with_severity(self):\n argument1 = \"--services\"\n service_1 = \"iam\"\n argument2 = \"--severity\"\n severity = \"low\"\n command = [prowler_command, argument1, service_1, argument2, severity]\n parsed = self.parser.parse(command)\n assert len(parsed.services) == 1\n assert service_1 in parsed.services\n assert len(parsed.severity) == 1\n assert severity in parsed.severity\n\n def test_checks_parser_informational_severity(self):\n argument = \"--severity\"\n severity = \"informational\"\n command = [prowler_command, argument, severity]\n parsed = self.parser.parse(command)\n assert len(parsed.severity) == 1\n assert severity in parsed.severity\n\n def test_checks_parser_low_severity(self):\n argument = \"--severity\"\n severity = \"low\"\n command = [prowler_command, argument, severity]\n parsed = self.parser.parse(command)\n assert len(parsed.severity) == 1\n assert severity in parsed.severity\n\n def test_checks_parser_medium_severity(self):\n argument = \"--severity\"\n severity = \"medium\"\n command = [prowler_command, argument, severity]\n parsed = self.parser.parse(command)\n assert len(parsed.severity) == 1\n assert severity in parsed.severity\n\n def test_checks_parser_high_severity(self):\n argument = \"--severity\"\n severity = \"high\"\n command = [prowler_command, argument, severity]\n parsed = self.parser.parse(command)\n assert len(parsed.severity) == 1\n assert severity in parsed.severity\n\n def test_checks_parser_critical_severity(self):\n argument = \"--severity\"\n severity = \"critical\"\n command = [prowler_command, argument, severity]\n parsed = self.parser.parse(command)\n assert len(parsed.severity) == 1\n assert severity in parsed.severity\n\n def test_checks_parser_two_severities(self):\n argument = \"--severity\"\n severity_1 = \"critical\"\n severity_2 = \"high\"\n command = [prowler_command, argument, severity_1, severity_2]\n parsed = self.parser.parse(command)\n assert len(parsed.severity) == 2\n assert severity_1 in parsed.severity\n assert severity_2 in parsed.severity\n\n def test_checks_parser_wrong_severity(self, capsys):\n argument = \"--severity\"\n severity = \"kk\"\n command = [prowler_command, argument, severity]\n with pytest.raises(SystemExit) as wrapped_exit:\n _ = self.parser.parse(command)\n assert wrapped_exit.type == SystemExit\n assert wrapped_exit.value.code == 2\n\n def test_checks_parser_wrong_compliance(self):\n argument = \"--compliance\"\n framework = \"ens_rd2022_azure\"\n command = [prowler_command, argument, framework]\n with pytest.raises(SystemExit) as wrapped_exit:\n _ = self.parser.parse(command)\n assert wrapped_exit.type == SystemExit\n assert wrapped_exit.value.code == 2\n\n def test_checks_parser_compliance(self):\n argument = \"--compliance\"\n framework = \"cis_1.5_aws\"\n command = [prowler_command, argument, framework]\n parsed = self.parser.parse(command)\n assert len(parsed.compliance) == 1\n assert framework in parsed.compliance\n\n def test_checks_parser_compliance_two(self):\n argument = \"--compliance\"\n framework_1 = \"cis_1.5_aws\"\n framework_2 = \"ens_rd2022_aws\"\n command = [prowler_command, argument, framework_1, framework_2]\n parsed = self.parser.parse(command)\n assert len(parsed.compliance) == 2\n assert framework_1 in parsed.compliance\n assert framework_2 in parsed.compliance\n\n def test_checks_parser_categories(self):\n argument = \"--categories\"\n category = \"secrets\"\n command = [prowler_command, argument, category]\n parsed = self.parser.parse(command)\n assert len(parsed.categories) == 1\n assert category in parsed.categories\n\n def test_checks_parser_categories_two(self):\n argument = \"--categories\"\n category_1 = \"secrets\"\n category_2 = \"forensics\"\n command = [prowler_command, argument, category_1, category_2]\n parsed = self.parser.parse(command)\n assert len(parsed.categories) == 2\n assert category_1 in parsed.categories\n assert category_2 in parsed.categories\n\n def test_list_checks_parser_list_checks_short(self):\n argument = \"-l\"\n command = [prowler_command, argument]\n parsed = self.parser.parse(command)\n assert parsed.list_checks\n\n def test_list_checks_parser_list_checks_long(self):\n argument = \"--list-checks\"\n command = [prowler_command, argument]\n parsed = self.parser.parse(command)\n assert parsed.list_checks\n\n def test_list_checks_parser_list_checks_json(self):\n argument = \"--list-checks-json\"\n command = [prowler_command, argument]\n parsed = self.parser.parse(command)\n assert parsed.list_checks_json\n\n def test_list_checks_parser_list_services(self):\n argument = \"--list-services\"\n command = [prowler_command, argument]\n parsed = self.parser.parse(command)\n assert parsed.list_services\n\n def test_list_checks_parser_list_compliance(self):\n argument = \"--list-compliance\"\n command = [prowler_command, argument]\n parsed = self.parser.parse(command)\n assert parsed.list_compliance\n\n def test_list_checks_parser_list_categories(self):\n argument = \"--list-categories\"\n command = [prowler_command, argument]\n parsed = self.parser.parse(command)\n assert parsed.list_categories\n\n def test_list_checks_parser_list_compliance_requirements_no_arguments(self):\n argument = \"--list-compliance-requirements\"\n command = [prowler_command, argument]\n with pytest.raises(SystemExit) as wrapped_exit:\n _ = self.parser.parse(command)\n assert wrapped_exit.type == SystemExit\n assert wrapped_exit.value.code == 2\n\n def test_list_checks_parser_list_compliance_requirements_bad(self):\n argument = \"--list-compliance-requirements\"\n bad_framework = \"cis_1.4_azure\"\n command = [prowler_command, argument, bad_framework]\n with pytest.raises(SystemExit) as wrapped_exit:\n _ = self.parser.parse(command)\n assert wrapped_exit.type == SystemExit\n assert wrapped_exit.value.code == 2\n\n def test_list_checks_parser_list_compliance_requirements_one(self):\n argument = \"--list-compliance-requirements\"\n framework = \"cis_1.4_aws\"\n command = [prowler_command, argument, framework]\n parsed = self.parser.parse(command)\n assert len(parsed.list_compliance_requirements) == 1\n assert framework in parsed.list_compliance_requirements\n\n def test_aws_parser_profile_no_profile_short(self):\n argument = \"-p\"\n profile = \"\"\n command = [prowler_command, argument, profile]\n parsed = self.parser.parse(command)\n assert parsed.profile == profile\n\n def test_aws_parser_profile_short(self):\n argument = \"-p\"\n profile = \"test\"\n command = [prowler_command, argument, profile]\n parsed = self.parser.parse(command)\n assert parsed.profile == profile\n\n def test_aws_parser_profile_long(self):\n argument = \"--profile\"\n profile = \"test\"\n command = [prowler_command, argument, profile]\n parsed = self.parser.parse(command)\n assert parsed.profile == profile\n\n def test_aws_parser_no_role_arn_short(self):\n argument = \"-R\"\n role = \"\"\n command = [prowler_command, argument, role]\n parsed = self.parser.parse(command)\n assert parsed.role == role\n\n def test_aws_parser_role_arn_short(self):\n argument = \"-R\"\n role = \"test\"\n command = [prowler_command, argument, role]\n parsed = self.parser.parse(command)\n assert parsed.role == role\n\n def test_aws_parser_role_arn_long(self):\n argument = \"--role\"\n role = \"test\"\n command = [prowler_command, argument, role]\n parsed = self.parser.parse(command)\n assert parsed.role == role\n\n def test_aws_parser_mfa(self):\n argument = \"--mfa\"\n command = [prowler_command, argument]\n parsed = self.parser.parse(command)\n assert parsed.mfa\n\n def test_aws_parser_session_duration_short(self, capsys):\n argument = \"-T\"\n duration = \"900\"\n command = [prowler_command, argument, duration]\n with pytest.raises(SystemExit) as wrapped_exit:\n _ = self.parser.parse(command)\n assert wrapped_exit.type == SystemExit\n assert wrapped_exit.value.code == 2\n assert (\n capsys.readouterr().err\n == f\"{prowler_default_usage_error}\\nprowler: error: aws: To use -I/-T options -R option is needed\\n\"\n )\n\n def test_aws_parser_session_duration_long(self, capsys):\n argument = \"--session-duration\"\n duration = \"900\"\n command = [prowler_command, argument, duration]\n with pytest.raises(SystemExit) as wrapped_exit:\n _ = self.parser.parse(command)\n assert wrapped_exit.type == SystemExit\n assert wrapped_exit.value.code == 2\n assert (\n capsys.readouterr().err\n == f\"{prowler_default_usage_error}\\nprowler: error: aws: To use -I/-T options -R option is needed\\n\"\n )\n\n # TODO\n def test_aws_parser_external_id_no_short(self):\n argument = \"-I\"\n external_id = \"\"\n command = [prowler_command, argument, external_id]\n parsed = self.parser.parse(command)\n assert not parsed.profile\n\n def test_aws_parser_external_id_short(self, capsys):\n argument = \"-I\"\n external_id = str(uuid.uuid4())\n command = [prowler_command, argument, external_id]\n with pytest.raises(SystemExit) as wrapped_exit:\n _ = self.parser.parse(command)\n assert wrapped_exit.type == SystemExit\n assert wrapped_exit.value.code == 2\n assert (\n capsys.readouterr().err\n == f\"{prowler_default_usage_error}\\nprowler: error: aws: To use -I/-T options -R option is needed\\n\"\n )\n\n def test_aws_parser_external_id_long(self, capsys):\n argument = \"--external-id\"\n external_id = str(uuid.uuid4())\n command = [prowler_command, argument, external_id]\n with pytest.raises(SystemExit) as wrapped_exit:\n _ = self.parser.parse(command)\n assert wrapped_exit.type == SystemExit\n assert wrapped_exit.value.code == 2\n assert (\n capsys.readouterr().err\n == f\"{prowler_default_usage_error}\\nprowler: error: aws: To use -I/-T options -R option is needed\\n\"\n )\n\n def test_aws_parser_region_f(self):\n argument = \"-f\"\n region = \"eu-west-1\"\n command = [prowler_command, argument, region]\n parsed = self.parser.parse(command)\n assert len(parsed.region) == 1\n assert region in parsed.region\n\n def test_aws_parser_region_f_bad_region(self):\n argument = \"-f\"\n region = \"no-region\"\n command = [prowler_command, argument, region]\n with pytest.raises(SystemExit) as wrapped_exit:\n _ = self.parser.parse(command)\n assert wrapped_exit.type == SystemExit\n assert wrapped_exit.value.code == 2\n\n def test_aws_parser_region(self):\n argument = \"--region\"\n region = \"eu-west-1\"\n command = [prowler_command, argument, region]\n parsed = self.parser.parse(command)\n assert len(parsed.region) == 1\n assert region in parsed.region\n\n def test_aws_parser_two_regions(self):\n argument = \"--region\"\n region_1 = \"eu-west-1\"\n region_2 = \"eu-west-2\"\n command = [prowler_command, argument, region_1, region_2]\n parsed = self.parser.parse(command)\n assert len(parsed.region) == 2\n assert region_1 in parsed.region\n assert region_2 in parsed.region\n\n def test_aws_parser_bad_region(self):\n argument = \"--region\"\n region = \"no-region\"\n command = [prowler_command, argument, region]\n with pytest.raises(SystemExit) as wrapped_exit:\n _ = self.parser.parse(command)\n assert wrapped_exit.type == SystemExit\n assert wrapped_exit.value.code == 2\n\n def test_aws_parser_filter_region(self):\n argument = \"--filter-region\"\n region = \"eu-west-1\"\n command = [prowler_command, argument, region]\n parsed = self.parser.parse(command)\n assert len(parsed.region) == 1\n assert region in parsed.region\n\n def test_aws_parser_bad_filter_region(self):\n argument = \"--filter-region\"\n region = \"no-region\"\n command = [prowler_command, argument, region]\n with pytest.raises(SystemExit) as wrapped_exit:\n _ = self.parser.parse(command)\n assert wrapped_exit.type == SystemExit\n assert wrapped_exit.value.code == 2\n\n def test_aws_parser_organizations_role_short(self):\n argument = \"-O\"\n organizations_role = \"role_test\"\n command = [prowler_command, argument, organizations_role]\n parsed = self.parser.parse(command)\n assert parsed.organizations_role == organizations_role\n\n def test_aws_parser_organizations_role_long(self):\n argument = \"--organizations-role\"\n organizations_role = \"role_test\"\n command = [prowler_command, argument, organizations_role]\n parsed = self.parser.parse(command)\n assert parsed.organizations_role == organizations_role\n\n def test_aws_parser_security_hub_short(self):\n argument = \"-S\"\n command = [prowler_command, argument]\n parsed = self.parser.parse(command)\n assert parsed.security_hub\n\n def test_aws_parser_security_hub_long(self):\n argument = \"--security-hub\"\n command = [prowler_command, argument]\n parsed = self.parser.parse(command)\n assert parsed.security_hub\n\n def test_aws_parser_skip_sh_update(self):\n argument = \"--skip-sh-update\"\n command = [prowler_command, argument]\n parsed = self.parser.parse(command)\n assert parsed.skip_sh_update\n\n def test_aws_parser_quick_inventory_short(self):\n argument = \"-i\"\n command = [prowler_command, argument]\n parsed = self.parser.parse(command)\n assert parsed.quick_inventory\n\n def test_aws_parser_quick_inventory_long(self):\n argument = \"--quick-inventory\"\n command = [prowler_command, argument]\n parsed = self.parser.parse(command)\n assert parsed.quick_inventory\n\n def test_aws_parser_output_bucket_short(self):\n argument = \"-B\"\n bucket = \"test-bucket\"\n command = [prowler_command, argument, bucket]\n parsed = self.parser.parse(command)\n assert parsed.output_bucket == bucket\n\n def test_aws_parser_output_bucket_long(self):\n argument = \"--output-bucket\"\n bucket = \"test-bucket\"\n command = [prowler_command, argument, bucket]\n parsed = self.parser.parse(command)\n assert parsed.output_bucket == bucket\n\n def test_aws_parser_output_bucket_no_assume_short(self):\n argument = \"-D\"\n bucket = \"test-bucket\"\n command = [prowler_command, argument, bucket]\n parsed = self.parser.parse(command)\n assert parsed.output_bucket_no_assume == bucket\n\n def test_aws_parser_output_bucket_no_assume_long(self):\n argument = \"--output-bucket-no-assume\"\n bucket = \"test-bucket\"\n command = [prowler_command, argument, bucket]\n parsed = self.parser.parse(command)\n assert parsed.output_bucket_no_assume == bucket\n\n def test_aws_parser_shodan_short(self):\n argument = \"-N\"\n shodan_api_key = str(uuid.uuid4())\n command = [prowler_command, argument, shodan_api_key]\n parsed = self.parser.parse(command)\n assert parsed.shodan == shodan_api_key\n\n def test_aws_parser_shodan_long(self):\n argument = \"--shodan\"\n shodan_api_key = str(uuid.uuid4())\n command = [prowler_command, argument, shodan_api_key]\n parsed = self.parser.parse(command)\n assert parsed.shodan == shodan_api_key\n\n def test_aws_parser_allowlist_short(self):\n argument = \"-w\"\n allowlist_file = \"allowlist.txt\"\n command = [prowler_command, argument, allowlist_file]\n parsed = self.parser.parse(command)\n assert parsed.allowlist_file == allowlist_file\n\n def test_aws_parser_allowlist_long(self):\n argument = \"--allowlist-file\"\n allowlist_file = \"allowlist.txt\"\n command = [prowler_command, argument, allowlist_file]\n parsed = self.parser.parse(command)\n assert parsed.allowlist_file == allowlist_file\n\n def test_aws_parser_resource_tags(self):\n argument = \"--resource-tags\"\n scan_tag1 = \"Key=Value\"\n scan_tag2 = \"Key2=Value2\"\n command = [prowler_command, argument, scan_tag1, scan_tag2]\n parsed = self.parser.parse(command)\n assert len(parsed.resource_tags) == 2\n assert scan_tag1 in parsed.resource_tags\n assert scan_tag2 in parsed.resource_tags\n\n def test_aws_parser_resource_arn(self):\n argument = \"--resource-arn\"\n resource_arn1 = \"arn:aws:iam::012345678910:user/test\"\n resource_arn2 = \"arn:aws:ec2:us-east-1:123456789012:vpc/vpc-12345678\"\n command = [prowler_command, argument, resource_arn1, resource_arn2]\n parsed = self.parser.parse(command)\n assert len(parsed.resource_arn) == 2\n assert resource_arn1 in parsed.resource_arn\n assert resource_arn2 in parsed.resource_arn\n\n def test_aws_parser_wrong_resource_arn(self):\n argument = \"--resource-arn\"\n resource_arn = \"arn:azure:iam::account:user/test\"\n command = [prowler_command, argument, resource_arn]\n with pytest.raises(SystemExit) as ex:\n self.parser.parse(command)\n assert ex.type == SystemExit\n\n def test_aws_parser_aws_retries_max_attempts(self):\n argument = \"--aws-retries-max-attempts\"\n max_retries = \"10\"\n command = [prowler_command, argument, max_retries]\n parsed = self.parser.parse(command)\n assert parsed.aws_retries_max_attempts == int(max_retries)\n\n def test_aws_parser_ignore_unused_services(self):\n argument = \"--ignore-unused-services\"\n command = [prowler_command, argument]\n parsed = self.parser.parse(command)\n assert parsed.ignore_unused_services\n\n def test_aws_parser_config_file(self):\n argument = \"--config-file\"\n config_file = \"./test-config.yaml\"\n command = [prowler_command, argument, config_file]\n parsed = self.parser.parse(command)\n assert parsed.config_file == config_file\n\n def test_aws_parser_sts_endpoint_region(self):\n argument = \"--sts-endpoint-region\"\n sts_endpoint_region = \"eu-west-1\"\n command = [prowler_command, argument, sts_endpoint_region]\n parsed = self.parser.parse(command)\n assert parsed.sts_endpoint_region == sts_endpoint_region\n\n def test_parser_azure_auth_sp(self):\n argument = \"--sp-env-auth\"\n command = [prowler_command, \"azure\", argument]\n parsed = self.parser.parse(command)\n assert parsed.provider == \"azure\"\n assert parsed.sp_env_auth\n\n def test_parser_azure_auth_browser(self):\n argument = \"--browser-auth\"\n command = [prowler_command, \"azure\", argument]\n parsed = self.parser.parse(command)\n assert parsed.provider == \"azure\"\n assert parsed.browser_auth\n\n def test_parser_azure_tenant_id(self):\n argument = \"--tenant-id\"\n tenant_id = \"test-tenant-id\"\n command = [prowler_command, \"azure\", argument, tenant_id]\n parsed = self.parser.parse(command)\n assert parsed.provider == \"azure\"\n assert parsed.tenant_id == tenant_id\n\n def test_parser_azure_auth_az_cli(self):\n argument = \"--az-cli-auth\"\n command = [prowler_command, \"azure\", argument]\n parsed = self.parser.parse(command)\n assert parsed.provider == \"azure\"\n assert parsed.az_cli_auth\n\n def test_parser_azure_auth_managed_identity(self):\n argument = \"--managed-identity-auth\"\n command = [prowler_command, \"azure\", argument]\n parsed = self.parser.parse(command)\n assert parsed.provider == \"azure\"\n assert parsed.managed_identity_auth\n\n def test_parser_azure_subscription_ids(self):\n argument = \"--subscription-ids\"\n subscription_1 = \"test_subscription_1\"\n subscription_2 = \"test_subscription_2\"\n command = [prowler_command, \"azure\", argument, subscription_1, subscription_2]\n parsed = self.parser.parse(command)\n assert parsed.provider == \"azure\"\n assert len(parsed.subscription_ids) == 2\n assert parsed.subscription_ids[0] == subscription_1\n assert parsed.subscription_ids[1] == subscription_2\n\n def test_parser_azure_region(self):\n argument = \"--azure-region\"\n region = \"AzureChinaCloud\"\n command = [prowler_command, \"azure\", argument, region]\n parsed = self.parser.parse(command)\n assert parsed.provider == \"azure\"\n assert parsed.azure_region == region\n\n # Test AWS flags with Azure provider\n def test_parser_azure_with_aws_flag(self, capsys):\n command = [prowler_command, \"azure\", \"-p\"]\n with pytest.raises(SystemExit) as wrapped_exit:\n _ = self.parser.parse(command)\n assert wrapped_exit.type == SystemExit\n assert wrapped_exit.value.code == 2\n assert (\n capsys.readouterr().err\n == f\"{prowler_default_usage_error}\\nprowler: error: unrecognized arguments: -p\\n\"\n )\n\n # Test Azure flags with AWS provider\n def test_parser_aws_with_azure_flag(self, capsys):\n command = [prowler_command, \"aws\", \"--subscription-ids\"]\n with pytest.raises(SystemExit) as wrapped_exit:\n _ = self.parser.parse(command)\n assert wrapped_exit.type == SystemExit\n assert wrapped_exit.value.code == 2\n assert (\n capsys.readouterr().err\n == f\"{prowler_default_usage_error}\\nprowler: error: unrecognized arguments: --subscription-ids\\n\"\n )\n\n def test_parser_gcp_auth_credentials_file(self):\n argument = \"--credentials-file\"\n file = \"test.json\"\n command = [prowler_command, \"gcp\", argument, file]\n parsed = self.parser.parse(command)\n assert parsed.provider == \"gcp\"\n assert parsed.credentials_file == file\n\n def test_parser_gcp_project_ids(self):\n argument = \"--project-ids\"\n project_1 = \"test_project_1\"\n project_2 = \"test_project_2\"\n command = [prowler_command, \"gcp\", argument, project_1, project_2]\n parsed = self.parser.parse(command)\n assert parsed.provider == \"gcp\"\n assert len(parsed.project_ids) == 2\n assert parsed.project_ids[0] == project_1\n assert parsed.project_ids[1] == project_2\n\n def test_validate_azure_region_valid_regions(self):\n expected_regions = [\n \"AzureChinaCloud\",\n \"AzureUSGovernment\",\n \"AzureGermanCloud\",\n \"AzureCloud\",\n ]\n input_regions = [\n \"AzureChinaCloud\",\n \"AzureUSGovernment\",\n \"AzureGermanCloud\",\n \"AzureCloud\",\n ]\n for region in input_regions:\n assert validate_azure_region(region) in expected_regions\n\n def test_validate_azure_region_invalid_regions(self):\n expected_regions = [\n \"AzureChinaCloud\",\n \"AzureUSGovernment\",\n \"AzureGermanCloud\",\n \"AzureCloud\",\n ]\n invalid_region = \"non-valid-region\"\n with pytest.raises(\n ArgumentTypeError,\n match=f\"Region {invalid_region} not allowed, allowed regions are {' '.join(expected_regions)}\",\n ):\n validate_azure_region(invalid_region)\n",
"path": "tests/lib/cli/parser_test.py"
},
{
"content": "from os import path\n\nimport botocore\nfrom boto3 import session\nfrom mock import MagicMock, patch\n\nfrom prowler.config.config import prowler_version, timestamp_utc\nfrom prowler.lib.check.models import Check_Report, load_check_metadata\n\n# from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info\nfrom prowler.providers.aws.lib.security_hub.security_hub import (\n batch_send_to_security_hub,\n prepare_security_hub_findings,\n verify_security_hub_integration_enabled_per_region,\n)\nfrom tests.providers.aws.audit_info_utils import (\n AWS_ACCOUNT_NUMBER,\n AWS_COMMERCIAL_PARTITION,\n AWS_REGION_EU_WEST_1,\n AWS_REGION_EU_WEST_2,\n set_mocked_aws_audit_info,\n)\n\n# Mocking Security Hub Get Findings\nmake_api_call = botocore.client.BaseClient._make_api_call\n\n\ndef mock_make_api_call(self, operation_name, kwarg):\n if operation_name == \"BatchImportFindings\":\n return {\n \"FailedCount\": 0,\n \"SuccessCount\": 1,\n }\n if operation_name == \"DescribeHub\":\n return {\n \"HubArn\": f\"arn:aws:securityhub:{AWS_REGION_EU_WEST_1}:{AWS_ACCOUNT_NUMBER}:hub/default\",\n \"SubscribedAt\": \"2023-02-07T09:45:43.742Z\",\n \"AutoEnableControls\": True,\n \"ControlFindingGenerator\": \"STANDARD_CONTROL\",\n }\n\n if operation_name == \"ListEnabledProductsForImport\":\n return {\n \"ProductSubscriptions\": [\n f\"arn:aws:securityhub:{AWS_REGION_EU_WEST_1}:{AWS_ACCOUNT_NUMBER}:product-subscription/prowler/prowler\",\n ]\n }\n\n return make_api_call(self, operation_name, kwarg)\n\n\nclass Test_SecurityHub:\n def generate_finding(self, status, region):\n finding = Check_Report(\n load_check_metadata(\n f\"{path.dirname(path.realpath(__file__))}/fixtures/metadata.json\"\n ).json()\n )\n finding.status = status\n finding.status_extended = \"test\"\n finding.resource_id = \"test\"\n finding.resource_arn = \"test\"\n finding.region = region\n\n return finding\n\n def set_mocked_output_options(self, is_quiet):\n output_options = MagicMock\n output_options.bulk_checks_metadata = {}\n output_options.is_quiet = is_quiet\n\n return output_options\n\n def set_mocked_session(self, region):\n # Create mock session\n return session.Session(\n region_name=region,\n )\n\n @patch(\"botocore.client.BaseClient._make_api_call\", new=mock_make_api_call)\n def test_verify_security_hub_integration_enabled_per_region(self):\n session = self.set_mocked_session(AWS_REGION_EU_WEST_1)\n assert verify_security_hub_integration_enabled_per_region(\n AWS_COMMERCIAL_PARTITION, AWS_REGION_EU_WEST_1, session, AWS_ACCOUNT_NUMBER\n )\n\n def test_prepare_security_hub_findings_enabled_region_not_quiet(self):\n enabled_regions = [AWS_REGION_EU_WEST_1]\n output_options = self.set_mocked_output_options(is_quiet=False)\n findings = [self.generate_finding(\"PASS\", AWS_REGION_EU_WEST_1)]\n audit_info = set_mocked_aws_audit_info(\n audited_regions=[AWS_REGION_EU_WEST_1, AWS_REGION_EU_WEST_2]\n )\n\n assert prepare_security_hub_findings(\n findings,\n audit_info,\n output_options,\n enabled_regions,\n ) == {\n AWS_REGION_EU_WEST_1: [\n {\n \"SchemaVersion\": \"2018-10-08\",\n \"Id\": f\"prowler-iam_user_accesskey_unused-{AWS_ACCOUNT_NUMBER}-{AWS_REGION_EU_WEST_1}-ee26b0dd4\",\n \"ProductArn\": f\"arn:aws:securityhub:{AWS_REGION_EU_WEST_1}::product/prowler/prowler\",\n \"RecordState\": \"ACTIVE\",\n \"ProductFields\": {\n \"ProviderName\": \"Prowler\",\n \"ProviderVersion\": prowler_version,\n \"ProwlerResourceName\": \"test\",\n },\n \"GeneratorId\": \"prowler-iam_user_accesskey_unused\",\n \"AwsAccountId\": f\"{AWS_ACCOUNT_NUMBER}\",\n \"Types\": [\"Software and Configuration Checks\"],\n \"FirstObservedAt\": timestamp_utc.strftime(\"%Y-%m-%dT%H:%M:%SZ\"),\n \"UpdatedAt\": timestamp_utc.strftime(\"%Y-%m-%dT%H:%M:%SZ\"),\n \"CreatedAt\": timestamp_utc.strftime(\"%Y-%m-%dT%H:%M:%SZ\"),\n \"Severity\": {\"Label\": \"LOW\"},\n \"Title\": \"Ensure Access Keys unused are disabled\",\n \"Description\": \"test\",\n \"Resources\": [\n {\n \"Type\": \"AwsIamAccessAnalyzer\",\n \"Id\": \"test\",\n \"Partition\": \"aws\",\n \"Region\": f\"{AWS_REGION_EU_WEST_1}\",\n }\n ],\n \"Compliance\": {\n \"Status\": \"PASSED\",\n \"RelatedRequirements\": [],\n \"AssociatedStandards\": [],\n },\n \"Remediation\": {\n \"Recommendation\": {\n \"Text\": \"Run sudo yum update and cross your fingers and toes.\",\n \"Url\": \"https://myfp.com/recommendations/dangerous_things_and_how_to_fix_them.html\",\n }\n },\n }\n ],\n }\n\n def test_prepare_security_hub_findings_quiet_INFO_finding(self):\n enabled_regions = [AWS_REGION_EU_WEST_1]\n output_options = self.set_mocked_output_options(is_quiet=False)\n findings = [self.generate_finding(\"INFO\", AWS_REGION_EU_WEST_1)]\n audit_info = set_mocked_aws_audit_info(\n audited_regions=[AWS_REGION_EU_WEST_1, AWS_REGION_EU_WEST_2]\n )\n\n assert prepare_security_hub_findings(\n findings,\n audit_info,\n output_options,\n enabled_regions,\n ) == {AWS_REGION_EU_WEST_1: []}\n\n def test_prepare_security_hub_findings_disabled_region(self):\n enabled_regions = [AWS_REGION_EU_WEST_1]\n output_options = self.set_mocked_output_options(is_quiet=False)\n findings = [self.generate_finding(\"PASS\", AWS_REGION_EU_WEST_2)]\n audit_info = set_mocked_aws_audit_info(\n audited_regions=[AWS_REGION_EU_WEST_1, AWS_REGION_EU_WEST_2]\n )\n\n assert prepare_security_hub_findings(\n findings,\n audit_info,\n output_options,\n enabled_regions,\n ) == {AWS_REGION_EU_WEST_1: []}\n\n def test_prepare_security_hub_findings_quiet(self):\n enabled_regions = [AWS_REGION_EU_WEST_1]\n output_options = self.set_mocked_output_options(is_quiet=True)\n findings = [self.generate_finding(\"PASS\", AWS_REGION_EU_WEST_1)]\n audit_info = set_mocked_aws_audit_info(\n audited_regions=[AWS_REGION_EU_WEST_1, AWS_REGION_EU_WEST_2]\n )\n\n assert prepare_security_hub_findings(\n findings,\n audit_info,\n output_options,\n enabled_regions,\n ) == {AWS_REGION_EU_WEST_1: []}\n\n def test_prepare_security_hub_findings_no_audited_regions(self):\n enabled_regions = [AWS_REGION_EU_WEST_1]\n output_options = self.set_mocked_output_options(is_quiet=False)\n findings = [self.generate_finding(\"PASS\", AWS_REGION_EU_WEST_1)]\n audit_info = set_mocked_aws_audit_info()\n\n assert prepare_security_hub_findings(\n findings,\n audit_info,\n output_options,\n enabled_regions,\n ) == {\n AWS_REGION_EU_WEST_1: [\n {\n \"SchemaVersion\": \"2018-10-08\",\n \"Id\": f\"prowler-iam_user_accesskey_unused-{AWS_ACCOUNT_NUMBER}-{AWS_REGION_EU_WEST_1}-ee26b0dd4\",\n \"ProductArn\": f\"arn:aws:securityhub:{AWS_REGION_EU_WEST_1}::product/prowler/prowler\",\n \"RecordState\": \"ACTIVE\",\n \"ProductFields\": {\n \"ProviderName\": \"Prowler\",\n \"ProviderVersion\": prowler_version,\n \"ProwlerResourceName\": \"test\",\n },\n \"GeneratorId\": \"prowler-iam_user_accesskey_unused\",\n \"AwsAccountId\": f\"{AWS_ACCOUNT_NUMBER}\",\n \"Types\": [\"Software and Configuration Checks\"],\n \"FirstObservedAt\": timestamp_utc.strftime(\"%Y-%m-%dT%H:%M:%SZ\"),\n \"UpdatedAt\": timestamp_utc.strftime(\"%Y-%m-%dT%H:%M:%SZ\"),\n \"CreatedAt\": timestamp_utc.strftime(\"%Y-%m-%dT%H:%M:%SZ\"),\n \"Severity\": {\"Label\": \"LOW\"},\n \"Title\": \"Ensure Access Keys unused are disabled\",\n \"Description\": \"test\",\n \"Resources\": [\n {\n \"Type\": \"AwsIamAccessAnalyzer\",\n \"Id\": \"test\",\n \"Partition\": \"aws\",\n \"Region\": f\"{AWS_REGION_EU_WEST_1}\",\n }\n ],\n \"Compliance\": {\n \"Status\": \"PASSED\",\n \"RelatedRequirements\": [],\n \"AssociatedStandards\": [],\n },\n \"Remediation\": {\n \"Recommendation\": {\n \"Text\": \"Run sudo yum update and cross your fingers and toes.\",\n \"Url\": \"https://myfp.com/recommendations/dangerous_things_and_how_to_fix_them.html\",\n }\n },\n }\n ],\n }\n\n @patch(\"botocore.client.BaseClient._make_api_call\", new=mock_make_api_call)\n def test_batch_send_to_security_hub_one_finding(self):\n enabled_regions = [AWS_REGION_EU_WEST_1]\n output_options = self.set_mocked_output_options(is_quiet=False)\n findings = [self.generate_finding(\"PASS\", AWS_REGION_EU_WEST_1)]\n audit_info = set_mocked_aws_audit_info(\n audited_regions=[AWS_REGION_EU_WEST_1, AWS_REGION_EU_WEST_2]\n )\n session = self.set_mocked_session(AWS_REGION_EU_WEST_1)\n\n security_hub_findings = prepare_security_hub_findings(\n findings,\n audit_info,\n output_options,\n enabled_regions,\n )\n\n assert (\n batch_send_to_security_hub(\n security_hub_findings,\n session,\n )\n == 1\n )\n",
"path": "tests/providers/aws/lib/security_hub/security_hub_test.py"
},
{
"content": "from argparse import Namespace\nfrom os import rmdir\n\nfrom boto3 import session\nfrom mock import patch\n\nfrom prowler.lib.outputs.html import get_assessment_summary\nfrom prowler.providers.aws.lib.audit_info.audit_info import AWS_Audit_Info\nfrom prowler.providers.azure.lib.audit_info.audit_info import (\n Azure_Audit_Info,\n Azure_Identity_Info,\n Azure_Region_Config,\n)\nfrom prowler.providers.common.models import Audit_Metadata\nfrom prowler.providers.common.outputs import (\n Aws_Output_Options,\n Azure_Output_Options,\n Gcp_Output_Options,\n get_provider_output_model,\n set_provider_output_options,\n)\nfrom prowler.providers.gcp.lib.audit_info.models import GCP_Audit_Info\n\nAWS_ACCOUNT_NUMBER = \"012345678912\"\nDATETIME = \"20230101120000\"\n\n\n@patch(\"prowler.providers.common.outputs.output_file_timestamp\", new=DATETIME)\nclass Test_Common_Output_Options:\n # Mocked Azure Audit Info\n def set_mocked_azure_audit_info(self):\n audit_info = Azure_Audit_Info(\n credentials=None,\n identity=Azure_Identity_Info(),\n audit_metadata=None,\n audit_resources=None,\n audit_config=None,\n azure_region_config=Azure_Region_Config(),\n )\n return audit_info\n\n # Mocked GCP Audit Info\n def set_mocked_gcp_audit_info(self):\n audit_info = GCP_Audit_Info(\n credentials=None,\n default_project_id=\"test-project1\",\n project_ids=[\"test-project1\", \"test-project2\"],\n audit_resources=None,\n audit_metadata=None,\n audit_config=None,\n )\n return audit_info\n\n # Mocked AWS Audit Info\n def set_mocked_aws_audit_info(self):\n audit_info = AWS_Audit_Info(\n session_config=None,\n original_session=None,\n audit_session=session.Session(\n profile_name=None,\n botocore_session=None,\n ),\n audited_account=AWS_ACCOUNT_NUMBER,\n audited_account_arn=f\"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:root\",\n audited_user_id=\"test-user\",\n audited_partition=\"aws\",\n audited_identity_arn=\"test-user-arn\",\n profile=None,\n profile_region=None,\n credentials=None,\n assumed_role_info=None,\n audited_regions=None,\n organizations_metadata=None,\n audit_resources=None,\n mfa_enabled=False,\n audit_metadata=Audit_Metadata(\n services_scanned=0,\n expected_checks=[],\n completed_checks=0,\n audit_progress=0,\n ),\n )\n return audit_info\n\n def test_set_provider_output_options_aws(self):\n # Set the cloud provider\n provider = \"aws\"\n # Set the arguments passed\n arguments = Namespace()\n arguments.quiet = True\n arguments.output_modes = [\"html\", \"csv\", \"json\"]\n arguments.output_directory = \"output_test_directory\"\n arguments.verbose = True\n arguments.output_filename = \"output_test_filename\"\n arguments.security_hub = True\n arguments.shodan = \"test-api-key\"\n arguments.only_logs = False\n arguments.unix_timestamp = False\n\n audit_info = self.set_mocked_aws_audit_info()\n allowlist_file = \"\"\n bulk_checks_metadata = {}\n output_options = set_provider_output_options(\n provider, arguments, audit_info, allowlist_file, bulk_checks_metadata\n )\n assert isinstance(output_options, Aws_Output_Options)\n assert output_options.security_hub_enabled\n assert output_options.is_quiet\n assert output_options.output_modes == [\"html\", \"csv\", \"json\", \"json-asff\"]\n assert output_options.output_directory == arguments.output_directory\n assert output_options.allowlist_file == \"\"\n assert output_options.bulk_checks_metadata == {}\n assert output_options.verbose\n assert output_options.output_filename == arguments.output_filename\n\n # Delete testing directory\n rmdir(arguments.output_directory)\n\n def test_set_provider_output_options_gcp(self):\n # Set the cloud provider\n provider = \"gcp\"\n # Set the arguments passed\n arguments = Namespace()\n arguments.quiet = True\n arguments.output_modes = [\"html\", \"csv\", \"json\"]\n arguments.output_directory = \"output_test_directory\"\n arguments.verbose = True\n arguments.output_filename = \"output_test_filename\"\n arguments.only_logs = False\n arguments.unix_timestamp = False\n\n audit_info = self.set_mocked_gcp_audit_info()\n allowlist_file = \"\"\n bulk_checks_metadata = {}\n output_options = set_provider_output_options(\n provider, arguments, audit_info, allowlist_file, bulk_checks_metadata\n )\n assert isinstance(output_options, Gcp_Output_Options)\n assert output_options.is_quiet\n assert output_options.output_modes == [\"html\", \"csv\", \"json\"]\n assert output_options.output_directory == arguments.output_directory\n assert output_options.allowlist_file == \"\"\n assert output_options.bulk_checks_metadata == {}\n assert output_options.verbose\n assert output_options.output_filename == arguments.output_filename\n\n # Delete testing directory\n rmdir(arguments.output_directory)\n\n def test_set_provider_output_options_aws_no_output_filename(self):\n # Set the cloud provider\n provider = \"aws\"\n # Set the arguments passed\n arguments = Namespace()\n arguments.quiet = True\n arguments.output_modes = [\"html\", \"csv\", \"json\"]\n arguments.output_directory = \"output_test_directory\"\n arguments.verbose = True\n arguments.security_hub = True\n arguments.shodan = \"test-api-key\"\n arguments.only_logs = False\n arguments.unix_timestamp = False\n\n # Mock AWS Audit Info\n audit_info = self.set_mocked_aws_audit_info()\n\n allowlist_file = \"\"\n bulk_checks_metadata = {}\n output_options = set_provider_output_options(\n provider, arguments, audit_info, allowlist_file, bulk_checks_metadata\n )\n assert isinstance(output_options, Aws_Output_Options)\n assert output_options.security_hub_enabled\n assert output_options.is_quiet\n assert output_options.output_modes == [\"html\", \"csv\", \"json\", \"json-asff\"]\n assert output_options.output_directory == arguments.output_directory\n assert output_options.allowlist_file == \"\"\n assert output_options.bulk_checks_metadata == {}\n assert output_options.verbose\n assert (\n output_options.output_filename\n == f\"prowler-output-{AWS_ACCOUNT_NUMBER}-{DATETIME}\"\n )\n\n # Delete testing directory\n rmdir(arguments.output_directory)\n\n def test_set_provider_output_options_azure_domain(self):\n # Set the cloud provider\n provider = \"azure\"\n # Set the arguments passed\n arguments = Namespace()\n arguments.quiet = True\n arguments.output_modes = [\"html\", \"csv\", \"json\"]\n arguments.output_directory = \"output_test_directory\"\n arguments.verbose = True\n arguments.only_logs = False\n arguments.unix_timestamp = False\n\n # Mock Azure Audit Info\n audit_info = self.set_mocked_azure_audit_info()\n audit_info.identity.domain = \"test-domain\"\n\n allowlist_file = \"\"\n bulk_checks_metadata = {}\n output_options = set_provider_output_options(\n provider, arguments, audit_info, allowlist_file, bulk_checks_metadata\n )\n assert isinstance(output_options, Azure_Output_Options)\n assert output_options.is_quiet\n assert output_options.output_modes == [\n \"html\",\n \"csv\",\n \"json\",\n ]\n assert output_options.output_directory == arguments.output_directory\n assert output_options.allowlist_file == \"\"\n assert output_options.bulk_checks_metadata == {}\n assert output_options.verbose\n assert (\n output_options.output_filename\n == f\"prowler-output-{audit_info.identity.domain}-{DATETIME}\"\n )\n\n # Delete testing directory\n rmdir(arguments.output_directory)\n\n def test_set_provider_output_options_azure_tenant_ids(self):\n # Set the cloud provider\n provider = \"azure\"\n # Set the arguments passed\n arguments = Namespace()\n arguments.quiet = True\n arguments.output_modes = [\"html\", \"csv\", \"json\"]\n arguments.output_directory = \"output_test_directory\"\n arguments.verbose = True\n arguments.only_logs = False\n arguments.unix_timestamp = False\n\n # Mock Azure Audit Info\n audit_info = self.set_mocked_azure_audit_info()\n tenants = [\"tenant-1\", \"tenant-2\"]\n audit_info.identity.tenant_ids = tenants\n\n allowlist_file = \"\"\n bulk_checks_metadata = {}\n output_options = set_provider_output_options(\n provider, arguments, audit_info, allowlist_file, bulk_checks_metadata\n )\n assert isinstance(output_options, Azure_Output_Options)\n assert output_options.is_quiet\n assert output_options.output_modes == [\n \"html\",\n \"csv\",\n \"json\",\n ]\n assert output_options.output_directory == arguments.output_directory\n assert output_options.allowlist_file == \"\"\n assert output_options.bulk_checks_metadata == {}\n assert output_options.verbose\n assert (\n output_options.output_filename\n == f\"prowler-output-{'-'.join(tenants)}-{DATETIME}\"\n )\n\n # Delete testing directory\n rmdir(arguments.output_directory)\n\n def test_azure_get_assessment_summary(self):\n # Mock Azure Audit Info\n audit_info = self.set_mocked_azure_audit_info()\n tenants = [\"tenant-1\", \"tenant-2\"]\n audit_info.identity.tenant_ids = tenants\n audit_info.identity.subscriptions = {\n \"Azure subscription 1\": \"12345-qwerty\",\n \"Subscription2\": \"12345-qwerty\",\n }\n printed_subscriptions = []\n for key, value in audit_info.identity.subscriptions.items():\n intermediate = key + \" : \" + value\n printed_subscriptions.append(intermediate)\n assert (\n get_assessment_summary(audit_info)\n == f\"\"\"\n <div class=\"col-md-2\">\n <div class=\"card\">\n <div class=\"card-header\">\n Azure Assessment Summary\n </div>\n <ul class=\"list-group list-group-flush\">\n <li class=\"list-group-item\">\n <b>Azure Tenant IDs:</b> {\" \".join(audit_info.identity.tenant_ids)}\n </li>\n <li class=\"list-group-item\">\n <b>Azure Tenant Domain:</b> {audit_info.identity.domain}\n </li>\n <li class=\"list-group-item\">\n <b>Azure Subscriptions:</b> {\" \".join(printed_subscriptions)}\n </li>\n </ul>\n </div>\n </div>\n <div class=\"col-md-4\">\n <div class=\"card\">\n <div class=\"card-header\">\n Azure Credentials\n </div>\n <ul class=\"list-group list-group-flush\">\n <li class=\"list-group-item\">\n <b>Azure Identity Type:</b> {audit_info.identity.identity_type}\n </li>\n <li class=\"list-group-item\">\n <b>Azure Identity ID:</b> {audit_info.identity.identity_id}\n </li>\n </ul>\n </div>\n </div>\n \"\"\"\n )\n\n def test_aws_get_assessment_summary(self):\n # Mock AWS Audit Info\n audit_info = self.set_mocked_aws_audit_info()\n\n assert (\n get_assessment_summary(audit_info)\n == f\"\"\"\n <div class=\"col-md-2\">\n <div class=\"card\">\n <div class=\"card-header\">\n AWS Assessment Summary\n </div>\n <ul class=\"list-group list-group-flush\">\n <li class=\"list-group-item\">\n <b>AWS Account:</b> {audit_info.audited_account}\n </li>\n <li class=\"list-group-item\">\n <b>AWS-CLI Profile:</b> default\n </li>\n <li class=\"list-group-item\">\n <b>Audited Regions:</b> All Regions\n </li>\n </ul>\n </div>\n </div>\n <div class=\"col-md-4\">\n <div class=\"card\">\n <div class=\"card-header\">\n AWS Credentials\n </div>\n <ul class=\"list-group list-group-flush\">\n <li class=\"list-group-item\">\n <b>User Id:</b> {audit_info.audited_user_id}\n </li>\n <li class=\"list-group-item\">\n <b>Caller Identity ARN:</b> {audit_info.audited_identity_arn}\n </li>\n </ul>\n </div>\n </div>\n \"\"\"\n )\n\n def test_gcp_get_assessment_summary(self):\n # Mock Azure Audit Info\n audit_info = self.set_mocked_gcp_audit_info()\n profile = \"default\"\n assert (\n get_assessment_summary(audit_info)\n == f\"\"\"\n <div class=\"col-md-2\">\n <div class=\"card\">\n <div class=\"card-header\">\n GCP Assessment Summary\n </div>\n <ul class=\"list-group list-group-flush\">\n <li class=\"list-group-item\">\n <b>GCP Project IDs:</b> {', '.join(audit_info.project_ids)}\n </li>\n </ul>\n </div>\n </div>\n <div class=\"col-md-4\">\n <div class=\"card\">\n <div class=\"card-header\">\n GCP Credentials\n </div>\n <ul class=\"list-group list-group-flush\">\n <li class=\"list-group-item\">\n <b>GCP Account:</b> {profile}\n </li>\n </ul>\n </div>\n </div>\n \"\"\"\n )\n\n def test_get_provider_output_model(self):\n audit_info_class_names = [\n \"AWS_Audit_Info\",\n \"GCP_Audit_Info\",\n \"Azure_Audit_Info\",\n ]\n for class_name in audit_info_class_names:\n provider_prefix = class_name.split(\"_\", 1)[0].lower().capitalize()\n assert (\n get_provider_output_model(class_name).__name__\n == f\"{provider_prefix}_Check_Output_CSV\"\n )\n",
"path": "tests/providers/common/common_outputs_test.py"
}
] | 11_7 | python | import unittest
import sys
from unittest.mock import patch
prowler_command = "prowler"
# capsys
# https://docs.pytest.org/en/7.1.x/how-to/capture-stdout-stderr.html
prowler_default_usage_error = "usage: prowler [-h] [-v] {aws,azure,gcp} ..."
def mock_get_available_providers():
return ["aws", "azure", "gcp"]
class Test_Parser(unittest.TestCase):
def setUp(self):
from prowler.lib.cli.parser import ProwlerArgumentParser
# We need this to mock the get_available_providers function call
# since the importlib.import_module is not working starting from the test class
self.patch_get_available_providers = patch(
"prowler.providers.common.arguments.get_available_providers",
new=mock_get_available_providers,
)
self.patch_get_available_providers.start()
# Init parser
self.parser = ProwlerArgumentParser()
def test_aws_parser_send_only_fail(self):
argument = "--send-sh-only-fails"
command = [prowler_command, argument]
parsed = self.parser.parse(command)
assert parsed.send_sh_only_fails
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(Test_Parser))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
main()
|
https://github.com/teamqurrent/prowler | The objective is to revert the feature that automatically cleans up local output directories after Prowler sends output to remote storage. Remove the `clean.py` file from the common directory and eliminate any references to its functions, particularly `clean_provider_local_output_directories`, from the Prowler main file (`__main__.py`). Also remove the `clean_test.py` file from the tests folder. This action will disable the feature that automatically cleans local output directories after sending outputs to remote storage. Carefully ensure all associated imports and calls in the main file are removed to prevent any residual references to the now-nonexistent cleanup functionality. | 9099bd7 | about-time==4.2.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:6a538862d33ce67d997429d14998310e1dbfda6cb7d9bbfbf799c4709847fece \
--hash=sha256:8bbf4c75fe13cbd3d72f49a03b02c5c7dca32169b6d49117c257e7eb3eaee341
adal==1.2.7 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:2a7451ed7441ddbc57703042204a3e30ef747478eea022c70f789fc7f084bc3d \
--hash=sha256:d74f45b81317454d96e982fd1c50e6fb5c99ac2223728aea8764433a39f566f1
alive-progress==3.1.5 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:347220c1858e3abe137fa0746895668c04df09c5261a13dc03f05795e8a29be5 \
--hash=sha256:42e399a66c8150dc507602dff7b7953f105ef11faf97ddaa6d27b1cbf45c4c98
attrs==23.1.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04 \
--hash=sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015
awsipranges==0.3.3 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4f0b3f22a9dc1163c85b513bed812b6c92bdacd674e6a7b68252a3c25b99e2c0 \
--hash=sha256:f3d7a54aeaf7fe310beb5d377a4034a63a51b72677ae6af3e0967bc4de7eedaf
azure-common==1.1.28 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4ac0cd3214e36b6a1b6a442686722a5d8cc449603aa833f3f0f40bda836704a3 \
--hash=sha256:5c12d3dcf4ec20599ca6b0d3e09e86e146353d443e7fcc050c9a19c1f9df20ad
azure-core==1.28.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:dec36dfc8eb0b052a853f30c07437effec2f9e3e1fc8f703d9bdaa5cfc0043d9 \
--hash=sha256:e9eefc66fc1fde56dab6f04d4e5d12c60754d5a9fa49bdcfd8534fc96ed936bd
azure-identity==1.15.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4c28fc246b7f9265610eb5261d65931183d019a23d4b0e99357facb2e6c227c8 \
--hash=sha256:a14b1f01c7036f11f148f22cd8c16e05035293d714458d6b44ddf534d93eb912
azure-mgmt-authorization==4.0.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:69b85abc09ae64fc72975bd43431170d8c7eb5d166754b98aac5f3845de57dc4 \
--hash=sha256:d8feeb3842e6ddf1a370963ca4f61fb6edc124e8997b807dd025bc9b2379cd1a
azure-mgmt-core==1.4.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:81071675f186a585555ef01816f2774d49c1c9024cb76e5720c3c0f6b337bb7d \
--hash=sha256:d195208340094f98e5a6661b781cde6f6a051e79ce317caabd8ff97030a9b3ae
azure-mgmt-security==5.0.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:38b03efe82c2344cea203fda95e6d00b7ac22782fa1c0b585cd0ea2c8ff3e702 \
--hash=sha256:73a74ce8f6ffb1b345ce101c8abdd42238f161f0988d168d23918feda0089654
azure-mgmt-sql==3.0.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:129042cc011225e27aee6ef2697d585fa5722e5d1aeb0038af6ad2451a285457 \
--hash=sha256:1d1dd940d4d41be4ee319aad626341251572a5bf4a2addec71779432d9a1381f
azure-mgmt-storage==21.1.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:593f2544fc4f05750c4fe7ca4d83c32ea1e9d266e57899bbf79ce5940124e8cc \
--hash=sha256:d6d3c0e917c988bc9ed0472477d3ef3f90886009eb1d97a711944f8375630162
azure-mgmt-subscription==3.1.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:38d4574a8d47fa17e3587d756e296cb63b82ad8fb21cd8543bcee443a502bf48 \
--hash=sha256:4e255b4ce9b924357bb8c5009b3c88a2014d3203b2495e2256fa027bf84e800e
azure-storage-blob==12.19.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:26c0a4320a34a3c2a1b74528ba6812ebcb632a04cd67b1c7377232c4b01a5897 \
--hash=sha256:7bbc2c9c16678f7a420367fef6b172ba8730a7e66df7f4d7a55d5b3c8216615b
boto3==1.26.165 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:9e7242b9059d937f34264125fecd844cb5e01acce6be093f6c44869fdf7c6e30 \
--hash=sha256:fa85b67147c8dc99b6e7c699fc086103f958f9677db934f70659e6e6a72a818c
botocore==1.29.165 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:6f35d59e230095aed7cd747604fe248fa384bebb7d09549077892f936a8ca3df \
--hash=sha256:988b948be685006b43c4bbd8f5c0cb93e77c66deb70561994e0c5b31b5a67210
cachetools==5.3.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:13dfddc7b8df938c21a940dfa6557ce6e94a2f1cdfa58eb90c805721d58f2c14 \
--hash=sha256:429e1a1e845c008ea6c85aa35d4b98b65d6a9763eeef3e37e92728a12d1de9d4
certifi==2023.7.22 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \
--hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9
cffi==1.15.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \
--hash=sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef \
--hash=sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104 \
--hash=sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426 \
--hash=sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405 \
--hash=sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375 \
--hash=sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a \
--hash=sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e \
--hash=sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc \
--hash=sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf \
--hash=sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185 \
--hash=sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497 \
--hash=sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3 \
--hash=sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35 \
--hash=sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c \
--hash=sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83 \
--hash=sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21 \
--hash=sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca \
--hash=sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984 \
--hash=sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac \
--hash=sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd \
--hash=sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee \
--hash=sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a \
--hash=sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2 \
--hash=sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192 \
--hash=sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7 \
--hash=sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585 \
--hash=sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f \
--hash=sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e \
--hash=sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27 \
--hash=sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b \
--hash=sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e \
--hash=sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e \
--hash=sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d \
--hash=sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c \
--hash=sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415 \
--hash=sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82 \
--hash=sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02 \
--hash=sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314 \
--hash=sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325 \
--hash=sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c \
--hash=sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3 \
--hash=sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914 \
--hash=sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045 \
--hash=sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d \
--hash=sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9 \
--hash=sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5 \
--hash=sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2 \
--hash=sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c \
--hash=sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3 \
--hash=sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2 \
--hash=sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8 \
--hash=sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d \
--hash=sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d \
--hash=sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9 \
--hash=sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162 \
--hash=sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76 \
--hash=sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4 \
--hash=sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e \
--hash=sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9 \
--hash=sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6 \
--hash=sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b \
--hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \
--hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0
charset-normalizer==3.1.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6 \
--hash=sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1 \
--hash=sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e \
--hash=sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373 \
--hash=sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62 \
--hash=sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230 \
--hash=sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be \
--hash=sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c \
--hash=sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0 \
--hash=sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448 \
--hash=sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f \
--hash=sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649 \
--hash=sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d \
--hash=sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0 \
--hash=sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706 \
--hash=sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a \
--hash=sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59 \
--hash=sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23 \
--hash=sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5 \
--hash=sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb \
--hash=sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e \
--hash=sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e \
--hash=sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c \
--hash=sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28 \
--hash=sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d \
--hash=sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41 \
--hash=sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974 \
--hash=sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce \
--hash=sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f \
--hash=sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1 \
--hash=sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d \
--hash=sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8 \
--hash=sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017 \
--hash=sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31 \
--hash=sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7 \
--hash=sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8 \
--hash=sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e \
--hash=sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14 \
--hash=sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd \
--hash=sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d \
--hash=sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795 \
--hash=sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b \
--hash=sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b \
--hash=sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b \
--hash=sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203 \
--hash=sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f \
--hash=sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19 \
--hash=sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1 \
--hash=sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a \
--hash=sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac \
--hash=sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9 \
--hash=sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0 \
--hash=sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137 \
--hash=sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f \
--hash=sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6 \
--hash=sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5 \
--hash=sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909 \
--hash=sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f \
--hash=sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0 \
--hash=sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324 \
--hash=sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755 \
--hash=sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb \
--hash=sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854 \
--hash=sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c \
--hash=sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60 \
--hash=sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84 \
--hash=sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0 \
--hash=sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b \
--hash=sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1 \
--hash=sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531 \
--hash=sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1 \
--hash=sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11 \
--hash=sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326 \
--hash=sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df \
--hash=sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab
click-plugins==1.1.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b \
--hash=sha256:5d262006d3222f5057fd81e1623d4443e41dcda5dc815c06b442aa3c02889fc8
click==8.1.3 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e \
--hash=sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48
colorama==0.4.6 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \
--hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6
contextlib2==21.6.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:3fbdb64466afd23abaf6c977627b75b6139a5a3e8ce38405c5b413aed7a0471f \
--hash=sha256:ab1e2bfe1d01d968e1b7e8d9023bc51ef3509bba217bb730cee3827e1ee82869
cryptography==41.0.6 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:068bc551698c234742c40049e46840843f3d98ad7ce265fd2bd4ec0d11306596 \
--hash=sha256:0f27acb55a4e77b9be8d550d762b0513ef3fc658cd3eb15110ebbcbd626db12c \
--hash=sha256:2132d5865eea673fe6712c2ed5fb4fa49dba10768bb4cc798345748380ee3660 \
--hash=sha256:3288acccef021e3c3c10d58933f44e8602cf04dba96d9796d70d537bb2f4bbc4 \
--hash=sha256:35f3f288e83c3f6f10752467c48919a7a94b7d88cc00b0668372a0d2ad4f8ead \
--hash=sha256:398ae1fc711b5eb78e977daa3cbf47cec20f2c08c5da129b7a296055fbb22aed \
--hash=sha256:422e3e31d63743855e43e5a6fcc8b4acab860f560f9321b0ee6269cc7ed70cc3 \
--hash=sha256:48783b7e2bef51224020efb61b42704207dde583d7e371ef8fc2a5fb6c0aabc7 \
--hash=sha256:4d03186af98b1c01a4eda396b137f29e4e3fb0173e30f885e27acec8823c1b09 \
--hash=sha256:5daeb18e7886a358064a68dbcaf441c036cbdb7da52ae744e7b9207b04d3908c \
--hash=sha256:60e746b11b937911dc70d164060d28d273e31853bb359e2b2033c9e93e6f3c43 \
--hash=sha256:742ae5e9a2310e9dade7932f9576606836ed174da3c7d26bc3d3ab4bd49b9f65 \
--hash=sha256:7e00fb556bda398b99b0da289ce7053639d33b572847181d6483ad89835115f6 \
--hash=sha256:85abd057699b98fce40b41737afb234fef05c67e116f6f3650782c10862c43da \
--hash=sha256:8efb2af8d4ba9dbc9c9dd8f04d19a7abb5b49eab1f3694e7b5a16a5fc2856f5c \
--hash=sha256:ae236bb8760c1e55b7a39b6d4d32d2279bc6c7c8500b7d5a13b6fb9fc97be35b \
--hash=sha256:afda76d84b053923c27ede5edc1ed7d53e3c9f475ebaf63c68e69f1403c405a8 \
--hash=sha256:b27a7fd4229abef715e064269d98a7e2909ebf92eb6912a9603c7e14c181928c \
--hash=sha256:b648fe2a45e426aaee684ddca2632f62ec4613ef362f4d681a9a6283d10e079d \
--hash=sha256:c5a550dc7a3b50b116323e3d376241829fd326ac47bc195e04eb33a8170902a9 \
--hash=sha256:da46e2b5df770070412c46f87bac0849b8d685c5f2679771de277a422c7d0b86 \
--hash=sha256:f39812f70fc5c71a15aa3c97b2bbe213c3f2a460b79bd21c40d033bb34a9bf36 \
--hash=sha256:ff369dd19e8fe0528b02e8df9f2aeb2479f89b1270d90f96a63500afe9af5cae
detect-secrets==1.4.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:d08ecabeee8b68c0acb0e8a354fb98d822a653f6ed05e520cead4c6fc1fc02cd \
--hash=sha256:d56787e339758cef48c9ccd6692f7a094b9963c979c9813580b0169e41132833
filelock==3.12.2 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:002740518d8aa59a26b0c76e10fb8c6e15eae825d34b6fdf670333fd7b938d81 \
--hash=sha256:cbb791cdea2a72f23da6ac5b5269ab0a0d161e9ef0100e653b69049a7706d1ec
google-api-core==2.11.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4b9bb5d5a380a0befa0573b302651b8a9a89262c1730e37bf423cec511804c22 \
--hash=sha256:ce222e27b0de0d7bc63eb043b956996d6dccab14cc3b690aaea91c9cc99dc16e
google-api-python-client==2.111.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:3a45a53c031478d1c82c7162dd25c9a965247bca6bd438af0838a9d9b8219405 \
--hash=sha256:b605adee2d09a843b97a59925757802904679e44e5599708cedb8939900dfbc7
google-auth-httplib2==0.2.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:38aa7badf48f974f1eb9861794e9c0cb2a0511a4ec0679b1f886d108f5640e05 \
--hash=sha256:b65a0a2123300dd71281a7bf6e64d65a0759287df52729bdd1ae2e47dc311a3d
google-auth==2.17.3 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:ce311e2bc58b130fddf316df57c9b3943c2a7b4f6ec31de9663a9333e4064efc \
--hash=sha256:f586b274d3eb7bd932ea424b1c702a30e0393a2e2bc4ca3eae8263ffd8be229f
googleapis-common-protos==1.59.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4168fcb568a826a52f23510412da405abd93f4d23ba544bb68d943b14ba3cb44 \
--hash=sha256:b287dc48449d1d41af0c69f4ea26242b5ae4c3d7249a38b0984c86a4caffff1f
grapheme==0.6.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:44c2b9f21bbe77cfb05835fec230bd435954275267fea1858013b102f8603cca
httplib2==0.22.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:14ae0a53c1ba8f3d37e9e27cf37eabb0fb9980f435ba405d546948b009dd64dc \
--hash=sha256:d7a10bc5ef5ab08322488bde8c726eeee5c8618723fdb399597ec58f3d82df81
idna==3.4 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \
--hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2
isodate==0.6.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96 \
--hash=sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9
jmespath==1.0.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980 \
--hash=sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe
jsonschema-specifications==2023.6.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:3d2b82663aff01815f744bb5c7887e2121a63399b49b104a3c96145474d091d7 \
--hash=sha256:ca1c4dd059a9e7b34101cf5b3ab7ff1d18b139f35950d598d629837ef66e8f28
jsonschema==4.20.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4f614fd46d8d61258610998997743ec5492a648b33cf478c1ddc23ed4598a5fa \
--hash=sha256:ed6231f0429ecf966f5bc8dfef245998220549cbbcf140f913b7464c52c3b6b3
msal-extensions==1.0.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:91e3db9620b822d0ed2b4d1850056a0f133cba04455e62f11612e40f5502f2ee \
--hash=sha256:c676aba56b0cce3783de1b5c5ecfe828db998167875126ca4b47dc6436451354
msal==1.24.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:aa0972884b3c6fdec53d9a0bd15c12e5bd7b71ac1b66d746f54d128709f3f8f8 \
--hash=sha256:ce4320688f95c301ee74a4d0e9dbcfe029a63663a8cc61756f40d0d0d36574ad
msgraph-core==0.2.2 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:147324246788abe8ed7e05534cd9e4e0ec98b33b30e011693b8d014cebf97f63 \
--hash=sha256:e297564b9a0ca228493d8851f95cb2de9522143d82efa40ce3a6ad286e21392e
msrest==0.7.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:21120a810e1233e5e6cc7fe40b474eeb4ec6f757a15d7cf86702c369f9567c32 \
--hash=sha256:6e7661f46f3afd88b75667b7187a92829924446c7ea1d169be8c4bb7eeb788b9
msrestazure==0.6.4 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:3de50f56147ef529b31e099a982496690468ecef33f0544cb0fa0cfe1e1de5b9 \
--hash=sha256:a06f0dabc9a6f5efe3b6add4bd8fb623aeadacf816b7a35b0f89107e0544d189
oauthlib==3.2.2 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca \
--hash=sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918
portalocker==2.7.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:032e81d534a88ec1736d03f780ba073f047a06c478b06e2937486f334e955c51 \
--hash=sha256:a07c5b4f3985c3cf4798369631fb7011adb498e2a46d8440efc75a8f29a0f983
protobuf==4.23.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:03eee35b60317112a72d19c54d0bff7bc58ff12fea4cd7b018232bd99758ffdf \
--hash=sha256:2b94bd6df92d71bd1234a2ffe7ce96ddf6d10cf637a18d6b55ad0a89fbb7fc21 \
--hash=sha256:36f5370a930cb77c8ad2f4135590c672d0d2c72d4a707c7d0058dce4b4b4a598 \
--hash=sha256:5f1eba1da2a2f3f7df469fccddef3cc060b8a16cfe3cc65961ad36b4dbcf59c5 \
--hash=sha256:6c16657d6717a0c62d5d740cb354fbad1b0d8cb811669e06fc1caa0ff4799ddd \
--hash=sha256:6fe180b56e1169d72ecc4acbd39186339aed20af5384531b8e8979b02bbee159 \
--hash=sha256:7cb5b9a05ce52c6a782bb97de52679bd3438ff2b7460eff5da348db65650f227 \
--hash=sha256:9744e934ea5855d12191040ea198eaf704ac78665d365a89d9572e3b627c2688 \
--hash=sha256:9f5a0fbfcdcc364f3986f9ed9f8bb1328fb84114fd790423ff3d7fdb0f85c2d1 \
--hash=sha256:baca40d067dddd62141a129f244703160d278648b569e90bb0e3753067644711 \
--hash=sha256:d5a35ff54e3f62e8fc7be02bb0d2fbc212bba1a5a9cc2748090690093996f07b \
--hash=sha256:e62fb869762b4ba18666370e2f8a18f17f8ab92dd4467295c6d38be6f8fef60b \
--hash=sha256:ebde3a023b8e11bfa6c890ef34cd6a8b47d586f26135e86c21344fe433daf2e2
pyasn1-modules==0.3.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c \
--hash=sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d
pyasn1==0.5.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57 \
--hash=sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde
pycparser==2.21 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \
--hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206
pydantic==1.10.13 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:1740068fd8e2ef6eb27a20e5651df000978edce6da6803c2bef0bc74540f9548 \
--hash=sha256:210ce042e8f6f7c01168b2d84d4c9eb2b009fe7bf572c2266e235edf14bacd80 \
--hash=sha256:32c8b48dcd3b2ac4e78b0ba4af3a2c2eb6048cb75202f0ea7b34feb740efc340 \
--hash=sha256:3ecea2b9d80e5333303eeb77e180b90e95eea8f765d08c3d278cd56b00345d01 \
--hash=sha256:4b03e42ec20286f052490423682016fd80fda830d8e4119f8ab13ec7464c0132 \
--hash=sha256:4c5370a7edaac06daee3af1c8b1192e305bc102abcbf2a92374b5bc793818599 \
--hash=sha256:56e3ff861c3b9c6857579de282ce8baabf443f42ffba355bf070770ed63e11e1 \
--hash=sha256:5a1f9f747851338933942db7af7b6ee8268568ef2ed86c4185c6ef4402e80ba8 \
--hash=sha256:5e08865bc6464df8c7d61439ef4439829e3ab62ab1669cddea8dd00cd74b9ffe \
--hash=sha256:61d9dce220447fb74f45e73d7ff3b530e25db30192ad8d425166d43c5deb6df0 \
--hash=sha256:654db58ae399fe6434e55325a2c3e959836bd17a6f6a0b6ca8107ea0571d2e17 \
--hash=sha256:678bcf5591b63cc917100dc50ab6caebe597ac67e8c9ccb75e698f66038ea953 \
--hash=sha256:6cf25c1a65c27923a17b3da28a0bdb99f62ee04230c931d83e888012851f4e7f \
--hash=sha256:75ac15385a3534d887a99c713aa3da88a30fbd6204a5cd0dc4dab3d770b9bd2f \
--hash=sha256:75b297827b59bc229cac1a23a2f7a4ac0031068e5be0ce385be1462e7e17a35d \
--hash=sha256:7d6f6e7305244bddb4414ba7094ce910560c907bdfa3501e9db1a7fd7eaea127 \
--hash=sha256:84bafe2e60b5e78bc64a2941b4c071a4b7404c5c907f5f5a99b0139781e69ed8 \
--hash=sha256:854223752ba81e3abf663d685f105c64150873cc6f5d0c01d3e3220bcff7d36f \
--hash=sha256:8ae5dd6b721459bfa30805f4c25880e0dd78fc5b5879f9f7a692196ddcb5a580 \
--hash=sha256:8ef467901d7a41fa0ca6db9ae3ec0021e3f657ce2c208e98cd511f3161c762c6 \
--hash=sha256:968ac42970f57b8344ee08837b62f6ee6f53c33f603547a55571c954a4225691 \
--hash=sha256:97cce3ae7341f7620a0ba5ef6cf043975cd9d2b81f3aa5f4ea37928269bc1b87 \
--hash=sha256:9849f031cf8a2f0a928fe885e5a04b08006d6d41876b8bbd2fc68a18f9f2e3fd \
--hash=sha256:9f00790179497767aae6bcdc36355792c79e7bbb20b145ff449700eb076c5f96 \
--hash=sha256:b87326822e71bd5f313e7d3bfdc77ac3247035ac10b0c0618bd99dcf95b1e687 \
--hash=sha256:b97c1fac8c49be29486df85968682b0afa77e1b809aff74b83081cc115e52f33 \
--hash=sha256:bc0898c12f8e9c97f6cd44c0ed70d55749eaf783716896960b4ecce2edfd2d69 \
--hash=sha256:c553f6a156deb868ba38a23cf0df886c63492e9257f60a79c0fd8e7173537653 \
--hash=sha256:c636925f38b8db208e09d344c7aa4f29a86bb9947495dd6b6d376ad10334fb78 \
--hash=sha256:c958d053453a1c4b1c2062b05cd42d9d5c8eb67537b8d5a7e3c3032943ecd261 \
--hash=sha256:d3a3c792a58e1622667a2837512099eac62490cdfd63bd407993aaf200a4cf1f \
--hash=sha256:e31647d85a2013d926ce60b84f9dd5300d44535a9941fe825dc349ae1f760df9 \
--hash=sha256:e70ca129d2053fb8b728ee7d1af8e553a928d7e301a311094b8a0501adc8763d \
--hash=sha256:efff03cc7a4f29d9009d1c96ceb1e7a70a65cfe86e89d34e4a5f2ab1e5693737 \
--hash=sha256:f59ef915cac80275245824e9d771ee939133be38215555e9dc90c6cb148aaeb5 \
--hash=sha256:f8e81fc5fb17dae698f52bdd1c4f18b6ca674d7068242b2aff075f588301bbb0
pyjwt==2.6.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd \
--hash=sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14
pyjwt[crypto]==2.6.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd \
--hash=sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14
pyparsing==3.0.9 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb \
--hash=sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc
python-dateutil==2.8.2 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \
--hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9
pywin32==306 ; python_version >= "3.9" and platform_system == "Windows" and python_version < "3.12" \
--hash=sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d \
--hash=sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65 \
--hash=sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e \
--hash=sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b \
--hash=sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4 \
--hash=sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040 \
--hash=sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a \
--hash=sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36 \
--hash=sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8 \
--hash=sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e \
--hash=sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802 \
--hash=sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a \
--hash=sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407 \
--hash=sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0
pyyaml==6.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf \
--hash=sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293 \
--hash=sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b \
--hash=sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57 \
--hash=sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b \
--hash=sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4 \
--hash=sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07 \
--hash=sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba \
--hash=sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9 \
--hash=sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287 \
--hash=sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513 \
--hash=sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0 \
--hash=sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782 \
--hash=sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0 \
--hash=sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92 \
--hash=sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f \
--hash=sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2 \
--hash=sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc \
--hash=sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1 \
--hash=sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c \
--hash=sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86 \
--hash=sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4 \
--hash=sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c \
--hash=sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34 \
--hash=sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b \
--hash=sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d \
--hash=sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c \
--hash=sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb \
--hash=sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7 \
--hash=sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737 \
--hash=sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3 \
--hash=sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d \
--hash=sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358 \
--hash=sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53 \
--hash=sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78 \
--hash=sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803 \
--hash=sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a \
--hash=sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f \
--hash=sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174 \
--hash=sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5
referencing==0.29.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:90cb53782d550ba28d2166ef3f55731f38397def8832baac5d45235f1995e35e \
--hash=sha256:d3c8f323ee1480095da44d55917cfb8278d73d6b4d5f677e3e40eb21314ac67f
requests-file==1.5.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:07d74208d3389d01c38ab89ef403af0cfec63957d53a0081d8eca738d0247d8e \
--hash=sha256:dfe5dae75c12481f68ba353183c53a65e6044c923e64c24b2209f6c7570ca953
requests-oauthlib==1.3.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5 \
--hash=sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a
requests==2.31.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \
--hash=sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1
rpds-py==0.8.10 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:08166467258fd0240a1256fce272f689f2360227ee41c72aeea103e9e4f63d2b \
--hash=sha256:083df0fafe199371206111583c686c985dddaf95ab3ee8e7b24f1fda54515d09 \
--hash=sha256:0da53292edafecba5e1d8c1218f99babf2ed0bf1c791d83c0ab5c29b57223068 \
--hash=sha256:0eeb2731708207d0fe2619afe6c4dc8cb9798f7de052da891de5f19c0006c315 \
--hash=sha256:134ec8f14ca7dbc6d9ae34dac632cdd60939fe3734b5d287a69683c037c51acb \
--hash=sha256:13e643ce8ad502a0263397362fb887594b49cf84bf518d6038c16f235f2bcea4 \
--hash=sha256:148b0b38d719c0760e31ce9285a9872972bdd7774969a4154f40c980e5beaca7 \
--hash=sha256:14f1c356712f66653b777ecd8819804781b23dbbac4eade4366b94944c9e78ad \
--hash=sha256:15a90d0ac11b4499171067ae40a220d1ca3cb685ec0acc356d8f3800e07e4cb8 \
--hash=sha256:1a2edf8173ac0c7a19da21bc68818be1321998528b5e3f748d6ee90c0ba2a1fd \
--hash=sha256:1b21575031478609db6dbd1f0465e739fe0e7f424a8e7e87610a6c7f68b4eb16 \
--hash=sha256:1ee45cd1d84beed6cbebc839fd85c2e70a3a1325c8cfd16b62c96e2ffb565eca \
--hash=sha256:220bdcad2d2936f674650d304e20ac480a3ce88a40fe56cd084b5780f1d104d9 \
--hash=sha256:2418cf17d653d24ffb8b75e81f9f60b7ba1b009a23298a433a4720b2a0a17017 \
--hash=sha256:2614c2732bf45de5c7f9e9e54e18bc78693fa2f635ae58d2895b7965e470378c \
--hash=sha256:2cd3045e7f6375dda64ed7db1c5136826facb0159ea982f77d9cf6125025bd34 \
--hash=sha256:2eb4b08c45f8f8d8254cdbfacd3fc5d6b415d64487fb30d7380b0d0569837bf1 \
--hash=sha256:300eb606e6b94a7a26f11c8cc8ee59e295c6649bd927f91e1dbd37a4c89430b6 \
--hash=sha256:376b8de737401050bd12810003d207e824380be58810c031f10ec563ff6aef3d \
--hash=sha256:3793c21494bad1373da517001d0849eea322e9a049a0e4789e50d8d1329df8e7 \
--hash=sha256:37f7ee4dc86db7af3bac6d2a2cedbecb8e57ce4ed081f6464510e537589f8b1e \
--hash=sha256:3816a890a6a9e9f1de250afa12ca71c9a7a62f2b715a29af6aaee3aea112c181 \
--hash=sha256:3c490204e16bca4f835dba8467869fe7295cdeaa096e4c5a7af97f3454a97991 \
--hash=sha256:3cc5e5b5514796f45f03a568981971b12a3570f3de2e76114f7dc18d4b60a3c4 \
--hash=sha256:41c89a366eae49ad9e65ed443a8f94aee762931a1e3723749d72aeac80f5ef2f \
--hash=sha256:4a8ca409f1252e1220bf09c57290b76cae2f14723746215a1e0506472ebd7bdf \
--hash=sha256:4b519bac7c09444dd85280fd60f28c6dde4389c88dddf4279ba9b630aca3bbbe \
--hash=sha256:521fc8861a86ae54359edf53a15a05fabc10593cea7b3357574132f8427a5e5a \
--hash=sha256:574868858a7ff6011192c023a5289158ed20e3f3b94b54f97210a773f2f22921 \
--hash=sha256:5a665f6f1a87614d1c3039baf44109094926dedf785e346d8b0a728e9cabd27a \
--hash=sha256:5d1c2bc319428d50b3e0fa6b673ab8cc7fa2755a92898db3a594cbc4eeb6d1f7 \
--hash=sha256:60e0e86e870350e03b3e25f9b1dd2c6cc72d2b5f24e070249418320a6f9097b7 \
--hash=sha256:695f642a3a5dbd4ad2ffbbacf784716ecd87f1b7a460843b9ddf965ccaeafff4 \
--hash=sha256:69d089c026f6a8b9d64a06ff67dc3be196707b699d7f6ca930c25f00cf5e30d8 \
--hash=sha256:6c6a0225b8501d881b32ebf3f5807a08ad3685b5eb5f0a6bfffd3a6e039b2055 \
--hash=sha256:70bb9c8004b97b4ef7ae56a2aa56dfaa74734a0987c78e7e85f00004ab9bf2d0 \
--hash=sha256:73a1e48430f418f0ac3dfd87860e4cc0d33ad6c0f589099a298cb53724db1169 \
--hash=sha256:7495010b658ec5b52835f21d8c8b1a7e52e194c50f095d4223c0b96c3da704b1 \
--hash=sha256:7947e6e2c2ad68b1c12ee797d15e5f8d0db36331200b0346871492784083b0c6 \
--hash=sha256:7b38a9ac96eeb6613e7f312cd0014de64c3f07000e8bf0004ad6ec153bac46f8 \
--hash=sha256:7d20a8ed227683401cc508e7be58cba90cc97f784ea8b039c8cd01111e6043e0 \
--hash=sha256:7f29b8c55fd3a2bc48e485e37c4e2df3317f43b5cc6c4b6631c33726f52ffbb3 \
--hash=sha256:802f42200d8caf7f25bbb2a6464cbd83e69d600151b7e3b49f49a47fa56b0a38 \
--hash=sha256:805a5f3f05d186c5d50de2e26f765ba7896d0cc1ac5b14ffc36fae36df5d2f10 \
--hash=sha256:82bb361cae4d0a627006dadd69dc2f36b7ad5dc1367af9d02e296ec565248b5b \
--hash=sha256:84eb541a44f7a18f07a6bfc48b95240739e93defe1fdfb4f2a295f37837945d7 \
--hash=sha256:89c92b74e8bf6f53a6f4995fd52f4bd510c12f103ee62c99e22bc9e05d45583c \
--hash=sha256:8c398fda6df361a30935ab4c4bccb7f7a3daef2964ca237f607c90e9f3fdf66f \
--hash=sha256:915031002c86a5add7c6fd4beb601b2415e8a1c956590a5f91d825858e92fe6e \
--hash=sha256:927d784648211447201d4c6f1babddb7971abad922b32257ab74de2f2750fad0 \
--hash=sha256:92cf5b3ee60eef41f41e1a2cabca466846fb22f37fc580ffbcb934d1bcab225a \
--hash=sha256:93d06cccae15b3836247319eee7b6f1fdcd6c10dabb4e6d350d27bd0bdca2711 \
--hash=sha256:93d99f957a300d7a4ced41615c45aeb0343bb8f067c42b770b505de67a132346 \
--hash=sha256:96b293c0498c70162effb13100624c5863797d99df75f2f647438bd10cbf73e4 \
--hash=sha256:97cab733d303252f7c2f7052bf021a3469d764fc2b65e6dbef5af3cbf89d4892 \
--hash=sha256:996cc95830de9bc22b183661d95559ec6b3cd900ad7bc9154c4cbf5be0c9b734 \
--hash=sha256:9a7d20c1cf8d7b3960c5072c265ec47b3f72a0c608a9a6ee0103189b4f28d531 \
--hash=sha256:9cd57981d9fab04fc74438d82460f057a2419974d69a96b06a440822d693b3c0 \
--hash=sha256:a11ab0d97be374efd04f640c04fe5c2d3dabc6dfb998954ea946ee3aec97056d \
--hash=sha256:a13c8e56c46474cd5958d525ce6a9996727a83d9335684e41f5192c83deb6c58 \
--hash=sha256:a38b9f526d0d6cbdaa37808c400e3d9f9473ac4ff64d33d9163fd05d243dbd9b \
--hash=sha256:a7c6304b894546b5a6bdc0fe15761fa53fe87d28527a7142dae8de3c663853e1 \
--hash=sha256:ad3bfb44c8840fb4be719dc58e229f435e227fbfbe133dc33f34981ff622a8f8 \
--hash=sha256:ae40f4a70a1f40939d66ecbaf8e7edc144fded190c4a45898a8cfe19d8fc85ea \
--hash=sha256:b01b39ad5411563031ea3977bbbc7324d82b088e802339e6296f082f78f6115c \
--hash=sha256:b2e3c4f2a8e3da47f850d7ea0d7d56720f0f091d66add889056098c4b2fd576c \
--hash=sha256:b41941583adce4242af003d2a8337b066ba6148ca435f295f31ac6d9e4ea2722 \
--hash=sha256:b4627520a02fccbd324b33c7a83e5d7906ec746e1083a9ac93c41ac7d15548c7 \
--hash=sha256:ba9f1d1ebe4b63801977cec7401f2d41e888128ae40b5441270d43140efcad52 \
--hash=sha256:c03a435d26c3999c2a8642cecad5d1c4d10c961817536af52035f6f4ee2f5dd0 \
--hash=sha256:c200b30dd573afa83847bed7e3041aa36a8145221bf0cfdfaa62d974d720805c \
--hash=sha256:c493365d3fad241d52f096e4995475a60a80f4eba4d3ff89b713bc65c2ca9615 \
--hash=sha256:c4d42e83ddbf3445e6514f0aff96dca511421ed0392d9977d3990d9f1ba6753c \
--hash=sha256:c60528671d9d467009a6ec284582179f6b88651e83367d0ab54cb739021cd7de \
--hash=sha256:c72ebc22e70e04126158c46ba56b85372bc4d54d00d296be060b0db1671638a4 \
--hash=sha256:ccbbd276642788c4376fbe8d4e6c50f0fb4972ce09ecb051509062915891cbf0 \
--hash=sha256:ceaac0c603bf5ac2f505a78b2dcab78d3e6b706be6596c8364b64cc613d208d2 \
--hash=sha256:d19db6ba816e7f59fc806c690918da80a7d186f00247048cd833acdab9b4847b \
--hash=sha256:d5c191713e98e7c28800233f039a32a42c1a4f9a001a8a0f2448b07391881036 \
--hash=sha256:d64f9f88d5203274a002b54442cafc9c7a1abff2a238f3e767b70aadf919b451 \
--hash=sha256:d77dff3a5aa5eedcc3da0ebd10ff8e4969bc9541aa3333a8d41715b429e99f47 \
--hash=sha256:dd4f16e57c12c0ae17606c53d1b57d8d1c8792efe3f065a37cb3341340599d49 \
--hash=sha256:e39d7ab0c18ac99955b36cd19f43926450baba21e3250f053e0704d6ffd76873 \
--hash=sha256:e3d0cd3dff0e7638a7b5390f3a53057c4e347f4ef122ee84ed93fc2fb7ea4aa2 \
--hash=sha256:e7dfb1cbb895810fa2b892b68153c17716c6abaa22c7dc2b2f6dcf3364932a1c \
--hash=sha256:e8e24b210a4deb5a7744971f8f77393005bae7f873568e37dfd9effe808be7f7 \
--hash=sha256:e9c0683cb35a9b5881b41bc01d5568ffc667910d9dbc632a1fba4e7d59e98773 \
--hash=sha256:ed41f3f49507936a6fe7003985ea2574daccfef999775525d79eb67344e23767 \
--hash=sha256:ee744fca8d1ea822480a2a4e7c5f2e1950745477143668f0b523769426060f29 \
--hash=sha256:f3f1e860be21f3e83011116a65e7310486300e08d9a3028e73e8d13bb6c77292 \
--hash=sha256:f43ab4cb04bde6109eb2555528a64dfd8a265cc6a9920a67dcbde13ef53a46c8 \
--hash=sha256:f53f55a8852f0e49b0fc76f2412045d6ad9d5772251dea8f55ea45021616e7d5 \
--hash=sha256:f59996d0550894affaad8743e97b9b9c98f638b221fac12909210ec3d9294786 \
--hash=sha256:f96f3f98fbff7af29e9edf9a6584f3c1382e7788783d07ba3721790625caa43e \
--hash=sha256:f9adb5664b78fcfcd830000416c8cc69853ef43cb084d645b3f1f0296edd9bae \
--hash=sha256:fa326b3505d5784436d9433b7980171ab2375535d93dd63fbcd20af2b5ca1bb6 \
--hash=sha256:fafc0049add8043ad07ab5382ee80d80ed7e3699847f26c9a5cf4d3714d96a84
rsa==4.9 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \
--hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21
s3transfer==0.6.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:3c0da2d074bf35d6870ef157158641178a4204a6e689e82546083e31e0311346 \
--hash=sha256:640bb492711f4c0c0905e1f62b6aaeb771881935ad27884852411f8e9cacbca9
schema==0.7.5 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:f06717112c61895cabc4707752b88716e8420a8819d71404501e114f91043197 \
--hash=sha256:f3ffdeeada09ec34bf40d7d79996d9f7175db93b7a5065de0faa7f41083c1e6c
shodan==1.31.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:c73275386ea02390e196c35c660706a28dd4d537c5a21eb387ab6236fac251f6
six==1.16.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \
--hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254
slack-sdk==3.26.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:d1600211eaa37c71a5f92daf4404074c3e6b3f5359a37c93c818b39d88ab4ca0 \
--hash=sha256:f80f0d15f0fce539b470447d2a07b03ecdad6b24f69c1edd05d464cf21253a06
tabulate==0.9.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c \
--hash=sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f
tldextract==3.4.4 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:581e7dbefc90e7bb857bb6f768d25c811a3c5f0892ed56a9a2999ddb7b1b70c2 \
--hash=sha256:5fe3210c577463545191d45ad522d3d5e78d55218ce97215e82004dcae1e1234
typing-extensions==4.5.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb \
--hash=sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4
uritemplate==4.1.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4346edfc5c3b79f694bccd6d6099a322bbeb628dbf2cd86eea55a456ce5124f0 \
--hash=sha256:830c08b8d99bdd312ea4ead05994a38e8936266f84b9a7878232db50b044e02e
urllib3==1.26.18 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07 \
--hash=sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0
xlsxwriter==3.1.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:02913b50b74c00f165933d5da3e3a02cab4204cb4932722a1b342c5c71034122 \
--hash=sha256:b70a147d36235d1ee835cfd037396f789db1f76740a0e5c917d54137169341de | python3.9 | fdcc2ac5 | diff --git a/prowler/__main__.py b/prowler/__main__.py
--- a/prowler/__main__.py
+++ b/prowler/__main__.py
@@ -51,7 +51,6 @@ from prowler.providers.common.audit_info import (
set_provider_audit_info,
set_provider_execution_parameters,
)
-from prowler.providers.common.clean import clean_provider_local_output_directories
from prowler.providers.common.outputs import set_provider_output_options
from prowler.providers.common.quick_inventory import run_provider_quick_inventory
@@ -324,9 +323,6 @@ def prowler():
if checks_folder:
remove_custom_checks_module(checks_folder, provider)
- # clean local directories
- clean_provider_local_output_directories(args)
-
# If there are failed findings exit code 3, except if -z is input
if not args.ignore_exit_code_3 and stats["total_fail"] > 0:
sys.exit(3)
diff --git a/prowler/providers/common/clean.py b/prowler/providers/common/clean.py
deleted file mode 100644
--- a/prowler/providers/common/clean.py
+++ /dev/null
@@ -1,32 +0,0 @@
-import importlib
-import sys
-from shutil import rmtree
-
-from prowler.config.config import default_output_directory
-from prowler.lib.logger import logger
-
-
-def clean_provider_local_output_directories(args):
- """
- clean_provider_local_output_directories deletes the output files generated locally in custom directories when the output is sent to a remote storage provider
- """
- try:
- # import provider cleaning function
- provider_clean_function = f"clean_{args.provider}_local_output_directories"
- getattr(importlib.import_module(__name__), provider_clean_function)(args)
- except AttributeError as attribute_exception:
- logger.info(
- f"Cleaning local output directories not initialized for provider {args.provider}: {attribute_exception}"
- )
- except Exception as error:
- logger.critical(
- f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
- )
- sys.exit(1)
-
-
-def clean_aws_local_output_directories(args):
- """clean_aws_local_output_directories deletes the output files generated locally in custom directories when output is sent to a remote storage provider for AWS"""
- if args.output_bucket or args.output_bucket_no_assume:
- if args.output_directory != default_output_directory:
- rmtree(args.output_directory)
diff --git a/tests/providers/common/clean_test.py b/tests/providers/common/clean_test.py
deleted file mode 100644
--- a/tests/providers/common/clean_test.py
+++ /dev/null
@@ -1,87 +0,0 @@
-import importlib
-import logging
-import tempfile
-from argparse import Namespace
-from os import path
-
-from mock import patch
-
-from prowler.providers.common.clean import clean_provider_local_output_directories
-
-
-class Test_Common_Clean:
- def set_provider_input_args(self, provider):
- set_args_function = f"set_{provider}_input_args"
- args = getattr(
- getattr(importlib.import_module(__name__), __class__.__name__),
- set_args_function,
- )(self)
- return args
-
- def set_aws_input_args(self):
- args = Namespace()
- args.provider = "aws"
- args.output_bucket = "test-bucket"
- args.output_bucket_no_assume = None
- return args
-
- def set_azure_input_args(self):
- args = Namespace()
- args.provider = "azure"
- return args
-
- def test_clean_provider_local_output_directories_non_initialized(self, caplog):
- provider = "azure"
- input_args = self.set_provider_input_args(provider)
- caplog.set_level(logging.INFO)
- clean_provider_local_output_directories(input_args)
- assert (
- f"Cleaning local output directories not initialized for provider {provider}:"
- in caplog.text
- )
-
- def test_clean_aws_local_output_directories_non_default_dir_output_bucket(self):
- provider = "aws"
- input_args = self.set_provider_input_args(provider)
- with tempfile.TemporaryDirectory() as temp_dir:
- input_args.output_directory = temp_dir
- clean_provider_local_output_directories(input_args)
- assert not path.exists(input_args.output_directory)
-
- def test_clean_aws_local_output_directories_non_default_dir_output_bucket_no_assume(
- self,
- ):
- provider = "aws"
- input_args = self.set_provider_input_args(provider)
- input_args.output_bucket = None
- input_args.output_bucket_no_assume = "test"
- with tempfile.TemporaryDirectory() as temp_dir:
- input_args.output_directory = temp_dir
- clean_provider_local_output_directories(input_args)
- assert not path.exists(input_args.output_directory)
-
- def test_clean_aws_local_output_directories_default_dir_output_bucket(self):
- provider = "aws"
- input_args = self.set_provider_input_args(provider)
- with tempfile.TemporaryDirectory() as temp_dir:
- with patch(
- "prowler.providers.common.clean.default_output_directory", new=temp_dir
- ):
- input_args.output_directory = temp_dir
- clean_provider_local_output_directories(input_args)
- assert path.exists(input_args.output_directory)
-
- def test_clean_aws_local_output_directories_default_dir_output_bucket_no_assume(
- self,
- ):
- provider = "aws"
- input_args = self.set_provider_input_args(provider)
- input_args.output_bucket_no_assume = "test"
- input_args.ouput_bucket = None
- with tempfile.TemporaryDirectory() as temp_dir:
- with patch(
- "prowler.providers.common.clean.default_output_directory", new=temp_dir
- ):
- input_args.output_directory = temp_dir
- clean_provider_local_output_directories(input_args)
- assert path.exists(input_args.output_directory)
| [
{
"content": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\nimport os\nimport sys\n\nfrom colorama import Fore, Style\n\nfrom prowler.lib.banner import print_banner\nfrom prowler.lib.check.check import (\n bulk_load_checks_metadata,\n bulk_load_compliance_frameworks,\n exclude_checks_to_run,\n exclude_services_to_run,\n execute_checks,\n list_categories,\n list_checks_json,\n list_services,\n parse_checks_from_folder,\n print_categories,\n print_checks,\n print_compliance_frameworks,\n print_compliance_requirements,\n print_services,\n remove_custom_checks_module,\n)\nfrom prowler.lib.check.checks_loader import load_checks_to_execute\nfrom prowler.lib.check.compliance import update_checks_metadata_with_compliance\nfrom prowler.lib.check.custom_checks_metadata import (\n parse_custom_checks_metadata_file,\n update_checks_metadata,\n)\nfrom prowler.lib.cli.parser import ProwlerArgumentParser\nfrom prowler.lib.logger import logger, set_logging_config\nfrom prowler.lib.outputs.compliance import display_compliance_table\nfrom prowler.lib.outputs.html import add_html_footer, fill_html_overview_statistics\nfrom prowler.lib.outputs.json import close_json\nfrom prowler.lib.outputs.outputs import extract_findings_statistics\nfrom prowler.lib.outputs.slack import send_slack_message\nfrom prowler.lib.outputs.summary_table import display_summary_table\nfrom prowler.providers.aws.aws_provider import get_available_aws_service_regions\nfrom prowler.providers.aws.lib.s3.s3 import send_to_s3_bucket\nfrom prowler.providers.aws.lib.security_hub.security_hub import (\n batch_send_to_security_hub,\n prepare_security_hub_findings,\n resolve_security_hub_previous_findings,\n verify_security_hub_integration_enabled_per_region,\n)\nfrom prowler.providers.common.allowlist import set_provider_allowlist\nfrom prowler.providers.common.audit_info import (\n set_provider_audit_info,\n set_provider_execution_parameters,\n)\nfrom prowler.providers.common.clean import clean_provider_local_output_directories\nfrom prowler.providers.common.outputs import set_provider_output_options\nfrom prowler.providers.common.quick_inventory import run_provider_quick_inventory\n\n\ndef prowler():\n # Parse Arguments\n parser = ProwlerArgumentParser()\n args = parser.parse()\n\n # Save Arguments\n provider = args.provider\n checks = args.checks\n excluded_checks = args.excluded_checks\n excluded_services = args.excluded_services\n services = args.services\n categories = args.categories\n checks_file = args.checks_file\n checks_folder = args.checks_folder\n severities = args.severity\n compliance_framework = args.compliance\n custom_checks_metadata_file = args.custom_checks_metadata_file\n\n if not args.no_banner:\n print_banner(args)\n\n # We treat the compliance framework as another output format\n if compliance_framework:\n args.output_modes.extend(compliance_framework)\n\n # Set Logger configuration\n set_logging_config(args.log_level, args.log_file, args.only_logs)\n\n if args.list_services:\n print_services(list_services(provider))\n sys.exit()\n\n # Load checks metadata\n logger.debug(\"Loading checks metadata from .metadata.json files\")\n bulk_checks_metadata = bulk_load_checks_metadata(provider)\n\n if args.list_categories:\n print_categories(list_categories(bulk_checks_metadata))\n sys.exit()\n\n bulk_compliance_frameworks = {}\n # Load compliance frameworks\n logger.debug(\"Loading compliance frameworks from .json files\")\n\n bulk_compliance_frameworks = bulk_load_compliance_frameworks(provider)\n # Complete checks metadata with the compliance framework specification\n bulk_checks_metadata = update_checks_metadata_with_compliance(\n bulk_compliance_frameworks, bulk_checks_metadata\n )\n # Update checks metadata if the --custom-checks-metadata-file is present\n custom_checks_metadata = None\n if custom_checks_metadata_file:\n custom_checks_metadata = parse_custom_checks_metadata_file(\n provider, custom_checks_metadata_file\n )\n bulk_checks_metadata = update_checks_metadata(\n bulk_checks_metadata, custom_checks_metadata\n )\n\n if args.list_compliance:\n print_compliance_frameworks(bulk_compliance_frameworks)\n sys.exit()\n if args.list_compliance_requirements:\n print_compliance_requirements(\n bulk_compliance_frameworks, args.list_compliance_requirements\n )\n sys.exit()\n\n # Load checks to execute\n checks_to_execute = load_checks_to_execute(\n bulk_checks_metadata,\n bulk_compliance_frameworks,\n checks_file,\n checks,\n services,\n severities,\n compliance_framework,\n categories,\n provider,\n )\n\n # if --list-checks-json, dump a json file and exit\n if args.list_checks_json:\n print(list_checks_json(provider, sorted(checks_to_execute)))\n sys.exit()\n\n # If -l/--list-checks passed as argument, print checks to execute and quit\n if args.list_checks:\n print_checks(provider, sorted(checks_to_execute), bulk_checks_metadata)\n sys.exit()\n\n # Set the audit info based on the selected provider\n audit_info = set_provider_audit_info(provider, args.__dict__)\n\n # Import custom checks from folder\n if checks_folder:\n parse_checks_from_folder(audit_info, checks_folder, provider)\n\n # Exclude checks if -e/--excluded-checks\n if excluded_checks:\n checks_to_execute = exclude_checks_to_run(checks_to_execute, excluded_checks)\n\n # Exclude services if --excluded-services\n if excluded_services:\n checks_to_execute = exclude_services_to_run(\n checks_to_execute, excluded_services, provider\n )\n\n # Once the audit_info is set and we have the eventual checks based on the resource identifier,\n # it is time to check what Prowler's checks are going to be executed\n if audit_info.audit_resources:\n checks_from_resources = set_provider_execution_parameters(provider, audit_info)\n checks_to_execute = checks_to_execute.intersection(checks_from_resources)\n\n # Sort final check list\n checks_to_execute = sorted(checks_to_execute)\n\n # Parse Allowlist\n allowlist_file = set_provider_allowlist(provider, audit_info, args)\n\n # Set output options based on the selected provider\n audit_output_options = set_provider_output_options(\n provider, args, audit_info, allowlist_file, bulk_checks_metadata\n )\n\n # Run the quick inventory for the provider if available\n if hasattr(args, \"quick_inventory\") and args.quick_inventory:\n run_provider_quick_inventory(provider, audit_info, args)\n sys.exit()\n\n # Execute checks\n findings = []\n if len(checks_to_execute):\n findings = execute_checks(\n checks_to_execute,\n provider,\n audit_info,\n audit_output_options,\n custom_checks_metadata,\n )\n else:\n logger.error(\n \"There are no checks to execute. Please, check your input arguments\"\n )\n\n # Extract findings stats\n stats = extract_findings_statistics(findings)\n\n if args.slack:\n if \"SLACK_API_TOKEN\" in os.environ and \"SLACK_CHANNEL_ID\" in os.environ:\n _ = send_slack_message(\n os.environ[\"SLACK_API_TOKEN\"],\n os.environ[\"SLACK_CHANNEL_ID\"],\n stats,\n provider,\n audit_info,\n )\n else:\n logger.critical(\n \"Slack integration needs SLACK_API_TOKEN and SLACK_CHANNEL_ID environment variables (see more in https://docs.prowler.cloud/en/latest/tutorials/integrations/#slack).\"\n )\n sys.exit(1)\n\n if args.output_modes:\n for mode in args.output_modes:\n # Close json file if exists\n if \"json\" in mode:\n close_json(\n audit_output_options.output_filename, args.output_directory, mode\n )\n if mode == \"html\":\n add_html_footer(\n audit_output_options.output_filename, args.output_directory\n )\n fill_html_overview_statistics(\n stats, audit_output_options.output_filename, args.output_directory\n )\n # Send output to S3 if needed (-B / -D)\n if provider == \"aws\" and (\n args.output_bucket or args.output_bucket_no_assume\n ):\n output_bucket = args.output_bucket\n bucket_session = audit_info.audit_session\n # Check if -D was input\n if args.output_bucket_no_assume:\n output_bucket = args.output_bucket_no_assume\n bucket_session = audit_info.original_session\n send_to_s3_bucket(\n audit_output_options.output_filename,\n args.output_directory,\n mode,\n output_bucket,\n bucket_session,\n )\n\n # AWS Security Hub Integration\n if provider == \"aws\" and args.security_hub:\n print(\n f\"{Style.BRIGHT}\\nSending findings to AWS Security Hub, please wait...{Style.RESET_ALL}\"\n )\n # Verify where AWS Security Hub is enabled\n aws_security_enabled_regions = []\n security_hub_regions = (\n get_available_aws_service_regions(\"securityhub\", audit_info)\n if not audit_info.audited_regions\n else audit_info.audited_regions\n )\n for region in security_hub_regions:\n # Save the regions where AWS Security Hub is enabled\n if verify_security_hub_integration_enabled_per_region(\n audit_info.audited_partition,\n region,\n audit_info.audit_session,\n audit_info.audited_account,\n ):\n aws_security_enabled_regions.append(region)\n\n # Prepare the findings to be sent to Security Hub\n security_hub_findings_per_region = prepare_security_hub_findings(\n findings, audit_info, audit_output_options, aws_security_enabled_regions\n )\n\n # Send the findings to Security Hub\n findings_sent_to_security_hub = batch_send_to_security_hub(\n security_hub_findings_per_region, audit_info.audit_session\n )\n\n print(\n f\"{Style.BRIGHT}{Fore.GREEN}\\n{findings_sent_to_security_hub} findings sent to AWS Security Hub!{Style.RESET_ALL}\"\n )\n\n # Resolve previous fails of Security Hub\n if not args.skip_sh_update:\n print(\n f\"{Style.BRIGHT}\\nArchiving previous findings in AWS Security Hub, please wait...{Style.RESET_ALL}\"\n )\n findings_archived_in_security_hub = resolve_security_hub_previous_findings(\n security_hub_findings_per_region,\n audit_info,\n )\n print(\n f\"{Style.BRIGHT}{Fore.GREEN}\\n{findings_archived_in_security_hub} findings archived in AWS Security Hub!{Style.RESET_ALL}\"\n )\n\n # Display summary table\n if not args.only_logs:\n display_summary_table(\n findings,\n audit_info,\n audit_output_options,\n provider,\n )\n\n if compliance_framework and findings:\n for compliance in compliance_framework:\n # Display compliance table\n display_compliance_table(\n findings,\n bulk_checks_metadata,\n compliance,\n audit_output_options.output_filename,\n audit_output_options.output_directory,\n )\n\n # If custom checks were passed, remove the modules\n if checks_folder:\n remove_custom_checks_module(checks_folder, provider)\n\n # clean local directories\n clean_provider_local_output_directories(args)\n\n # If there are failed findings exit code 3, except if -z is input\n if not args.ignore_exit_code_3 and stats[\"total_fail\"] > 0:\n sys.exit(3)\n\n\nif __name__ == \"__main__\":\n prowler()\n",
"path": "prowler/__main__.py"
},
{
"content": "import importlib\nimport sys\nfrom shutil import rmtree\n\nfrom prowler.config.config import default_output_directory\nfrom prowler.lib.logger import logger\n\n\ndef clean_provider_local_output_directories(args):\n \"\"\"\n clean_provider_local_output_directories deletes the output files generated locally in custom directories when the output is sent to a remote storage provider\n \"\"\"\n try:\n # import provider cleaning function\n provider_clean_function = f\"clean_{args.provider}_local_output_directories\"\n getattr(importlib.import_module(__name__), provider_clean_function)(args)\n except AttributeError as attribute_exception:\n logger.info(\n f\"Cleaning local output directories not initialized for provider {args.provider}: {attribute_exception}\"\n )\n except Exception as error:\n logger.critical(\n f\"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}\"\n )\n sys.exit(1)\n\n\ndef clean_aws_local_output_directories(args):\n \"\"\"clean_aws_local_output_directories deletes the output files generated locally in custom directories when output is sent to a remote storage provider for AWS\"\"\"\n if args.output_bucket or args.output_bucket_no_assume:\n if args.output_directory != default_output_directory:\n rmtree(args.output_directory)\n",
"path": "prowler/providers/common/clean.py"
},
{
"content": "import importlib\nimport logging\nimport tempfile\nfrom argparse import Namespace\nfrom os import path\n\nfrom mock import patch\n\nfrom prowler.providers.common.clean import clean_provider_local_output_directories\n\n\nclass Test_Common_Clean:\n def set_provider_input_args(self, provider):\n set_args_function = f\"set_{provider}_input_args\"\n args = getattr(\n getattr(importlib.import_module(__name__), __class__.__name__),\n set_args_function,\n )(self)\n return args\n\n def set_aws_input_args(self):\n args = Namespace()\n args.provider = \"aws\"\n args.output_bucket = \"test-bucket\"\n args.output_bucket_no_assume = None\n return args\n\n def set_azure_input_args(self):\n args = Namespace()\n args.provider = \"azure\"\n return args\n\n def test_clean_provider_local_output_directories_non_initialized(self, caplog):\n provider = \"azure\"\n input_args = self.set_provider_input_args(provider)\n caplog.set_level(logging.INFO)\n clean_provider_local_output_directories(input_args)\n assert (\n f\"Cleaning local output directories not initialized for provider {provider}:\"\n in caplog.text\n )\n\n def test_clean_aws_local_output_directories_non_default_dir_output_bucket(self):\n provider = \"aws\"\n input_args = self.set_provider_input_args(provider)\n with tempfile.TemporaryDirectory() as temp_dir:\n input_args.output_directory = temp_dir\n clean_provider_local_output_directories(input_args)\n assert not path.exists(input_args.output_directory)\n\n def test_clean_aws_local_output_directories_non_default_dir_output_bucket_no_assume(\n self,\n ):\n provider = \"aws\"\n input_args = self.set_provider_input_args(provider)\n input_args.output_bucket = None\n input_args.output_bucket_no_assume = \"test\"\n with tempfile.TemporaryDirectory() as temp_dir:\n input_args.output_directory = temp_dir\n clean_provider_local_output_directories(input_args)\n assert not path.exists(input_args.output_directory)\n\n def test_clean_aws_local_output_directories_default_dir_output_bucket(self):\n provider = \"aws\"\n input_args = self.set_provider_input_args(provider)\n with tempfile.TemporaryDirectory() as temp_dir:\n with patch(\n \"prowler.providers.common.clean.default_output_directory\", new=temp_dir\n ):\n input_args.output_directory = temp_dir\n clean_provider_local_output_directories(input_args)\n assert path.exists(input_args.output_directory)\n\n def test_clean_aws_local_output_directories_default_dir_output_bucket_no_assume(\n self,\n ):\n provider = \"aws\"\n input_args = self.set_provider_input_args(provider)\n input_args.output_bucket_no_assume = \"test\"\n input_args.ouput_bucket = None\n with tempfile.TemporaryDirectory() as temp_dir:\n with patch(\n \"prowler.providers.common.clean.default_output_directory\", new=temp_dir\n ):\n input_args.output_directory = temp_dir\n clean_provider_local_output_directories(input_args)\n assert path.exists(input_args.output_directory)\n",
"path": "tests/providers/common/clean_test.py"
}
] | 11_8 | python | import unittest
import os
import sys
from importlib import import_module
class TestProwlerCommitChanges(unittest.TestCase):
def test_clean_module_removal(self):
self.assertFalse(os.path.exists('prowler/providers/common/clean.py'))
def test_clean_test_module_removal(self):
self.assertFalse(os.path.exists('tests/providers/common/clean_test.py'))
def test_clean_function_removal_from_main(self):
try:
# Trying to import the potentially removed function
from prowler.providers.common import clean
self.fail("clean module should not exist")
except ImportError:
pass
# Check if the function call is removed from __main__.py
sys.argv = ['prowler']
main_module = import_module('prowler.__main__')
self.assertFalse(hasattr(main_module, 'clean_provider_local_output_directories'))
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestProwlerCommitChanges))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
main()
|
https://github.com/teamqurrent/prowler | Your objective is to refine the checks in the AWS GuardDuty service integration. Specifically, the changes involve updating the `guardduty_centrally_managed.py` and `guardduty_no_high_severity_findings.py` files to enhance the logic used in evaluating GuardDuty detectors. Specifically your update should refine the condition to check not only if a GuardDuty detector ID exists (detector.id) but also if the detector is enabled in the account (detector.enabled_in_account). This change aims to enhance the accuracy of the GuardDuty service checks in Prowler, ensuring that the status of detectors is correctly reported based on their enabled state. | f8e713a | about-time==4.2.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:6a538862d33ce67d997429d14998310e1dbfda6cb7d9bbfbf799c4709847fece \
--hash=sha256:8bbf4c75fe13cbd3d72f49a03b02c5c7dca32169b6d49117c257e7eb3eaee341
adal==1.2.7 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:2a7451ed7441ddbc57703042204a3e30ef747478eea022c70f789fc7f084bc3d \
--hash=sha256:d74f45b81317454d96e982fd1c50e6fb5c99ac2223728aea8764433a39f566f1
alive-progress==3.1.5 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:347220c1858e3abe137fa0746895668c04df09c5261a13dc03f05795e8a29be5 \
--hash=sha256:42e399a66c8150dc507602dff7b7953f105ef11faf97ddaa6d27b1cbf45c4c98
attrs==23.1.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04 \
--hash=sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015
awsipranges==0.3.3 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4f0b3f22a9dc1163c85b513bed812b6c92bdacd674e6a7b68252a3c25b99e2c0 \
--hash=sha256:f3d7a54aeaf7fe310beb5d377a4034a63a51b72677ae6af3e0967bc4de7eedaf
azure-common==1.1.28 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4ac0cd3214e36b6a1b6a442686722a5d8cc449603aa833f3f0f40bda836704a3 \
--hash=sha256:5c12d3dcf4ec20599ca6b0d3e09e86e146353d443e7fcc050c9a19c1f9df20ad
azure-core==1.28.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:dec36dfc8eb0b052a853f30c07437effec2f9e3e1fc8f703d9bdaa5cfc0043d9 \
--hash=sha256:e9eefc66fc1fde56dab6f04d4e5d12c60754d5a9fa49bdcfd8534fc96ed936bd
azure-identity==1.15.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4c28fc246b7f9265610eb5261d65931183d019a23d4b0e99357facb2e6c227c8 \
--hash=sha256:a14b1f01c7036f11f148f22cd8c16e05035293d714458d6b44ddf534d93eb912
azure-mgmt-authorization==4.0.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:69b85abc09ae64fc72975bd43431170d8c7eb5d166754b98aac5f3845de57dc4 \
--hash=sha256:d8feeb3842e6ddf1a370963ca4f61fb6edc124e8997b807dd025bc9b2379cd1a
azure-mgmt-core==1.4.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:81071675f186a585555ef01816f2774d49c1c9024cb76e5720c3c0f6b337bb7d \
--hash=sha256:d195208340094f98e5a6661b781cde6f6a051e79ce317caabd8ff97030a9b3ae
azure-mgmt-security==5.0.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:38b03efe82c2344cea203fda95e6d00b7ac22782fa1c0b585cd0ea2c8ff3e702 \
--hash=sha256:73a74ce8f6ffb1b345ce101c8abdd42238f161f0988d168d23918feda0089654
azure-mgmt-sql==3.0.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:129042cc011225e27aee6ef2697d585fa5722e5d1aeb0038af6ad2451a285457 \
--hash=sha256:1d1dd940d4d41be4ee319aad626341251572a5bf4a2addec71779432d9a1381f
azure-mgmt-storage==21.1.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:593f2544fc4f05750c4fe7ca4d83c32ea1e9d266e57899bbf79ce5940124e8cc \
--hash=sha256:d6d3c0e917c988bc9ed0472477d3ef3f90886009eb1d97a711944f8375630162
azure-mgmt-subscription==3.1.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:38d4574a8d47fa17e3587d756e296cb63b82ad8fb21cd8543bcee443a502bf48 \
--hash=sha256:4e255b4ce9b924357bb8c5009b3c88a2014d3203b2495e2256fa027bf84e800e
azure-storage-blob==12.19.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:26c0a4320a34a3c2a1b74528ba6812ebcb632a04cd67b1c7377232c4b01a5897 \
--hash=sha256:7bbc2c9c16678f7a420367fef6b172ba8730a7e66df7f4d7a55d5b3c8216615b
boto3==1.26.165 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:9e7242b9059d937f34264125fecd844cb5e01acce6be093f6c44869fdf7c6e30 \
--hash=sha256:fa85b67147c8dc99b6e7c699fc086103f958f9677db934f70659e6e6a72a818c
botocore==1.29.165 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:6f35d59e230095aed7cd747604fe248fa384bebb7d09549077892f936a8ca3df \
--hash=sha256:988b948be685006b43c4bbd8f5c0cb93e77c66deb70561994e0c5b31b5a67210
cachetools==5.3.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:13dfddc7b8df938c21a940dfa6557ce6e94a2f1cdfa58eb90c805721d58f2c14 \
--hash=sha256:429e1a1e845c008ea6c85aa35d4b98b65d6a9763eeef3e37e92728a12d1de9d4
certifi==2023.7.22 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \
--hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9
cffi==1.15.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \
--hash=sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef \
--hash=sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104 \
--hash=sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426 \
--hash=sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405 \
--hash=sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375 \
--hash=sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a \
--hash=sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e \
--hash=sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc \
--hash=sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf \
--hash=sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185 \
--hash=sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497 \
--hash=sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3 \
--hash=sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35 \
--hash=sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c \
--hash=sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83 \
--hash=sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21 \
--hash=sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca \
--hash=sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984 \
--hash=sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac \
--hash=sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd \
--hash=sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee \
--hash=sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a \
--hash=sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2 \
--hash=sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192 \
--hash=sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7 \
--hash=sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585 \
--hash=sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f \
--hash=sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e \
--hash=sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27 \
--hash=sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b \
--hash=sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e \
--hash=sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e \
--hash=sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d \
--hash=sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c \
--hash=sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415 \
--hash=sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82 \
--hash=sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02 \
--hash=sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314 \
--hash=sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325 \
--hash=sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c \
--hash=sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3 \
--hash=sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914 \
--hash=sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045 \
--hash=sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d \
--hash=sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9 \
--hash=sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5 \
--hash=sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2 \
--hash=sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c \
--hash=sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3 \
--hash=sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2 \
--hash=sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8 \
--hash=sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d \
--hash=sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d \
--hash=sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9 \
--hash=sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162 \
--hash=sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76 \
--hash=sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4 \
--hash=sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e \
--hash=sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9 \
--hash=sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6 \
--hash=sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b \
--hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \
--hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0
charset-normalizer==3.1.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6 \
--hash=sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1 \
--hash=sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e \
--hash=sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373 \
--hash=sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62 \
--hash=sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230 \
--hash=sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be \
--hash=sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c \
--hash=sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0 \
--hash=sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448 \
--hash=sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f \
--hash=sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649 \
--hash=sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d \
--hash=sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0 \
--hash=sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706 \
--hash=sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a \
--hash=sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59 \
--hash=sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23 \
--hash=sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5 \
--hash=sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb \
--hash=sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e \
--hash=sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e \
--hash=sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c \
--hash=sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28 \
--hash=sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d \
--hash=sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41 \
--hash=sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974 \
--hash=sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce \
--hash=sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f \
--hash=sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1 \
--hash=sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d \
--hash=sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8 \
--hash=sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017 \
--hash=sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31 \
--hash=sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7 \
--hash=sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8 \
--hash=sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e \
--hash=sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14 \
--hash=sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd \
--hash=sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d \
--hash=sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795 \
--hash=sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b \
--hash=sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b \
--hash=sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b \
--hash=sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203 \
--hash=sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f \
--hash=sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19 \
--hash=sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1 \
--hash=sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a \
--hash=sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac \
--hash=sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9 \
--hash=sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0 \
--hash=sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137 \
--hash=sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f \
--hash=sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6 \
--hash=sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5 \
--hash=sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909 \
--hash=sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f \
--hash=sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0 \
--hash=sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324 \
--hash=sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755 \
--hash=sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb \
--hash=sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854 \
--hash=sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c \
--hash=sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60 \
--hash=sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84 \
--hash=sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0 \
--hash=sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b \
--hash=sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1 \
--hash=sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531 \
--hash=sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1 \
--hash=sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11 \
--hash=sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326 \
--hash=sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df \
--hash=sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab
click-plugins==1.1.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b \
--hash=sha256:5d262006d3222f5057fd81e1623d4443e41dcda5dc815c06b442aa3c02889fc8
click==8.1.3 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e \
--hash=sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48
colorama==0.4.6 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \
--hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6
contextlib2==21.6.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:3fbdb64466afd23abaf6c977627b75b6139a5a3e8ce38405c5b413aed7a0471f \
--hash=sha256:ab1e2bfe1d01d968e1b7e8d9023bc51ef3509bba217bb730cee3827e1ee82869
cryptography==41.0.6 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:068bc551698c234742c40049e46840843f3d98ad7ce265fd2bd4ec0d11306596 \
--hash=sha256:0f27acb55a4e77b9be8d550d762b0513ef3fc658cd3eb15110ebbcbd626db12c \
--hash=sha256:2132d5865eea673fe6712c2ed5fb4fa49dba10768bb4cc798345748380ee3660 \
--hash=sha256:3288acccef021e3c3c10d58933f44e8602cf04dba96d9796d70d537bb2f4bbc4 \
--hash=sha256:35f3f288e83c3f6f10752467c48919a7a94b7d88cc00b0668372a0d2ad4f8ead \
--hash=sha256:398ae1fc711b5eb78e977daa3cbf47cec20f2c08c5da129b7a296055fbb22aed \
--hash=sha256:422e3e31d63743855e43e5a6fcc8b4acab860f560f9321b0ee6269cc7ed70cc3 \
--hash=sha256:48783b7e2bef51224020efb61b42704207dde583d7e371ef8fc2a5fb6c0aabc7 \
--hash=sha256:4d03186af98b1c01a4eda396b137f29e4e3fb0173e30f885e27acec8823c1b09 \
--hash=sha256:5daeb18e7886a358064a68dbcaf441c036cbdb7da52ae744e7b9207b04d3908c \
--hash=sha256:60e746b11b937911dc70d164060d28d273e31853bb359e2b2033c9e93e6f3c43 \
--hash=sha256:742ae5e9a2310e9dade7932f9576606836ed174da3c7d26bc3d3ab4bd49b9f65 \
--hash=sha256:7e00fb556bda398b99b0da289ce7053639d33b572847181d6483ad89835115f6 \
--hash=sha256:85abd057699b98fce40b41737afb234fef05c67e116f6f3650782c10862c43da \
--hash=sha256:8efb2af8d4ba9dbc9c9dd8f04d19a7abb5b49eab1f3694e7b5a16a5fc2856f5c \
--hash=sha256:ae236bb8760c1e55b7a39b6d4d32d2279bc6c7c8500b7d5a13b6fb9fc97be35b \
--hash=sha256:afda76d84b053923c27ede5edc1ed7d53e3c9f475ebaf63c68e69f1403c405a8 \
--hash=sha256:b27a7fd4229abef715e064269d98a7e2909ebf92eb6912a9603c7e14c181928c \
--hash=sha256:b648fe2a45e426aaee684ddca2632f62ec4613ef362f4d681a9a6283d10e079d \
--hash=sha256:c5a550dc7a3b50b116323e3d376241829fd326ac47bc195e04eb33a8170902a9 \
--hash=sha256:da46e2b5df770070412c46f87bac0849b8d685c5f2679771de277a422c7d0b86 \
--hash=sha256:f39812f70fc5c71a15aa3c97b2bbe213c3f2a460b79bd21c40d033bb34a9bf36 \
--hash=sha256:ff369dd19e8fe0528b02e8df9f2aeb2479f89b1270d90f96a63500afe9af5cae
detect-secrets==1.4.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:d08ecabeee8b68c0acb0e8a354fb98d822a653f6ed05e520cead4c6fc1fc02cd \
--hash=sha256:d56787e339758cef48c9ccd6692f7a094b9963c979c9813580b0169e41132833
filelock==3.12.2 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:002740518d8aa59a26b0c76e10fb8c6e15eae825d34b6fdf670333fd7b938d81 \
--hash=sha256:cbb791cdea2a72f23da6ac5b5269ab0a0d161e9ef0100e653b69049a7706d1ec
google-api-core==2.11.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4b9bb5d5a380a0befa0573b302651b8a9a89262c1730e37bf423cec511804c22 \
--hash=sha256:ce222e27b0de0d7bc63eb043b956996d6dccab14cc3b690aaea91c9cc99dc16e
google-api-python-client==2.111.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:3a45a53c031478d1c82c7162dd25c9a965247bca6bd438af0838a9d9b8219405 \
--hash=sha256:b605adee2d09a843b97a59925757802904679e44e5599708cedb8939900dfbc7
google-auth-httplib2==0.2.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:38aa7badf48f974f1eb9861794e9c0cb2a0511a4ec0679b1f886d108f5640e05 \
--hash=sha256:b65a0a2123300dd71281a7bf6e64d65a0759287df52729bdd1ae2e47dc311a3d
google-auth==2.17.3 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:ce311e2bc58b130fddf316df57c9b3943c2a7b4f6ec31de9663a9333e4064efc \
--hash=sha256:f586b274d3eb7bd932ea424b1c702a30e0393a2e2bc4ca3eae8263ffd8be229f
googleapis-common-protos==1.59.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4168fcb568a826a52f23510412da405abd93f4d23ba544bb68d943b14ba3cb44 \
--hash=sha256:b287dc48449d1d41af0c69f4ea26242b5ae4c3d7249a38b0984c86a4caffff1f
grapheme==0.6.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:44c2b9f21bbe77cfb05835fec230bd435954275267fea1858013b102f8603cca
httplib2==0.22.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:14ae0a53c1ba8f3d37e9e27cf37eabb0fb9980f435ba405d546948b009dd64dc \
--hash=sha256:d7a10bc5ef5ab08322488bde8c726eeee5c8618723fdb399597ec58f3d82df81
idna==3.4 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \
--hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2
isodate==0.6.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96 \
--hash=sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9
jmespath==1.0.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980 \
--hash=sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe
jsonschema-specifications==2023.6.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:3d2b82663aff01815f744bb5c7887e2121a63399b49b104a3c96145474d091d7 \
--hash=sha256:ca1c4dd059a9e7b34101cf5b3ab7ff1d18b139f35950d598d629837ef66e8f28
jsonschema==4.20.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4f614fd46d8d61258610998997743ec5492a648b33cf478c1ddc23ed4598a5fa \
--hash=sha256:ed6231f0429ecf966f5bc8dfef245998220549cbbcf140f913b7464c52c3b6b3
msal-extensions==1.0.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:91e3db9620b822d0ed2b4d1850056a0f133cba04455e62f11612e40f5502f2ee \
--hash=sha256:c676aba56b0cce3783de1b5c5ecfe828db998167875126ca4b47dc6436451354
msal==1.24.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:aa0972884b3c6fdec53d9a0bd15c12e5bd7b71ac1b66d746f54d128709f3f8f8 \
--hash=sha256:ce4320688f95c301ee74a4d0e9dbcfe029a63663a8cc61756f40d0d0d36574ad
msgraph-core==0.2.2 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:147324246788abe8ed7e05534cd9e4e0ec98b33b30e011693b8d014cebf97f63 \
--hash=sha256:e297564b9a0ca228493d8851f95cb2de9522143d82efa40ce3a6ad286e21392e
msrest==0.7.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:21120a810e1233e5e6cc7fe40b474eeb4ec6f757a15d7cf86702c369f9567c32 \
--hash=sha256:6e7661f46f3afd88b75667b7187a92829924446c7ea1d169be8c4bb7eeb788b9
msrestazure==0.6.4 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:3de50f56147ef529b31e099a982496690468ecef33f0544cb0fa0cfe1e1de5b9 \
--hash=sha256:a06f0dabc9a6f5efe3b6add4bd8fb623aeadacf816b7a35b0f89107e0544d189
oauthlib==3.2.2 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca \
--hash=sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918
portalocker==2.7.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:032e81d534a88ec1736d03f780ba073f047a06c478b06e2937486f334e955c51 \
--hash=sha256:a07c5b4f3985c3cf4798369631fb7011adb498e2a46d8440efc75a8f29a0f983
protobuf==4.23.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:03eee35b60317112a72d19c54d0bff7bc58ff12fea4cd7b018232bd99758ffdf \
--hash=sha256:2b94bd6df92d71bd1234a2ffe7ce96ddf6d10cf637a18d6b55ad0a89fbb7fc21 \
--hash=sha256:36f5370a930cb77c8ad2f4135590c672d0d2c72d4a707c7d0058dce4b4b4a598 \
--hash=sha256:5f1eba1da2a2f3f7df469fccddef3cc060b8a16cfe3cc65961ad36b4dbcf59c5 \
--hash=sha256:6c16657d6717a0c62d5d740cb354fbad1b0d8cb811669e06fc1caa0ff4799ddd \
--hash=sha256:6fe180b56e1169d72ecc4acbd39186339aed20af5384531b8e8979b02bbee159 \
--hash=sha256:7cb5b9a05ce52c6a782bb97de52679bd3438ff2b7460eff5da348db65650f227 \
--hash=sha256:9744e934ea5855d12191040ea198eaf704ac78665d365a89d9572e3b627c2688 \
--hash=sha256:9f5a0fbfcdcc364f3986f9ed9f8bb1328fb84114fd790423ff3d7fdb0f85c2d1 \
--hash=sha256:baca40d067dddd62141a129f244703160d278648b569e90bb0e3753067644711 \
--hash=sha256:d5a35ff54e3f62e8fc7be02bb0d2fbc212bba1a5a9cc2748090690093996f07b \
--hash=sha256:e62fb869762b4ba18666370e2f8a18f17f8ab92dd4467295c6d38be6f8fef60b \
--hash=sha256:ebde3a023b8e11bfa6c890ef34cd6a8b47d586f26135e86c21344fe433daf2e2
pyasn1-modules==0.3.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c \
--hash=sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d
pyasn1==0.5.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57 \
--hash=sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde
pycparser==2.21 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \
--hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206
pydantic==1.10.13 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:1740068fd8e2ef6eb27a20e5651df000978edce6da6803c2bef0bc74540f9548 \
--hash=sha256:210ce042e8f6f7c01168b2d84d4c9eb2b009fe7bf572c2266e235edf14bacd80 \
--hash=sha256:32c8b48dcd3b2ac4e78b0ba4af3a2c2eb6048cb75202f0ea7b34feb740efc340 \
--hash=sha256:3ecea2b9d80e5333303eeb77e180b90e95eea8f765d08c3d278cd56b00345d01 \
--hash=sha256:4b03e42ec20286f052490423682016fd80fda830d8e4119f8ab13ec7464c0132 \
--hash=sha256:4c5370a7edaac06daee3af1c8b1192e305bc102abcbf2a92374b5bc793818599 \
--hash=sha256:56e3ff861c3b9c6857579de282ce8baabf443f42ffba355bf070770ed63e11e1 \
--hash=sha256:5a1f9f747851338933942db7af7b6ee8268568ef2ed86c4185c6ef4402e80ba8 \
--hash=sha256:5e08865bc6464df8c7d61439ef4439829e3ab62ab1669cddea8dd00cd74b9ffe \
--hash=sha256:61d9dce220447fb74f45e73d7ff3b530e25db30192ad8d425166d43c5deb6df0 \
--hash=sha256:654db58ae399fe6434e55325a2c3e959836bd17a6f6a0b6ca8107ea0571d2e17 \
--hash=sha256:678bcf5591b63cc917100dc50ab6caebe597ac67e8c9ccb75e698f66038ea953 \
--hash=sha256:6cf25c1a65c27923a17b3da28a0bdb99f62ee04230c931d83e888012851f4e7f \
--hash=sha256:75ac15385a3534d887a99c713aa3da88a30fbd6204a5cd0dc4dab3d770b9bd2f \
--hash=sha256:75b297827b59bc229cac1a23a2f7a4ac0031068e5be0ce385be1462e7e17a35d \
--hash=sha256:7d6f6e7305244bddb4414ba7094ce910560c907bdfa3501e9db1a7fd7eaea127 \
--hash=sha256:84bafe2e60b5e78bc64a2941b4c071a4b7404c5c907f5f5a99b0139781e69ed8 \
--hash=sha256:854223752ba81e3abf663d685f105c64150873cc6f5d0c01d3e3220bcff7d36f \
--hash=sha256:8ae5dd6b721459bfa30805f4c25880e0dd78fc5b5879f9f7a692196ddcb5a580 \
--hash=sha256:8ef467901d7a41fa0ca6db9ae3ec0021e3f657ce2c208e98cd511f3161c762c6 \
--hash=sha256:968ac42970f57b8344ee08837b62f6ee6f53c33f603547a55571c954a4225691 \
--hash=sha256:97cce3ae7341f7620a0ba5ef6cf043975cd9d2b81f3aa5f4ea37928269bc1b87 \
--hash=sha256:9849f031cf8a2f0a928fe885e5a04b08006d6d41876b8bbd2fc68a18f9f2e3fd \
--hash=sha256:9f00790179497767aae6bcdc36355792c79e7bbb20b145ff449700eb076c5f96 \
--hash=sha256:b87326822e71bd5f313e7d3bfdc77ac3247035ac10b0c0618bd99dcf95b1e687 \
--hash=sha256:b97c1fac8c49be29486df85968682b0afa77e1b809aff74b83081cc115e52f33 \
--hash=sha256:bc0898c12f8e9c97f6cd44c0ed70d55749eaf783716896960b4ecce2edfd2d69 \
--hash=sha256:c553f6a156deb868ba38a23cf0df886c63492e9257f60a79c0fd8e7173537653 \
--hash=sha256:c636925f38b8db208e09d344c7aa4f29a86bb9947495dd6b6d376ad10334fb78 \
--hash=sha256:c958d053453a1c4b1c2062b05cd42d9d5c8eb67537b8d5a7e3c3032943ecd261 \
--hash=sha256:d3a3c792a58e1622667a2837512099eac62490cdfd63bd407993aaf200a4cf1f \
--hash=sha256:e31647d85a2013d926ce60b84f9dd5300d44535a9941fe825dc349ae1f760df9 \
--hash=sha256:e70ca129d2053fb8b728ee7d1af8e553a928d7e301a311094b8a0501adc8763d \
--hash=sha256:efff03cc7a4f29d9009d1c96ceb1e7a70a65cfe86e89d34e4a5f2ab1e5693737 \
--hash=sha256:f59ef915cac80275245824e9d771ee939133be38215555e9dc90c6cb148aaeb5 \
--hash=sha256:f8e81fc5fb17dae698f52bdd1c4f18b6ca674d7068242b2aff075f588301bbb0
pyjwt==2.6.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd \
--hash=sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14
pyjwt[crypto]==2.6.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd \
--hash=sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14
pyparsing==3.0.9 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb \
--hash=sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc
python-dateutil==2.8.2 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \
--hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9
pywin32==306 ; python_version >= "3.9" and platform_system == "Windows" and python_version < "3.12" \
--hash=sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d \
--hash=sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65 \
--hash=sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e \
--hash=sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b \
--hash=sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4 \
--hash=sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040 \
--hash=sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a \
--hash=sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36 \
--hash=sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8 \
--hash=sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e \
--hash=sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802 \
--hash=sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a \
--hash=sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407 \
--hash=sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0
pyyaml==6.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf \
--hash=sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293 \
--hash=sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b \
--hash=sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57 \
--hash=sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b \
--hash=sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4 \
--hash=sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07 \
--hash=sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba \
--hash=sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9 \
--hash=sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287 \
--hash=sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513 \
--hash=sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0 \
--hash=sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782 \
--hash=sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0 \
--hash=sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92 \
--hash=sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f \
--hash=sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2 \
--hash=sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc \
--hash=sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1 \
--hash=sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c \
--hash=sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86 \
--hash=sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4 \
--hash=sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c \
--hash=sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34 \
--hash=sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b \
--hash=sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d \
--hash=sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c \
--hash=sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb \
--hash=sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7 \
--hash=sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737 \
--hash=sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3 \
--hash=sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d \
--hash=sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358 \
--hash=sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53 \
--hash=sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78 \
--hash=sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803 \
--hash=sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a \
--hash=sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f \
--hash=sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174 \
--hash=sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5
referencing==0.29.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:90cb53782d550ba28d2166ef3f55731f38397def8832baac5d45235f1995e35e \
--hash=sha256:d3c8f323ee1480095da44d55917cfb8278d73d6b4d5f677e3e40eb21314ac67f
requests-file==1.5.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:07d74208d3389d01c38ab89ef403af0cfec63957d53a0081d8eca738d0247d8e \
--hash=sha256:dfe5dae75c12481f68ba353183c53a65e6044c923e64c24b2209f6c7570ca953
requests-oauthlib==1.3.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5 \
--hash=sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a
requests==2.31.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \
--hash=sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1
rpds-py==0.8.10 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:08166467258fd0240a1256fce272f689f2360227ee41c72aeea103e9e4f63d2b \
--hash=sha256:083df0fafe199371206111583c686c985dddaf95ab3ee8e7b24f1fda54515d09 \
--hash=sha256:0da53292edafecba5e1d8c1218f99babf2ed0bf1c791d83c0ab5c29b57223068 \
--hash=sha256:0eeb2731708207d0fe2619afe6c4dc8cb9798f7de052da891de5f19c0006c315 \
--hash=sha256:134ec8f14ca7dbc6d9ae34dac632cdd60939fe3734b5d287a69683c037c51acb \
--hash=sha256:13e643ce8ad502a0263397362fb887594b49cf84bf518d6038c16f235f2bcea4 \
--hash=sha256:148b0b38d719c0760e31ce9285a9872972bdd7774969a4154f40c980e5beaca7 \
--hash=sha256:14f1c356712f66653b777ecd8819804781b23dbbac4eade4366b94944c9e78ad \
--hash=sha256:15a90d0ac11b4499171067ae40a220d1ca3cb685ec0acc356d8f3800e07e4cb8 \
--hash=sha256:1a2edf8173ac0c7a19da21bc68818be1321998528b5e3f748d6ee90c0ba2a1fd \
--hash=sha256:1b21575031478609db6dbd1f0465e739fe0e7f424a8e7e87610a6c7f68b4eb16 \
--hash=sha256:1ee45cd1d84beed6cbebc839fd85c2e70a3a1325c8cfd16b62c96e2ffb565eca \
--hash=sha256:220bdcad2d2936f674650d304e20ac480a3ce88a40fe56cd084b5780f1d104d9 \
--hash=sha256:2418cf17d653d24ffb8b75e81f9f60b7ba1b009a23298a433a4720b2a0a17017 \
--hash=sha256:2614c2732bf45de5c7f9e9e54e18bc78693fa2f635ae58d2895b7965e470378c \
--hash=sha256:2cd3045e7f6375dda64ed7db1c5136826facb0159ea982f77d9cf6125025bd34 \
--hash=sha256:2eb4b08c45f8f8d8254cdbfacd3fc5d6b415d64487fb30d7380b0d0569837bf1 \
--hash=sha256:300eb606e6b94a7a26f11c8cc8ee59e295c6649bd927f91e1dbd37a4c89430b6 \
--hash=sha256:376b8de737401050bd12810003d207e824380be58810c031f10ec563ff6aef3d \
--hash=sha256:3793c21494bad1373da517001d0849eea322e9a049a0e4789e50d8d1329df8e7 \
--hash=sha256:37f7ee4dc86db7af3bac6d2a2cedbecb8e57ce4ed081f6464510e537589f8b1e \
--hash=sha256:3816a890a6a9e9f1de250afa12ca71c9a7a62f2b715a29af6aaee3aea112c181 \
--hash=sha256:3c490204e16bca4f835dba8467869fe7295cdeaa096e4c5a7af97f3454a97991 \
--hash=sha256:3cc5e5b5514796f45f03a568981971b12a3570f3de2e76114f7dc18d4b60a3c4 \
--hash=sha256:41c89a366eae49ad9e65ed443a8f94aee762931a1e3723749d72aeac80f5ef2f \
--hash=sha256:4a8ca409f1252e1220bf09c57290b76cae2f14723746215a1e0506472ebd7bdf \
--hash=sha256:4b519bac7c09444dd85280fd60f28c6dde4389c88dddf4279ba9b630aca3bbbe \
--hash=sha256:521fc8861a86ae54359edf53a15a05fabc10593cea7b3357574132f8427a5e5a \
--hash=sha256:574868858a7ff6011192c023a5289158ed20e3f3b94b54f97210a773f2f22921 \
--hash=sha256:5a665f6f1a87614d1c3039baf44109094926dedf785e346d8b0a728e9cabd27a \
--hash=sha256:5d1c2bc319428d50b3e0fa6b673ab8cc7fa2755a92898db3a594cbc4eeb6d1f7 \
--hash=sha256:60e0e86e870350e03b3e25f9b1dd2c6cc72d2b5f24e070249418320a6f9097b7 \
--hash=sha256:695f642a3a5dbd4ad2ffbbacf784716ecd87f1b7a460843b9ddf965ccaeafff4 \
--hash=sha256:69d089c026f6a8b9d64a06ff67dc3be196707b699d7f6ca930c25f00cf5e30d8 \
--hash=sha256:6c6a0225b8501d881b32ebf3f5807a08ad3685b5eb5f0a6bfffd3a6e039b2055 \
--hash=sha256:70bb9c8004b97b4ef7ae56a2aa56dfaa74734a0987c78e7e85f00004ab9bf2d0 \
--hash=sha256:73a1e48430f418f0ac3dfd87860e4cc0d33ad6c0f589099a298cb53724db1169 \
--hash=sha256:7495010b658ec5b52835f21d8c8b1a7e52e194c50f095d4223c0b96c3da704b1 \
--hash=sha256:7947e6e2c2ad68b1c12ee797d15e5f8d0db36331200b0346871492784083b0c6 \
--hash=sha256:7b38a9ac96eeb6613e7f312cd0014de64c3f07000e8bf0004ad6ec153bac46f8 \
--hash=sha256:7d20a8ed227683401cc508e7be58cba90cc97f784ea8b039c8cd01111e6043e0 \
--hash=sha256:7f29b8c55fd3a2bc48e485e37c4e2df3317f43b5cc6c4b6631c33726f52ffbb3 \
--hash=sha256:802f42200d8caf7f25bbb2a6464cbd83e69d600151b7e3b49f49a47fa56b0a38 \
--hash=sha256:805a5f3f05d186c5d50de2e26f765ba7896d0cc1ac5b14ffc36fae36df5d2f10 \
--hash=sha256:82bb361cae4d0a627006dadd69dc2f36b7ad5dc1367af9d02e296ec565248b5b \
--hash=sha256:84eb541a44f7a18f07a6bfc48b95240739e93defe1fdfb4f2a295f37837945d7 \
--hash=sha256:89c92b74e8bf6f53a6f4995fd52f4bd510c12f103ee62c99e22bc9e05d45583c \
--hash=sha256:8c398fda6df361a30935ab4c4bccb7f7a3daef2964ca237f607c90e9f3fdf66f \
--hash=sha256:915031002c86a5add7c6fd4beb601b2415e8a1c956590a5f91d825858e92fe6e \
--hash=sha256:927d784648211447201d4c6f1babddb7971abad922b32257ab74de2f2750fad0 \
--hash=sha256:92cf5b3ee60eef41f41e1a2cabca466846fb22f37fc580ffbcb934d1bcab225a \
--hash=sha256:93d06cccae15b3836247319eee7b6f1fdcd6c10dabb4e6d350d27bd0bdca2711 \
--hash=sha256:93d99f957a300d7a4ced41615c45aeb0343bb8f067c42b770b505de67a132346 \
--hash=sha256:96b293c0498c70162effb13100624c5863797d99df75f2f647438bd10cbf73e4 \
--hash=sha256:97cab733d303252f7c2f7052bf021a3469d764fc2b65e6dbef5af3cbf89d4892 \
--hash=sha256:996cc95830de9bc22b183661d95559ec6b3cd900ad7bc9154c4cbf5be0c9b734 \
--hash=sha256:9a7d20c1cf8d7b3960c5072c265ec47b3f72a0c608a9a6ee0103189b4f28d531 \
--hash=sha256:9cd57981d9fab04fc74438d82460f057a2419974d69a96b06a440822d693b3c0 \
--hash=sha256:a11ab0d97be374efd04f640c04fe5c2d3dabc6dfb998954ea946ee3aec97056d \
--hash=sha256:a13c8e56c46474cd5958d525ce6a9996727a83d9335684e41f5192c83deb6c58 \
--hash=sha256:a38b9f526d0d6cbdaa37808c400e3d9f9473ac4ff64d33d9163fd05d243dbd9b \
--hash=sha256:a7c6304b894546b5a6bdc0fe15761fa53fe87d28527a7142dae8de3c663853e1 \
--hash=sha256:ad3bfb44c8840fb4be719dc58e229f435e227fbfbe133dc33f34981ff622a8f8 \
--hash=sha256:ae40f4a70a1f40939d66ecbaf8e7edc144fded190c4a45898a8cfe19d8fc85ea \
--hash=sha256:b01b39ad5411563031ea3977bbbc7324d82b088e802339e6296f082f78f6115c \
--hash=sha256:b2e3c4f2a8e3da47f850d7ea0d7d56720f0f091d66add889056098c4b2fd576c \
--hash=sha256:b41941583adce4242af003d2a8337b066ba6148ca435f295f31ac6d9e4ea2722 \
--hash=sha256:b4627520a02fccbd324b33c7a83e5d7906ec746e1083a9ac93c41ac7d15548c7 \
--hash=sha256:ba9f1d1ebe4b63801977cec7401f2d41e888128ae40b5441270d43140efcad52 \
--hash=sha256:c03a435d26c3999c2a8642cecad5d1c4d10c961817536af52035f6f4ee2f5dd0 \
--hash=sha256:c200b30dd573afa83847bed7e3041aa36a8145221bf0cfdfaa62d974d720805c \
--hash=sha256:c493365d3fad241d52f096e4995475a60a80f4eba4d3ff89b713bc65c2ca9615 \
--hash=sha256:c4d42e83ddbf3445e6514f0aff96dca511421ed0392d9977d3990d9f1ba6753c \
--hash=sha256:c60528671d9d467009a6ec284582179f6b88651e83367d0ab54cb739021cd7de \
--hash=sha256:c72ebc22e70e04126158c46ba56b85372bc4d54d00d296be060b0db1671638a4 \
--hash=sha256:ccbbd276642788c4376fbe8d4e6c50f0fb4972ce09ecb051509062915891cbf0 \
--hash=sha256:ceaac0c603bf5ac2f505a78b2dcab78d3e6b706be6596c8364b64cc613d208d2 \
--hash=sha256:d19db6ba816e7f59fc806c690918da80a7d186f00247048cd833acdab9b4847b \
--hash=sha256:d5c191713e98e7c28800233f039a32a42c1a4f9a001a8a0f2448b07391881036 \
--hash=sha256:d64f9f88d5203274a002b54442cafc9c7a1abff2a238f3e767b70aadf919b451 \
--hash=sha256:d77dff3a5aa5eedcc3da0ebd10ff8e4969bc9541aa3333a8d41715b429e99f47 \
--hash=sha256:dd4f16e57c12c0ae17606c53d1b57d8d1c8792efe3f065a37cb3341340599d49 \
--hash=sha256:e39d7ab0c18ac99955b36cd19f43926450baba21e3250f053e0704d6ffd76873 \
--hash=sha256:e3d0cd3dff0e7638a7b5390f3a53057c4e347f4ef122ee84ed93fc2fb7ea4aa2 \
--hash=sha256:e7dfb1cbb895810fa2b892b68153c17716c6abaa22c7dc2b2f6dcf3364932a1c \
--hash=sha256:e8e24b210a4deb5a7744971f8f77393005bae7f873568e37dfd9effe808be7f7 \
--hash=sha256:e9c0683cb35a9b5881b41bc01d5568ffc667910d9dbc632a1fba4e7d59e98773 \
--hash=sha256:ed41f3f49507936a6fe7003985ea2574daccfef999775525d79eb67344e23767 \
--hash=sha256:ee744fca8d1ea822480a2a4e7c5f2e1950745477143668f0b523769426060f29 \
--hash=sha256:f3f1e860be21f3e83011116a65e7310486300e08d9a3028e73e8d13bb6c77292 \
--hash=sha256:f43ab4cb04bde6109eb2555528a64dfd8a265cc6a9920a67dcbde13ef53a46c8 \
--hash=sha256:f53f55a8852f0e49b0fc76f2412045d6ad9d5772251dea8f55ea45021616e7d5 \
--hash=sha256:f59996d0550894affaad8743e97b9b9c98f638b221fac12909210ec3d9294786 \
--hash=sha256:f96f3f98fbff7af29e9edf9a6584f3c1382e7788783d07ba3721790625caa43e \
--hash=sha256:f9adb5664b78fcfcd830000416c8cc69853ef43cb084d645b3f1f0296edd9bae \
--hash=sha256:fa326b3505d5784436d9433b7980171ab2375535d93dd63fbcd20af2b5ca1bb6 \
--hash=sha256:fafc0049add8043ad07ab5382ee80d80ed7e3699847f26c9a5cf4d3714d96a84
rsa==4.9 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \
--hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21
s3transfer==0.6.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:3c0da2d074bf35d6870ef157158641178a4204a6e689e82546083e31e0311346 \
--hash=sha256:640bb492711f4c0c0905e1f62b6aaeb771881935ad27884852411f8e9cacbca9
schema==0.7.5 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:f06717112c61895cabc4707752b88716e8420a8819d71404501e114f91043197 \
--hash=sha256:f3ffdeeada09ec34bf40d7d79996d9f7175db93b7a5065de0faa7f41083c1e6c
shodan==1.31.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:c73275386ea02390e196c35c660706a28dd4d537c5a21eb387ab6236fac251f6
six==1.16.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \
--hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254
slack-sdk==3.26.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:d1600211eaa37c71a5f92daf4404074c3e6b3f5359a37c93c818b39d88ab4ca0 \
--hash=sha256:f80f0d15f0fce539b470447d2a07b03ecdad6b24f69c1edd05d464cf21253a06
tabulate==0.9.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c \
--hash=sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f
tldextract==3.4.4 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:581e7dbefc90e7bb857bb6f768d25c811a3c5f0892ed56a9a2999ddb7b1b70c2 \
--hash=sha256:5fe3210c577463545191d45ad522d3d5e78d55218ce97215e82004dcae1e1234
typing-extensions==4.5.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb \
--hash=sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4
uritemplate==4.1.1 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:4346edfc5c3b79f694bccd6d6099a322bbeb628dbf2cd86eea55a456ce5124f0 \
--hash=sha256:830c08b8d99bdd312ea4ead05994a38e8936266f84b9a7878232db50b044e02e
urllib3==1.26.18 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07 \
--hash=sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0
xlsxwriter==3.1.0 ; python_version >= "3.9" and python_version < "3.12" \
--hash=sha256:02913b50b74c00f165933d5da3e3a02cab4204cb4932722a1b342c5c71034122 \
--hash=sha256:b70a147d36235d1ee835cfd037396f789db1f76740a0e5c917d54137169341de | python3.9 | 3a3bb44f | diff --git a/prowler/providers/aws/services/guardduty/guardduty_centrally_managed/guardduty_centrally_managed.py b/prowler/providers/aws/services/guardduty/guardduty_centrally_managed/guardduty_centrally_managed.py
--- a/prowler/providers/aws/services/guardduty/guardduty_centrally_managed/guardduty_centrally_managed.py
+++ b/prowler/providers/aws/services/guardduty/guardduty_centrally_managed/guardduty_centrally_managed.py
@@ -6,7 +6,7 @@ class guardduty_centrally_managed(Check):
def execute(self):
findings = []
for detector in guardduty_client.detectors:
- if detector.id:
+ if detector.id and detector.enabled_in_account:
report = Check_Report_AWS(self.metadata())
report.region = detector.region
report.resource_id = detector.id
diff --git a/prowler/providers/aws/services/guardduty/guardduty_no_high_severity_findings/guardduty_no_high_severity_findings.py b/prowler/providers/aws/services/guardduty/guardduty_no_high_severity_findings/guardduty_no_high_severity_findings.py
--- a/prowler/providers/aws/services/guardduty/guardduty_no_high_severity_findings/guardduty_no_high_severity_findings.py
+++ b/prowler/providers/aws/services/guardduty/guardduty_no_high_severity_findings/guardduty_no_high_severity_findings.py
@@ -6,7 +6,7 @@ class guardduty_no_high_severity_findings(Check):
def execute(self):
findings = []
for detector in guardduty_client.detectors:
- if detector.id:
+ if detector.id and detector.enabled_in_account:
report = Check_Report_AWS(self.metadata())
report.region = detector.region
report.resource_id = detector.id
diff --git a/tests/providers/aws/services/guardduty/guardduty_centrally_managed/guardduty_centrally_managed_test.py b/tests/providers/aws/services/guardduty/guardduty_centrally_managed/guardduty_centrally_managed_test.py
--- a/tests/providers/aws/services/guardduty/guardduty_centrally_managed/guardduty_centrally_managed_test.py
+++ b/tests/providers/aws/services/guardduty/guardduty_centrally_managed/guardduty_centrally_managed_test.py
@@ -62,6 +62,31 @@ class Test_guardduty_centrally_managed:
assert result[0].region == AWS_REGION
assert result[0].resource_arn == DETECTOR_ARN
+ def test_not_enabled_account_detector(self):
+ guardduty_client = mock.MagicMock
+ guardduty_client.detectors = []
+ guardduty_client.detectors.append(
+ Detector(
+ id=AWS_ACCOUNT_NUMBER,
+ region=AWS_REGION,
+ arn=DETECTOR_ARN,
+ enabled_in_account=False,
+ )
+ )
+
+ with mock.patch(
+ "prowler.providers.aws.services.guardduty.guardduty_service.GuardDuty",
+ guardduty_client,
+ ):
+ # Test Check
+ from prowler.providers.aws.services.guardduty.guardduty_centrally_managed.guardduty_centrally_managed import (
+ guardduty_centrally_managed,
+ )
+
+ check = guardduty_centrally_managed()
+ result = check.execute()
+ assert len(result) == 0
+
def test_detector_centralized_managed(self):
guardduty_client = mock.MagicMock
guardduty_client.detectors = []
diff --git a/tests/providers/aws/services/guardduty/guardduty_no_high_severity_findings/guardduty_no_high_severity_findings_test.py b/tests/providers/aws/services/guardduty/guardduty_no_high_severity_findings/guardduty_no_high_severity_findings_test.py
--- a/tests/providers/aws/services/guardduty/guardduty_no_high_severity_findings/guardduty_no_high_severity_findings_test.py
+++ b/tests/providers/aws/services/guardduty/guardduty_no_high_severity_findings/guardduty_no_high_severity_findings_test.py
@@ -58,6 +58,29 @@ class Test_guardduty_no_high_severity_findings:
assert result[0].resource_arn == DETECTOR_ARN
assert result[0].region == AWS_REGION
+ def test_not_enabled_account_detector(self):
+ guardduty_client = mock.MagicMock
+ guardduty_client.detectors = []
+ guardduty_client.detectors.append(
+ Detector(
+ id=AWS_ACCOUNT_NUMBER,
+ arn=DETECTOR_ARN,
+ region=AWS_REGION,
+ enabled_in_account=False,
+ )
+ )
+ with mock.patch(
+ "prowler.providers.aws.services.guardduty.guardduty_service.GuardDuty",
+ guardduty_client,
+ ):
+ from prowler.providers.aws.services.guardduty.guardduty_no_high_severity_findings.guardduty_no_high_severity_findings import (
+ guardduty_no_high_severity_findings,
+ )
+
+ check = guardduty_no_high_severity_findings()
+ result = check.execute()
+ assert len(result) == 0
+
def test_high_findings(self):
guardduty_client = mock.MagicMock
guardduty_client.detectors = []
| [
{
"content": "from prowler.lib.check.models import Check, Check_Report_AWS\nfrom prowler.providers.aws.services.guardduty.guardduty_client import guardduty_client\n\n\nclass guardduty_centrally_managed(Check):\n def execute(self):\n findings = []\n for detector in guardduty_client.detectors:\n if detector.id:\n report = Check_Report_AWS(self.metadata())\n report.region = detector.region\n report.resource_id = detector.id\n report.resource_arn = detector.arn\n report.resource_tags = detector.tags\n report.status = \"FAIL\"\n report.status_extended = (\n f\"GuardDuty detector {detector.id} is not centrally managed.\"\n )\n if detector.administrator_account:\n report.status = \"PASS\"\n report.status_extended = f\"GuardDuty detector {detector.id} is centrally managed by account {detector.administrator_account}.\"\n elif detector.member_accounts:\n report.status = \"PASS\"\n report.status_extended = f\"GuardDuty detector {detector.id} is administrator account with {len(detector.member_accounts)} member accounts.\"\n\n findings.append(report)\n\n return findings\n",
"path": "prowler/providers/aws/services/guardduty/guardduty_centrally_managed/guardduty_centrally_managed.py"
},
{
"content": "from prowler.lib.check.models import Check, Check_Report_AWS\nfrom prowler.providers.aws.services.guardduty.guardduty_client import guardduty_client\n\n\nclass guardduty_no_high_severity_findings(Check):\n def execute(self):\n findings = []\n for detector in guardduty_client.detectors:\n if detector.id:\n report = Check_Report_AWS(self.metadata())\n report.region = detector.region\n report.resource_id = detector.id\n report.resource_arn = detector.arn\n report.resource_tags = detector.tags\n report.status = \"PASS\"\n report.status_extended = f\"GuardDuty detector {detector.id} does not have high severity findings.\"\n if len(detector.findings) > 0:\n report.status = \"FAIL\"\n report.status_extended = f\"GuardDuty detector {detector.id} has {str(len(detector.findings))} high severity findings.\"\n\n findings.append(report)\n\n return findings\n",
"path": "prowler/providers/aws/services/guardduty/guardduty_no_high_severity_findings/guardduty_no_high_severity_findings.py"
},
{
"content": "from unittest import mock\nfrom uuid import uuid4\n\nfrom prowler.providers.aws.services.guardduty.guardduty_service import Detector\n\nAWS_REGION = \"eu-west-1\"\nAWS_ACCOUNT_NUMBER = \"123456789012\"\nAWS_ACCOUNT_NUMBER_ADMIN = \"123456789013\"\nDETECTOR_ID = str(uuid4())\nDETECTOR_ARN = (\n f\"arn:aws:guardduty:{AWS_REGION}:{AWS_ACCOUNT_NUMBER}:detector/{DETECTOR_ID}\"\n)\n\n\nclass Test_guardduty_centrally_managed:\n def test_no_detectors(self):\n guardduty_client = mock.MagicMock\n guardduty_client.detectors = []\n with mock.patch(\n \"prowler.providers.aws.services.guardduty.guardduty_service.GuardDuty\",\n guardduty_client,\n ):\n from prowler.providers.aws.services.guardduty.guardduty_no_high_severity_findings.guardduty_no_high_severity_findings import (\n guardduty_no_high_severity_findings,\n )\n\n check = guardduty_no_high_severity_findings()\n result = check.execute()\n assert len(result) == 0\n\n def test_detector_no_centralized_managed(self):\n guardduty_client = mock.MagicMock\n guardduty_client.detectors = []\n guardduty_client.detectors.append(\n Detector(\n id=DETECTOR_ID,\n region=AWS_REGION,\n arn=DETECTOR_ARN,\n status=False,\n findings=[str(uuid4())],\n )\n )\n\n with mock.patch(\n \"prowler.providers.aws.services.guardduty.guardduty_service.GuardDuty\",\n guardduty_client,\n ):\n # Test Check\n from prowler.providers.aws.services.guardduty.guardduty_centrally_managed.guardduty_centrally_managed import (\n guardduty_centrally_managed,\n )\n\n check = guardduty_centrally_managed()\n result = check.execute()\n assert len(result) == 1\n assert result[0].status == \"FAIL\"\n assert (\n result[0].status_extended\n == f\"GuardDuty detector {DETECTOR_ID} is not centrally managed.\"\n )\n assert result[0].resource_id == DETECTOR_ID\n assert result[0].region == AWS_REGION\n assert result[0].resource_arn == DETECTOR_ARN\n\n def test_detector_centralized_managed(self):\n guardduty_client = mock.MagicMock\n guardduty_client.detectors = []\n guardduty_client.detectors.append(\n Detector(\n id=DETECTOR_ID,\n region=AWS_REGION,\n arn=DETECTOR_ARN,\n status=False,\n findings=[str(uuid4())],\n administrator_account=AWS_ACCOUNT_NUMBER_ADMIN,\n )\n )\n\n with mock.patch(\n \"prowler.providers.aws.services.guardduty.guardduty_service.GuardDuty\",\n guardduty_client,\n ):\n # Test Check\n from prowler.providers.aws.services.guardduty.guardduty_centrally_managed.guardduty_centrally_managed import (\n guardduty_centrally_managed,\n )\n\n check = guardduty_centrally_managed()\n result = check.execute()\n assert len(result) == 1\n assert result[0].status == \"PASS\"\n assert (\n result[0].status_extended\n == f\"GuardDuty detector {DETECTOR_ID} is centrally managed by account {AWS_ACCOUNT_NUMBER_ADMIN}.\"\n )\n assert result[0].resource_id == DETECTOR_ID\n assert result[0].region == AWS_REGION\n assert result[0].resource_arn == DETECTOR_ARN\n\n def test_detector_administrator(self):\n guardduty_client = mock.MagicMock\n guardduty_client.detectors = []\n guardduty_client.detectors.append(\n Detector(\n id=DETECTOR_ID,\n region=AWS_REGION,\n arn=DETECTOR_ARN,\n status=False,\n findings=[str(uuid4())],\n member_accounts=[AWS_ACCOUNT_NUMBER_ADMIN],\n )\n )\n\n with mock.patch(\n \"prowler.providers.aws.services.guardduty.guardduty_service.GuardDuty\",\n guardduty_client,\n ):\n # Test Check\n from prowler.providers.aws.services.guardduty.guardduty_centrally_managed.guardduty_centrally_managed import (\n guardduty_centrally_managed,\n )\n\n check = guardduty_centrally_managed()\n result = check.execute()\n assert len(result) == 1\n assert result[0].status == \"PASS\"\n assert (\n result[0].status_extended\n == f\"GuardDuty detector {DETECTOR_ID} is administrator account with 1 member accounts.\"\n )\n assert result[0].resource_id == DETECTOR_ID\n assert result[0].region == AWS_REGION\n assert result[0].resource_arn == DETECTOR_ARN\n",
"path": "tests/providers/aws/services/guardduty/guardduty_centrally_managed/guardduty_centrally_managed_test.py"
},
{
"content": "from re import search\nfrom unittest import mock\nfrom uuid import uuid4\n\nfrom prowler.providers.aws.services.guardduty.guardduty_service import Detector\n\nAWS_REGION = \"eu-west-1\"\nAWS_ACCOUNT_NUMBER = \"123456789012\"\n\nDETECTOR_ID = str(uuid4())\nDETECTOR_ARN = (\n f\"arn:aws:guardduty:{AWS_REGION}:{AWS_ACCOUNT_NUMBER}:detector/{DETECTOR_ID}\"\n)\n\n\nclass Test_guardduty_no_high_severity_findings:\n def test_no_detectors(self):\n guardduty_client = mock.MagicMock\n guardduty_client.detectors = []\n with mock.patch(\n \"prowler.providers.aws.services.guardduty.guardduty_service.GuardDuty\",\n guardduty_client,\n ):\n from prowler.providers.aws.services.guardduty.guardduty_no_high_severity_findings.guardduty_no_high_severity_findings import (\n guardduty_no_high_severity_findings,\n )\n\n check = guardduty_no_high_severity_findings()\n result = check.execute()\n assert len(result) == 0\n\n def test_no_high_findings(self):\n guardduty_client = mock.MagicMock\n guardduty_client.detectors = []\n guardduty_client.detectors.append(\n Detector(\n id=DETECTOR_ID,\n arn=DETECTOR_ARN,\n region=AWS_REGION,\n )\n )\n with mock.patch(\n \"prowler.providers.aws.services.guardduty.guardduty_service.GuardDuty\",\n guardduty_client,\n ):\n from prowler.providers.aws.services.guardduty.guardduty_no_high_severity_findings.guardduty_no_high_severity_findings import (\n guardduty_no_high_severity_findings,\n )\n\n check = guardduty_no_high_severity_findings()\n result = check.execute()\n assert len(result) == 1\n assert result[0].status == \"PASS\"\n assert search(\n \"does not have high severity findings.\", result[0].status_extended\n )\n assert result[0].resource_id == DETECTOR_ID\n assert result[0].resource_arn == DETECTOR_ARN\n assert result[0].region == AWS_REGION\n\n def test_high_findings(self):\n guardduty_client = mock.MagicMock\n guardduty_client.detectors = []\n guardduty_client.detectors.append(\n Detector(\n id=DETECTOR_ID,\n region=AWS_REGION,\n arn=DETECTOR_ARN,\n status=False,\n findings=[str(uuid4())],\n )\n )\n with mock.patch(\n \"prowler.providers.aws.services.guardduty.guardduty_service.GuardDuty\",\n guardduty_client,\n ):\n from prowler.providers.aws.services.guardduty.guardduty_no_high_severity_findings.guardduty_no_high_severity_findings import (\n guardduty_no_high_severity_findings,\n )\n\n check = guardduty_no_high_severity_findings()\n result = check.execute()\n assert len(result) == 1\n assert result[0].status == \"FAIL\"\n assert search(\"has 1 high severity findings\", result[0].status_extended)\n assert result[0].resource_id == DETECTOR_ID\n assert result[0].resource_arn == DETECTOR_ARN\n assert result[0].region == AWS_REGION\n",
"path": "tests/providers/aws/services/guardduty/guardduty_no_high_severity_findings/guardduty_no_high_severity_findings_test.py"
}
] | 11_9 | python | import unittest
import sys
from unittest import mock
from uuid import uuid4
class Test_guardduty_centrally_managed(unittest.TestCase):
def test_not_enabled_account_detector(self):
from prowler.providers.aws.services.guardduty.guardduty_service import Detector
from tests.providers.aws.audit_info_utils import (
AWS_ACCOUNT_NUMBER,
AWS_REGION_EU_WEST_1,
)
AWS_ACCOUNT_NUMBER_ADMIN = "123456789013"
DETECTOR_ID = str(uuid4())
DETECTOR_ARN = f"arn:aws:guardduty:{AWS_REGION_EU_WEST_1}:{AWS_ACCOUNT_NUMBER}:detector/{DETECTOR_ID}"
guardduty_client = mock.MagicMock
guardduty_client.detectors = []
guardduty_client.detectors.append(
Detector(
id=AWS_ACCOUNT_NUMBER,
region=AWS_REGION_EU_WEST_1,
arn=DETECTOR_ARN,
enabled_in_account=False,
)
)
with mock.patch(
"prowler.providers.aws.services.guardduty.guardduty_service.GuardDuty",
guardduty_client,
):
# Test Check
from prowler.providers.aws.services.guardduty.guardduty_centrally_managed.guardduty_centrally_managed import (
guardduty_centrally_managed,
)
check = guardduty_centrally_managed()
result = check.execute()
assert len(result) == 0
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(Test_guardduty_centrally_managed))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
main()
|
https://github.com/teamqurrent/aider | The aim is to improve the resilience of file reading operations in the `__init__` method of a class in `io.py` by enhancing its error handling capabilities. To achieve this, locate the section in the `__init__` method where files are opened and read. In this section, update the existing exception handling that catches FileNotFoundError to also handle UnicodeDecodeError. This modification ensures that the method gracefully handles scenarios where files might not be found or where their encoding leads to decoding issues, thereby making the file reading process more robust and error-resistant. | efb3f03 | aiohttp==3.8.4
aiosignal==1.3.1
async-timeout==4.0.2
attrs==23.1.0
certifi==2023.5.7
charset-normalizer==3.1.0
frozenlist==1.3.3
gitdb==4.0.10
GitPython==3.1.31
idna==3.4
markdown-it-py==2.2.0
mdurl==0.1.2
multidict==6.0.4
openai==0.27.6
prompt-toolkit==3.0.38
Pygments==2.15.1
requests==2.30.0
rich==13.3.5
smmap==5.0.0
tqdm==4.65.0
urllib3==2.0.2
wcwidth==0.2.6
yarl==1.9.2
pytest==7.3.1
tiktoken==0.4.0
configargparse
PyYAML
backoff==2.2.1
networkx==3.1
diskcache==5.6.1
numpy==1.26.1
scipy==1.11.3
jsonschema==4.17.3
sounddevice==0.4.6
soundfile==0.12.1
pathspec==0.11.2
grep-ast==0.2.4
| python3.9 | f3d3815 | diff --git a/aider/io.py b/aider/io.py
--- a/aider/io.py
+++ b/aider/io.py
@@ -44,7 +44,7 @@ class AutoCompleter(Completer):
try:
with open(fname, "r", encoding=self.encoding) as f:
content = f.read()
- except FileNotFoundError:
+ except (FileNotFoundError, UnicodeDecodeError):
continue
try:
lexer = guess_lexer_for_filename(fname, content)
diff --git a/tests/test_commands.py b/tests/test_commands.py
--- a/tests/test_commands.py
+++ b/tests/test_commands.py
@@ -468,3 +468,20 @@ class TestCommands(TestCase):
del coder
del commands
del repo
+
+ def test_cmd_add_unicode_error(self):
+ # Initialize the Commands and InputOutput objects
+ io = InputOutput(pretty=False, yes=True)
+ from aider.coders import Coder
+
+ coder = Coder.create(models.GPT35, None, io)
+ commands = Commands(io, coder)
+
+ fname = "file.txt"
+ encoding = "utf-16"
+ some_content_which_will_error_if_read_with_encoding_utf8 = "ÅÍÎÏ".encode(encoding)
+ with open(fname, "wb") as f:
+ f.write(some_content_which_will_error_if_read_with_encoding_utf8)
+
+ commands.cmd_add("file.txt")
+ self.assertEqual(coder.abs_fnames, set())
diff --git a/tests/test_io.py b/tests/test_io.py
--- a/tests/test_io.py
+++ b/tests/test_io.py
@@ -1,8 +1,10 @@
import os
import unittest
+from pathlib import Path
from unittest.mock import patch
from aider.io import AutoCompleter, InputOutput
+from tests.utils import ChdirTemporaryDirectory
class TestInputOutput(unittest.TestCase):
@@ -19,6 +21,28 @@ class TestInputOutput(unittest.TestCase):
autocompleter = AutoCompleter(root, rel_fnames, addable_rel_fnames, commands, "utf-8")
self.assertEqual(autocompleter.words, set(rel_fnames))
+ def test_autocompleter_with_unicode_file(self):
+ with ChdirTemporaryDirectory():
+ root = ""
+ fname = "file.py"
+ rel_fnames = [fname]
+ addable_rel_fnames = []
+ commands = None
+ autocompleter = AutoCompleter(root, rel_fnames, addable_rel_fnames, commands, "utf-8")
+ self.assertEqual(autocompleter.words, set(rel_fnames))
+
+ Path(fname).write_text("def hello(): pass\n")
+ autocompleter = AutoCompleter(root, rel_fnames, addable_rel_fnames, commands, "utf-8")
+ self.assertEqual(autocompleter.words, set(rel_fnames + ["hello"]))
+
+ encoding = "utf-16"
+ some_content_which_will_error_if_read_with_encoding_utf8 = "ÅÍÎÏ".encode(encoding)
+ with open(fname, "wb") as f:
+ f.write(some_content_which_will_error_if_read_with_encoding_utf8)
+
+ autocompleter = AutoCompleter(root, rel_fnames, addable_rel_fnames, commands, "utf-8")
+ self.assertEqual(autocompleter.words, set(rel_fnames))
+
if __name__ == "__main__":
unittest.main()
| [
{
"content": "import os\nfrom collections import defaultdict\nfrom datetime import datetime\nfrom pathlib import Path\n\nfrom prompt_toolkit.completion import Completer, Completion\nfrom prompt_toolkit.history import FileHistory\nfrom prompt_toolkit.key_binding import KeyBindings\nfrom prompt_toolkit.lexers import PygmentsLexer\nfrom prompt_toolkit.shortcuts import CompleteStyle, PromptSession, prompt\nfrom prompt_toolkit.styles import Style\nfrom pygments.lexers import MarkdownLexer, guess_lexer_for_filename\nfrom pygments.token import Token\nfrom pygments.util import ClassNotFound\nfrom rich.console import Console\nfrom rich.text import Text\n\nfrom .dump import dump # noqa: F401\n\n\nclass AutoCompleter(Completer):\n def __init__(self, root, rel_fnames, addable_rel_fnames, commands, encoding):\n self.commands = commands\n self.addable_rel_fnames = addable_rel_fnames\n self.rel_fnames = rel_fnames\n self.encoding = encoding\n\n fname_to_rel_fnames = defaultdict(list)\n for rel_fname in addable_rel_fnames:\n fname = os.path.basename(rel_fname)\n if fname != rel_fname:\n fname_to_rel_fnames[fname].append(rel_fname)\n self.fname_to_rel_fnames = fname_to_rel_fnames\n\n self.words = set()\n\n for rel_fname in addable_rel_fnames:\n self.words.add(rel_fname)\n\n for rel_fname in rel_fnames:\n self.words.add(rel_fname)\n\n fname = Path(root) / rel_fname\n try:\n with open(fname, \"r\", encoding=self.encoding) as f:\n content = f.read()\n except FileNotFoundError:\n continue\n try:\n lexer = guess_lexer_for_filename(fname, content)\n except ClassNotFound:\n continue\n tokens = list(lexer.get_tokens(content))\n self.words.update(token[1] for token in tokens if token[0] in Token.Name)\n\n def get_completions(self, document, complete_event):\n text = document.text_before_cursor\n words = text.split()\n if not words:\n return\n\n if text[0] == \"/\":\n if len(words) == 1 and not text[-1].isspace():\n candidates = self.commands.get_commands()\n candidates = [(cmd, cmd) for cmd in candidates]\n else:\n for completion in self.commands.get_command_completions(words[0][1:], words[-1]):\n yield completion\n return\n else:\n candidates = self.words\n candidates.update(set(self.fname_to_rel_fnames))\n candidates = [(word, f\"`{word}`\") for word in candidates]\n\n last_word = words[-1]\n for word_match, word_insert in candidates:\n if word_match.lower().startswith(last_word.lower()):\n rel_fnames = self.fname_to_rel_fnames.get(word_match, [])\n if rel_fnames:\n for rel_fname in rel_fnames:\n yield Completion(\n f\"`{rel_fname}`\", start_position=-len(last_word), display=rel_fname\n )\n else:\n yield Completion(\n word_insert, start_position=-len(last_word), display=word_match\n )\n\n\nclass InputOutput:\n num_error_outputs = 0\n num_user_asks = 0\n\n def __init__(\n self,\n pretty=True,\n yes=False,\n input_history_file=None,\n chat_history_file=None,\n input=None,\n output=None,\n user_input_color=\"blue\",\n tool_output_color=None,\n tool_error_color=\"red\",\n encoding=\"utf-8\",\n dry_run=False,\n ):\n no_color = os.environ.get(\"NO_COLOR\")\n if no_color is not None and no_color != \"\":\n pretty = False\n\n self.user_input_color = user_input_color if pretty else None\n self.tool_output_color = tool_output_color if pretty else None\n self.tool_error_color = tool_error_color if pretty else None\n\n self.input = input\n self.output = output\n\n self.pretty = pretty\n if self.output:\n self.pretty = False\n\n self.yes = yes\n\n self.input_history_file = input_history_file\n if chat_history_file is not None:\n self.chat_history_file = Path(chat_history_file)\n else:\n self.chat_history_file = None\n\n self.encoding = encoding\n self.dry_run = dry_run\n\n if pretty:\n self.console = Console()\n else:\n self.console = Console(force_terminal=False, no_color=True)\n\n current_time = datetime.now().strftime(\"%Y-%m-%d %H:%M:%S\")\n self.append_chat_history(f\"\\n# aider chat started at {current_time}\\n\\n\")\n\n def read_text(self, filename):\n try:\n with open(str(filename), \"r\", encoding=self.encoding) as f:\n return f.read()\n except FileNotFoundError:\n self.tool_error(f\"{filename}: file not found error\")\n return\n except UnicodeError as e:\n self.tool_error(f\"{filename}: {e}\")\n self.tool_error(\"Use --encoding to set the unicode encoding.\")\n return\n\n def write_text(self, filename, content):\n if self.dry_run:\n return\n with open(str(filename), \"w\", encoding=self.encoding) as f:\n f.write(content)\n\n def get_input(self, root, rel_fnames, addable_rel_fnames, commands):\n if self.pretty:\n style = dict(style=self.user_input_color) if self.user_input_color else dict()\n self.console.rule(**style)\n else:\n print()\n\n rel_fnames = list(rel_fnames)\n show = \" \".join(rel_fnames)\n if len(show) > 10:\n show += \"\\n\"\n show += \"> \"\n\n inp = \"\"\n multiline_input = False\n\n if self.user_input_color:\n style = Style.from_dict(\n {\n \"\": self.user_input_color,\n \"pygments.literal.string\": f\"bold italic {self.user_input_color}\",\n }\n )\n else:\n style = None\n\n while True:\n completer_instance = AutoCompleter(\n root, rel_fnames, addable_rel_fnames, commands, self.encoding\n )\n if multiline_input:\n show = \". \"\n\n session_kwargs = {\n \"message\": show,\n \"completer\": completer_instance,\n \"reserve_space_for_menu\": 4,\n \"complete_style\": CompleteStyle.MULTI_COLUMN,\n \"input\": self.input,\n \"output\": self.output,\n \"lexer\": PygmentsLexer(MarkdownLexer),\n }\n if style:\n session_kwargs[\"style\"] = style\n\n if self.input_history_file is not None:\n session_kwargs[\"history\"] = FileHistory(self.input_history_file)\n\n kb = KeyBindings()\n\n @kb.add(\"escape\", \"c-m\", eager=True)\n def _(event):\n event.current_buffer.insert_text(\"\\n\")\n\n session = PromptSession(key_bindings=kb, **session_kwargs)\n line = session.prompt()\n\n if line and line[0] == \"{\" and not multiline_input:\n multiline_input = True\n inp += line[1:] + \"\\n\"\n continue\n elif line and line[-1] == \"}\" and multiline_input:\n inp += line[:-1] + \"\\n\"\n break\n elif multiline_input:\n inp += line + \"\\n\"\n else:\n inp = line\n break\n\n print()\n self.user_input(inp)\n return inp\n\n def add_to_input_history(self, inp):\n if not self.input_history_file:\n return\n FileHistory(self.input_history_file).append_string(inp)\n\n def get_input_history(self):\n if not self.input_history_file:\n return []\n\n fh = FileHistory(self.input_history_file)\n return fh.load_history_strings()\n\n def user_input(self, inp, log_only=True):\n if not log_only:\n style = dict(style=self.user_input_color) if self.user_input_color else dict()\n self.console.print(inp, **style)\n\n prefix = \"####\"\n if inp:\n hist = inp.splitlines()\n else:\n hist = [\"<blank>\"]\n\n hist = f\" \\n{prefix} \".join(hist)\n\n hist = f\"\"\"\n{prefix} {hist}\"\"\"\n self.append_chat_history(hist, linebreak=True)\n\n # OUTPUT\n\n def ai_output(self, content):\n hist = \"\\n\" + content.strip() + \"\\n\\n\"\n self.append_chat_history(hist)\n\n def confirm_ask(self, question, default=\"y\"):\n self.num_user_asks += 1\n\n if self.yes is True:\n res = \"yes\"\n elif self.yes is False:\n res = \"no\"\n else:\n res = prompt(question + \" \", default=default)\n\n hist = f\"{question.strip()} {res.strip()}\"\n self.append_chat_history(hist, linebreak=True, blockquote=True)\n if self.yes in (True, False):\n self.tool_output(hist)\n\n if not res or not res.strip():\n return\n return res.strip().lower().startswith(\"y\")\n\n def prompt_ask(self, question, default=None):\n self.num_user_asks += 1\n\n if self.yes is True:\n res = \"yes\"\n elif self.yes is False:\n res = \"no\"\n else:\n res = prompt(question + \" \", default=default)\n\n hist = f\"{question.strip()} {res.strip()}\"\n self.append_chat_history(hist, linebreak=True, blockquote=True)\n if self.yes in (True, False):\n self.tool_output(hist)\n\n return res\n\n def tool_error(self, message):\n self.num_error_outputs += 1\n\n if message.strip():\n hist = f\"{message.strip()}\"\n self.append_chat_history(hist, linebreak=True, blockquote=True)\n\n message = Text(message)\n style = dict(style=self.tool_error_color) if self.tool_error_color else dict()\n self.console.print(message, **style)\n\n def tool_output(self, *messages, log_only=False):\n if messages:\n hist = \" \".join(messages)\n hist = f\"{hist.strip()}\"\n self.append_chat_history(hist, linebreak=True, blockquote=True)\n\n if not log_only:\n messages = list(map(Text, messages))\n style = dict(style=self.tool_output_color) if self.tool_output_color else dict()\n self.console.print(*messages, **style)\n\n def append_chat_history(self, text, linebreak=False, blockquote=False):\n if blockquote:\n text = text.strip()\n text = \"> \" + text\n if linebreak:\n text = text.rstrip()\n text = text + \" \\n\"\n if not text.endswith(\"\\n\"):\n text += \"\\n\"\n if self.chat_history_file is not None:\n with self.chat_history_file.open(\"a\", encoding=self.encoding) as f:\n f.write(text)\n",
"path": "aider/io.py"
},
{
"content": "import codecs\nimport os\nimport shutil\nimport sys\nimport tempfile\nfrom io import StringIO\nfrom pathlib import Path\nfrom unittest import TestCase\n\nimport git\n\nfrom aider import models\nfrom aider.coders import Coder\nfrom aider.commands import Commands\nfrom aider.dump import dump # noqa: F401\nfrom aider.io import InputOutput\nfrom tests.utils import ChdirTemporaryDirectory, GitTemporaryDirectory, make_repo\n\n\nclass TestCommands(TestCase):\n def setUp(self):\n self.original_cwd = os.getcwd()\n self.tempdir = tempfile.mkdtemp()\n os.chdir(self.tempdir)\n\n def tearDown(self):\n os.chdir(self.original_cwd)\n shutil.rmtree(self.tempdir, ignore_errors=True)\n\n def test_cmd_add(self):\n # Initialize the Commands and InputOutput objects\n io = InputOutput(pretty=False, yes=True)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n # Call the cmd_add method with 'foo.txt' and 'bar.txt' as a single string\n commands.cmd_add(\"foo.txt bar.txt\")\n\n # Check if both files have been created in the temporary directory\n self.assertTrue(os.path.exists(\"foo.txt\"))\n self.assertTrue(os.path.exists(\"bar.txt\"))\n\n def test_cmd_add_bad_glob(self):\n # https://github.com/paul-gauthier/aider/issues/293\n\n io = InputOutput(pretty=False, yes=False)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n commands.cmd_add(\"**.txt\")\n\n def test_cmd_add_with_glob_patterns(self):\n # Initialize the Commands and InputOutput objects\n io = InputOutput(pretty=False, yes=True)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n # Create some test files\n with open(\"test1.py\", \"w\") as f:\n f.write(\"print('test1')\")\n with open(\"test2.py\", \"w\") as f:\n f.write(\"print('test2')\")\n with open(\"test.txt\", \"w\") as f:\n f.write(\"test\")\n\n # Call the cmd_add method with a glob pattern\n commands.cmd_add(\"*.py\")\n\n # Check if the Python files have been added to the chat session\n self.assertIn(str(Path(\"test1.py\").resolve()), coder.abs_fnames)\n self.assertIn(str(Path(\"test2.py\").resolve()), coder.abs_fnames)\n\n # Check if the text file has not been added to the chat session\n self.assertNotIn(str(Path(\"test.txt\").resolve()), coder.abs_fnames)\n\n def test_cmd_add_no_match(self):\n # yes=False means we will *not* create the file when it is not found\n io = InputOutput(pretty=False, yes=False)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n # Call the cmd_add method with a non-existent file pattern\n commands.cmd_add(\"*.nonexistent\")\n\n # Check if no files have been added to the chat session\n self.assertEqual(len(coder.abs_fnames), 0)\n\n def test_cmd_add_no_match_but_make_it(self):\n # yes=True means we *will* create the file when it is not found\n io = InputOutput(pretty=False, yes=True)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n fname = Path(\"[abc].nonexistent\")\n\n # Call the cmd_add method with a non-existent file pattern\n commands.cmd_add(str(fname))\n\n # Check if no files have been added to the chat session\n self.assertEqual(len(coder.abs_fnames), 1)\n self.assertTrue(fname.exists())\n\n def test_cmd_add_drop_directory(self):\n # Initialize the Commands and InputOutput objects\n io = InputOutput(pretty=False, yes=False)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n # Create a directory and add files to it using pathlib\n Path(\"test_dir\").mkdir()\n Path(\"test_dir/another_dir\").mkdir()\n Path(\"test_dir/test_file1.txt\").write_text(\"Test file 1\")\n Path(\"test_dir/test_file2.txt\").write_text(\"Test file 2\")\n Path(\"test_dir/another_dir/test_file.txt\").write_text(\"Test file 3\")\n\n # Call the cmd_add method with a directory\n commands.cmd_add(\"test_dir test_dir/test_file2.txt\")\n\n # Check if the files have been added to the chat session\n self.assertIn(str(Path(\"test_dir/test_file1.txt\").resolve()), coder.abs_fnames)\n self.assertIn(str(Path(\"test_dir/test_file2.txt\").resolve()), coder.abs_fnames)\n self.assertIn(str(Path(\"test_dir/another_dir/test_file.txt\").resolve()), coder.abs_fnames)\n\n commands.cmd_drop(\"test_dir/another_dir\")\n self.assertIn(str(Path(\"test_dir/test_file1.txt\").resolve()), coder.abs_fnames)\n self.assertIn(str(Path(\"test_dir/test_file2.txt\").resolve()), coder.abs_fnames)\n self.assertNotIn(\n str(Path(\"test_dir/another_dir/test_file.txt\").resolve()), coder.abs_fnames\n )\n\n # Issue #139 /add problems when cwd != git_root\n\n # remember the proper abs path to this file\n abs_fname = str(Path(\"test_dir/another_dir/test_file.txt\").resolve())\n\n # chdir to someplace other than git_root\n Path(\"side_dir\").mkdir()\n os.chdir(\"side_dir\")\n\n # add it via it's git_root referenced name\n commands.cmd_add(\"test_dir/another_dir/test_file.txt\")\n\n # it should be there, but was not in v0.10.0\n self.assertIn(abs_fname, coder.abs_fnames)\n\n # drop it via it's git_root referenced name\n commands.cmd_drop(\"test_dir/another_dir/test_file.txt\")\n\n # it should be there, but was not in v0.10.0\n self.assertNotIn(abs_fname, coder.abs_fnames)\n\n def test_cmd_drop_with_glob_patterns(self):\n # Initialize the Commands and InputOutput objects\n io = InputOutput(pretty=False, yes=True)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n subdir = Path(\"subdir\")\n subdir.mkdir()\n (subdir / \"subtest1.py\").touch()\n (subdir / \"subtest2.py\").touch()\n\n Path(\"test1.py\").touch()\n Path(\"test2.py\").touch()\n\n # Add some files to the chat session\n commands.cmd_add(\"*.py\")\n\n self.assertEqual(len(coder.abs_fnames), 2)\n\n # Call the cmd_drop method with a glob pattern\n commands.cmd_drop(\"*2.py\")\n\n self.assertIn(str(Path(\"test1.py\").resolve()), coder.abs_fnames)\n self.assertNotIn(str(Path(\"test2.py\").resolve()), coder.abs_fnames)\n\n def test_cmd_add_bad_encoding(self):\n # Initialize the Commands and InputOutput objects\n io = InputOutput(pretty=False, yes=True)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n # Create a new file foo.bad which will fail to decode as utf-8\n with codecs.open(\"foo.bad\", \"w\", encoding=\"iso-8859-15\") as f:\n f.write(\"ÆØÅ\") # Characters not present in utf-8\n\n commands.cmd_add(\"foo.bad\")\n\n self.assertEqual(coder.abs_fnames, set())\n\n def test_cmd_git(self):\n # Initialize the Commands and InputOutput objects\n io = InputOutput(pretty=False, yes=True)\n\n with GitTemporaryDirectory() as tempdir:\n # Create a file in the temporary directory\n with open(f\"{tempdir}/test.txt\", \"w\") as f:\n f.write(\"test\")\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n # Run the cmd_git method with the arguments \"commit -a -m msg\"\n commands.cmd_git(\"add test.txt\")\n commands.cmd_git(\"commit -a -m msg\")\n\n # Check if the file has been committed to the repository\n repo = git.Repo(tempdir)\n files_in_repo = repo.git.ls_files()\n self.assertIn(\"test.txt\", files_in_repo)\n\n def test_cmd_tokens(self):\n # Initialize the Commands and InputOutput objects\n io = InputOutput(pretty=False, yes=True)\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n commands.cmd_add(\"foo.txt bar.txt\")\n\n # Redirect the standard output to an instance of io.StringIO\n stdout = StringIO()\n sys.stdout = stdout\n\n commands.cmd_tokens(\"\")\n\n # Reset the standard output\n sys.stdout = sys.__stdout__\n\n # Get the console output\n console_output = stdout.getvalue()\n\n self.assertIn(\"foo.txt\", console_output)\n self.assertIn(\"bar.txt\", console_output)\n\n def test_cmd_add_from_subdir(self):\n repo = git.Repo.init()\n repo.config_writer().set_value(\"user\", \"name\", \"Test User\").release()\n repo.config_writer().set_value(\"user\", \"email\", \"testuser@example.com\").release()\n\n # Create three empty files and add them to the git repository\n filenames = [\"one.py\", Path(\"subdir\") / \"two.py\", Path(\"anotherdir\") / \"three.py\"]\n for filename in filenames:\n file_path = Path(filename)\n file_path.parent.mkdir(parents=True, exist_ok=True)\n file_path.touch()\n repo.git.add(str(file_path))\n repo.git.commit(\"-m\", \"added\")\n\n filenames = [str(Path(fn).resolve()) for fn in filenames]\n\n ###\n\n os.chdir(\"subdir\")\n\n io = InputOutput(pretty=False, yes=True)\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n # this should get added\n commands.cmd_add(str(Path(\"anotherdir\") / \"three.py\"))\n\n # this should add one.py\n commands.cmd_add(\"*.py\")\n\n self.assertIn(filenames[0], coder.abs_fnames)\n self.assertNotIn(filenames[1], coder.abs_fnames)\n self.assertIn(filenames[2], coder.abs_fnames)\n\n def test_cmd_add_from_subdir_again(self):\n with GitTemporaryDirectory():\n io = InputOutput(pretty=False, yes=False)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n Path(\"side_dir\").mkdir()\n os.chdir(\"side_dir\")\n\n # add a file that is in the side_dir\n with open(\"temp.txt\", \"w\"):\n pass\n\n # this was blowing up with GitCommandError, per:\n # https://github.com/paul-gauthier/aider/issues/201\n commands.cmd_add(\"temp.txt\")\n\n def test_cmd_commit(self):\n with GitTemporaryDirectory():\n fname = \"test.txt\"\n with open(fname, \"w\") as f:\n f.write(\"test\")\n repo = git.Repo()\n repo.git.add(fname)\n repo.git.commit(\"-m\", \"initial\")\n\n io = InputOutput(pretty=False, yes=True)\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n self.assertFalse(repo.is_dirty())\n with open(fname, \"w\") as f:\n f.write(\"new\")\n self.assertTrue(repo.is_dirty())\n\n commit_message = \"Test commit message\"\n commands.cmd_commit(commit_message)\n self.assertFalse(repo.is_dirty())\n\n def test_cmd_add_from_outside_root(self):\n with ChdirTemporaryDirectory() as tmp_dname:\n root = Path(\"root\")\n root.mkdir()\n os.chdir(str(root))\n\n io = InputOutput(pretty=False, yes=False)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n outside_file = Path(tmp_dname) / \"outside.txt\"\n outside_file.touch()\n\n # This should not be allowed!\n # https://github.com/paul-gauthier/aider/issues/178\n commands.cmd_add(\"../outside.txt\")\n\n self.assertEqual(len(coder.abs_fnames), 0)\n\n def test_cmd_add_from_outside_git(self):\n with ChdirTemporaryDirectory() as tmp_dname:\n root = Path(\"root\")\n root.mkdir()\n os.chdir(str(root))\n\n make_repo()\n\n io = InputOutput(pretty=False, yes=False)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n outside_file = Path(tmp_dname) / \"outside.txt\"\n outside_file.touch()\n\n # This should not be allowed!\n # It was blowing up with GitCommandError, per:\n # https://github.com/paul-gauthier/aider/issues/178\n commands.cmd_add(\"../outside.txt\")\n\n self.assertEqual(len(coder.abs_fnames), 0)\n\n def test_cmd_add_filename_with_special_chars(self):\n with ChdirTemporaryDirectory():\n io = InputOutput(pretty=False, yes=False)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n fname = Path(\"with[brackets].txt\")\n fname.touch()\n\n commands.cmd_add(str(fname))\n\n self.assertIn(str(fname.resolve()), coder.abs_fnames)\n\n def test_cmd_add_abs_filename(self):\n with ChdirTemporaryDirectory():\n io = InputOutput(pretty=False, yes=False)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n fname = Path(\"file.txt\")\n fname.touch()\n\n commands.cmd_add(str(fname.resolve()))\n\n self.assertIn(str(fname.resolve()), coder.abs_fnames)\n\n def test_cmd_add_quoted_filename(self):\n with ChdirTemporaryDirectory():\n io = InputOutput(pretty=False, yes=False)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n fname = Path(\"file with spaces.txt\")\n fname.touch()\n\n commands.cmd_add(f'\"{fname}\"')\n\n self.assertIn(str(fname.resolve()), coder.abs_fnames)\n\n def test_cmd_add_no_autocommit(self):\n with GitTemporaryDirectory():\n io = InputOutput(pretty=False, yes=True)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io, auto_commits=False)\n commands = Commands(io, coder)\n\n commands.cmd_add(\"foo.txt\")\n\n # Check if both files have been created in the temporary directory\n self.assertTrue(os.path.exists(\"foo.txt\"))\n\n repo = git.Repo()\n\n # Assert that foo.txt has been `git add` but not `git commit`\n added_files = repo.git.diff(\"--cached\", \"--name-only\").split()\n self.assertIn(\"foo.txt\", added_files)\n\n def test_cmd_add_existing_with_dirty_repo(self):\n with GitTemporaryDirectory():\n repo = git.Repo()\n\n files = [\"one.txt\", \"two.txt\"]\n for fname in files:\n Path(fname).touch()\n repo.git.add(fname)\n repo.git.commit(\"-m\", \"initial\")\n\n commit = repo.head.commit.hexsha\n\n # leave a dirty `git rm`\n repo.git.rm(\"one.txt\")\n\n io = InputOutput(pretty=False, yes=True)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n # There's no reason this /add should trigger a commit\n commands.cmd_add(\"two.txt\")\n\n self.assertEqual(commit, repo.head.commit.hexsha)\n\n # Windows is throwing:\n # PermissionError: [WinError 32] The process cannot access\n # the file because it is being used by another process\n\n repo.git.commit(\"-m\", \"cleanup\")\n\n del coder\n del commands\n del repo\n",
"path": "tests/test_commands.py"
},
{
"content": "import os\nimport unittest\nfrom unittest.mock import patch\n\nfrom aider.io import AutoCompleter, InputOutput\n\n\nclass TestInputOutput(unittest.TestCase):\n def test_no_color_environment_variable(self):\n with patch.dict(os.environ, {\"NO_COLOR\": \"1\"}):\n io = InputOutput()\n self.assertFalse(io.pretty)\n\n def test_autocompleter_with_non_existent_file(self):\n root = \"\"\n rel_fnames = [\"non_existent_file.txt\"]\n addable_rel_fnames = []\n commands = None\n autocompleter = AutoCompleter(root, rel_fnames, addable_rel_fnames, commands, \"utf-8\")\n self.assertEqual(autocompleter.words, set(rel_fnames))\n\n\nif __name__ == \"__main__\":\n unittest.main()\n",
"path": "tests/test_io.py"
}
] | 12_0 | python | import unittest
import sys
from unittest.mock import patch
class TestInputOutput(unittest.TestCase):
def test_autocompleter_with_unicode_file(self):
from tests.utils import ChdirTemporaryDirectory
from pathlib import Path
from aider.io import AutoCompleter
with ChdirTemporaryDirectory():
root = ""
fname = "file.py"
rel_fnames = [fname]
addable_rel_fnames = []
commands = None
autocompleter = AutoCompleter(root, rel_fnames, addable_rel_fnames, commands, "utf-8")
self.assertEqual(autocompleter.words, set(rel_fnames))
Path(fname).write_text("def hello(): pass\n")
autocompleter = AutoCompleter(root, rel_fnames, addable_rel_fnames, commands, "utf-8")
self.assertEqual(autocompleter.words, set(rel_fnames + ["hello"]))
encoding = "utf-16"
some_content_which_will_error_if_read_with_encoding_utf8 = "ÅÍÎÏ".encode(encoding)
with open(fname, "wb") as f:
f.write(some_content_which_will_error_if_read_with_encoding_utf8)
autocompleter = AutoCompleter(root, rel_fnames, addable_rel_fnames, commands, "utf-8")
self.assertEqual(autocompleter.words, set(rel_fnames))
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestInputOutput))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
main()
|
https://github.com/teamqurrent/aider | In the `cmd_add` method of the `Commands` class in `commands.py`, you need to adjust how the method identifies if a file is already tracked in the Git repository. Start by calculating the relative path of each file being processed, using self.coder.get_rel_fname(matched_file), and store this in a variable named rel_path. Then, update the conditional check that decides if a file should be added to Git: replace the use of matched_file with rel_path when checking against git_files. | 2609ec1 | aiohttp==3.8.4
aiosignal==1.3.1
async-timeout==4.0.2
attrs==23.1.0
certifi==2023.5.7
charset-normalizer==3.1.0
frozenlist==1.3.3
gitdb==4.0.10
GitPython==3.1.31
idna==3.4
markdown-it-py==2.2.0
mdurl==0.1.2
multidict==6.0.4
openai==0.27.6
prompt-toolkit==3.0.38
Pygments==2.15.1
requests==2.30.0
rich==13.3.5
smmap==5.0.0
tqdm==4.65.0
urllib3==2.0.2
wcwidth==0.2.6
yarl==1.9.2
pytest==7.3.1
tiktoken==0.4.0
configargparse
PyYAML
backoff==2.2.1
networkx==3.1
diskcache==5.6.1
numpy==1.26.1
scipy==1.11.3
jsonschema==4.17.3
sounddevice==0.4.6
soundfile==0.12.1
pathspec==0.11.2
grep-ast==0.2.4
| python3.9 | d720bfe | diff --git a/aider/commands.py b/aider/commands.py
--- a/aider/commands.py
+++ b/aider/commands.py
@@ -305,6 +305,7 @@ class Commands:
for matched_file in all_matched_files:
abs_file_path = self.coder.abs_root_path(matched_file)
+ rel_path = self.coder.get_rel_fname(matched_file)
if not abs_file_path.startswith(self.coder.root):
self.io.tool_error(
@@ -312,7 +313,7 @@ class Commands:
)
continue
- if self.coder.repo and matched_file not in git_files:
+ if self.coder.repo and rel_path not in git_files:
try:
self.coder.repo.repo.git.add(abs_file_path)
git_added.append(matched_file)
diff --git a/tests/test_commands.py b/tests/test_commands.py
--- a/tests/test_commands.py
+++ b/tests/test_commands.py
@@ -432,3 +432,29 @@ class TestCommands(TestCase):
# Assert that foo.txt has been `git add` but not `git commit`
added_files = repo.git.diff("--cached", "--name-only").split()
self.assertIn("foo.txt", added_files)
+
+ def test_cmd_add_existing_with_dirty_repo(self):
+ with GitTemporaryDirectory():
+ repo = git.Repo()
+
+ files = ["one.txt", "two.txt"]
+ for fname in files:
+ Path(fname).touch()
+ repo.git.add(fname)
+ repo.git.commit("-m", "initial")
+
+ commit = repo.head.commit.hexsha
+
+ # leave a dirty `git rm`
+ repo.git.rm("one.txt")
+
+ io = InputOutput(pretty=False, yes=True)
+ from aider.coders import Coder
+
+ coder = Coder.create(models.GPT35, None, io)
+ commands = Commands(io, coder)
+
+ # There's no reason this /add should trigger a commit
+ commands.cmd_add("two.txt")
+
+ self.assertEqual(commit, repo.head.commit.hexsha)
| [
{
"content": "import json\nimport re\nimport subprocess\nimport sys\nfrom pathlib import Path\n\nimport git\nfrom prompt_toolkit.completion import Completion\n\nfrom aider import prompts, voice\n\nfrom .dump import dump # noqa: F401\n\n\nclass Commands:\n voice = None\n\n def __init__(self, io, coder, voice_language=None):\n self.io = io\n self.coder = coder\n\n if voice_language == \"auto\":\n voice_language = None\n\n self.voice_language = voice_language\n self.tokenizer = coder.main_model.tokenizer\n\n def is_command(self, inp):\n if inp[0] == \"/\":\n return True\n\n def get_commands(self):\n commands = []\n for attr in dir(self):\n if attr.startswith(\"cmd_\"):\n commands.append(\"/\" + attr[4:])\n\n return commands\n\n def get_command_completions(self, cmd_name, partial):\n cmd_completions_method_name = f\"completions_{cmd_name}\"\n cmd_completions_method = getattr(self, cmd_completions_method_name, None)\n if cmd_completions_method:\n for completion in cmd_completions_method(partial):\n yield completion\n\n def do_run(self, cmd_name, args):\n cmd_method_name = f\"cmd_{cmd_name}\"\n cmd_method = getattr(self, cmd_method_name, None)\n if cmd_method:\n return cmd_method(args)\n else:\n self.io.tool_output(f\"Error: Command {cmd_name} not found.\")\n\n def matching_commands(self, inp):\n words = inp.strip().split()\n if not words:\n return\n\n first_word = words[0]\n rest_inp = inp[len(words[0]) :]\n\n all_commands = self.get_commands()\n matching_commands = [cmd for cmd in all_commands if cmd.startswith(first_word)]\n return matching_commands, first_word, rest_inp\n\n def run(self, inp):\n res = self.matching_commands(inp)\n if res is None:\n return\n matching_commands, first_word, rest_inp = res\n if len(matching_commands) == 1:\n return self.do_run(matching_commands[0][1:], rest_inp)\n elif len(matching_commands) > 1:\n self.io.tool_error(f\"Ambiguous command: {', '.join(matching_commands)}\")\n else:\n self.io.tool_error(f\"Invalid command: {first_word}\")\n\n # any method called cmd_xxx becomes a command automatically.\n # each one must take an args param.\n\n def cmd_commit(self, args):\n \"Commit edits to the repo made outside the chat (commit message optional)\"\n\n if not self.coder.repo:\n self.io.tool_error(\"No git repository found.\")\n return\n\n if not self.coder.repo.is_dirty():\n self.io.tool_error(\"No more changes to commit.\")\n return\n\n commit_message = args.strip()\n self.coder.repo.commit(message=commit_message)\n\n def cmd_clear(self, args):\n \"Clear the chat history\"\n\n self.coder.done_messages = []\n self.coder.cur_messages = []\n\n def cmd_tokens(self, args):\n \"Report on the number of tokens used by the current chat context\"\n\n res = []\n\n # system messages\n msgs = [\n dict(role=\"system\", content=self.coder.gpt_prompts.main_system),\n dict(role=\"system\", content=self.coder.gpt_prompts.system_reminder),\n ]\n tokens = len(self.tokenizer.encode(json.dumps(msgs)))\n res.append((tokens, \"system messages\", \"\"))\n\n # chat history\n msgs = self.coder.done_messages + self.coder.cur_messages\n if msgs:\n msgs = [dict(role=\"dummy\", content=msg) for msg in msgs]\n msgs = json.dumps(msgs)\n tokens = len(self.tokenizer.encode(msgs))\n res.append((tokens, \"chat history\", \"use /clear to clear\"))\n\n # repo map\n other_files = set(self.coder.get_all_abs_files()) - set(self.coder.abs_fnames)\n if self.coder.repo_map:\n repo_content = self.coder.repo_map.get_repo_map(self.coder.abs_fnames, other_files)\n if repo_content:\n tokens = len(self.tokenizer.encode(repo_content))\n res.append((tokens, \"repository map\", \"use --map-tokens to resize\"))\n\n # files\n for fname in self.coder.abs_fnames:\n relative_fname = self.coder.get_rel_fname(fname)\n content = self.io.read_text(fname)\n # approximate\n content = f\"{relative_fname}\\n```\\n\" + content + \"```\\n\"\n tokens = len(self.tokenizer.encode(content))\n res.append((tokens, f\"{relative_fname}\", \"use /drop to drop from chat\"))\n\n self.io.tool_output(\"Approximate context window usage, in tokens:\")\n self.io.tool_output()\n\n width = 8\n cost_width = 7\n\n def fmt(v):\n return format(int(v), \",\").rjust(width)\n\n col_width = max(len(row[1]) for row in res)\n\n cost_pad = \" \" * cost_width\n total = 0\n total_cost = 0.0\n for tk, msg, tip in res:\n total += tk\n cost = tk * (self.coder.main_model.prompt_price / 1000)\n total_cost += cost\n msg = msg.ljust(col_width)\n self.io.tool_output(f\"${cost:5.2f} {fmt(tk)} {msg} {tip}\")\n\n self.io.tool_output(\"=\" * (width + cost_width + 1))\n self.io.tool_output(f\"${total_cost:5.2f} {fmt(total)} tokens total\")\n\n limit = self.coder.main_model.max_context_tokens\n remaining = limit - total\n if remaining > 1024:\n self.io.tool_output(f\"{cost_pad}{fmt(remaining)} tokens remaining in context window\")\n elif remaining > 0:\n self.io.tool_error(\n f\"{cost_pad}{fmt(remaining)} tokens remaining in context window (use /drop or\"\n \" /clear to make space)\"\n )\n else:\n self.io.tool_error(f\"{cost_pad}{fmt(remaining)} tokens remaining, window exhausted!\")\n self.io.tool_output(f\"{cost_pad}{fmt(limit)} tokens max context window size\")\n\n def cmd_undo(self, args):\n \"Undo the last git commit if it was done by aider\"\n if not self.coder.repo:\n self.io.tool_error(\"No git repository found.\")\n return\n\n if self.coder.repo.is_dirty():\n self.io.tool_error(\n \"The repository has uncommitted changes. Please commit or stash them before\"\n \" undoing.\"\n )\n return\n\n local_head = self.coder.repo.repo.git.rev_parse(\"HEAD\")\n current_branch = self.coder.repo.repo.active_branch.name\n try:\n remote_head = self.coder.repo.repo.git.rev_parse(f\"origin/{current_branch}\")\n has_origin = True\n except git.exc.GitCommandError:\n has_origin = False\n\n if has_origin:\n if local_head == remote_head:\n self.io.tool_error(\n \"The last commit has already been pushed to the origin. Undoing is not\"\n \" possible.\"\n )\n return\n\n last_commit = self.coder.repo.repo.head.commit\n if (\n not last_commit.message.startswith(\"aider:\")\n or last_commit.hexsha[:7] != self.coder.last_aider_commit_hash\n ):\n self.io.tool_error(\"The last commit was not made by aider in this chat session.\")\n self.io.tool_error(\n \"You could try `/git reset --hard HEAD^` but be aware that this is a destructive\"\n \" command!\"\n )\n return\n self.coder.repo.repo.git.reset(\"--hard\", \"HEAD~1\")\n self.io.tool_output(\n f\"{last_commit.message.strip()}\\n\"\n f\"The above commit {self.coder.last_aider_commit_hash} \"\n \"was reset and removed from git.\\n\"\n )\n\n if self.coder.main_model.send_undo_reply:\n return prompts.undo_command_reply\n\n def cmd_diff(self, args):\n \"Display the diff of the last aider commit\"\n if not self.coder.repo:\n self.io.tool_error(\"No git repository found.\")\n return\n\n if not self.coder.last_aider_commit_hash:\n self.io.tool_error(\"No previous aider commit found.\")\n self.io.tool_error(\"You could try `/git diff` or `/git diff HEAD^`.\")\n return\n\n commits = f\"{self.coder.last_aider_commit_hash}~1\"\n diff = self.coder.repo.diff_commits(\n self.coder.pretty,\n commits,\n self.coder.last_aider_commit_hash,\n )\n\n # don't use io.tool_output() because we don't want to log or further colorize\n print(diff)\n\n def completions_add(self, partial):\n files = set(self.coder.get_all_relative_files())\n files = files - set(self.coder.get_inchat_relative_files())\n for fname in files:\n if partial.lower() in fname.lower():\n yield Completion(fname, start_position=-len(partial))\n\n def glob_filtered_to_repo(self, pattern):\n try:\n raw_matched_files = list(Path(self.coder.root).glob(pattern))\n except ValueError as err:\n self.io.tool_error(f\"Error matching {pattern}: {err}\")\n raw_matched_files = []\n\n matched_files = []\n for fn in raw_matched_files:\n matched_files += expand_subdir(fn)\n\n matched_files = [str(Path(fn).relative_to(self.coder.root)) for fn in matched_files]\n\n # if repo, filter against it\n if self.coder.repo:\n git_files = self.coder.repo.get_tracked_files()\n matched_files = [fn for fn in matched_files if str(fn) in git_files]\n\n res = list(map(str, matched_files))\n return res\n\n def cmd_add(self, args):\n \"Add matching files to the chat session using glob patterns\"\n\n added_fnames = []\n git_added = []\n git_files = self.coder.repo.get_tracked_files() if self.coder.repo else []\n\n all_matched_files = set()\n\n filenames = parse_quoted_filenames(args)\n for word in filenames:\n if Path(word).is_absolute():\n fname = Path(word)\n else:\n fname = Path(self.coder.root) / word\n\n if fname.exists() and fname.is_file():\n all_matched_files.add(str(fname))\n continue\n # an existing dir will fall through and get recursed by glob\n\n matched_files = self.glob_filtered_to_repo(word)\n if matched_files:\n all_matched_files.update(matched_files)\n continue\n\n if self.io.confirm_ask(f\"No files matched '{word}'. Do you want to create {fname}?\"):\n fname.touch()\n all_matched_files.add(str(fname))\n\n for matched_file in all_matched_files:\n abs_file_path = self.coder.abs_root_path(matched_file)\n\n if not abs_file_path.startswith(self.coder.root):\n self.io.tool_error(\n f\"Can not add {abs_file_path}, which is not within {self.coder.root}\"\n )\n continue\n\n if self.coder.repo and matched_file not in git_files:\n try:\n self.coder.repo.repo.git.add(abs_file_path)\n git_added.append(matched_file)\n except git.exc.GitCommandError as e:\n self.io.tool_error(f\"Unable to add {matched_file}: {str(e)}\")\n continue\n\n if abs_file_path in self.coder.abs_fnames:\n self.io.tool_error(f\"{matched_file} is already in the chat\")\n else:\n content = self.io.read_text(abs_file_path)\n if content is None:\n self.io.tool_error(f\"Unable to read {matched_file}\")\n else:\n self.coder.abs_fnames.add(abs_file_path)\n self.io.tool_output(f\"Added {matched_file} to the chat\")\n added_fnames.append(matched_file)\n\n if self.coder.repo and git_added and self.coder.auto_commits:\n git_added = \" \".join(git_added)\n commit_message = f\"aider: Added {git_added}\"\n self.coder.repo.commit(message=commit_message)\n\n if not added_fnames:\n return\n\n # only reply if there's been some chatting since the last edit\n if not self.coder.cur_messages:\n return\n\n reply = prompts.added_files.format(fnames=\", \".join(added_fnames))\n return reply\n\n def completions_drop(self, partial):\n files = self.coder.get_inchat_relative_files()\n\n for fname in files:\n if partial.lower() in fname.lower():\n yield Completion(fname, start_position=-len(partial))\n\n def cmd_drop(self, args):\n \"Remove matching files from the chat session\"\n\n if not args.strip():\n self.io.tool_output(\"Dropping all files from the chat session.\")\n self.coder.abs_fnames = set()\n\n filenames = parse_quoted_filenames(args)\n for word in filenames:\n matched_files = self.glob_filtered_to_repo(word)\n\n if not matched_files:\n self.io.tool_error(f\"No files matched '{word}'\")\n\n for matched_file in matched_files:\n abs_fname = self.coder.abs_root_path(matched_file)\n if abs_fname in self.coder.abs_fnames:\n self.coder.abs_fnames.remove(abs_fname)\n self.io.tool_output(f\"Removed {matched_file} from the chat\")\n\n def cmd_git(self, args):\n \"Run a git command\"\n combined_output = None\n try:\n args = \"git \" + args\n env = dict(GIT_EDITOR=\"true\", **subprocess.os.environ)\n result = subprocess.run(\n args,\n stdout=subprocess.PIPE,\n stderr=subprocess.STDOUT,\n text=True,\n env=env,\n shell=True,\n )\n combined_output = result.stdout\n except Exception as e:\n self.io.tool_error(f\"Error running git command: {e}\")\n\n if combined_output is None:\n return\n\n self.io.tool_output(combined_output)\n\n def cmd_run(self, args):\n \"Run a shell command and optionally add the output to the chat\"\n combined_output = None\n try:\n result = subprocess.run(\n args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True, shell=True\n )\n combined_output = result.stdout\n except Exception as e:\n self.io.tool_error(f\"Error running command: {e}\")\n\n if combined_output is None:\n return\n\n self.io.tool_output(combined_output)\n\n if self.io.confirm_ask(\"Add the output to the chat?\", default=\"y\"):\n for line in combined_output.splitlines():\n self.io.tool_output(line, log_only=True)\n\n msg = prompts.run_output.format(\n command=args,\n output=combined_output,\n )\n return msg\n\n def cmd_exit(self, args):\n \"Exit the application\"\n sys.exit()\n\n def cmd_ls(self, args):\n \"List all known files and those included in the chat session\"\n\n files = self.coder.get_all_relative_files()\n\n other_files = []\n chat_files = []\n for file in files:\n abs_file_path = self.coder.abs_root_path(file)\n if abs_file_path in self.coder.abs_fnames:\n chat_files.append(file)\n else:\n other_files.append(file)\n\n if not chat_files and not other_files:\n self.io.tool_output(\"\\nNo files in chat or git repo.\")\n return\n\n if chat_files:\n self.io.tool_output(\"Files in chat:\\n\")\n for file in chat_files:\n self.io.tool_output(f\" {file}\")\n\n if other_files:\n self.io.tool_output(\"\\nRepo files not in the chat:\\n\")\n for file in other_files:\n self.io.tool_output(f\" {file}\")\n\n def cmd_help(self, args):\n \"Show help about all commands\"\n commands = sorted(self.get_commands())\n for cmd in commands:\n cmd_method_name = f\"cmd_{cmd[1:]}\"\n cmd_method = getattr(self, cmd_method_name, None)\n if cmd_method:\n description = cmd_method.__doc__\n self.io.tool_output(f\"{cmd} {description}\")\n else:\n self.io.tool_output(f\"{cmd} No description available.\")\n\n def cmd_voice(self, args):\n \"Record and transcribe voice input\"\n\n if not self.voice:\n try:\n self.voice = voice.Voice()\n except voice.SoundDeviceError:\n self.io.tool_error(\n \"Unable to import `sounddevice` and/or `soundfile`, is portaudio installed?\"\n )\n return\n\n history_iter = self.io.get_input_history()\n\n history = []\n size = 0\n for line in history_iter:\n if line.startswith(\"/\"):\n continue\n if line in history:\n continue\n if size + len(line) > 1024:\n break\n size += len(line)\n history.append(line)\n\n history.reverse()\n history = \"\\n\".join(history)\n\n text = self.voice.record_and_transcribe(history, language=self.voice_language)\n if text:\n self.io.add_to_input_history(text)\n print()\n self.io.user_input(text, log_only=False)\n print()\n\n return text\n\n\ndef expand_subdir(file_path):\n file_path = Path(file_path)\n if file_path.is_file():\n yield file_path\n return\n\n if file_path.is_dir():\n for file in file_path.rglob(\"*\"):\n if file.is_file():\n yield str(file)\n\n\ndef parse_quoted_filenames(args):\n filenames = re.findall(r\"\\\"(.+?)\\\"|(\\S+)\", args)\n filenames = [name for sublist in filenames for name in sublist if name]\n return filenames\n",
"path": "aider/commands.py"
},
{
"content": "import codecs\nimport os\nimport shutil\nimport sys\nimport tempfile\nfrom io import StringIO\nfrom pathlib import Path\nfrom unittest import TestCase\n\nimport git\n\nfrom aider import models\nfrom aider.coders import Coder\nfrom aider.commands import Commands\nfrom aider.dump import dump # noqa: F401\nfrom aider.io import InputOutput\nfrom tests.utils import ChdirTemporaryDirectory, GitTemporaryDirectory, make_repo\n\n\nclass TestCommands(TestCase):\n def setUp(self):\n self.original_cwd = os.getcwd()\n self.tempdir = tempfile.mkdtemp()\n os.chdir(self.tempdir)\n\n def tearDown(self):\n os.chdir(self.original_cwd)\n shutil.rmtree(self.tempdir, ignore_errors=True)\n\n def test_cmd_add(self):\n # Initialize the Commands and InputOutput objects\n io = InputOutput(pretty=False, yes=True)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n # Call the cmd_add method with 'foo.txt' and 'bar.txt' as a single string\n commands.cmd_add(\"foo.txt bar.txt\")\n\n # Check if both files have been created in the temporary directory\n self.assertTrue(os.path.exists(\"foo.txt\"))\n self.assertTrue(os.path.exists(\"bar.txt\"))\n\n def test_cmd_add_bad_glob(self):\n # https://github.com/paul-gauthier/aider/issues/293\n\n io = InputOutput(pretty=False, yes=False)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n commands.cmd_add(\"**.txt\")\n\n def test_cmd_add_with_glob_patterns(self):\n # Initialize the Commands and InputOutput objects\n io = InputOutput(pretty=False, yes=True)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n # Create some test files\n with open(\"test1.py\", \"w\") as f:\n f.write(\"print('test1')\")\n with open(\"test2.py\", \"w\") as f:\n f.write(\"print('test2')\")\n with open(\"test.txt\", \"w\") as f:\n f.write(\"test\")\n\n # Call the cmd_add method with a glob pattern\n commands.cmd_add(\"*.py\")\n\n # Check if the Python files have been added to the chat session\n self.assertIn(str(Path(\"test1.py\").resolve()), coder.abs_fnames)\n self.assertIn(str(Path(\"test2.py\").resolve()), coder.abs_fnames)\n\n # Check if the text file has not been added to the chat session\n self.assertNotIn(str(Path(\"test.txt\").resolve()), coder.abs_fnames)\n\n def test_cmd_add_no_match(self):\n # yes=False means we will *not* create the file when it is not found\n io = InputOutput(pretty=False, yes=False)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n # Call the cmd_add method with a non-existent file pattern\n commands.cmd_add(\"*.nonexistent\")\n\n # Check if no files have been added to the chat session\n self.assertEqual(len(coder.abs_fnames), 0)\n\n def test_cmd_add_no_match_but_make_it(self):\n # yes=True means we *will* create the file when it is not found\n io = InputOutput(pretty=False, yes=True)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n fname = Path(\"[abc].nonexistent\")\n\n # Call the cmd_add method with a non-existent file pattern\n commands.cmd_add(str(fname))\n\n # Check if no files have been added to the chat session\n self.assertEqual(len(coder.abs_fnames), 1)\n self.assertTrue(fname.exists())\n\n def test_cmd_add_drop_directory(self):\n # Initialize the Commands and InputOutput objects\n io = InputOutput(pretty=False, yes=False)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n # Create a directory and add files to it using pathlib\n Path(\"test_dir\").mkdir()\n Path(\"test_dir/another_dir\").mkdir()\n Path(\"test_dir/test_file1.txt\").write_text(\"Test file 1\")\n Path(\"test_dir/test_file2.txt\").write_text(\"Test file 2\")\n Path(\"test_dir/another_dir/test_file.txt\").write_text(\"Test file 3\")\n\n # Call the cmd_add method with a directory\n commands.cmd_add(\"test_dir test_dir/test_file2.txt\")\n\n # Check if the files have been added to the chat session\n self.assertIn(str(Path(\"test_dir/test_file1.txt\").resolve()), coder.abs_fnames)\n self.assertIn(str(Path(\"test_dir/test_file2.txt\").resolve()), coder.abs_fnames)\n self.assertIn(str(Path(\"test_dir/another_dir/test_file.txt\").resolve()), coder.abs_fnames)\n\n commands.cmd_drop(\"test_dir/another_dir\")\n self.assertIn(str(Path(\"test_dir/test_file1.txt\").resolve()), coder.abs_fnames)\n self.assertIn(str(Path(\"test_dir/test_file2.txt\").resolve()), coder.abs_fnames)\n self.assertNotIn(\n str(Path(\"test_dir/another_dir/test_file.txt\").resolve()), coder.abs_fnames\n )\n\n # Issue #139 /add problems when cwd != git_root\n\n # remember the proper abs path to this file\n abs_fname = str(Path(\"test_dir/another_dir/test_file.txt\").resolve())\n\n # chdir to someplace other than git_root\n Path(\"side_dir\").mkdir()\n os.chdir(\"side_dir\")\n\n # add it via it's git_root referenced name\n commands.cmd_add(\"test_dir/another_dir/test_file.txt\")\n\n # it should be there, but was not in v0.10.0\n self.assertIn(abs_fname, coder.abs_fnames)\n\n # drop it via it's git_root referenced name\n commands.cmd_drop(\"test_dir/another_dir/test_file.txt\")\n\n # it should be there, but was not in v0.10.0\n self.assertNotIn(abs_fname, coder.abs_fnames)\n\n def test_cmd_drop_with_glob_patterns(self):\n # Initialize the Commands and InputOutput objects\n io = InputOutput(pretty=False, yes=True)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n subdir = Path(\"subdir\")\n subdir.mkdir()\n (subdir / \"subtest1.py\").touch()\n (subdir / \"subtest2.py\").touch()\n\n Path(\"test1.py\").touch()\n Path(\"test2.py\").touch()\n\n # Add some files to the chat session\n commands.cmd_add(\"*.py\")\n\n self.assertEqual(len(coder.abs_fnames), 2)\n\n # Call the cmd_drop method with a glob pattern\n commands.cmd_drop(\"*2.py\")\n\n self.assertIn(str(Path(\"test1.py\").resolve()), coder.abs_fnames)\n self.assertNotIn(str(Path(\"test2.py\").resolve()), coder.abs_fnames)\n\n def test_cmd_add_bad_encoding(self):\n # Initialize the Commands and InputOutput objects\n io = InputOutput(pretty=False, yes=True)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n # Create a new file foo.bad which will fail to decode as utf-8\n with codecs.open(\"foo.bad\", \"w\", encoding=\"iso-8859-15\") as f:\n f.write(\"ÆØÅ\") # Characters not present in utf-8\n\n commands.cmd_add(\"foo.bad\")\n\n self.assertEqual(coder.abs_fnames, set())\n\n def test_cmd_git(self):\n # Initialize the Commands and InputOutput objects\n io = InputOutput(pretty=False, yes=True)\n\n with GitTemporaryDirectory() as tempdir:\n # Create a file in the temporary directory\n with open(f\"{tempdir}/test.txt\", \"w\") as f:\n f.write(\"test\")\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n # Run the cmd_git method with the arguments \"commit -a -m msg\"\n commands.cmd_git(\"add test.txt\")\n commands.cmd_git(\"commit -a -m msg\")\n\n # Check if the file has been committed to the repository\n repo = git.Repo(tempdir)\n files_in_repo = repo.git.ls_files()\n self.assertIn(\"test.txt\", files_in_repo)\n\n def test_cmd_tokens(self):\n # Initialize the Commands and InputOutput objects\n io = InputOutput(pretty=False, yes=True)\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n commands.cmd_add(\"foo.txt bar.txt\")\n\n # Redirect the standard output to an instance of io.StringIO\n stdout = StringIO()\n sys.stdout = stdout\n\n commands.cmd_tokens(\"\")\n\n # Reset the standard output\n sys.stdout = sys.__stdout__\n\n # Get the console output\n console_output = stdout.getvalue()\n\n self.assertIn(\"foo.txt\", console_output)\n self.assertIn(\"bar.txt\", console_output)\n\n def test_cmd_add_from_subdir(self):\n repo = git.Repo.init()\n repo.config_writer().set_value(\"user\", \"name\", \"Test User\").release()\n repo.config_writer().set_value(\"user\", \"email\", \"testuser@example.com\").release()\n\n # Create three empty files and add them to the git repository\n filenames = [\"one.py\", Path(\"subdir\") / \"two.py\", Path(\"anotherdir\") / \"three.py\"]\n for filename in filenames:\n file_path = Path(filename)\n file_path.parent.mkdir(parents=True, exist_ok=True)\n file_path.touch()\n repo.git.add(str(file_path))\n repo.git.commit(\"-m\", \"added\")\n\n filenames = [str(Path(fn).resolve()) for fn in filenames]\n\n ###\n\n os.chdir(\"subdir\")\n\n io = InputOutput(pretty=False, yes=True)\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n # this should get added\n commands.cmd_add(str(Path(\"anotherdir\") / \"three.py\"))\n\n # this should add one.py\n commands.cmd_add(\"*.py\")\n\n self.assertIn(filenames[0], coder.abs_fnames)\n self.assertNotIn(filenames[1], coder.abs_fnames)\n self.assertIn(filenames[2], coder.abs_fnames)\n\n def test_cmd_add_from_subdir_again(self):\n with GitTemporaryDirectory():\n io = InputOutput(pretty=False, yes=False)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n Path(\"side_dir\").mkdir()\n os.chdir(\"side_dir\")\n\n # add a file that is in the side_dir\n with open(\"temp.txt\", \"w\"):\n pass\n\n # this was blowing up with GitCommandError, per:\n # https://github.com/paul-gauthier/aider/issues/201\n commands.cmd_add(\"temp.txt\")\n\n def test_cmd_commit(self):\n with GitTemporaryDirectory():\n fname = \"test.txt\"\n with open(fname, \"w\") as f:\n f.write(\"test\")\n repo = git.Repo()\n repo.git.add(fname)\n repo.git.commit(\"-m\", \"initial\")\n\n io = InputOutput(pretty=False, yes=True)\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n self.assertFalse(repo.is_dirty())\n with open(fname, \"w\") as f:\n f.write(\"new\")\n self.assertTrue(repo.is_dirty())\n\n commit_message = \"Test commit message\"\n commands.cmd_commit(commit_message)\n self.assertFalse(repo.is_dirty())\n\n def test_cmd_add_from_outside_root(self):\n with ChdirTemporaryDirectory() as tmp_dname:\n root = Path(\"root\")\n root.mkdir()\n os.chdir(str(root))\n\n io = InputOutput(pretty=False, yes=False)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n outside_file = Path(tmp_dname) / \"outside.txt\"\n outside_file.touch()\n\n # This should not be allowed!\n # https://github.com/paul-gauthier/aider/issues/178\n commands.cmd_add(\"../outside.txt\")\n\n self.assertEqual(len(coder.abs_fnames), 0)\n\n def test_cmd_add_from_outside_git(self):\n with ChdirTemporaryDirectory() as tmp_dname:\n root = Path(\"root\")\n root.mkdir()\n os.chdir(str(root))\n\n make_repo()\n\n io = InputOutput(pretty=False, yes=False)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n outside_file = Path(tmp_dname) / \"outside.txt\"\n outside_file.touch()\n\n # This should not be allowed!\n # It was blowing up with GitCommandError, per:\n # https://github.com/paul-gauthier/aider/issues/178\n commands.cmd_add(\"../outside.txt\")\n\n self.assertEqual(len(coder.abs_fnames), 0)\n\n def test_cmd_add_filename_with_special_chars(self):\n with ChdirTemporaryDirectory():\n io = InputOutput(pretty=False, yes=False)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n fname = Path(\"with[brackets].txt\")\n fname.touch()\n\n commands.cmd_add(str(fname))\n\n self.assertIn(str(fname.resolve()), coder.abs_fnames)\n\n def test_cmd_add_abs_filename(self):\n with ChdirTemporaryDirectory():\n io = InputOutput(pretty=False, yes=False)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n fname = Path(\"file.txt\")\n fname.touch()\n\n commands.cmd_add(str(fname.resolve()))\n\n self.assertIn(str(fname.resolve()), coder.abs_fnames)\n\n def test_cmd_add_quoted_filename(self):\n with ChdirTemporaryDirectory():\n io = InputOutput(pretty=False, yes=False)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n fname = Path(\"file with spaces.txt\")\n fname.touch()\n\n commands.cmd_add(f'\"{fname}\"')\n\n self.assertIn(str(fname.resolve()), coder.abs_fnames)\n\n def test_cmd_add_no_autocommit(self):\n with GitTemporaryDirectory():\n io = InputOutput(pretty=False, yes=True)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io, auto_commits=False)\n commands = Commands(io, coder)\n\n commands.cmd_add(\"foo.txt\")\n\n # Check if both files have been created in the temporary directory\n self.assertTrue(os.path.exists(\"foo.txt\"))\n\n repo = git.Repo()\n\n # Assert that foo.txt has been `git add` but not `git commit`\n added_files = repo.git.diff(\"--cached\", \"--name-only\").split()\n self.assertIn(\"foo.txt\", added_files)\n",
"path": "tests/test_commands.py"
}
] | 12_1 | python | import os
import unittest
import shutil
import sys
import tempfile
from pathlib import Path
from unittest import TestCase
class TestCommands(TestCase):
def setUp(self):
self.original_cwd = os.getcwd()
self.tempdir = tempfile.mkdtemp()
os.chdir(self.tempdir)
def tearDown(self):
os.chdir(self.original_cwd)
shutil.rmtree(self.tempdir, ignore_errors=True)
def test_cmd_add_existing_with_dirty_repo(self):
import git
from aider import models
from aider.coders import Coder
from aider.commands import Commands
from aider.io import InputOutput
from tests.utils import GitTemporaryDirectory
with GitTemporaryDirectory():
repo = git.Repo()
files = ["one.txt", "two.txt"]
for fname in files:
Path(fname).touch()
repo.git.add(fname)
repo.git.commit("-m", "initial")
commit = repo.head.commit.hexsha
# leave a dirty `git rm`
repo.git.rm("one.txt")
io = InputOutput(pretty=False, yes=True)
from aider.coders import Coder
coder = Coder.create(models.GPT35, None, io)
commands = Commands(io, coder)
# There's no reason this /add should trigger a commit
commands.cmd_add("two.txt")
self.assertEqual(commit, repo.head.commit.hexsha)
# Windows is throwing:
# PermissionError: [WinError 32] The process cannot access
# the file because it is being used by another process
repo.git.commit("-m", "cleanup")
del coder
del commands
del repo
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestCommands))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
main() |
https://github.com/teamqurrent/aider | Your goal is to update the dname path from 'tmp.benchmarks/refactor-benchmark-pylint' to 'tmp.benchmarks/refactor-benchmark-spyder' in function process(entry) in the `refactor_tools.py` file | d9a301c | aiohttp==3.8.4
aiosignal==1.3.1
async-timeout==4.0.2
attrs==23.1.0
certifi==2023.5.7
charset-normalizer==3.1.0
frozenlist==1.3.3
gitdb==4.0.10
GitPython==3.1.31
idna==3.4
markdown-it-py==2.2.0
mdurl==0.1.2
multidict==6.0.4
openai==0.27.6
prompt-toolkit==3.0.38
Pygments==2.15.1
requests==2.30.0
rich==13.3.5
smmap==5.0.0
tqdm==4.65.0
urllib3==2.0.2
wcwidth==0.2.6
yarl==1.9.2
pytest==7.3.1
tiktoken==0.4.0
configargparse
PyYAML
backoff==2.2.1
networkx==3.1
diskcache==5.6.1
numpy==1.26.1
scipy==1.11.3
jsonschema==4.17.3
sounddevice==0.4.6
soundfile==0.12.1
pathspec==0.11.2
grep-ast==0.2.4
| python3.9 | ef2a1f3 | diff --git a/aider/coders/editblock_prompts.py b/aider/coders/editblock_prompts.py
--- a/aider/coders/editblock_prompts.py
+++ b/aider/coders/editblock_prompts.py
@@ -174,6 +174,8 @@ Include *ALL* the code being searched and replaced!
Only *SEARCH/REPLACE* files that are *read-write*.
+To move code within a file, use 2 *SEARCH/REPLACE* blocks: 1 to delete it from its current location, 1 to insert it in the new location.
+
If you want to put code in a new file, use a *SEARCH/REPLACE block* with:
- A new file path, including dir name if needed
- An empty `SEARCH` section
diff --git a/aider/coders/udiff_prompts.py b/aider/coders/udiff_prompts.py
--- a/aider/coders/udiff_prompts.py
+++ b/aider/coders/udiff_prompts.py
@@ -96,6 +96,8 @@ When editing a function, method, loop, etc use a hunk to replace the *entire* co
Delete the entire existing version with `-` lines and then add a new, updated version with `+` lines.
This will help you generate correct code and correct diffs.
+To move code within a file, use 2 hunks: 1 to delete it from its current location, 1 to insert it in the new location.
+
To make a new file, show a diff from `--- /dev/null` to `+++ path/to/new/file.ext`.
You are diligent and tireless!
diff --git a/benchmark/refactor_tools.py b/benchmark/refactor_tools.py
--- a/benchmark/refactor_tools.py
+++ b/benchmark/refactor_tools.py
@@ -157,7 +157,7 @@ def process(entry):
print(f"{fname} {class_name} {method_name} {class_children} {method_children}")
- dname = Path("tmp.benchmarks/refactor-benchmark-pylint")
+ dname = Path("tmp.benchmarks/refactor-benchmark-spyder")
dname.mkdir(exist_ok=True)
dname = dname / f"{fname.stem}_{class_name}_{method_name}"
| [
{
"content": "# flake8: noqa: E501\n\nfrom .base_prompts import CoderPrompts\n\n\nclass EditBlockPrompts(CoderPrompts):\n main_system = \"\"\"Act as an expert software developer.\nYou are diligent and tireless!\nYou NEVER leave comments describing code without implementing it!\nYou always COMPLETELY IMPLEMENT the needed code!\nAlways use best practices when coding.\nRespect and use existing conventions, libraries, etc that are already present in the code base.\n\nTake requests for changes to the supplied code.\nIf the request is ambiguous, ask questions.\n\nOnce you understand the request you MUST:\n1. List the files you need to modify. Only suggest changes to a *read-write* files. Before changing *read-only* files you *MUST* tell the user their full path names and ask them to *add the files to the chat*. End your reply and wait for their approval.\n2. Think step-by-step and explain the needed changes with a numbered list of short sentences.\n3. Describe each change with a *SEARCH/REPLACE block* per the examples below.\n\nAll changes to files must use this *SEARCH/REPLACE block* format.\n\n# Example conversation 1:\n\n## USER: These are the *read-write* files:\n\nmathweb/flask/app.py\n{fence[0]}python\nfrom flask import Flask\n\napp = Flask(__name__)\n\n@app.route('/div/<int:n>/<int:d>')\ndef divide(n, d):\n if d == 0:\n return 'Error: Division by zero'\n else:\n result = n / d\n return str(result)\n\ndef factorial(n):\n \"compute factorial\"\n\n if n == 0:\n return 1\n else:\n return n * factorial(n-1)\n\n@app.route('/factorial/<int:n>')\ndef get_factorial(n):\n return str(factorial(n))\n\nif __name__ == '__main__':\n print(\"Starting...\")\n app.run()\n{fence[1]}\n\n## ASSISTANT: Ok.\n\n## USER: Change get_factorial() to use math.factorial\n\n## ASSISTANT:\n\nDo make this change we need to modify `mathweb/flask/app.py` to:\n\n1. Import the math package.\n2. Remove the existing factorial() function.\n3. Update get_factorial() to call math.factorial instead.\n\nHere are the *SEARCH/REPLACE* blocks:\n\nmathweb/flask/app.py\n{fence[0]}python\n<<<<<<< SEARCH\nfrom flask import Flask\n=======\nimport math\nfrom flask import Flask\n>>>>>>> REPLACE\n{fence[1]}\n\nmathweb/flask/app.py\n{fence[0]}python\n<<<<<<< SEARCH\ndef factorial(n):\n \"compute factorial\"\n\n if n == 0:\n return 1\n else:\n return n * factorial(n-1)\n\n=======\n>>>>>>> REPLACE\n{fence[1]}\n\nmathweb/flask/app.py\n{fence[0]}python\n<<<<<<< SEARCH\n return str(factorial(n))\n=======\n return str(math.factorial(n))\n>>>>>>> REPLACE\n{fence[1]}\n\n# Example conversation 2:\n\n## USER: These are the *read-write* files:\n\nmain.py\n{fence[0]}python\ndef hello():\n \"print a greeting\"\n\n print(\"hello\")\n\nif __name__ == '__main__':\n hello()\n{fence[1]}\n\n## ASSISTANT: Ok.\n\n## USER: Refactor hello() into its own file.\n\n## ASSISTANT:\n\nDo make this change we need to modify `main.py` and make a new file `hello.py`:\n\n1. Make a new hello.py file with hello() in it.\n2. Remove hello() from main.py and replace it with an import.\n\nHere are the *SEARCH/REPLACE* blocks:\n\nhello.py\n{fence[0]}python\n<<<<<<< SEARCH\n=======\ndef hello():\n \"print a greeting\"\n\n print(\"hello\")\n>>>>>>> REPLACE\n{fence[1]}\n\nmain.py\n{fence[0]}python\n<<<<<<< SEARCH\ndef hello():\n \"print a greeting\"\n\n print(\"hello\")\n=======\nfrom hello import hello\n>>>>>>> REPLACE\n{fence[1]}\n\n# Rules\n\"\"\"\n\n system_reminder = \"\"\"Every *SEARCH/REPLACE block* must use this format:\n1. The file path alone on a line, eg: main.py\n2. The opening fence and code language, eg: {fence[0]}python\n3. The start of search block: <<<<<<< SEARCH\n4. A contiguous chunk of lines to search for in the existing source code\n5. The dividing line: =======\n6. The lines to replace into the source code\n7. The end of the replace block: >>>>>>> REPLACE\n8. The closing fence: {fence[1]}\n\nEvery *SEARCH* section must *EXACTLY MATCH* the existing source code, character for character, including all comments, docstrings, etc.\n\nInclude *ALL* the code being searched and replaced!\n\nOnly *SEARCH/REPLACE* files that are *read-write*.\n\nIf you want to put code in a new file, use a *SEARCH/REPLACE block* with:\n- A new file path, including dir name if needed\n- An empty `SEARCH` section\n- The new file's contents in the `REPLACE` section\n\nYou are diligent and tireless!\nYou NEVER leave comments describing code without implementing it!\nYou always COMPLETELY IMPLEMENT the needed code!\n\"\"\"\n\n files_content_prefix = \"These are the *read-write* files:\\n\"\n\n files_no_full_files = \"I am not sharing any *read-write* files yet.\"\n\n repo_content_prefix = \"\"\"Below here are summaries of files present in the user's git repository.\nDo not propose changes to these files, they are *read-only*.\nTo make a file *read-write*, ask the user to *add it to the chat*.\n\"\"\"\n",
"path": "aider/coders/editblock_prompts.py"
},
{
"content": "# flake8: noqa: E501\n\nfrom .base_prompts import CoderPrompts\n\n\nclass UnifiedDiffPrompts(CoderPrompts):\n main_system = \"\"\"Act as an expert software developer.\nYou are diligent and tireless!\nYou NEVER leave comments describing code without implementing it!\nYou always COMPLETELY IMPLEMENT the needed code!\nAlways use best practices when coding.\nRespect and use existing conventions, libraries, etc that are already present in the code base.\n\nTake requests for changes to the supplied code.\nIf the request is ambiguous, ask questions.\n\nFor each file that needs to be changed, write out the changes similar to a unified diff like `diff -U0` would produce. For example:\n\n# Example conversation 1\n\n## USER: Replace is_prime with a call to sympy.\n\n## ASSISTANT: Ok, I will:\n\n1. Add an imports of sympy.\n2. Remove the is_prime() function.\n3. Replace the existing call to is_prime() with a call to sympy.isprime().\n\nHere are the diffs for those changes:\n\n```diff\n--- mathweb/flask/app.py\n+++ mathweb/flask/app.py\n@@ ... @@\n-class MathWeb:\n+import sympy\n+\n+class MathWeb:\n@@ ... @@\n-def is_prime(x):\n- if x < 2:\n- return False\n- for i in range(2, int(math.sqrt(x)) + 1):\n- if x % i == 0:\n- return False\n- return True\n@@ ... @@\n-@app.route('/prime/<int:n>')\n-def nth_prime(n):\n- count = 0\n- num = 1\n- while count < n:\n- num += 1\n- if is_prime(num):\n- count += 1\n- return str(num)\n+@app.route('/prime/<int:n>')\n+def nth_prime(n):\n+ count = 0\n+ num = 1\n+ while count < n:\n+ num += 1\n+ if sympy.isprime(num):\n+ count += 1\n+ return str(num)\n```\n\"\"\"\n\n system_reminder = \"\"\"# File editing rules:\n\nReturn edits similar to unified diffs that `diff -U0` would produce.\n\nMake sure you include the first 2 lines with the file paths.\nDon't include timestamps with the file paths.\n\nStart each hunk of changes with a `@@ ... @@` line.\nDon't include line numbers like `diff -U0` does.\nThe user's patch tool doesn't need them.\n\nThe user's patch tool needs CORRECT patches that apply cleanly against the current contents of the file!\nThink carefully and make sure you include and mark all lines that need to be removed or changed as `-` lines.\nMake sure you mark all new or modified lines with `+`.\nDon't leave out any lines or the diff patch won't apply correctly.\n\nIndentation matters in the diffs!\n\nStart a new hunk for each section of the file that needs changes.\n\nOnly output hunks that specify changes with `+` or `-` lines.\nSkip any hunks that are entirely unchanging ` ` lines.\n\nOutput hunks in whatever order makes the most sense.\nHunks don't need to be in any particular order.\n\nWhen editing a function, method, loop, etc use a hunk to replace the *entire* code block.\nDelete the entire existing version with `-` lines and then add a new, updated version with `+` lines.\nThis will help you generate correct code and correct diffs.\n\nTo make a new file, show a diff from `--- /dev/null` to `+++ path/to/new/file.ext`.\n\nYou are diligent and tireless!\nYou NEVER leave comments describing code without implementing it!\nYou always COMPLETELY IMPLEMENT the needed code!\n\"\"\"\n\n files_content_prefix = \"These are the *read-write* files:\\n\"\n\n files_no_full_files = \"I am not sharing any *read-write* files yet.\"\n\n repo_content_prefix = \"\"\"Below here are summaries of other files present in this git repository.\nDo not propose changes to these files, they are *read-only*.\nTo make a file *read-write*, ask the user to *add it to the chat*.\n\"\"\"\n",
"path": "aider/coders/udiff_prompts.py"
},
{
"content": "#!/usr/bin/env python\n\nimport ast\nimport os\nimport shutil\nimport sys\nfrom pathlib import Path\n\nfrom aider.dump import dump # noqa: F401\n\n\nclass ParentNodeTransformer(ast.NodeTransformer):\n \"\"\"\n This transformer sets the 'parent' attribute on each node.\n \"\"\"\n\n def generic_visit(self, node):\n for child in ast.iter_child_nodes(node):\n child.parent = node\n return super(ParentNodeTransformer, self).generic_visit(node)\n\n\ndef verify_full_func_at_top_level(tree, func, func_children):\n func_node = next(\n (\n item\n for item in ast.walk(tree)\n if isinstance(item, ast.FunctionDef) and item.name == func\n ),\n None,\n )\n assert func_node is not None, f\"Function {func} not found\"\n\n assert isinstance(\n func_node.parent, ast.Module\n ), f\"{func} is not a top level function, it has parent {func_node.parent}\"\n\n num_children = sum(1 for _ in ast.walk(func_node))\n pct_diff_children = abs(num_children - func_children) * 100 / func_children\n assert (\n pct_diff_children < 10\n ), f\"Old method had {func_children} children, new method has {num_children}\"\n\n\ndef verify_old_class_children(tree, old_class, old_class_children):\n node = next(\n (\n item\n for item in ast.walk(tree)\n if isinstance(item, ast.ClassDef) and item.name == old_class\n ),\n None,\n )\n assert node is not None, f\"Old class {old_class} not found\"\n\n num_children = sum(1 for _ in ast.walk(node))\n\n pct_diff_children = abs(num_children - old_class_children) * 100 / old_class_children\n assert (\n pct_diff_children < 10\n ), f\"Old class had {old_class_children} children, new class has {num_children}\"\n\n\ndef verify_refactor(fname, func, func_children, old_class, old_class_children):\n with open(fname, \"r\") as file:\n file_contents = file.read()\n tree = ast.parse(file_contents)\n ParentNodeTransformer().visit(tree) # Set parent attribute for all nodes\n\n verify_full_func_at_top_level(tree, func, func_children)\n\n verify_old_class_children(tree, old_class, old_class_children - func_children)\n\n\n############################\n\n\nclass SelfUsageChecker(ast.NodeVisitor):\n def __init__(self):\n self.non_self_methods = []\n self.parent_class_name = None\n self.num_class_children = 0\n\n def visit_FunctionDef(self, node):\n # Check if the first argument is 'self' and if it's not used\n if node.args.args and node.args.args[0].arg == \"self\":\n self_used = any(\n isinstance(expr, ast.Name) and expr.id == \"self\"\n for stmt in node.body\n for expr in ast.walk(stmt)\n )\n super_used = any(\n isinstance(expr, ast.Name) and expr.id == \"super\"\n for stmt in node.body\n for expr in ast.walk(stmt)\n )\n if not self_used and not super_used:\n # Calculate the number of child nodes in the function\n num_child_nodes = sum(1 for _ in ast.walk(node))\n res = (\n self.parent_class_name,\n node.name,\n self.num_class_children,\n num_child_nodes,\n )\n self.non_self_methods.append(res)\n self.generic_visit(node)\n\n def visit_ClassDef(self, node):\n self.parent_class_name = node.name\n self.num_class_children = sum(1 for _ in ast.walk(node))\n self.generic_visit(node)\n\n\ndef find_python_files(path):\n if os.path.isfile(path) and path.endswith(\".py\"):\n return [path]\n elif os.path.isdir(path):\n py_files = []\n for root, dirs, files in os.walk(path):\n for file in files:\n if file.endswith(\".py\"):\n full_path = os.path.join(root, file)\n py_files.append(full_path)\n return py_files\n else:\n return []\n\n\ndef find_non_self_methods(path):\n python_files = find_python_files(path)\n non_self_methods = []\n for filename in python_files:\n with open(filename, \"r\") as file:\n try:\n node = ast.parse(file.read(), filename=filename)\n except:\n pass\n checker = SelfUsageChecker()\n checker.visit(node)\n for method in checker.non_self_methods:\n non_self_methods.append([filename] + list(method))\n\n return non_self_methods\n\n\ndef process(entry):\n fname, class_name, method_name, class_children, method_children = entry\n if method_children > class_children / 2:\n return\n if method_children < 250:\n return\n\n fname = Path(fname)\n if \"test\" in fname.stem:\n return\n\n print(f\"{fname} {class_name} {method_name} {class_children} {method_children}\")\n\n dname = Path(\"tmp.benchmarks/refactor-benchmark-pylint\")\n dname.mkdir(exist_ok=True)\n\n dname = dname / f\"{fname.stem}_{class_name}_{method_name}\"\n dname.mkdir(exist_ok=True)\n\n shutil.copy(fname, dname / fname.name)\n\n docs_dname = dname / \".docs\"\n docs_dname.mkdir(exist_ok=True)\n\n ins_fname = docs_dname / \"instructions.md\"\n ins_fname.write_text(f\"\"\"# Refactor {class_name}.{method_name}\n\nRefactor the `{method_name}` method in the `{class_name}` class to be a stand alone, top level function.\nName the new function `{method_name}`, exactly the same name as the existing method.\nUpdate any existing `self.{method_name}` calls to work with the new `{method_name}` function.\n\"\"\") # noqa: E501\n\n test_fname = dname / f\"{fname.stem}_test.py\"\n test_fname.write_text(f\"\"\"\nimport unittest\nfrom benchmark.refactor_tools import verify_refactor\nfrom pathlib import Path\n\nclass TheTest(unittest.TestCase):\n def test_{method_name}(self):\n fname = Path(__file__).parent / \"{fname.name}\"\n method = \"{method_name}\"\n method_children = {method_children}\n\n class_name = \"{class_name}\"\n class_children = {class_children}\n\n verify_refactor(fname, method, method_children, class_name, class_children)\n\nif __name__ == \"__main__\":\n unittest.main()\n\"\"\")\n\n\ndef main(paths):\n for path in paths:\n methods = find_non_self_methods(path)\n # methods = sorted(methods, key=lambda x: x[4])\n\n for method in methods:\n process(method)\n\n\nif __name__ == \"__main__\":\n main(sys.argv[1:])\n",
"path": "benchmark/refactor_tools.py"
}
] | 12_2 | python | import unittest
import sys
from pathlib import Path
class TestRefactorTools(unittest.TestCase):
def test_directory_name_change(self):
file_path = Path('benchmark/refactor_tools.py')
with open(file_path, 'r') as file:
lines = file.readlines()
# Check if the line 160 contains the updated directory name
self.assertIn("tmp.benchmarks/refactor-benchmark-spyder", lines[159].strip())
# Check that the original directory name no longer exists in the file
self.assertNotIn("tmp.benchmarks/refactor-benchmark-pylint", "".join(lines))
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestRefactorTools))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
main()
|
https://github.com/teamqurrent/aider | In the openai.py model file, we need to avoid swamping the model with too much context. in the `__init__` function of the file, reduce the max_chat_history_tokens for elif cases of 32 tokens and 128 tokens to '2 * 1024'. Also do the same reduction of max_chat_history_tokens for the gpt-3.5-turbo-1106 model. Doing so should prevent the model from getting overwhelmed by chats in the future | 560759f | aiohttp==3.8.4
aiosignal==1.3.1
async-timeout==4.0.2
attrs==23.1.0
certifi==2023.5.7
charset-normalizer==3.1.0
frozenlist==1.3.3
gitdb==4.0.10
GitPython==3.1.31
idna==3.4
markdown-it-py==2.2.0
mdurl==0.1.2
multidict==6.0.4
openai==0.27.6
prompt-toolkit==3.0.38
Pygments==2.15.1
requests==2.30.0
rich==13.3.5
smmap==5.0.0
tqdm==4.65.0
urllib3==2.0.2
wcwidth==0.2.6
yarl==1.9.2
pytest==7.3.1
tiktoken==0.4.0
configargparse
PyYAML
backoff==2.2.1
networkx==3.1
diskcache==5.6.1
numpy==1.26.1
scipy==1.11.3
jsonschema==4.17.3
sounddevice==0.4.6
soundfile==0.12.1
pathspec==0.11.2
grep-ast==0.2.4
| python3.9 | 92f4100 | diff --git a/aider/models/openai.py b/aider/models/openai.py
--- a/aider/models/openai.py
+++ b/aider/models/openai.py
@@ -44,11 +44,11 @@ class OpenAIModel(Model):
elif tokens == 32:
self.prompt_price = 0.06
self.completion_price = 0.12
- self.max_chat_history_tokens = 3 * 1024
+ self.max_chat_history_tokens = 2 * 1024
elif tokens == 128:
self.prompt_price = 0.01
self.completion_price = 0.03
- self.max_chat_history_tokens = 4 * 1024
+ self.max_chat_history_tokens = 2 * 1024
return
@@ -60,7 +60,7 @@ class OpenAIModel(Model):
if self.name == "gpt-3.5-turbo-1106":
self.prompt_price = 0.001
self.completion_price = 0.002
- self.max_chat_history_tokens = 3 * 1024
+ self.max_chat_history_tokens = 2 * 1024
elif tokens == 4:
self.prompt_price = 0.0015
self.completion_price = 0.002
| [
{
"content": "import re\n\nimport tiktoken\n\nfrom .model import Model\n\nknown_tokens = {\n \"gpt-3.5-turbo\": 4,\n \"gpt-4\": 8,\n \"gpt-4-1106-preview\": 128,\n \"gpt-3.5-turbo-1106\": 16,\n}\n\n\nclass OpenAIModel(Model):\n def __init__(self, name):\n self.name = name\n\n tokens = None\n\n match = re.search(r\"-([0-9]+)k\", name)\n if match:\n tokens = int(match.group(1))\n else:\n for m, t in known_tokens.items():\n if name.startswith(m):\n tokens = t\n\n if tokens is None:\n raise ValueError(f\"Unknown context window size for model: {name}\")\n\n self.max_context_tokens = tokens * 1024\n self.tokenizer = tiktoken.encoding_for_model(name)\n\n if self.is_gpt4():\n self.edit_format = \"diff\"\n self.use_repo_map = True\n self.send_undo_reply = True\n\n if tokens == 8:\n self.prompt_price = 0.03\n self.completion_price = 0.06\n self.max_chat_history_tokens = 1024\n elif tokens == 32:\n self.prompt_price = 0.06\n self.completion_price = 0.12\n self.max_chat_history_tokens = 3 * 1024\n elif tokens == 128:\n self.prompt_price = 0.01\n self.completion_price = 0.03\n self.max_chat_history_tokens = 4 * 1024\n\n return\n\n if self.is_gpt35():\n self.edit_format = \"whole\"\n self.always_available = True\n self.send_undo_reply = False\n\n if self.name == \"gpt-3.5-turbo-1106\":\n self.prompt_price = 0.001\n self.completion_price = 0.002\n self.max_chat_history_tokens = 3 * 1024\n elif tokens == 4:\n self.prompt_price = 0.0015\n self.completion_price = 0.002\n self.max_chat_history_tokens = 1024\n elif tokens == 16:\n self.prompt_price = 0.003\n self.completion_price = 0.004\n self.max_chat_history_tokens = 2 * 1024\n\n return\n\n raise ValueError(f\"Unsupported model: {name}\")\n\n def is_gpt4(self):\n return self.name.startswith(\"gpt-4\")\n\n def is_gpt35(self):\n return self.name.startswith(\"gpt-3.5-turbo\")\n",
"path": "aider/models/openai.py"
}
] | 12_3 | python | import unittest
import sys
from pathlib import Path
class TestOpenAiModel(unittest.TestCase):
def test_max_chat_history_tokens_update(self):
file_path = Path('aider/models/openai.py')
with open(file_path, 'r') as file:
lines = file.readlines()
buffer = 1 # Number of lines to check before and after the expected line
expected_line_1 = 47
expected_line_2 = 51
expected_line_3 = 63
def check_line_in_range(expected_line, search_string):
start = max(0, expected_line - buffer)
end = min(expected_line + buffer, len(lines))
return any(search_string in lines[i].strip() for i in range(start, end))
# Check that the updates are within the buffer range of the expected lines (dirty workaround but works for this context)
self.assertTrue(check_line_in_range(expected_line_1, "self.max_chat_history_tokens = 2 * 1024"))
self.assertTrue(check_line_in_range(expected_line_2, "self.max_chat_history_tokens = 2 * 1024"))
self.assertTrue(check_line_in_range(expected_line_3, "self.max_chat_history_tokens = 2 * 1024"))
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestOpenAiModel))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
main()
|
https://github.com/teamqurrent/aider | Your goal is to modify the `base_coder.py` file in the coders folder to show the repomap before the added files in the get_repo_map function. Find and move the relevant code block aassociated with the repo map and move it above the code for showing added files. | cab7460 | aiohttp==3.8.4
aiosignal==1.3.1
async-timeout==4.0.2
attrs==23.1.0
certifi==2023.5.7
charset-normalizer==3.1.0
frozenlist==1.3.3
gitdb==4.0.10
GitPython==3.1.31
idna==3.4
markdown-it-py==2.2.0
mdurl==0.1.2
multidict==6.0.4
openai==0.27.6
prompt-toolkit==3.0.38
Pygments==2.15.1
requests==2.30.0
rich==13.3.5
smmap==5.0.0
tqdm==4.65.0
urllib3==2.0.2
wcwidth==0.2.6
yarl==1.9.2
pytest==7.3.1
tiktoken==0.4.0
configargparse
PyYAML
backoff==2.2.1
networkx==3.1
diskcache==5.6.1
numpy==1.26.1
scipy==1.11.3
jsonschema==4.17.3
sounddevice==0.4.6
soundfile==0.12.1
pathspec==0.11.2
grep-ast==0.2.4
| python3.9 | 560759f | diff --git a/aider/coders/base_coder.py b/aider/coders/base_coder.py
--- a/aider/coders/base_coder.py
+++ b/aider/coders/base_coder.py
@@ -311,6 +311,13 @@ class Coder:
def get_files_messages(self):
all_content = ""
+
+ repo_content = self.get_repo_map()
+ if repo_content:
+ if all_content:
+ all_content += "\n"
+ all_content += repo_content
+
if self.abs_fnames:
files_content = self.gpt_prompts.files_content_prefix
files_content += self.get_files_content()
@@ -319,12 +326,6 @@ class Coder:
all_content += files_content
- repo_content = self.get_repo_map()
- if repo_content:
- if all_content:
- all_content += "\n"
- all_content += repo_content
-
files_messages = [
dict(role="user", content=all_content),
dict(role="assistant", content="Ok."),
diff --git a/aider/coders/editblock_prompts.py b/aider/coders/editblock_prompts.py
--- a/aider/coders/editblock_prompts.py
+++ b/aider/coders/editblock_prompts.py
@@ -182,7 +182,7 @@ If you want to put code in a new file, use a *SEARCH/REPLACE block* with:
files_no_full_files = "I am not sharing any *read-write* files yet."
- repo_content_prefix = """Below here are summaries of other files present in this git repository.
+ repo_content_prefix = """Below here are summaries of files present in the user's git repository.
Do not propose changes to these files, they are *read-only*.
To make a file *read-write*, ask the user to *add it to the chat*.
"""
| [
{
"content": "#!/usr/bin/env python\n\nimport hashlib\nimport json\nimport os\nimport sys\nimport threading\nimport time\nimport traceback\nfrom json.decoder import JSONDecodeError\nfrom pathlib import Path\n\nimport openai\nfrom jsonschema import Draft7Validator\nfrom rich.console import Console, Text\nfrom rich.live import Live\nfrom rich.markdown import Markdown\n\nfrom aider import models, prompts, utils\nfrom aider.commands import Commands\nfrom aider.history import ChatSummary\nfrom aider.io import InputOutput\nfrom aider.repo import GitRepo\nfrom aider.repomap import RepoMap\nfrom aider.sendchat import send_with_retries\n\nfrom ..dump import dump # noqa: F401\n\n\nclass MissingAPIKeyError(ValueError):\n pass\n\n\nclass ExhaustedContextWindow(Exception):\n pass\n\n\ndef wrap_fence(name):\n return f\"<{name}>\", f\"</{name}>\"\n\n\nclass Coder:\n client = None\n abs_fnames = None\n repo = None\n last_aider_commit_hash = None\n last_asked_for_commit_time = 0\n repo_map = None\n functions = None\n total_cost = 0.0\n num_exhausted_context_windows = 0\n last_keyboard_interrupt = None\n\n @classmethod\n def create(\n self,\n main_model=None,\n edit_format=None,\n io=None,\n client=None,\n skip_model_availabily_check=False,\n **kwargs,\n ):\n from . import EditBlockCoder, WholeFileCoder\n\n if not main_model:\n main_model = models.GPT4\n\n if not skip_model_availabily_check and not main_model.always_available:\n if not check_model_availability(io, client, main_model):\n fallback_model = models.GPT35_1106\n if main_model != models.GPT4:\n io.tool_error(\n f\"API key does not support {main_model.name}, falling back to\"\n f\" {fallback_model.name}\"\n )\n main_model = fallback_model\n\n if edit_format is None:\n edit_format = main_model.edit_format\n\n if edit_format == \"diff\":\n return EditBlockCoder(client, main_model, io, **kwargs)\n elif edit_format == \"whole\":\n return WholeFileCoder(client, main_model, io, **kwargs)\n else:\n raise ValueError(f\"Unknown edit format {edit_format}\")\n\n def __init__(\n self,\n client,\n main_model,\n io,\n fnames=None,\n git_dname=None,\n pretty=True,\n show_diffs=False,\n auto_commits=True,\n dirty_commits=True,\n dry_run=False,\n map_tokens=1024,\n verbose=False,\n assistant_output_color=\"blue\",\n code_theme=\"default\",\n stream=True,\n use_git=True,\n voice_language=None,\n aider_ignore_file=None,\n ):\n self.client = client\n\n if not fnames:\n fnames = []\n\n if io is None:\n io = InputOutput()\n\n self.chat_completion_call_hashes = []\n self.chat_completion_response_hashes = []\n self.need_commit_before_edits = set()\n\n self.verbose = verbose\n self.abs_fnames = set()\n self.cur_messages = []\n self.done_messages = []\n\n self.io = io\n self.stream = stream\n\n if not auto_commits:\n dirty_commits = False\n\n self.auto_commits = auto_commits\n self.dirty_commits = dirty_commits\n self.assistant_output_color = assistant_output_color\n self.code_theme = code_theme\n\n self.dry_run = dry_run\n self.pretty = pretty\n\n if pretty:\n self.console = Console()\n else:\n self.console = Console(force_terminal=False, no_color=True)\n\n self.main_model = main_model\n\n self.io.tool_output(f\"Model: {main_model.name}\")\n\n self.show_diffs = show_diffs\n\n self.commands = Commands(self.io, self, voice_language)\n\n for fname in fnames:\n fname = Path(fname)\n if not fname.exists():\n self.io.tool_output(f\"Creating empty file {fname}\")\n fname.parent.mkdir(parents=True, exist_ok=True)\n fname.touch()\n\n if not fname.is_file():\n raise ValueError(f\"{fname} is not a file\")\n\n self.abs_fnames.add(str(fname.resolve()))\n\n if use_git:\n try:\n self.repo = GitRepo(\n self.io, fnames, git_dname, aider_ignore_file, client=self.client\n )\n self.root = self.repo.root\n except FileNotFoundError:\n self.repo = None\n\n if self.repo:\n rel_repo_dir = self.repo.get_rel_repo_dir()\n self.io.tool_output(f\"Git repo: {rel_repo_dir}\")\n else:\n self.io.tool_output(\"Git repo: none\")\n self.find_common_root()\n\n if main_model.use_repo_map and self.repo and self.gpt_prompts.repo_content_prefix:\n self.repo_map = RepoMap(\n map_tokens,\n self.root,\n self.main_model,\n io,\n self.gpt_prompts.repo_content_prefix,\n self.verbose,\n )\n\n if map_tokens > 0:\n self.io.tool_output(f\"Repo-map: using {map_tokens} tokens\")\n else:\n self.io.tool_output(\"Repo-map: disabled because map_tokens == 0\")\n\n for fname in self.get_inchat_relative_files():\n self.io.tool_output(f\"Added {fname} to the chat.\")\n\n self.summarizer = ChatSummary(\n self.client,\n models.Model.weak_model(),\n self.main_model.max_chat_history_tokens,\n )\n\n self.summarizer_thread = None\n self.summarized_done_messages = []\n\n # validate the functions jsonschema\n if self.functions:\n for function in self.functions:\n Draft7Validator.check_schema(function)\n\n if self.verbose:\n self.io.tool_output(\"JSON Schema:\")\n self.io.tool_output(json.dumps(self.functions, indent=4))\n\n def find_common_root(self):\n if len(self.abs_fnames) == 1:\n self.root = os.path.dirname(list(self.abs_fnames)[0])\n elif self.abs_fnames:\n self.root = os.path.commonpath(list(self.abs_fnames))\n else:\n self.root = os.getcwd()\n\n self.root = utils.safe_abs_path(self.root)\n\n def add_rel_fname(self, rel_fname):\n self.abs_fnames.add(self.abs_root_path(rel_fname))\n\n def abs_root_path(self, path):\n res = Path(self.root) / path\n return utils.safe_abs_path(res)\n\n fences = [\n (\"``\" + \"`\", \"``\" + \"`\"),\n wrap_fence(\"source\"),\n wrap_fence(\"code\"),\n wrap_fence(\"pre\"),\n wrap_fence(\"codeblock\"),\n wrap_fence(\"sourcecode\"),\n ]\n fence = fences[0]\n\n def show_pretty(self):\n if not self.pretty:\n return False\n\n # only show pretty output if fences are the normal triple-backtick\n if self.fence != self.fences[0]:\n return False\n\n return True\n\n def get_abs_fnames_content(self):\n for fname in list(self.abs_fnames):\n content = self.io.read_text(fname)\n\n if content is None:\n relative_fname = self.get_rel_fname(fname)\n self.io.tool_error(f\"Dropping {relative_fname} from the chat.\")\n self.abs_fnames.remove(fname)\n else:\n yield fname, content\n\n def choose_fence(self):\n all_content = \"\"\n for _fname, content in self.get_abs_fnames_content():\n all_content += content + \"\\n\"\n\n good = False\n for fence_open, fence_close in self.fences:\n if fence_open in all_content or fence_close in all_content:\n continue\n good = True\n break\n\n if good:\n self.fence = (fence_open, fence_close)\n else:\n self.fence = self.fences[0]\n self.io.tool_error(\n \"Unable to find a fencing strategy! Falling back to:\"\n \" {self.fence[0]}...{self.fence[1]}\"\n )\n\n return\n\n def get_files_content(self, fnames=None):\n if not fnames:\n fnames = self.abs_fnames\n\n prompt = \"\"\n for fname, content in self.get_abs_fnames_content():\n relative_fname = self.get_rel_fname(fname)\n prompt += \"\\n\"\n prompt += relative_fname\n prompt += f\"\\n{self.fence[0]}\\n\"\n prompt += content\n prompt += f\"{self.fence[1]}\\n\"\n\n return prompt\n\n def get_repo_map(self):\n if not self.repo_map:\n return\n\n other_files = set(self.get_all_abs_files()) - set(self.abs_fnames)\n repo_content = self.repo_map.get_repo_map(self.abs_fnames, other_files)\n return repo_content\n\n def get_files_messages(self):\n all_content = \"\"\n if self.abs_fnames:\n files_content = self.gpt_prompts.files_content_prefix\n files_content += self.get_files_content()\n else:\n files_content = self.gpt_prompts.files_no_full_files\n\n all_content += files_content\n\n repo_content = self.get_repo_map()\n if repo_content:\n if all_content:\n all_content += \"\\n\"\n all_content += repo_content\n\n files_messages = [\n dict(role=\"user\", content=all_content),\n dict(role=\"assistant\", content=\"Ok.\"),\n ]\n\n return files_messages\n\n def run(self, with_message=None):\n while True:\n try:\n if with_message:\n new_user_message = with_message\n self.io.user_input(with_message)\n else:\n new_user_message = self.run_loop()\n\n while new_user_message:\n new_user_message = self.send_new_user_message(new_user_message)\n\n if with_message:\n return\n\n except KeyboardInterrupt:\n self.keyboard_interrupt()\n except EOFError:\n return\n\n def keyboard_interrupt(self):\n now = time.time()\n\n thresh = 2 # seconds\n if self.last_keyboard_interrupt and now - self.last_keyboard_interrupt < thresh:\n self.io.tool_error(\"\\n\\n^C KeyboardInterrupt\")\n sys.exit()\n\n self.io.tool_error(\"\\n\\n^C again to exit\")\n\n self.last_keyboard_interrupt = now\n\n def summarize_start(self):\n if not self.summarizer.too_big(self.done_messages):\n return\n\n self.summarize_end()\n\n if self.verbose:\n self.io.tool_output(\"Starting to summarize chat history.\")\n\n self.summarizer_thread = threading.Thread(target=self.summarize_worker)\n self.summarizer_thread.start()\n\n def summarize_worker(self):\n try:\n self.summarized_done_messages = self.summarizer.summarize(self.done_messages)\n except ValueError as err:\n self.io.tool_error(err.args[0])\n\n if self.verbose:\n self.io.tool_output(\"Finished summarizing chat history.\")\n\n def summarize_end(self):\n if self.summarizer_thread is None:\n return\n\n self.summarizer_thread.join()\n self.summarizer_thread = None\n\n self.done_messages = self.summarized_done_messages\n self.summarized_done_messages = []\n\n def move_back_cur_messages(self, message):\n self.done_messages += self.cur_messages\n self.summarize_start()\n\n if message:\n self.done_messages += [\n dict(role=\"user\", content=message),\n dict(role=\"assistant\", content=\"Ok.\"),\n ]\n self.cur_messages = []\n\n def run_loop(self):\n inp = self.io.get_input(\n self.root,\n self.get_inchat_relative_files(),\n self.get_addable_relative_files(),\n self.commands,\n )\n\n if not inp:\n return\n\n if self.commands.is_command(inp):\n return self.commands.run(inp)\n\n self.check_for_file_mentions(inp)\n\n return self.send_new_user_message(inp)\n\n def fmt_system_prompt(self, prompt):\n prompt = prompt.format(fence=self.fence)\n return prompt\n\n def format_messages(self):\n self.choose_fence()\n main_sys = self.fmt_system_prompt(self.gpt_prompts.main_system)\n main_sys += \"\\n\" + self.fmt_system_prompt(self.gpt_prompts.system_reminder)\n\n messages = [\n dict(role=\"system\", content=main_sys),\n ]\n\n self.summarize_end()\n messages += self.done_messages\n messages += self.get_files_messages()\n\n reminder_message = [\n dict(role=\"system\", content=self.fmt_system_prompt(self.gpt_prompts.system_reminder)),\n ]\n\n messages_tokens = self.main_model.token_count(messages)\n reminder_tokens = self.main_model.token_count(reminder_message)\n cur_tokens = self.main_model.token_count(self.cur_messages)\n\n if None not in (messages_tokens, reminder_tokens, cur_tokens):\n total_tokens = messages_tokens + reminder_tokens + cur_tokens\n else:\n # add the reminder anyway\n total_tokens = 0\n\n # Add the reminder prompt if we still have room to include it.\n if total_tokens < self.main_model.max_context_tokens:\n messages += reminder_message\n\n messages += self.cur_messages\n\n return messages\n\n def send_new_user_message(self, inp):\n self.cur_messages += [\n dict(role=\"user\", content=inp),\n ]\n\n messages = self.format_messages()\n\n if self.verbose:\n utils.show_messages(messages, functions=self.functions)\n\n exhausted = False\n interrupted = False\n try:\n interrupted = self.send(messages, functions=self.functions)\n except ExhaustedContextWindow:\n exhausted = True\n except openai.BadRequestError as err:\n if \"maximum context length\" in str(err):\n exhausted = True\n else:\n raise err\n\n if exhausted:\n self.num_exhausted_context_windows += 1\n self.io.tool_error(\"The chat session is larger than the context window!\\n\")\n self.commands.cmd_tokens(\"\")\n self.io.tool_error(\"\\nTo reduce token usage:\")\n self.io.tool_error(\" - Use /drop to remove unneeded files from the chat session.\")\n self.io.tool_error(\" - Use /clear to clear chat history.\")\n return\n\n if self.partial_response_function_call:\n args = self.parse_partial_args()\n if args:\n content = args[\"explanation\"]\n else:\n content = \"\"\n elif self.partial_response_content:\n content = self.partial_response_content\n else:\n content = \"\"\n\n if interrupted:\n content += \"\\n^C KeyboardInterrupt\"\n\n self.io.tool_output()\n if interrupted:\n self.cur_messages += [dict(role=\"assistant\", content=content)]\n return\n\n edited, edit_error = self.apply_updates()\n if edit_error:\n self.update_cur_messages(set())\n return edit_error\n\n self.update_cur_messages(edited)\n\n if edited:\n if self.repo and self.auto_commits and not self.dry_run:\n saved_message = self.auto_commit(edited)\n elif hasattr(self.gpt_prompts, \"files_content_gpt_edits_no_repo\"):\n saved_message = self.gpt_prompts.files_content_gpt_edits_no_repo\n else:\n saved_message = None\n\n self.move_back_cur_messages(saved_message)\n\n add_rel_files_message = self.check_for_file_mentions(content)\n if add_rel_files_message:\n return add_rel_files_message\n\n def update_cur_messages(self, edited):\n if self.partial_response_content:\n self.cur_messages += [dict(role=\"assistant\", content=self.partial_response_content)]\n if self.partial_response_function_call:\n self.cur_messages += [\n dict(\n role=\"assistant\",\n content=None,\n function_call=self.partial_response_function_call,\n )\n ]\n\n def check_for_file_mentions(self, content):\n words = set(word for word in content.split())\n\n # drop sentence punctuation from the end\n words = set(word.rstrip(\",.!;\") for word in words)\n\n # strip away all kinds of quotes\n quotes = \"\".join(['\"', \"'\", \"`\"])\n words = set(word.strip(quotes) for word in words)\n\n addable_rel_fnames = self.get_addable_relative_files()\n\n mentioned_rel_fnames = set()\n fname_to_rel_fnames = {}\n for rel_fname in addable_rel_fnames:\n if rel_fname in words:\n mentioned_rel_fnames.add(str(rel_fname))\n\n fname = os.path.basename(rel_fname)\n if fname not in fname_to_rel_fnames:\n fname_to_rel_fnames[fname] = []\n fname_to_rel_fnames[fname].append(rel_fname)\n\n for fname, rel_fnames in fname_to_rel_fnames.items():\n if len(rel_fnames) == 1 and fname in words:\n mentioned_rel_fnames.add(rel_fnames[0])\n\n if not mentioned_rel_fnames:\n return\n\n for rel_fname in mentioned_rel_fnames:\n self.io.tool_output(rel_fname)\n\n if not self.io.confirm_ask(\"Add these files to the chat?\"):\n return\n\n for rel_fname in mentioned_rel_fnames:\n self.add_rel_fname(rel_fname)\n\n return prompts.added_files.format(fnames=\", \".join(mentioned_rel_fnames))\n\n def send(self, messages, model=None, functions=None):\n if not model:\n model = self.main_model.name\n\n self.partial_response_content = \"\"\n self.partial_response_function_call = dict()\n\n interrupted = False\n try:\n hash_object, completion = send_with_retries(\n self.client, model, messages, functions, self.stream\n )\n self.chat_completion_call_hashes.append(hash_object.hexdigest())\n\n if self.stream:\n self.show_send_output_stream(completion)\n else:\n self.show_send_output(completion)\n except KeyboardInterrupt:\n self.keyboard_interrupt()\n interrupted = True\n\n if self.partial_response_content:\n self.io.ai_output(self.partial_response_content)\n elif self.partial_response_function_call:\n # TODO: push this into subclasses\n args = self.parse_partial_args()\n if args:\n self.io.ai_output(json.dumps(args, indent=4))\n\n return interrupted\n\n def show_send_output(self, completion):\n if self.verbose:\n print(completion)\n\n show_func_err = None\n show_content_err = None\n try:\n self.partial_response_function_call = completion.choices[0].message.function_call\n except AttributeError as func_err:\n show_func_err = func_err\n\n try:\n self.partial_response_content = completion.choices[0].message.content\n except AttributeError as content_err:\n show_content_err = content_err\n\n resp_hash = dict(\n function_call=self.partial_response_function_call,\n content=self.partial_response_content,\n )\n resp_hash = hashlib.sha1(json.dumps(resp_hash, sort_keys=True).encode())\n self.chat_completion_response_hashes.append(resp_hash.hexdigest())\n\n if show_func_err and show_content_err:\n self.io.tool_error(show_func_err)\n self.io.tool_error(show_content_err)\n raise Exception(\"No data found in openai response!\")\n\n tokens = None\n if hasattr(completion, \"usage\"):\n prompt_tokens = completion.usage.prompt_tokens\n completion_tokens = completion.usage.completion_tokens\n\n tokens = f\"{prompt_tokens} prompt tokens, {completion_tokens} completion tokens\"\n if self.main_model.prompt_price:\n cost = prompt_tokens * self.main_model.prompt_price / 1000\n cost += completion_tokens * self.main_model.completion_price / 1000\n tokens += f\", ${cost:.6f} cost\"\n self.total_cost += cost\n\n show_resp = self.render_incremental_response(True)\n if self.show_pretty():\n show_resp = Markdown(\n show_resp, style=self.assistant_output_color, code_theme=self.code_theme\n )\n else:\n show_resp = Text(show_resp or \"<no response>\")\n\n self.io.console.print(show_resp)\n\n if tokens is not None:\n self.io.tool_output(tokens)\n\n def show_send_output_stream(self, completion):\n live = None\n if self.show_pretty():\n live = Live(vertical_overflow=\"scroll\")\n\n try:\n if live:\n live.start()\n\n for chunk in completion:\n if len(chunk.choices) == 0:\n continue\n\n if (\n hasattr(chunk.choices[0], \"finish_reason\")\n and chunk.choices[0].finish_reason == \"length\"\n ):\n raise ExhaustedContextWindow()\n\n try:\n func = chunk.choices[0].delta.function_call\n # dump(func)\n for k, v in func.items():\n if k in self.partial_response_function_call:\n self.partial_response_function_call[k] += v\n else:\n self.partial_response_function_call[k] = v\n except AttributeError:\n pass\n\n try:\n text = chunk.choices[0].delta.content\n if text:\n self.partial_response_content += text\n except AttributeError:\n text = None\n\n if self.show_pretty():\n self.live_incremental_response(live, False)\n elif text:\n sys.stdout.write(text)\n sys.stdout.flush()\n finally:\n if live:\n self.live_incremental_response(live, True)\n live.stop()\n\n def live_incremental_response(self, live, final):\n show_resp = self.render_incremental_response(final)\n if not show_resp:\n return\n\n md = Markdown(show_resp, style=self.assistant_output_color, code_theme=self.code_theme)\n live.update(md)\n\n def render_incremental_response(self, final):\n return self.partial_response_content\n\n def get_rel_fname(self, fname):\n return os.path.relpath(fname, self.root)\n\n def get_inchat_relative_files(self):\n files = [self.get_rel_fname(fname) for fname in self.abs_fnames]\n return sorted(set(files))\n\n def get_all_relative_files(self):\n if self.repo:\n files = self.repo.get_tracked_files()\n else:\n files = self.get_inchat_relative_files()\n\n files = [fname for fname in files if Path(self.abs_root_path(fname)).is_file()]\n return sorted(set(files))\n\n def get_all_abs_files(self):\n files = self.get_all_relative_files()\n files = [self.abs_root_path(path) for path in files]\n return files\n\n def get_last_modified(self):\n files = [Path(fn) for fn in self.get_all_abs_files() if Path(fn).exists()]\n if not files:\n return 0\n return max(path.stat().st_mtime for path in files)\n\n def get_addable_relative_files(self):\n return set(self.get_all_relative_files()) - set(self.get_inchat_relative_files())\n\n def check_for_dirty_commit(self, path):\n if not self.repo:\n return\n if not self.dirty_commits:\n return\n if not self.repo.is_dirty(path):\n return\n\n fullp = Path(self.abs_root_path(path))\n if not fullp.stat().st_size:\n return\n\n self.io.tool_output(f\"Committing {path} before applying edits.\")\n self.need_commit_before_edits.add(path)\n\n def allowed_to_edit(self, path):\n full_path = self.abs_root_path(path)\n if self.repo:\n need_to_add = not self.repo.path_in_repo(path)\n else:\n need_to_add = False\n\n if full_path in self.abs_fnames:\n self.check_for_dirty_commit(path)\n return True\n\n if not Path(full_path).exists():\n if not self.io.confirm_ask(f\"Allow creation of new file {path}?\"):\n self.io.tool_error(f\"Skipping edits to {path}\")\n return\n\n if not self.dry_run:\n Path(full_path).parent.mkdir(parents=True, exist_ok=True)\n Path(full_path).touch()\n\n # Seems unlikely that we needed to create the file, but it was\n # actually already part of the repo.\n # But let's only add if we need to, just to be safe.\n if need_to_add:\n self.repo.repo.git.add(full_path)\n\n self.abs_fnames.add(full_path)\n return True\n\n if not self.io.confirm_ask(\n f\"Allow edits to {path} which was not previously added to chat?\"\n ):\n self.io.tool_error(f\"Skipping edits to {path}\")\n return\n\n if need_to_add:\n self.repo.repo.git.add(full_path)\n\n self.abs_fnames.add(full_path)\n self.check_for_dirty_commit(path)\n\n return True\n\n apply_update_errors = 0\n\n def prepare_to_edit(self, edits):\n res = []\n seen = dict()\n\n self.need_commit_before_edits = set()\n\n for edit in edits:\n path = edit[0]\n if path in seen:\n allowed = seen[path]\n else:\n allowed = self.allowed_to_edit(path)\n seen[path] = allowed\n\n if allowed:\n res.append(edit)\n\n self.dirty_commit()\n self.need_commit_before_edits = set()\n\n return res\n\n def update_files(self):\n edits = self.get_edits()\n edits = self.prepare_to_edit(edits)\n self.apply_edits(edits)\n return set(edit[0] for edit in edits)\n\n def apply_updates(self):\n max_apply_update_errors = 3\n\n try:\n edited = self.update_files()\n except ValueError as err:\n err = err.args[0]\n self.apply_update_errors += 1\n if self.apply_update_errors < max_apply_update_errors:\n self.io.tool_error(f\"Malformed response #{self.apply_update_errors}, retrying...\")\n self.io.tool_error(str(err))\n return None, err\n else:\n self.io.tool_error(f\"Malformed response #{self.apply_update_errors}, aborting.\")\n return False, None\n\n except Exception as err:\n print(err)\n print()\n traceback.print_exc()\n self.apply_update_errors += 1\n if self.apply_update_errors < max_apply_update_errors:\n self.io.tool_error(f\"Update exception #{self.apply_update_errors}, retrying...\")\n return None, str(err)\n else:\n self.io.tool_error(f\"Update exception #{self.apply_update_errors}, aborting\")\n return False, None\n\n self.apply_update_errors = 0\n\n for path in edited:\n if self.dry_run:\n self.io.tool_output(f\"Did not apply edit to {path} (--dry-run)\")\n else:\n self.io.tool_output(f\"Applied edit to {path}\")\n\n return edited, None\n\n def parse_partial_args(self):\n # dump(self.partial_response_function_call)\n\n data = self.partial_response_function_call.get(\"arguments\")\n if not data:\n return\n\n try:\n return json.loads(data)\n except JSONDecodeError:\n pass\n\n try:\n return json.loads(data + \"]}\")\n except JSONDecodeError:\n pass\n\n try:\n return json.loads(data + \"}]}\")\n except JSONDecodeError:\n pass\n\n try:\n return json.loads(data + '\"}]}')\n except JSONDecodeError:\n pass\n\n # commits...\n\n def get_context_from_history(self, history):\n context = \"\"\n if history:\n for msg in history:\n context += \"\\n\" + msg[\"role\"].upper() + \": \" + msg[\"content\"] + \"\\n\"\n return context\n\n def auto_commit(self, edited):\n context = self.get_context_from_history(self.cur_messages)\n res = self.repo.commit(fnames=edited, context=context, prefix=\"aider: \")\n if res:\n commit_hash, commit_message = res\n self.last_aider_commit_hash = commit_hash\n\n return self.gpt_prompts.files_content_gpt_edits.format(\n hash=commit_hash,\n message=commit_message,\n )\n\n self.io.tool_output(\"No changes made to git tracked files.\")\n return self.gpt_prompts.files_content_gpt_no_edits\n\n def dirty_commit(self):\n if not self.need_commit_before_edits:\n return\n if not self.dirty_commits:\n return\n if not self.repo:\n return\n\n self.repo.commit(fnames=self.need_commit_before_edits)\n\n # files changed, move cur messages back behind the files messages\n self.move_back_cur_messages(self.gpt_prompts.files_content_local_edits)\n return True\n\n\ndef check_model_availability(io, client, main_model):\n try:\n available_models = client.models.list()\n except openai.NotFoundError:\n # Azure sometimes returns 404?\n # https://discord.com/channels/1131200896827654144/1182327371232186459\n io.tool_error(\"Unable to list available models, proceeding with {main_model.name}\")\n return True\n\n model_ids = sorted(model.id for model in available_models)\n if main_model.name in model_ids:\n return True\n\n available_models = \", \".join(model_ids)\n io.tool_error(f\"API key supports: {available_models}\")\n return False\n",
"path": "aider/coders/base_coder.py"
},
{
"content": "# flake8: noqa: E501\n\nfrom .base_prompts import CoderPrompts\n\n\nclass EditBlockPrompts(CoderPrompts):\n main_system = \"\"\"Act as an expert software developer.\nAlways use best practices when coding.\nWhen you edit or add code, respect and use existing conventions, libraries, etc.\nAlways COMPLETELY IMPLEMENT the needed code.\n\nTake requests for changes to the supplied code.\nIf the request is ambiguous, ask questions.\n\nOnce you understand the request you MUST:\n1. List the files you need to modify. Only suggest changes to a *read-write* files. Before changing *read-only* files you *MUST* tell the user their full path names and ask them to *add the files to the chat*. End your reply and wait for their approval.\n2. Think step-by-step and explain the needed changes with a numbered list of short sentences.\n3. Describe each change with a *SEARCH/REPLACE block* per the examples below.\n\nAll changes to files must use this *SEARCH/REPLACE block* format.\n\n# Example conversation 1:\n\n## USER: These are the *read-write* files:\n\nmathweb/flask/app.py\n{fence[0]}python\nfrom flask import Flask\n\napp = Flask(__name__)\n\n@app.route('/div/<int:n>/<int:d>')\ndef divide(n, d):\n if d == 0:\n return 'Error: Division by zero'\n else:\n result = n / d\n return str(result)\n\ndef factorial(n):\n \"compute factorial\"\n\n if n == 0:\n return 1\n else:\n return n * factorial(n-1)\n\n@app.route('/factorial/<int:n>')\ndef get_factorial(n):\n return str(factorial(n))\n\nif __name__ == '__main__':\n print(\"Starting...\")\n app.run()\n{fence[1]}\n\n## ASSISTANT: Ok.\n\n## USER: Change get_factorial() to use math.factorial\n\n## ASSISTANT:\n\nDo make this change we need to modify `mathweb/flask/app.py` to:\n\n1. Import the math package.\n2. Remove the existing factorial() function.\n3. Update get_factorial() to call math.factorial instead.\n\nHere are the *SEARCH/REPLACE* blocks:\n\nmathweb/flask/app.py\n{fence[0]}python\n<<<<<<< SEARCH\nfrom flask import Flask\n=======\nimport math\nfrom flask import Flask\n>>>>>>> REPLACE\n{fence[1]}\n\nmathweb/flask/app.py\n{fence[0]}python\n<<<<<<< SEARCH\ndef factorial(n):\n \"compute factorial\"\n\n if n == 0:\n return 1\n else:\n return n * factorial(n-1)\n\n=======\n>>>>>>> REPLACE\n{fence[1]}\n\nmathweb/flask/app.py\n{fence[0]}python\n<<<<<<< SEARCH\n return str(factorial(n))\n=======\n return str(math.factorial(n))\n>>>>>>> REPLACE\n{fence[1]}\n\n# Example conversation 2:\n\n## USER: These are the *read-write* files:\n\nmain.py\n{fence[0]}python\ndef hello():\n \"print a greeting\"\n\n print(\"hello\")\n\nif __name__ == '__main__':\n hello()\n{fence[1]}\n\n## ASSISTANT: Ok.\n\n## USER: Refactor hello() into its own file.\n\n## ASSISTANT:\n\nDo make this change we need to modify `main.py` and make a new file `hello.py`:\n\n1. Make a new hello.py file with hello() in it.\n2. Remove hello() from main.py and replace it with an import.\n\nHere are the *SEARCH/REPLACE* blocks:\n\nhello.py\n{fence[0]}python\n<<<<<<< SEARCH\n=======\ndef hello():\n \"print a greeting\"\n\n print(\"hello\")\n>>>>>>> REPLACE\n{fence[1]}\n\nmain.py\n{fence[0]}python\n<<<<<<< SEARCH\ndef hello():\n \"print a greeting\"\n\n print(\"hello\")\n=======\nfrom hello import hello\n>>>>>>> REPLACE\n{fence[1]}\n\n# Rules\n\"\"\"\n\n system_reminder = \"\"\"Every *SEARCH/REPLACE block* must use this format:\n1. The file path alone on a line, eg: main.py\n2. The opening fence and code language, eg: {fence[0]}python\n3. The start of search block: <<<<<<< SEARCH\n4. A contiguous chunk of lines to search for in the existing source code\n5. The dividing line: =======\n6. The lines to replace into the source code\n7. The end of the replace block: >>>>>>> REPLACE\n8. The closing fence: {fence[1]}\n\nEvery *SEARCH* section must *EXACTLY MATCH* the existing source code, character for character, including all comments, docstrings, etc.\n\nInclude *ALL* the code being searched and replaced!\n\nOnly *SEARCH/REPLACE* files that are *read-write*.\n\nIf you want to put code in a new file, use a *SEARCH/REPLACE block* with:\n- A new file path, including dir name if needed\n- An empty `SEARCH` section\n- The new file's contents in the `REPLACE` section\n\"\"\"\n\n files_content_prefix = \"These are the *read-write* files:\\n\"\n\n files_no_full_files = \"I am not sharing any *read-write* files yet.\"\n\n repo_content_prefix = \"\"\"Below here are summaries of other files present in this git repository.\nDo not propose changes to these files, they are *read-only*.\nTo make a file *read-write*, ask the user to *add it to the chat*.\n\"\"\"\n",
"path": "aider/coders/editblock_prompts.py"
}
] | 12_4 | python | import unittest
import sys
from pathlib import Path
class TestCodeBlockMovement(unittest.TestCase):
def test_code_block_moved(self):
file_path = Path('aider/coders/base_coder.py')
with open(file_path, 'r') as file:
lines = file.readlines()
expected_block = [
" repo_content = self.get_repo_map()\n",
" if repo_content:\n",
" if all_content:\n",
" all_content += \"\\n\"\n",
" all_content += repo_content\n"
]
# Buffer range
buffer = 2 # Number of lines to check before and after the expected range
# Function to check if block is within a range of lines
def is_block_within_range(start_line, end_line, block):
block_string = "".join(block)
range_string = "".join(lines[start_line - 1: end_line])
return block_string in range_string
# Original location (lines 322-327) with buffer
self.assertFalse(is_block_within_range(322 - buffer, 327 + buffer, expected_block))
# New location (lines 314-320) with buffer
self.assertTrue(is_block_within_range(314 - buffer, 320 + buffer, expected_block))
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestCodeBlockMovement))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
main()
|
https://github.com/teamqurrent/aider | You need to modify the `cmd_add` method in the `Commands` class within `commands.py`. Specifically, replace args.split() with shlex.split(args) to correctly handle quoted filenames. Additionally, update the file existence check to ensure that filenames with spaces are correctly identified and added to all_matched_files. This will allow the application to handle filenames enclosed in quotes properly, ensuring that such filenames are correctly processed and added to the coder's file list. | d2acb8e | aiohttp==3.8.4
aiosignal==1.3.1
async-timeout==4.0.2
attrs==23.1.0
certifi==2023.5.7
charset-normalizer==3.1.0
frozenlist==1.3.3
gitdb==4.0.10
GitPython==3.1.31
idna==3.4
markdown-it-py==2.2.0
mdurl==0.1.2
multidict==6.0.4
openai==0.27.6
prompt-toolkit==3.0.38
Pygments==2.15.1
requests==2.30.0
rich==13.3.5
smmap==5.0.0
tqdm==4.65.0
urllib3==2.0.2
wcwidth==0.2.6
yarl==1.9.2
pytest==7.3.1
tiktoken==0.4.0
configargparse
PyYAML
backoff==2.2.1
networkx==3.1
diskcache==5.6.1
numpy==1.26.1
scipy==1.11.3
jsonschema==4.17.3
sounddevice==0.4.6
soundfile==0.12.1
pathspec==0.11.2
grep-ast==0.2.4
| python3.9 | 774589b | diff --git a/aider/commands.py b/aider/commands.py
--- a/aider/commands.py
+++ b/aider/commands.py
@@ -272,13 +272,12 @@ class Commands:
git_files = self.coder.repo.get_tracked_files() if self.coder.repo else []
all_matched_files = set()
- for word in args.split():
+ for word in shlex.split(args):
fname = Path(self.coder.root) / word
- if fname.exists():
- if fname.is_file():
- all_matched_files.add(str(fname))
- continue
- # else we fall through and glob will pickup all files within a dir
+ if fname.exists() and fname.is_file():
+ all_matched_files.add(str(fname))
+ continue
+ # an existing dir will fall through and get recursed by glob
matched_files = self.glob_filtered_to_repo(word)
if matched_files:
@@ -342,7 +341,7 @@ class Commands:
self.io.tool_output("Dropping all files from the chat session.")
self.coder.abs_fnames = set()
- for word in args.split():
+ for word in shlex.split(args):
matched_files = self.glob_filtered_to_repo(word)
if not matched_files:
diff --git a/tests/test_commands.py b/tests/test_commands.py
--- a/tests/test_commands.py
+++ b/tests/test_commands.py
@@ -372,3 +372,33 @@ class TestCommands(TestCase):
commands.cmd_add(str(fname))
self.assertIn(str(fname.resolve()), coder.abs_fnames)
+
+ def test_cmd_add_abs_filename(self):
+ with ChdirTemporaryDirectory():
+ io = InputOutput(pretty=False, yes=False)
+ from aider.coders import Coder
+
+ coder = Coder.create(models.GPT35, None, io)
+ commands = Commands(io, coder)
+
+ fname = Path("file.txt")
+ fname.touch()
+
+ commands.cmd_add(str(fname.resolve()))
+
+ self.assertIn(str(fname.resolve()), coder.abs_fnames)
+
+ def test_cmd_add_quoted_filename(self):
+ with ChdirTemporaryDirectory():
+ io = InputOutput(pretty=False, yes=False)
+ from aider.coders import Coder
+
+ coder = Coder.create(models.GPT35, None, io)
+ commands = Commands(io, coder)
+
+ fname = Path("file with spaces.txt")
+ fname.touch()
+
+ commands.cmd_add(f'"{fname}"')
+
+ self.assertIn(str(fname.resolve()), coder.abs_fnames)
| [
{
"content": "import json\nimport shlex\nimport subprocess\nimport sys\nfrom pathlib import Path\n\nimport git\nfrom prompt_toolkit.completion import Completion\n\nfrom aider import prompts, voice\n\nfrom .dump import dump # noqa: F401\n\n\nclass Commands:\n voice = None\n\n def __init__(self, io, coder, voice_language=None):\n self.io = io\n self.coder = coder\n\n if voice_language == \"auto\":\n voice_language = None\n\n self.voice_language = voice_language\n self.tokenizer = coder.main_model.tokenizer\n\n def is_command(self, inp):\n if inp[0] == \"/\":\n return True\n\n def get_commands(self):\n commands = []\n for attr in dir(self):\n if attr.startswith(\"cmd_\"):\n commands.append(\"/\" + attr[4:])\n\n return commands\n\n def get_command_completions(self, cmd_name, partial):\n cmd_completions_method_name = f\"completions_{cmd_name}\"\n cmd_completions_method = getattr(self, cmd_completions_method_name, None)\n if cmd_completions_method:\n for completion in cmd_completions_method(partial):\n yield completion\n\n def do_run(self, cmd_name, args):\n cmd_method_name = f\"cmd_{cmd_name}\"\n cmd_method = getattr(self, cmd_method_name, None)\n if cmd_method:\n return cmd_method(args)\n else:\n self.io.tool_output(f\"Error: Command {cmd_name} not found.\")\n\n def matching_commands(self, inp):\n words = inp.strip().split()\n if not words:\n return\n\n first_word = words[0]\n rest_inp = inp[len(words[0]) :]\n\n all_commands = self.get_commands()\n matching_commands = [cmd for cmd in all_commands if cmd.startswith(first_word)]\n return matching_commands, first_word, rest_inp\n\n def run(self, inp):\n res = self.matching_commands(inp)\n if res is None:\n return\n matching_commands, first_word, rest_inp = res\n if len(matching_commands) == 1:\n return self.do_run(matching_commands[0][1:], rest_inp)\n elif len(matching_commands) > 1:\n self.io.tool_error(f\"Ambiguous command: {', '.join(matching_commands)}\")\n else:\n self.io.tool_error(f\"Invalid command: {first_word}\")\n\n # any method called cmd_xxx becomes a command automatically.\n # each one must take an args param.\n\n def cmd_commit(self, args):\n \"Commit edits to the repo made outside the chat (commit message optional)\"\n\n if not self.coder.repo:\n self.io.tool_error(\"No git repository found.\")\n return\n\n if not self.coder.repo.is_dirty():\n self.io.tool_error(\"No more changes to commit.\")\n return\n\n commit_message = args.strip()\n self.coder.repo.commit(message=commit_message)\n\n def cmd_clear(self, args):\n \"Clear the chat history\"\n\n self.coder.done_messages = []\n self.coder.cur_messages = []\n\n def cmd_tokens(self, args):\n \"Report on the number of tokens used by the current chat context\"\n\n res = []\n\n # system messages\n msgs = [\n dict(role=\"system\", content=self.coder.gpt_prompts.main_system),\n dict(role=\"system\", content=self.coder.gpt_prompts.system_reminder),\n ]\n tokens = len(self.tokenizer.encode(json.dumps(msgs)))\n res.append((tokens, \"system messages\", \"\"))\n\n # chat history\n msgs = self.coder.done_messages + self.coder.cur_messages\n if msgs:\n msgs = [dict(role=\"dummy\", content=msg) for msg in msgs]\n msgs = json.dumps(msgs)\n tokens = len(self.tokenizer.encode(msgs))\n res.append((tokens, \"chat history\", \"use /clear to clear\"))\n\n # repo map\n other_files = set(self.coder.get_all_abs_files()) - set(self.coder.abs_fnames)\n if self.coder.repo_map:\n repo_content = self.coder.repo_map.get_repo_map(self.coder.abs_fnames, other_files)\n if repo_content:\n tokens = len(self.tokenizer.encode(repo_content))\n res.append((tokens, \"repository map\", \"use --map-tokens to resize\"))\n\n # files\n for fname in self.coder.abs_fnames:\n relative_fname = self.coder.get_rel_fname(fname)\n content = self.io.read_text(fname)\n # approximate\n content = f\"{relative_fname}\\n```\\n\" + content + \"```\\n\"\n tokens = len(self.tokenizer.encode(content))\n res.append((tokens, f\"{relative_fname}\", \"use /drop to drop from chat\"))\n\n self.io.tool_output(\"Approximate context window usage, in tokens:\")\n self.io.tool_output()\n\n width = 8\n cost_width = 7\n\n def fmt(v):\n return format(int(v), \",\").rjust(width)\n\n col_width = max(len(row[1]) for row in res)\n\n cost_pad = \" \" * cost_width\n total = 0\n total_cost = 0.0\n for tk, msg, tip in res:\n total += tk\n cost = tk * (self.coder.main_model.prompt_price / 1000)\n total_cost += cost\n msg = msg.ljust(col_width)\n self.io.tool_output(f\"${cost:5.2f} {fmt(tk)} {msg} {tip}\")\n\n self.io.tool_output(\"=\" * (width + cost_width + 1))\n self.io.tool_output(f\"${total_cost:5.2f} {fmt(total)} tokens total\")\n\n limit = self.coder.main_model.max_context_tokens\n remaining = limit - total\n if remaining > 1024:\n self.io.tool_output(f\"{cost_pad}{fmt(remaining)} tokens remaining in context window\")\n elif remaining > 0:\n self.io.tool_error(\n f\"{cost_pad}{fmt(remaining)} tokens remaining in context window (use /drop or\"\n \" /clear to make space)\"\n )\n else:\n self.io.tool_error(f\"{cost_pad}{fmt(remaining)} tokens remaining, window exhausted!\")\n self.io.tool_output(f\"{cost_pad}{fmt(limit)} tokens max context window size\")\n\n def cmd_undo(self, args):\n \"Undo the last git commit if it was done by aider\"\n if not self.coder.repo:\n self.io.tool_error(\"No git repository found.\")\n return\n\n if self.coder.repo.is_dirty():\n self.io.tool_error(\n \"The repository has uncommitted changes. Please commit or stash them before\"\n \" undoing.\"\n )\n return\n\n local_head = self.coder.repo.repo.git.rev_parse(\"HEAD\")\n current_branch = self.coder.repo.repo.active_branch.name\n try:\n remote_head = self.coder.repo.repo.git.rev_parse(f\"origin/{current_branch}\")\n has_origin = True\n except git.exc.GitCommandError:\n has_origin = False\n\n if has_origin:\n if local_head == remote_head:\n self.io.tool_error(\n \"The last commit has already been pushed to the origin. Undoing is not\"\n \" possible.\"\n )\n return\n\n last_commit = self.coder.repo.repo.head.commit\n if (\n not last_commit.message.startswith(\"aider:\")\n or last_commit.hexsha[:7] != self.coder.last_aider_commit_hash\n ):\n self.io.tool_error(\"The last commit was not made by aider in this chat session.\")\n return\n self.coder.repo.repo.git.reset(\"--hard\", \"HEAD~1\")\n self.io.tool_output(\n f\"{last_commit.message.strip()}\\n\"\n f\"The above commit {self.coder.last_aider_commit_hash} \"\n \"was reset and removed from git.\\n\"\n )\n\n if self.coder.main_model.send_undo_reply:\n return prompts.undo_command_reply\n\n def cmd_diff(self, args):\n \"Display the diff of the last aider commit\"\n if not self.coder.repo:\n self.io.tool_error(\"No git repository found.\")\n return\n\n if not self.coder.last_aider_commit_hash:\n self.io.tool_error(\"No previous aider commit found.\")\n return\n\n commits = f\"{self.coder.last_aider_commit_hash}~1\"\n diff = self.coder.repo.diff_commits(\n self.coder.pretty,\n commits,\n self.coder.last_aider_commit_hash,\n )\n\n # don't use io.tool_output() because we don't want to log or further colorize\n print(diff)\n\n def completions_add(self, partial):\n files = set(self.coder.get_all_relative_files())\n files = files - set(self.coder.get_inchat_relative_files())\n for fname in files:\n if partial.lower() in fname.lower():\n yield Completion(fname, start_position=-len(partial))\n\n def glob_filtered_to_repo(self, pattern):\n raw_matched_files = list(Path(self.coder.root).glob(pattern))\n\n matched_files = []\n for fn in raw_matched_files:\n matched_files += expand_subdir(fn)\n\n matched_files = [str(Path(fn).relative_to(self.coder.root)) for fn in matched_files]\n\n # if repo, filter against it\n if self.coder.repo:\n git_files = self.coder.repo.get_tracked_files()\n matched_files = [fn for fn in matched_files if str(fn) in git_files]\n\n res = list(map(str, matched_files))\n return res\n\n def cmd_add(self, args):\n \"Add matching files to the chat session using glob patterns\"\n\n added_fnames = []\n git_added = []\n git_files = self.coder.repo.get_tracked_files() if self.coder.repo else []\n\n all_matched_files = set()\n for word in args.split():\n fname = Path(self.coder.root) / word\n if fname.exists():\n if fname.is_file():\n all_matched_files.add(str(fname))\n continue\n # else we fall through and glob will pickup all files within a dir\n\n matched_files = self.glob_filtered_to_repo(word)\n if matched_files:\n all_matched_files.update(matched_files)\n continue\n\n if self.io.confirm_ask(f\"No files matched '{word}'. Do you want to create {fname}?\"):\n fname.touch()\n all_matched_files.add(str(fname))\n\n for matched_file in all_matched_files:\n abs_file_path = self.coder.abs_root_path(matched_file)\n\n if not abs_file_path.startswith(self.coder.root):\n self.io.tool_error(\n f\"Can not add {abs_file_path}, which is not within {self.coder.root}\"\n )\n continue\n\n if self.coder.repo and matched_file not in git_files:\n self.coder.repo.repo.git.add(abs_file_path)\n git_added.append(matched_file)\n\n if abs_file_path in self.coder.abs_fnames:\n self.io.tool_error(f\"{matched_file} is already in the chat\")\n else:\n content = self.io.read_text(abs_file_path)\n if content is None:\n self.io.tool_error(f\"Unable to read {matched_file}\")\n else:\n self.coder.abs_fnames.add(abs_file_path)\n self.io.tool_output(f\"Added {matched_file} to the chat\")\n added_fnames.append(matched_file)\n\n if self.coder.repo and git_added:\n git_added = \" \".join(git_added)\n commit_message = f\"aider: Added {git_added}\"\n self.coder.repo.commit(message=commit_message)\n\n if not added_fnames:\n return\n\n # only reply if there's been some chatting since the last edit\n if not self.coder.cur_messages:\n return\n\n reply = prompts.added_files.format(fnames=\", \".join(added_fnames))\n return reply\n\n def completions_drop(self, partial):\n files = self.coder.get_inchat_relative_files()\n\n for fname in files:\n if partial.lower() in fname.lower():\n yield Completion(fname, start_position=-len(partial))\n\n def cmd_drop(self, args):\n \"Remove matching files from the chat session\"\n\n if not args.strip():\n self.io.tool_output(\"Dropping all files from the chat session.\")\n self.coder.abs_fnames = set()\n\n for word in args.split():\n matched_files = self.glob_filtered_to_repo(word)\n\n if not matched_files:\n self.io.tool_error(f\"No files matched '{word}'\")\n\n for matched_file in matched_files:\n abs_fname = self.coder.abs_root_path(matched_file)\n if abs_fname in self.coder.abs_fnames:\n self.coder.abs_fnames.remove(abs_fname)\n self.io.tool_output(f\"Removed {matched_file} from the chat\")\n\n def cmd_git(self, args):\n \"Run a git command\"\n combined_output = None\n try:\n parsed_args = shlex.split(\"git \" + args)\n env = dict(GIT_EDITOR=\"true\", **subprocess.os.environ)\n result = subprocess.run(\n parsed_args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True, env=env\n )\n combined_output = result.stdout\n except Exception as e:\n self.io.tool_error(f\"Error running git command: {e}\")\n\n if combined_output is None:\n return\n\n self.io.tool_output(combined_output)\n\n def cmd_run(self, args):\n \"Run a shell command and optionally add the output to the chat\"\n combined_output = None\n try:\n parsed_args = shlex.split(args)\n result = subprocess.run(\n parsed_args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True\n )\n combined_output = result.stdout\n except Exception as e:\n self.io.tool_error(f\"Error running command: {e}\")\n\n if combined_output is None:\n return\n\n self.io.tool_output(combined_output)\n\n if self.io.confirm_ask(\"Add the output to the chat?\", default=\"y\"):\n for line in combined_output.splitlines():\n self.io.tool_output(line, log_only=True)\n\n msg = prompts.run_output.format(\n command=args,\n output=combined_output,\n )\n return msg\n\n def cmd_exit(self, args):\n \"Exit the application\"\n sys.exit()\n\n def cmd_ls(self, args):\n \"List all known files and those included in the chat session\"\n\n files = self.coder.get_all_relative_files()\n\n other_files = []\n chat_files = []\n for file in files:\n abs_file_path = self.coder.abs_root_path(file)\n if abs_file_path in self.coder.abs_fnames:\n chat_files.append(file)\n else:\n other_files.append(file)\n\n if not chat_files and not other_files:\n self.io.tool_output(\"\\nNo files in chat or git repo.\")\n return\n\n if chat_files:\n self.io.tool_output(\"Files in chat:\\n\")\n for file in chat_files:\n self.io.tool_output(f\" {file}\")\n\n if other_files:\n self.io.tool_output(\"\\nRepo files not in the chat:\\n\")\n for file in other_files:\n self.io.tool_output(f\" {file}\")\n\n def cmd_help(self, args):\n \"Show help about all commands\"\n commands = sorted(self.get_commands())\n for cmd in commands:\n cmd_method_name = f\"cmd_{cmd[1:]}\"\n cmd_method = getattr(self, cmd_method_name, None)\n if cmd_method:\n description = cmd_method.__doc__\n self.io.tool_output(f\"{cmd} {description}\")\n else:\n self.io.tool_output(f\"{cmd} No description available.\")\n\n def cmd_voice(self, args):\n \"Record and transcribe voice input\"\n\n if not self.voice:\n try:\n self.voice = voice.Voice()\n except voice.SoundDeviceError:\n self.io.tool_error(\"Unable to import `sounddevice`, is portaudio installed?\")\n return\n\n history_iter = self.io.get_input_history()\n\n history = []\n size = 0\n for line in history_iter:\n if line.startswith(\"/\"):\n continue\n if line in history:\n continue\n if size + len(line) > 1024:\n break\n size += len(line)\n history.append(line)\n\n history.reverse()\n history = \"\\n\".join(history)\n\n text = self.voice.record_and_transcribe(history, language=self.voice_language)\n if text:\n self.io.add_to_input_history(text)\n print()\n self.io.user_input(text, log_only=False)\n print()\n\n return text\n\n\ndef expand_subdir(file_path):\n file_path = Path(file_path)\n if file_path.is_file():\n yield file_path\n return\n\n if file_path.is_dir():\n for file in file_path.rglob(\"*\"):\n if file.is_file():\n yield str(file)\n",
"path": "aider/commands.py"
},
{
"content": "import codecs\nimport os\nimport shutil\nimport sys\nimport tempfile\nfrom io import StringIO\nfrom pathlib import Path\nfrom unittest import TestCase\n\nimport git\n\nfrom aider import models\nfrom aider.coders import Coder\nfrom aider.commands import Commands\nfrom aider.dump import dump # noqa: F401\nfrom aider.io import InputOutput\nfrom tests.utils import ChdirTemporaryDirectory, GitTemporaryDirectory, make_repo\n\n\nclass TestCommands(TestCase):\n def setUp(self):\n self.original_cwd = os.getcwd()\n self.tempdir = tempfile.mkdtemp()\n os.chdir(self.tempdir)\n\n def tearDown(self):\n os.chdir(self.original_cwd)\n shutil.rmtree(self.tempdir, ignore_errors=True)\n\n def test_cmd_add(self):\n # Initialize the Commands and InputOutput objects\n io = InputOutput(pretty=False, yes=True)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n # Call the cmd_add method with 'foo.txt' and 'bar.txt' as a single string\n commands.cmd_add(\"foo.txt bar.txt\")\n\n # Check if both files have been created in the temporary directory\n self.assertTrue(os.path.exists(\"foo.txt\"))\n self.assertTrue(os.path.exists(\"bar.txt\"))\n\n def test_cmd_add_with_glob_patterns(self):\n # Initialize the Commands and InputOutput objects\n io = InputOutput(pretty=False, yes=True)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n # Create some test files\n with open(\"test1.py\", \"w\") as f:\n f.write(\"print('test1')\")\n with open(\"test2.py\", \"w\") as f:\n f.write(\"print('test2')\")\n with open(\"test.txt\", \"w\") as f:\n f.write(\"test\")\n\n # Call the cmd_add method with a glob pattern\n commands.cmd_add(\"*.py\")\n\n # Check if the Python files have been added to the chat session\n self.assertIn(str(Path(\"test1.py\").resolve()), coder.abs_fnames)\n self.assertIn(str(Path(\"test2.py\").resolve()), coder.abs_fnames)\n\n # Check if the text file has not been added to the chat session\n self.assertNotIn(str(Path(\"test.txt\").resolve()), coder.abs_fnames)\n\n def test_cmd_add_no_match(self):\n # yes=False means we will *not* create the file when it is not found\n io = InputOutput(pretty=False, yes=False)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n # Call the cmd_add method with a non-existent file pattern\n commands.cmd_add(\"*.nonexistent\")\n\n # Check if no files have been added to the chat session\n self.assertEqual(len(coder.abs_fnames), 0)\n\n def test_cmd_add_no_match_but_make_it(self):\n # yes=True means we *will* create the file when it is not found\n io = InputOutput(pretty=False, yes=True)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n fname = Path(\"[abc].nonexistent\")\n\n # Call the cmd_add method with a non-existent file pattern\n commands.cmd_add(str(fname))\n\n # Check if no files have been added to the chat session\n self.assertEqual(len(coder.abs_fnames), 1)\n self.assertTrue(fname.exists())\n\n def test_cmd_add_drop_directory(self):\n # Initialize the Commands and InputOutput objects\n io = InputOutput(pretty=False, yes=False)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n # Create a directory and add files to it using pathlib\n Path(\"test_dir\").mkdir()\n Path(\"test_dir/another_dir\").mkdir()\n Path(\"test_dir/test_file1.txt\").write_text(\"Test file 1\")\n Path(\"test_dir/test_file2.txt\").write_text(\"Test file 2\")\n Path(\"test_dir/another_dir/test_file.txt\").write_text(\"Test file 3\")\n\n # Call the cmd_add method with a directory\n commands.cmd_add(\"test_dir test_dir/test_file2.txt\")\n\n # Check if the files have been added to the chat session\n self.assertIn(str(Path(\"test_dir/test_file1.txt\").resolve()), coder.abs_fnames)\n self.assertIn(str(Path(\"test_dir/test_file2.txt\").resolve()), coder.abs_fnames)\n self.assertIn(str(Path(\"test_dir/another_dir/test_file.txt\").resolve()), coder.abs_fnames)\n\n commands.cmd_drop(\"test_dir/another_dir\")\n self.assertIn(str(Path(\"test_dir/test_file1.txt\").resolve()), coder.abs_fnames)\n self.assertIn(str(Path(\"test_dir/test_file2.txt\").resolve()), coder.abs_fnames)\n self.assertNotIn(\n str(Path(\"test_dir/another_dir/test_file.txt\").resolve()), coder.abs_fnames\n )\n\n # Issue #139 /add problems when cwd != git_root\n\n # remember the proper abs path to this file\n abs_fname = str(Path(\"test_dir/another_dir/test_file.txt\").resolve())\n\n # chdir to someplace other than git_root\n Path(\"side_dir\").mkdir()\n os.chdir(\"side_dir\")\n\n # add it via it's git_root referenced name\n commands.cmd_add(\"test_dir/another_dir/test_file.txt\")\n\n # it should be there, but was not in v0.10.0\n self.assertIn(abs_fname, coder.abs_fnames)\n\n # drop it via it's git_root referenced name\n commands.cmd_drop(\"test_dir/another_dir/test_file.txt\")\n\n # it should be there, but was not in v0.10.0\n self.assertNotIn(abs_fname, coder.abs_fnames)\n\n def test_cmd_drop_with_glob_patterns(self):\n # Initialize the Commands and InputOutput objects\n io = InputOutput(pretty=False, yes=True)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n subdir = Path(\"subdir\")\n subdir.mkdir()\n (subdir / \"subtest1.py\").touch()\n (subdir / \"subtest2.py\").touch()\n\n Path(\"test1.py\").touch()\n Path(\"test2.py\").touch()\n\n # Add some files to the chat session\n commands.cmd_add(\"*.py\")\n\n self.assertEqual(len(coder.abs_fnames), 2)\n\n # Call the cmd_drop method with a glob pattern\n commands.cmd_drop(\"*2.py\")\n\n self.assertIn(str(Path(\"test1.py\").resolve()), coder.abs_fnames)\n self.assertNotIn(str(Path(\"test2.py\").resolve()), coder.abs_fnames)\n\n def test_cmd_add_bad_encoding(self):\n # Initialize the Commands and InputOutput objects\n io = InputOutput(pretty=False, yes=True)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n # Create a new file foo.bad which will fail to decode as utf-8\n with codecs.open(\"foo.bad\", \"w\", encoding=\"iso-8859-15\") as f:\n f.write(\"ÆØÅ\") # Characters not present in utf-8\n\n commands.cmd_add(\"foo.bad\")\n\n self.assertEqual(coder.abs_fnames, set())\n\n def test_cmd_git(self):\n # Initialize the Commands and InputOutput objects\n io = InputOutput(pretty=False, yes=True)\n\n with GitTemporaryDirectory() as tempdir:\n # Create a file in the temporary directory\n with open(f\"{tempdir}/test.txt\", \"w\") as f:\n f.write(\"test\")\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n # Run the cmd_git method with the arguments \"commit -a -m msg\"\n commands.cmd_git(\"add test.txt\")\n commands.cmd_git(\"commit -a -m msg\")\n\n # Check if the file has been committed to the repository\n repo = git.Repo(tempdir)\n files_in_repo = repo.git.ls_files()\n self.assertIn(\"test.txt\", files_in_repo)\n\n def test_cmd_tokens(self):\n # Initialize the Commands and InputOutput objects\n io = InputOutput(pretty=False, yes=True)\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n commands.cmd_add(\"foo.txt bar.txt\")\n\n # Redirect the standard output to an instance of io.StringIO\n stdout = StringIO()\n sys.stdout = stdout\n\n commands.cmd_tokens(\"\")\n\n # Reset the standard output\n sys.stdout = sys.__stdout__\n\n # Get the console output\n console_output = stdout.getvalue()\n\n self.assertIn(\"foo.txt\", console_output)\n self.assertIn(\"bar.txt\", console_output)\n\n def test_cmd_add_from_subdir(self):\n repo = git.Repo.init()\n repo.config_writer().set_value(\"user\", \"name\", \"Test User\").release()\n repo.config_writer().set_value(\"user\", \"email\", \"testuser@example.com\").release()\n\n # Create three empty files and add them to the git repository\n filenames = [\"one.py\", Path(\"subdir\") / \"two.py\", Path(\"anotherdir\") / \"three.py\"]\n for filename in filenames:\n file_path = Path(filename)\n file_path.parent.mkdir(parents=True, exist_ok=True)\n file_path.touch()\n repo.git.add(str(file_path))\n repo.git.commit(\"-m\", \"added\")\n\n filenames = [str(Path(fn).resolve()) for fn in filenames]\n\n ###\n\n os.chdir(\"subdir\")\n\n io = InputOutput(pretty=False, yes=True)\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n # this should get added\n commands.cmd_add(str(Path(\"anotherdir\") / \"three.py\"))\n\n # this should add one.py\n commands.cmd_add(\"*.py\")\n\n self.assertIn(filenames[0], coder.abs_fnames)\n self.assertNotIn(filenames[1], coder.abs_fnames)\n self.assertIn(filenames[2], coder.abs_fnames)\n\n def test_cmd_add_from_subdir_again(self):\n with GitTemporaryDirectory():\n io = InputOutput(pretty=False, yes=False)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n Path(\"side_dir\").mkdir()\n os.chdir(\"side_dir\")\n\n # add a file that is in the side_dir\n with open(\"temp.txt\", \"w\"):\n pass\n\n # this was blowing up with GitCommandError, per:\n # https://github.com/paul-gauthier/aider/issues/201\n commands.cmd_add(\"temp.txt\")\n\n def test_cmd_commit(self):\n with GitTemporaryDirectory():\n fname = \"test.txt\"\n with open(fname, \"w\") as f:\n f.write(\"test\")\n repo = git.Repo()\n repo.git.add(fname)\n repo.git.commit(\"-m\", \"initial\")\n\n io = InputOutput(pretty=False, yes=True)\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n self.assertFalse(repo.is_dirty())\n with open(fname, \"w\") as f:\n f.write(\"new\")\n self.assertTrue(repo.is_dirty())\n\n commit_message = \"Test commit message\"\n commands.cmd_commit(commit_message)\n self.assertFalse(repo.is_dirty())\n\n def test_cmd_add_from_outside_root(self):\n with ChdirTemporaryDirectory() as tmp_dname:\n root = Path(\"root\")\n root.mkdir()\n os.chdir(str(root))\n\n io = InputOutput(pretty=False, yes=False)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n outside_file = Path(tmp_dname) / \"outside.txt\"\n outside_file.touch()\n\n # This should not be allowed!\n # https://github.com/paul-gauthier/aider/issues/178\n commands.cmd_add(\"../outside.txt\")\n\n self.assertEqual(len(coder.abs_fnames), 0)\n\n def test_cmd_add_from_outside_git(self):\n with ChdirTemporaryDirectory() as tmp_dname:\n root = Path(\"root\")\n root.mkdir()\n os.chdir(str(root))\n\n make_repo()\n\n io = InputOutput(pretty=False, yes=False)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n outside_file = Path(tmp_dname) / \"outside.txt\"\n outside_file.touch()\n\n # This should not be allowed!\n # It was blowing up with GitCommandError, per:\n # https://github.com/paul-gauthier/aider/issues/178\n commands.cmd_add(\"../outside.txt\")\n\n self.assertEqual(len(coder.abs_fnames), 0)\n\n def test_cmd_add_filename_with_special_chars(self):\n with ChdirTemporaryDirectory():\n io = InputOutput(pretty=False, yes=False)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n fname = Path(\"with[brackets].txt\")\n fname.touch()\n\n commands.cmd_add(str(fname))\n\n self.assertIn(str(fname.resolve()), coder.abs_fnames)\n",
"path": "tests/test_commands.py"
}
] | 12_5 | python | import os
import shutil
import sys
import tempfile
import unittest
import sys
from pathlib import Path
from unittest import TestCase
class TestCommands(TestCase):
def setUp(self):
self.original_cwd = os.getcwd()
self.tempdir = tempfile.mkdtemp()
os.chdir(self.tempdir)
def tearDown(self):
os.chdir(self.original_cwd)
shutil.rmtree(self.tempdir, ignore_errors=True)
def test_cmd_add_quoted_filename(self):
from aider import models
from aider.commands import Commands
from aider.io import InputOutput
from tests.utils import ChdirTemporaryDirectory
with ChdirTemporaryDirectory():
io = InputOutput(pretty=False, yes=False)
from aider.coders import Coder
coder = Coder.create(models.GPT35, None, io)
commands = Commands(io, coder)
fname = Path("file with spaces.txt")
fname.touch()
commands.cmd_add(f'"{fname}"')
self.assertIn(str(fname.resolve()), coder.abs_fnames)
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestCommands))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
main()
|
https://github.com/teamqurrent/aider | The objective is to refine the `commands.py` `cmd_add` method's handling of file paths and patterns. The goal is to streamline the process of adding files to all_matched_files by simplifying the logic for checking file existence and handling file patterns. The changes involve restructuring the code to make it more efficient and clear, particularly in how file paths are processed and how files are added based on user confirmation. | 399d86d | aiohttp==3.8.4
aiosignal==1.3.1
async-timeout==4.0.2
attrs==23.1.0
certifi==2023.5.7
charset-normalizer==3.1.0
frozenlist==1.3.3
gitdb==4.0.10
GitPython==3.1.31
idna==3.4
markdown-it-py==2.2.0
mdurl==0.1.2
multidict==6.0.4
openai==0.27.6
prompt-toolkit==3.0.38
Pygments==2.15.1
requests==2.30.0
rich==13.3.5
smmap==5.0.0
tqdm==4.65.0
urllib3==2.0.2
wcwidth==0.2.6
yarl==1.9.2
pytest==7.3.1
tiktoken==0.4.0
configargparse
PyYAML
backoff==2.2.1
networkx==3.1
diskcache==5.6.1
numpy==1.26.1
scipy==1.11.3
jsonschema==4.17.3
sounddevice==0.4.6
soundfile==0.12.1
pathspec==0.11.2
grep-ast==0.2.4
| python3.9 | dc19a1f | diff --git a/aider/commands.py b/aider/commands.py
--- a/aider/commands.py
+++ b/aider/commands.py
@@ -273,25 +273,21 @@ class Commands:
all_matched_files = set()
for word in args.split():
+ fname = Path(self.coder.root) / word
+ if fname.exists():
+ if fname.is_file():
+ all_matched_files.add(str(fname))
+ continue
+ # else we fall through and glob will pickup all files within a dir
+
matched_files = self.glob_filtered_to_repo(word)
+ if matched_files:
+ all_matched_files.update(matched_files)
+ continue
- if not matched_files:
- if any(char in word for char in "*?[]"):
- self.io.tool_error(f"No files to add matching pattern: {word}")
- else:
- fname = Path(self.coder.root) / word
- if fname.exists():
- if fname.is_file():
- matched_files = [str(fname)]
- else:
- self.io.tool_error(f"Unable to add: {word}")
- elif self.io.confirm_ask(
- f"No files matched '{word}'. Do you want to create {fname}?"
- ):
- fname.touch()
- matched_files = [str(fname)]
-
- all_matched_files.update(matched_files)
+ if self.io.confirm_ask(f"No files matched '{word}'. Do you want to create {fname}?"):
+ fname.touch()
+ all_matched_files.add(str(fname))
for matched_file in all_matched_files:
abs_file_path = self.coder.abs_root_path(matched_file)
diff --git a/tests/test_commands.py b/tests/test_commands.py
--- a/tests/test_commands.py
+++ b/tests/test_commands.py
@@ -69,8 +69,8 @@ class TestCommands(TestCase):
self.assertNotIn(str(Path("test.txt").resolve()), coder.abs_fnames)
def test_cmd_add_no_match(self):
- # Initialize the Commands and InputOutput objects
- io = InputOutput(pretty=False, yes=True)
+ # yes=False means we will *not* create the file when it is not found
+ io = InputOutput(pretty=False, yes=False)
from aider.coders import Coder
coder = Coder.create(models.GPT35, None, io)
@@ -82,6 +82,23 @@ class TestCommands(TestCase):
# Check if no files have been added to the chat session
self.assertEqual(len(coder.abs_fnames), 0)
+ def test_cmd_add_no_match_but_make_it(self):
+ # yes=True means we *will* create the file when it is not found
+ io = InputOutput(pretty=False, yes=True)
+ from aider.coders import Coder
+
+ coder = Coder.create(models.GPT35, None, io)
+ commands = Commands(io, coder)
+
+ fname = Path("*.nonexistent")
+
+ # Call the cmd_add method with a non-existent file pattern
+ commands.cmd_add(str(fname))
+
+ # Check if no files have been added to the chat session
+ self.assertEqual(len(coder.abs_fnames), 1)
+ self.assertTrue(fname.exists())
+
def test_cmd_add_drop_directory(self):
# Initialize the Commands and InputOutput objects
io = InputOutput(pretty=False, yes=False)
@@ -255,6 +272,25 @@ class TestCommands(TestCase):
self.assertNotIn(filenames[1], coder.abs_fnames)
self.assertIn(filenames[2], coder.abs_fnames)
+ def test_cmd_add_from_subdir_again(self):
+ with GitTemporaryDirectory():
+ io = InputOutput(pretty=False, yes=False)
+ from aider.coders import Coder
+
+ coder = Coder.create(models.GPT35, None, io)
+ commands = Commands(io, coder)
+
+ Path("side_dir").mkdir()
+ os.chdir("side_dir")
+
+ # add a file that is in the side_dir
+ with open("temp.txt", "w"):
+ pass
+
+ # this was blowing up with GitCommandError, per:
+ # https://github.com/paul-gauthier/aider/issues/201
+ commands.cmd_add("temp.txt")
+
def test_cmd_commit(self):
with GitTemporaryDirectory():
fname = "test.txt"
@@ -277,25 +313,6 @@ class TestCommands(TestCase):
commands.cmd_commit(commit_message)
self.assertFalse(repo.is_dirty())
- def test_cmd_add_from_sub_dir(self):
- with GitTemporaryDirectory():
- io = InputOutput(pretty=False, yes=False)
- from aider.coders import Coder
-
- coder = Coder.create(models.GPT35, None, io)
- commands = Commands(io, coder)
-
- Path("side_dir").mkdir()
- os.chdir("side_dir")
-
- # add a file that is in the side_dir
- with open("temp.txt", "w"):
- pass
-
- # this was blowing up with GitCommandError, per:
- # https://github.com/paul-gauthier/aider/issues/201
- commands.cmd_add("temp.txt")
-
def test_cmd_add_from_outside_root(self):
with ChdirTemporaryDirectory() as tmp_dname:
root = Path("root")
@@ -340,3 +357,18 @@ class TestCommands(TestCase):
commands.cmd_add("../outside.txt")
self.assertEqual(len(coder.abs_fnames), 0)
+
+ def test_cmd_add_filename_with_special_chars(self):
+ with ChdirTemporaryDirectory():
+ io = InputOutput(pretty=False, yes=False)
+ from aider.coders import Coder
+
+ coder = Coder.create(models.GPT35, None, io)
+ commands = Commands(io, coder)
+
+ fname = Path("with[brackets].txt")
+ fname.touch()
+
+ commands.cmd_add(str(fname))
+
+ self.assertIn(str(fname.resolve()), coder.abs_fnames)
| [
{
"content": "import json\nimport shlex\nimport subprocess\nimport sys\nfrom pathlib import Path\n\nimport git\nfrom prompt_toolkit.completion import Completion\n\nfrom aider import prompts, voice\n\nfrom .dump import dump # noqa: F401\n\n\nclass Commands:\n voice = None\n\n def __init__(self, io, coder, voice_language=None):\n self.io = io\n self.coder = coder\n\n if voice_language == \"auto\":\n voice_language = None\n\n self.voice_language = voice_language\n self.tokenizer = coder.main_model.tokenizer\n\n def is_command(self, inp):\n if inp[0] == \"/\":\n return True\n\n def get_commands(self):\n commands = []\n for attr in dir(self):\n if attr.startswith(\"cmd_\"):\n commands.append(\"/\" + attr[4:])\n\n return commands\n\n def get_command_completions(self, cmd_name, partial):\n cmd_completions_method_name = f\"completions_{cmd_name}\"\n cmd_completions_method = getattr(self, cmd_completions_method_name, None)\n if cmd_completions_method:\n for completion in cmd_completions_method(partial):\n yield completion\n\n def do_run(self, cmd_name, args):\n cmd_method_name = f\"cmd_{cmd_name}\"\n cmd_method = getattr(self, cmd_method_name, None)\n if cmd_method:\n return cmd_method(args)\n else:\n self.io.tool_output(f\"Error: Command {cmd_name} not found.\")\n\n def matching_commands(self, inp):\n words = inp.strip().split()\n if not words:\n return\n\n first_word = words[0]\n rest_inp = inp[len(words[0]) :]\n\n all_commands = self.get_commands()\n matching_commands = [cmd for cmd in all_commands if cmd.startswith(first_word)]\n return matching_commands, first_word, rest_inp\n\n def run(self, inp):\n res = self.matching_commands(inp)\n if res is None:\n return\n matching_commands, first_word, rest_inp = res\n if len(matching_commands) == 1:\n return self.do_run(matching_commands[0][1:], rest_inp)\n elif len(matching_commands) > 1:\n self.io.tool_error(f\"Ambiguous command: {', '.join(matching_commands)}\")\n else:\n self.io.tool_error(f\"Invalid command: {first_word}\")\n\n # any method called cmd_xxx becomes a command automatically.\n # each one must take an args param.\n\n def cmd_commit(self, args):\n \"Commit edits to the repo made outside the chat (commit message optional)\"\n\n if not self.coder.repo:\n self.io.tool_error(\"No git repository found.\")\n return\n\n if not self.coder.repo.is_dirty():\n self.io.tool_error(\"No more changes to commit.\")\n return\n\n commit_message = args.strip()\n self.coder.repo.commit(message=commit_message)\n\n def cmd_clear(self, args):\n \"Clear the chat history\"\n\n self.coder.done_messages = []\n self.coder.cur_messages = []\n\n def cmd_tokens(self, args):\n \"Report on the number of tokens used by the current chat context\"\n\n res = []\n\n # system messages\n msgs = [\n dict(role=\"system\", content=self.coder.gpt_prompts.main_system),\n dict(role=\"system\", content=self.coder.gpt_prompts.system_reminder),\n ]\n tokens = len(self.tokenizer.encode(json.dumps(msgs)))\n res.append((tokens, \"system messages\", \"\"))\n\n # chat history\n msgs = self.coder.done_messages + self.coder.cur_messages\n if msgs:\n msgs = [dict(role=\"dummy\", content=msg) for msg in msgs]\n msgs = json.dumps(msgs)\n tokens = len(self.tokenizer.encode(msgs))\n res.append((tokens, \"chat history\", \"use /clear to clear\"))\n\n # repo map\n other_files = set(self.coder.get_all_abs_files()) - set(self.coder.abs_fnames)\n if self.coder.repo_map:\n repo_content = self.coder.repo_map.get_repo_map(self.coder.abs_fnames, other_files)\n if repo_content:\n tokens = len(self.tokenizer.encode(repo_content))\n res.append((tokens, \"repository map\", \"use --map-tokens to resize\"))\n\n # files\n for fname in self.coder.abs_fnames:\n relative_fname = self.coder.get_rel_fname(fname)\n content = self.io.read_text(fname)\n # approximate\n content = f\"{relative_fname}\\n```\\n\" + content + \"```\\n\"\n tokens = len(self.tokenizer.encode(content))\n res.append((tokens, f\"{relative_fname}\", \"use /drop to drop from chat\"))\n\n self.io.tool_output(\"Approximate context window usage, in tokens:\")\n self.io.tool_output()\n\n width = 8\n cost_width = 7\n\n def fmt(v):\n return format(int(v), \",\").rjust(width)\n\n col_width = max(len(row[1]) for row in res)\n\n cost_pad = \" \" * cost_width\n total = 0\n total_cost = 0.0\n for tk, msg, tip in res:\n total += tk\n cost = tk * (self.coder.main_model.prompt_price / 1000)\n total_cost += cost\n msg = msg.ljust(col_width)\n self.io.tool_output(f\"${cost:5.2f} {fmt(tk)} {msg} {tip}\")\n\n self.io.tool_output(\"=\" * (width + cost_width + 1))\n self.io.tool_output(f\"${total_cost:5.2f} {fmt(total)} tokens total\")\n\n limit = self.coder.main_model.max_context_tokens\n remaining = limit - total\n if remaining > 1024:\n self.io.tool_output(f\"{cost_pad}{fmt(remaining)} tokens remaining in context window\")\n elif remaining > 0:\n self.io.tool_error(\n f\"{cost_pad}{fmt(remaining)} tokens remaining in context window (use /drop or\"\n \" /clear to make space)\"\n )\n else:\n self.io.tool_error(f\"{cost_pad}{fmt(remaining)} tokens remaining, window exhausted!\")\n self.io.tool_output(f\"{cost_pad}{fmt(limit)} tokens max context window size\")\n\n def cmd_undo(self, args):\n \"Undo the last git commit if it was done by aider\"\n if not self.coder.repo:\n self.io.tool_error(\"No git repository found.\")\n return\n\n if self.coder.repo.is_dirty():\n self.io.tool_error(\n \"The repository has uncommitted changes. Please commit or stash them before\"\n \" undoing.\"\n )\n return\n\n local_head = self.coder.repo.repo.git.rev_parse(\"HEAD\")\n current_branch = self.coder.repo.repo.active_branch.name\n try:\n remote_head = self.coder.repo.repo.git.rev_parse(f\"origin/{current_branch}\")\n has_origin = True\n except git.exc.GitCommandError:\n has_origin = False\n\n if has_origin:\n if local_head == remote_head:\n self.io.tool_error(\n \"The last commit has already been pushed to the origin. Undoing is not\"\n \" possible.\"\n )\n return\n\n last_commit = self.coder.repo.repo.head.commit\n if (\n not last_commit.message.startswith(\"aider:\")\n or last_commit.hexsha[:7] != self.coder.last_aider_commit_hash\n ):\n self.io.tool_error(\"The last commit was not made by aider in this chat session.\")\n return\n self.coder.repo.repo.git.reset(\"--hard\", \"HEAD~1\")\n self.io.tool_output(\n f\"{last_commit.message.strip()}\\n\"\n f\"The above commit {self.coder.last_aider_commit_hash} \"\n \"was reset and removed from git.\\n\"\n )\n\n if self.coder.main_model.send_undo_reply:\n return prompts.undo_command_reply\n\n def cmd_diff(self, args):\n \"Display the diff of the last aider commit\"\n if not self.coder.repo:\n self.io.tool_error(\"No git repository found.\")\n return\n\n if not self.coder.last_aider_commit_hash:\n self.io.tool_error(\"No previous aider commit found.\")\n return\n\n commits = f\"{self.coder.last_aider_commit_hash}~1\"\n diff = self.coder.repo.diff_commits(\n self.coder.pretty,\n commits,\n self.coder.last_aider_commit_hash,\n )\n\n # don't use io.tool_output() because we don't want to log or further colorize\n print(diff)\n\n def completions_add(self, partial):\n files = set(self.coder.get_all_relative_files())\n files = files - set(self.coder.get_inchat_relative_files())\n for fname in files:\n if partial.lower() in fname.lower():\n yield Completion(fname, start_position=-len(partial))\n\n def glob_filtered_to_repo(self, pattern):\n raw_matched_files = list(Path(self.coder.root).glob(pattern))\n\n matched_files = []\n for fn in raw_matched_files:\n matched_files += expand_subdir(fn)\n\n matched_files = [str(Path(fn).relative_to(self.coder.root)) for fn in matched_files]\n\n # if repo, filter against it\n if self.coder.repo:\n git_files = self.coder.repo.get_tracked_files()\n matched_files = [fn for fn in matched_files if str(fn) in git_files]\n\n res = list(map(str, matched_files))\n return res\n\n def cmd_add(self, args):\n \"Add matching files to the chat session using glob patterns\"\n\n added_fnames = []\n git_added = []\n git_files = self.coder.repo.get_tracked_files() if self.coder.repo else []\n\n all_matched_files = set()\n for word in args.split():\n matched_files = self.glob_filtered_to_repo(word)\n\n if not matched_files:\n if any(char in word for char in \"*?[]\"):\n self.io.tool_error(f\"No files to add matching pattern: {word}\")\n else:\n fname = Path(self.coder.root) / word\n if fname.exists():\n if fname.is_file():\n matched_files = [str(fname)]\n else:\n self.io.tool_error(f\"Unable to add: {word}\")\n elif self.io.confirm_ask(\n f\"No files matched '{word}'. Do you want to create {fname}?\"\n ):\n fname.touch()\n matched_files = [str(fname)]\n\n all_matched_files.update(matched_files)\n\n for matched_file in all_matched_files:\n abs_file_path = self.coder.abs_root_path(matched_file)\n\n if not abs_file_path.startswith(self.coder.root):\n self.io.tool_error(\n f\"Can not add {abs_file_path}, which is not within {self.coder.root}\"\n )\n continue\n\n if self.coder.repo and matched_file not in git_files:\n self.coder.repo.repo.git.add(abs_file_path)\n git_added.append(matched_file)\n\n if abs_file_path in self.coder.abs_fnames:\n self.io.tool_error(f\"{matched_file} is already in the chat\")\n else:\n content = self.io.read_text(abs_file_path)\n if content is None:\n self.io.tool_error(f\"Unable to read {matched_file}\")\n else:\n self.coder.abs_fnames.add(abs_file_path)\n self.io.tool_output(f\"Added {matched_file} to the chat\")\n added_fnames.append(matched_file)\n\n if self.coder.repo and git_added:\n git_added = \" \".join(git_added)\n commit_message = f\"aider: Added {git_added}\"\n self.coder.repo.commit(message=commit_message)\n\n if not added_fnames:\n return\n\n # only reply if there's been some chatting since the last edit\n if not self.coder.cur_messages:\n return\n\n reply = prompts.added_files.format(fnames=\", \".join(added_fnames))\n return reply\n\n def completions_drop(self, partial):\n files = self.coder.get_inchat_relative_files()\n\n for fname in files:\n if partial.lower() in fname.lower():\n yield Completion(fname, start_position=-len(partial))\n\n def cmd_drop(self, args):\n \"Remove matching files from the chat session\"\n\n if not args.strip():\n self.io.tool_output(\"Dropping all files from the chat session.\")\n self.coder.abs_fnames = set()\n\n for word in args.split():\n matched_files = self.glob_filtered_to_repo(word)\n\n if not matched_files:\n self.io.tool_error(f\"No files matched '{word}'\")\n\n for matched_file in matched_files:\n abs_fname = self.coder.abs_root_path(matched_file)\n if abs_fname in self.coder.abs_fnames:\n self.coder.abs_fnames.remove(abs_fname)\n self.io.tool_output(f\"Removed {matched_file} from the chat\")\n\n def cmd_git(self, args):\n \"Run a git command\"\n combined_output = None\n try:\n parsed_args = shlex.split(\"git \" + args)\n env = dict(GIT_EDITOR=\"true\", **subprocess.os.environ)\n result = subprocess.run(\n parsed_args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True, env=env\n )\n combined_output = result.stdout\n except Exception as e:\n self.io.tool_error(f\"Error running git command: {e}\")\n\n if combined_output is None:\n return\n\n self.io.tool_output(combined_output)\n\n def cmd_run(self, args):\n \"Run a shell command and optionally add the output to the chat\"\n combined_output = None\n try:\n parsed_args = shlex.split(args)\n result = subprocess.run(\n parsed_args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True\n )\n combined_output = result.stdout\n except Exception as e:\n self.io.tool_error(f\"Error running command: {e}\")\n\n if combined_output is None:\n return\n\n self.io.tool_output(combined_output)\n\n if self.io.confirm_ask(\"Add the output to the chat?\", default=\"y\"):\n for line in combined_output.splitlines():\n self.io.tool_output(line, log_only=True)\n\n msg = prompts.run_output.format(\n command=args,\n output=combined_output,\n )\n return msg\n\n def cmd_exit(self, args):\n \"Exit the application\"\n sys.exit()\n\n def cmd_ls(self, args):\n \"List all known files and those included in the chat session\"\n\n files = self.coder.get_all_relative_files()\n\n other_files = []\n chat_files = []\n for file in files:\n abs_file_path = self.coder.abs_root_path(file)\n if abs_file_path in self.coder.abs_fnames:\n chat_files.append(file)\n else:\n other_files.append(file)\n\n if not chat_files and not other_files:\n self.io.tool_output(\"\\nNo files in chat or git repo.\")\n return\n\n if chat_files:\n self.io.tool_output(\"Files in chat:\\n\")\n for file in chat_files:\n self.io.tool_output(f\" {file}\")\n\n if other_files:\n self.io.tool_output(\"\\nRepo files not in the chat:\\n\")\n for file in other_files:\n self.io.tool_output(f\" {file}\")\n\n def cmd_help(self, args):\n \"Show help about all commands\"\n commands = sorted(self.get_commands())\n for cmd in commands:\n cmd_method_name = f\"cmd_{cmd[1:]}\"\n cmd_method = getattr(self, cmd_method_name, None)\n if cmd_method:\n description = cmd_method.__doc__\n self.io.tool_output(f\"{cmd} {description}\")\n else:\n self.io.tool_output(f\"{cmd} No description available.\")\n\n def cmd_voice(self, args):\n \"Record and transcribe voice input\"\n\n if not self.voice:\n try:\n self.voice = voice.Voice()\n except voice.SoundDeviceError:\n self.io.tool_error(\"Unable to import `sounddevice`, is portaudio installed?\")\n return\n\n history_iter = self.io.get_input_history()\n\n history = []\n size = 0\n for line in history_iter:\n if line.startswith(\"/\"):\n continue\n if line in history:\n continue\n if size + len(line) > 1024:\n break\n size += len(line)\n history.append(line)\n\n history.reverse()\n history = \"\\n\".join(history)\n\n text = self.voice.record_and_transcribe(history, language=self.voice_language)\n if text:\n self.io.add_to_input_history(text)\n print()\n self.io.user_input(text, log_only=False)\n print()\n\n return text\n\n\ndef expand_subdir(file_path):\n file_path = Path(file_path)\n if file_path.is_file():\n yield file_path\n return\n\n if file_path.is_dir():\n for file in file_path.rglob(\"*\"):\n if file.is_file():\n yield str(file)\n",
"path": "aider/commands.py"
},
{
"content": "import codecs\nimport os\nimport shutil\nimport sys\nimport tempfile\nfrom io import StringIO\nfrom pathlib import Path\nfrom unittest import TestCase\n\nimport git\n\nfrom aider import models\nfrom aider.coders import Coder\nfrom aider.commands import Commands\nfrom aider.dump import dump # noqa: F401\nfrom aider.io import InputOutput\nfrom tests.utils import ChdirTemporaryDirectory, GitTemporaryDirectory, make_repo\n\n\nclass TestCommands(TestCase):\n def setUp(self):\n self.original_cwd = os.getcwd()\n self.tempdir = tempfile.mkdtemp()\n os.chdir(self.tempdir)\n\n def tearDown(self):\n os.chdir(self.original_cwd)\n shutil.rmtree(self.tempdir, ignore_errors=True)\n\n def test_cmd_add(self):\n # Initialize the Commands and InputOutput objects\n io = InputOutput(pretty=False, yes=True)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n # Call the cmd_add method with 'foo.txt' and 'bar.txt' as a single string\n commands.cmd_add(\"foo.txt bar.txt\")\n\n # Check if both files have been created in the temporary directory\n self.assertTrue(os.path.exists(\"foo.txt\"))\n self.assertTrue(os.path.exists(\"bar.txt\"))\n\n def test_cmd_add_with_glob_patterns(self):\n # Initialize the Commands and InputOutput objects\n io = InputOutput(pretty=False, yes=True)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n # Create some test files\n with open(\"test1.py\", \"w\") as f:\n f.write(\"print('test1')\")\n with open(\"test2.py\", \"w\") as f:\n f.write(\"print('test2')\")\n with open(\"test.txt\", \"w\") as f:\n f.write(\"test\")\n\n # Call the cmd_add method with a glob pattern\n commands.cmd_add(\"*.py\")\n\n # Check if the Python files have been added to the chat session\n self.assertIn(str(Path(\"test1.py\").resolve()), coder.abs_fnames)\n self.assertIn(str(Path(\"test2.py\").resolve()), coder.abs_fnames)\n\n # Check if the text file has not been added to the chat session\n self.assertNotIn(str(Path(\"test.txt\").resolve()), coder.abs_fnames)\n\n def test_cmd_add_no_match(self):\n # Initialize the Commands and InputOutput objects\n io = InputOutput(pretty=False, yes=True)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n # Call the cmd_add method with a non-existent file pattern\n commands.cmd_add(\"*.nonexistent\")\n\n # Check if no files have been added to the chat session\n self.assertEqual(len(coder.abs_fnames), 0)\n\n def test_cmd_add_drop_directory(self):\n # Initialize the Commands and InputOutput objects\n io = InputOutput(pretty=False, yes=False)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n # Create a directory and add files to it using pathlib\n Path(\"test_dir\").mkdir()\n Path(\"test_dir/another_dir\").mkdir()\n Path(\"test_dir/test_file1.txt\").write_text(\"Test file 1\")\n Path(\"test_dir/test_file2.txt\").write_text(\"Test file 2\")\n Path(\"test_dir/another_dir/test_file.txt\").write_text(\"Test file 3\")\n\n # Call the cmd_add method with a directory\n commands.cmd_add(\"test_dir test_dir/test_file2.txt\")\n\n # Check if the files have been added to the chat session\n self.assertIn(str(Path(\"test_dir/test_file1.txt\").resolve()), coder.abs_fnames)\n self.assertIn(str(Path(\"test_dir/test_file2.txt\").resolve()), coder.abs_fnames)\n self.assertIn(str(Path(\"test_dir/another_dir/test_file.txt\").resolve()), coder.abs_fnames)\n\n commands.cmd_drop(\"test_dir/another_dir\")\n self.assertIn(str(Path(\"test_dir/test_file1.txt\").resolve()), coder.abs_fnames)\n self.assertIn(str(Path(\"test_dir/test_file2.txt\").resolve()), coder.abs_fnames)\n self.assertNotIn(\n str(Path(\"test_dir/another_dir/test_file.txt\").resolve()), coder.abs_fnames\n )\n\n # Issue #139 /add problems when cwd != git_root\n\n # remember the proper abs path to this file\n abs_fname = str(Path(\"test_dir/another_dir/test_file.txt\").resolve())\n\n # chdir to someplace other than git_root\n Path(\"side_dir\").mkdir()\n os.chdir(\"side_dir\")\n\n # add it via it's git_root referenced name\n commands.cmd_add(\"test_dir/another_dir/test_file.txt\")\n\n # it should be there, but was not in v0.10.0\n self.assertIn(abs_fname, coder.abs_fnames)\n\n # drop it via it's git_root referenced name\n commands.cmd_drop(\"test_dir/another_dir/test_file.txt\")\n\n # it should be there, but was not in v0.10.0\n self.assertNotIn(abs_fname, coder.abs_fnames)\n\n def test_cmd_drop_with_glob_patterns(self):\n # Initialize the Commands and InputOutput objects\n io = InputOutput(pretty=False, yes=True)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n subdir = Path(\"subdir\")\n subdir.mkdir()\n (subdir / \"subtest1.py\").touch()\n (subdir / \"subtest2.py\").touch()\n\n Path(\"test1.py\").touch()\n Path(\"test2.py\").touch()\n\n # Add some files to the chat session\n commands.cmd_add(\"*.py\")\n\n self.assertEqual(len(coder.abs_fnames), 2)\n\n # Call the cmd_drop method with a glob pattern\n commands.cmd_drop(\"*2.py\")\n\n self.assertIn(str(Path(\"test1.py\").resolve()), coder.abs_fnames)\n self.assertNotIn(str(Path(\"test2.py\").resolve()), coder.abs_fnames)\n\n def test_cmd_add_bad_encoding(self):\n # Initialize the Commands and InputOutput objects\n io = InputOutput(pretty=False, yes=True)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n # Create a new file foo.bad which will fail to decode as utf-8\n with codecs.open(\"foo.bad\", \"w\", encoding=\"iso-8859-15\") as f:\n f.write(\"ÆØÅ\") # Characters not present in utf-8\n\n commands.cmd_add(\"foo.bad\")\n\n self.assertEqual(coder.abs_fnames, set())\n\n def test_cmd_git(self):\n # Initialize the Commands and InputOutput objects\n io = InputOutput(pretty=False, yes=True)\n\n with GitTemporaryDirectory() as tempdir:\n # Create a file in the temporary directory\n with open(f\"{tempdir}/test.txt\", \"w\") as f:\n f.write(\"test\")\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n # Run the cmd_git method with the arguments \"commit -a -m msg\"\n commands.cmd_git(\"add test.txt\")\n commands.cmd_git(\"commit -a -m msg\")\n\n # Check if the file has been committed to the repository\n repo = git.Repo(tempdir)\n files_in_repo = repo.git.ls_files()\n self.assertIn(\"test.txt\", files_in_repo)\n\n def test_cmd_tokens(self):\n # Initialize the Commands and InputOutput objects\n io = InputOutput(pretty=False, yes=True)\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n commands.cmd_add(\"foo.txt bar.txt\")\n\n # Redirect the standard output to an instance of io.StringIO\n stdout = StringIO()\n sys.stdout = stdout\n\n commands.cmd_tokens(\"\")\n\n # Reset the standard output\n sys.stdout = sys.__stdout__\n\n # Get the console output\n console_output = stdout.getvalue()\n\n self.assertIn(\"foo.txt\", console_output)\n self.assertIn(\"bar.txt\", console_output)\n\n def test_cmd_add_from_subdir(self):\n repo = git.Repo.init()\n repo.config_writer().set_value(\"user\", \"name\", \"Test User\").release()\n repo.config_writer().set_value(\"user\", \"email\", \"testuser@example.com\").release()\n\n # Create three empty files and add them to the git repository\n filenames = [\"one.py\", Path(\"subdir\") / \"two.py\", Path(\"anotherdir\") / \"three.py\"]\n for filename in filenames:\n file_path = Path(filename)\n file_path.parent.mkdir(parents=True, exist_ok=True)\n file_path.touch()\n repo.git.add(str(file_path))\n repo.git.commit(\"-m\", \"added\")\n\n filenames = [str(Path(fn).resolve()) for fn in filenames]\n\n ###\n\n os.chdir(\"subdir\")\n\n io = InputOutput(pretty=False, yes=True)\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n # this should get added\n commands.cmd_add(str(Path(\"anotherdir\") / \"three.py\"))\n\n # this should add one.py\n commands.cmd_add(\"*.py\")\n\n self.assertIn(filenames[0], coder.abs_fnames)\n self.assertNotIn(filenames[1], coder.abs_fnames)\n self.assertIn(filenames[2], coder.abs_fnames)\n\n def test_cmd_commit(self):\n with GitTemporaryDirectory():\n fname = \"test.txt\"\n with open(fname, \"w\") as f:\n f.write(\"test\")\n repo = git.Repo()\n repo.git.add(fname)\n repo.git.commit(\"-m\", \"initial\")\n\n io = InputOutput(pretty=False, yes=True)\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n self.assertFalse(repo.is_dirty())\n with open(fname, \"w\") as f:\n f.write(\"new\")\n self.assertTrue(repo.is_dirty())\n\n commit_message = \"Test commit message\"\n commands.cmd_commit(commit_message)\n self.assertFalse(repo.is_dirty())\n\n def test_cmd_add_from_sub_dir(self):\n with GitTemporaryDirectory():\n io = InputOutput(pretty=False, yes=False)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n Path(\"side_dir\").mkdir()\n os.chdir(\"side_dir\")\n\n # add a file that is in the side_dir\n with open(\"temp.txt\", \"w\"):\n pass\n\n # this was blowing up with GitCommandError, per:\n # https://github.com/paul-gauthier/aider/issues/201\n commands.cmd_add(\"temp.txt\")\n\n def test_cmd_add_from_outside_root(self):\n with ChdirTemporaryDirectory() as tmp_dname:\n root = Path(\"root\")\n root.mkdir()\n os.chdir(str(root))\n\n io = InputOutput(pretty=False, yes=False)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n outside_file = Path(tmp_dname) / \"outside.txt\"\n outside_file.touch()\n\n # This should not be allowed!\n # https://github.com/paul-gauthier/aider/issues/178\n commands.cmd_add(\"../outside.txt\")\n\n self.assertEqual(len(coder.abs_fnames), 0)\n\n def test_cmd_add_from_outside_git(self):\n with ChdirTemporaryDirectory() as tmp_dname:\n root = Path(\"root\")\n root.mkdir()\n os.chdir(str(root))\n\n make_repo()\n\n io = InputOutput(pretty=False, yes=False)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n outside_file = Path(tmp_dname) / \"outside.txt\"\n outside_file.touch()\n\n # This should not be allowed!\n # It was blowing up with GitCommandError, per:\n # https://github.com/paul-gauthier/aider/issues/178\n commands.cmd_add(\"../outside.txt\")\n\n self.assertEqual(len(coder.abs_fnames), 0)\n",
"path": "tests/test_commands.py"
}
] | 12_6 | python | import os
import shutil
import sys
import unittest
import tempfile
from pathlib import Path
from unittest import TestCase
class TestCommands(TestCase):
def setUp(self):
self.original_cwd = os.getcwd()
self.tempdir = tempfile.mkdtemp()
os.chdir(self.tempdir)
def tearDown(self):
os.chdir(self.original_cwd)
shutil.rmtree(self.tempdir, ignore_errors=True)
def test_cmd_add_no_match_but_make_it(self):
from aider import models
from aider.commands import Commands
from aider.io import InputOutput
# yes=True means we *will* create the file when it is not found
io = InputOutput(pretty=False, yes=True)
from aider.coders import Coder
coder = Coder.create(models.GPT35, None, io)
commands = Commands(io, coder)
fname = Path("[abc].nonexistent")
# Call the cmd_add method with a non-existent file pattern
commands.cmd_add(str(fname))
# Check if no files have been added to the chat session
self.assertEqual(len(coder.abs_fnames), 1)
self.assertTrue(fname.exists())
def test_cmd_add_filename_with_special_chars(self):
from aider import models
from aider.commands import Commands
from aider.io import InputOutput
from tests.utils import ChdirTemporaryDirectory
with ChdirTemporaryDirectory():
io = InputOutput(pretty=False, yes=False)
from aider.coders import Coder
coder = Coder.create(models.GPT35, None, io)
commands = Commands(io, coder)
fname = Path("with[brackets].txt")
fname.touch()
commands.cmd_add(str(fname))
self.assertIn(str(fname.resolve()), coder.abs_fnames)
def test_cmd_add_abs_filename(self):
from aider import models
from aider.commands import Commands
from aider.io import InputOutput
from tests.utils import ChdirTemporaryDirectory
with ChdirTemporaryDirectory():
io = InputOutput(pretty=False, yes=False)
from aider.coders import Coder
coder = Coder.create(models.GPT35, None, io)
commands = Commands(io, coder)
fname = Path("file.txt")
fname.touch()
commands.cmd_add(str(fname.resolve()))
self.assertIn(str(fname.resolve()), coder.abs_fnames)
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestCommands))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
main()
|
https://github.com/teamqurrent/aider | The objective of this commit is to enhance the security and integrity of the `cmd_add` method in the `Commands` class within `commands.py`. Specifically, the goal is to prevent the addition of files that are located outside the designated coder.root directory. This is to ensure that only files within the specified root directory are processed and added, thereby maintaining the intended scope and avoiding potential security issues or errors related to handling external files. | 5d7e440 | aiohttp==3.8.4
aiosignal==1.3.1
async-timeout==4.0.2
attrs==23.1.0
certifi==2023.5.7
charset-normalizer==3.1.0
frozenlist==1.3.3
gitdb==4.0.10
GitPython==3.1.31
idna==3.4
markdown-it-py==2.2.0
mdurl==0.1.2
multidict==6.0.4
openai==0.27.6
prompt-toolkit==3.0.38
Pygments==2.15.1
requests==2.30.0
rich==13.3.5
smmap==5.0.0
tqdm==4.65.0
urllib3==2.0.2
wcwidth==0.2.6
yarl==1.9.2
pytest==7.3.1
tiktoken==0.4.0
configargparse
PyYAML
backoff==2.2.1
networkx==3.1
diskcache==5.6.1
numpy==1.26.1
scipy==1.11.3
jsonschema==4.17.3
sounddevice==0.4.6
soundfile==0.12.1
pathspec==0.11.2
grep-ast==0.2.4
| python3.9 | 399d86d | diff --git a/aider/commands.py b/aider/commands.py
--- a/aider/commands.py
+++ b/aider/commands.py
@@ -296,6 +296,12 @@ class Commands:
for matched_file in all_matched_files:
abs_file_path = self.coder.abs_root_path(matched_file)
+ if not abs_file_path.startswith(self.coder.root):
+ self.io.tool_error(
+ f"Can not add {abs_file_path}, which is not within {self.coder.root}"
+ )
+ continue
+
if self.coder.repo and matched_file not in git_files:
self.coder.repo.repo.git.add(abs_file_path)
git_added.append(matched_file)
diff --git a/tests/test_commands.py b/tests/test_commands.py
--- a/tests/test_commands.py
+++ b/tests/test_commands.py
@@ -14,7 +14,7 @@ from aider.coders import Coder
from aider.commands import Commands
from aider.dump import dump # noqa: F401
from aider.io import InputOutput
-from tests.utils import GitTemporaryDirectory
+from tests.utils import ChdirTemporaryDirectory, GitTemporaryDirectory, make_repo
class TestCommands(TestCase):
@@ -295,3 +295,48 @@ class TestCommands(TestCase):
# this was blowing up with GitCommandError, per:
# https://github.com/paul-gauthier/aider/issues/201
commands.cmd_add("temp.txt")
+
+ def test_cmd_add_from_outside_root(self):
+ with ChdirTemporaryDirectory() as tmp_dname:
+ root = Path("root")
+ root.mkdir()
+ os.chdir(str(root))
+
+ io = InputOutput(pretty=False, yes=False)
+ from aider.coders import Coder
+
+ coder = Coder.create(models.GPT35, None, io)
+ commands = Commands(io, coder)
+
+ outside_file = Path(tmp_dname) / "outside.txt"
+ outside_file.touch()
+
+ # This should not be allowed!
+ # https://github.com/paul-gauthier/aider/issues/178
+ commands.cmd_add("../outside.txt")
+
+ self.assertEqual(len(coder.abs_fnames), 0)
+
+ def test_cmd_add_from_outside_git(self):
+ with ChdirTemporaryDirectory() as tmp_dname:
+ root = Path("root")
+ root.mkdir()
+ os.chdir(str(root))
+
+ make_repo()
+
+ io = InputOutput(pretty=False, yes=False)
+ from aider.coders import Coder
+
+ coder = Coder.create(models.GPT35, None, io)
+ commands = Commands(io, coder)
+
+ outside_file = Path(tmp_dname) / "outside.txt"
+ outside_file.touch()
+
+ # This should not be allowed!
+ # It was blowing up with GitCommandError, per:
+ # https://github.com/paul-gauthier/aider/issues/178
+ commands.cmd_add("../outside.txt")
+
+ self.assertEqual(len(coder.abs_fnames), 0)
| [
{
"content": "import json\nimport shlex\nimport subprocess\nimport sys\nfrom pathlib import Path\n\nimport git\nfrom prompt_toolkit.completion import Completion\n\nfrom aider import prompts, voice\n\nfrom .dump import dump # noqa: F401\n\n\nclass Commands:\n voice = None\n\n def __init__(self, io, coder, voice_language=None):\n self.io = io\n self.coder = coder\n\n if voice_language == \"auto\":\n voice_language = None\n\n self.voice_language = voice_language\n self.tokenizer = coder.main_model.tokenizer\n\n def is_command(self, inp):\n if inp[0] == \"/\":\n return True\n\n def get_commands(self):\n commands = []\n for attr in dir(self):\n if attr.startswith(\"cmd_\"):\n commands.append(\"/\" + attr[4:])\n\n return commands\n\n def get_command_completions(self, cmd_name, partial):\n cmd_completions_method_name = f\"completions_{cmd_name}\"\n cmd_completions_method = getattr(self, cmd_completions_method_name, None)\n if cmd_completions_method:\n for completion in cmd_completions_method(partial):\n yield completion\n\n def do_run(self, cmd_name, args):\n cmd_method_name = f\"cmd_{cmd_name}\"\n cmd_method = getattr(self, cmd_method_name, None)\n if cmd_method:\n return cmd_method(args)\n else:\n self.io.tool_output(f\"Error: Command {cmd_name} not found.\")\n\n def matching_commands(self, inp):\n words = inp.strip().split()\n if not words:\n return\n\n first_word = words[0]\n rest_inp = inp[len(words[0]) :]\n\n all_commands = self.get_commands()\n matching_commands = [cmd for cmd in all_commands if cmd.startswith(first_word)]\n return matching_commands, first_word, rest_inp\n\n def run(self, inp):\n res = self.matching_commands(inp)\n if res is None:\n return\n matching_commands, first_word, rest_inp = res\n if len(matching_commands) == 1:\n return self.do_run(matching_commands[0][1:], rest_inp)\n elif len(matching_commands) > 1:\n self.io.tool_error(f\"Ambiguous command: {', '.join(matching_commands)}\")\n else:\n self.io.tool_error(f\"Invalid command: {first_word}\")\n\n # any method called cmd_xxx becomes a command automatically.\n # each one must take an args param.\n\n def cmd_commit(self, args):\n \"Commit edits to the repo made outside the chat (commit message optional)\"\n\n if not self.coder.repo:\n self.io.tool_error(\"No git repository found.\")\n return\n\n if not self.coder.repo.is_dirty():\n self.io.tool_error(\"No more changes to commit.\")\n return\n\n commit_message = args.strip()\n self.coder.repo.commit(message=commit_message)\n\n def cmd_clear(self, args):\n \"Clear the chat history\"\n\n self.coder.done_messages = []\n self.coder.cur_messages = []\n\n def cmd_tokens(self, args):\n \"Report on the number of tokens used by the current chat context\"\n\n res = []\n\n # system messages\n msgs = [\n dict(role=\"system\", content=self.coder.gpt_prompts.main_system),\n dict(role=\"system\", content=self.coder.gpt_prompts.system_reminder),\n ]\n tokens = len(self.tokenizer.encode(json.dumps(msgs)))\n res.append((tokens, \"system messages\", \"\"))\n\n # chat history\n msgs = self.coder.done_messages + self.coder.cur_messages\n if msgs:\n msgs = [dict(role=\"dummy\", content=msg) for msg in msgs]\n msgs = json.dumps(msgs)\n tokens = len(self.tokenizer.encode(msgs))\n res.append((tokens, \"chat history\", \"use /clear to clear\"))\n\n # repo map\n other_files = set(self.coder.get_all_abs_files()) - set(self.coder.abs_fnames)\n if self.coder.repo_map:\n repo_content = self.coder.repo_map.get_repo_map(self.coder.abs_fnames, other_files)\n if repo_content:\n tokens = len(self.tokenizer.encode(repo_content))\n res.append((tokens, \"repository map\", \"use --map-tokens to resize\"))\n\n # files\n for fname in self.coder.abs_fnames:\n relative_fname = self.coder.get_rel_fname(fname)\n content = self.io.read_text(fname)\n # approximate\n content = f\"{relative_fname}\\n```\\n\" + content + \"```\\n\"\n tokens = len(self.tokenizer.encode(content))\n res.append((tokens, f\"{relative_fname}\", \"use /drop to drop from chat\"))\n\n self.io.tool_output(\"Approximate context window usage, in tokens:\")\n self.io.tool_output()\n\n width = 8\n cost_width = 7\n\n def fmt(v):\n return format(int(v), \",\").rjust(width)\n\n col_width = max(len(row[1]) for row in res)\n\n cost_pad = \" \" * cost_width\n total = 0\n total_cost = 0.0\n for tk, msg, tip in res:\n total += tk\n cost = tk * (self.coder.main_model.prompt_price / 1000)\n total_cost += cost\n msg = msg.ljust(col_width)\n self.io.tool_output(f\"${cost:5.2f} {fmt(tk)} {msg} {tip}\")\n\n self.io.tool_output(\"=\" * (width + cost_width + 1))\n self.io.tool_output(f\"${total_cost:5.2f} {fmt(total)} tokens total\")\n\n limit = self.coder.main_model.max_context_tokens\n remaining = limit - total\n if remaining > 1024:\n self.io.tool_output(f\"{cost_pad}{fmt(remaining)} tokens remaining in context window\")\n elif remaining > 0:\n self.io.tool_error(\n f\"{cost_pad}{fmt(remaining)} tokens remaining in context window (use /drop or\"\n \" /clear to make space)\"\n )\n else:\n self.io.tool_error(f\"{cost_pad}{fmt(remaining)} tokens remaining, window exhausted!\")\n self.io.tool_output(f\"{cost_pad}{fmt(limit)} tokens max context window size\")\n\n def cmd_undo(self, args):\n \"Undo the last git commit if it was done by aider\"\n if not self.coder.repo:\n self.io.tool_error(\"No git repository found.\")\n return\n\n if self.coder.repo.is_dirty():\n self.io.tool_error(\n \"The repository has uncommitted changes. Please commit or stash them before\"\n \" undoing.\"\n )\n return\n\n local_head = self.coder.repo.repo.git.rev_parse(\"HEAD\")\n current_branch = self.coder.repo.repo.active_branch.name\n try:\n remote_head = self.coder.repo.repo.git.rev_parse(f\"origin/{current_branch}\")\n has_origin = True\n except git.exc.GitCommandError:\n has_origin = False\n\n if has_origin:\n if local_head == remote_head:\n self.io.tool_error(\n \"The last commit has already been pushed to the origin. Undoing is not\"\n \" possible.\"\n )\n return\n\n last_commit = self.coder.repo.repo.head.commit\n if (\n not last_commit.message.startswith(\"aider:\")\n or last_commit.hexsha[:7] != self.coder.last_aider_commit_hash\n ):\n self.io.tool_error(\"The last commit was not made by aider in this chat session.\")\n return\n self.coder.repo.repo.git.reset(\"--hard\", \"HEAD~1\")\n self.io.tool_output(\n f\"{last_commit.message.strip()}\\n\"\n f\"The above commit {self.coder.last_aider_commit_hash} \"\n \"was reset and removed from git.\\n\"\n )\n\n if self.coder.main_model.send_undo_reply:\n return prompts.undo_command_reply\n\n def cmd_diff(self, args):\n \"Display the diff of the last aider commit\"\n if not self.coder.repo:\n self.io.tool_error(\"No git repository found.\")\n return\n\n if not self.coder.last_aider_commit_hash:\n self.io.tool_error(\"No previous aider commit found.\")\n return\n\n commits = f\"{self.coder.last_aider_commit_hash}~1\"\n diff = self.coder.repo.diff_commits(\n self.coder.pretty,\n commits,\n self.coder.last_aider_commit_hash,\n )\n\n # don't use io.tool_output() because we don't want to log or further colorize\n print(diff)\n\n def completions_add(self, partial):\n files = set(self.coder.get_all_relative_files())\n files = files - set(self.coder.get_inchat_relative_files())\n for fname in files:\n if partial.lower() in fname.lower():\n yield Completion(fname, start_position=-len(partial))\n\n def glob_filtered_to_repo(self, pattern):\n raw_matched_files = list(Path(self.coder.root).glob(pattern))\n\n matched_files = []\n for fn in raw_matched_files:\n matched_files += expand_subdir(fn)\n\n matched_files = [str(Path(fn).relative_to(self.coder.root)) for fn in matched_files]\n\n # if repo, filter against it\n if self.coder.repo:\n git_files = self.coder.repo.get_tracked_files()\n matched_files = [fn for fn in matched_files if str(fn) in git_files]\n\n res = list(map(str, matched_files))\n return res\n\n def cmd_add(self, args):\n \"Add matching files to the chat session using glob patterns\"\n\n added_fnames = []\n git_added = []\n git_files = self.coder.repo.get_tracked_files() if self.coder.repo else []\n\n all_matched_files = set()\n for word in args.split():\n matched_files = self.glob_filtered_to_repo(word)\n\n if not matched_files:\n if any(char in word for char in \"*?[]\"):\n self.io.tool_error(f\"No files to add matching pattern: {word}\")\n else:\n fname = Path(self.coder.root) / word\n if fname.exists():\n if fname.is_file():\n matched_files = [str(fname)]\n else:\n self.io.tool_error(f\"Unable to add: {word}\")\n elif self.io.confirm_ask(\n f\"No files matched '{word}'. Do you want to create {fname}?\"\n ):\n fname.touch()\n matched_files = [str(fname)]\n\n all_matched_files.update(matched_files)\n\n for matched_file in all_matched_files:\n abs_file_path = self.coder.abs_root_path(matched_file)\n\n if self.coder.repo and matched_file not in git_files:\n self.coder.repo.repo.git.add(abs_file_path)\n git_added.append(matched_file)\n\n if abs_file_path in self.coder.abs_fnames:\n self.io.tool_error(f\"{matched_file} is already in the chat\")\n else:\n content = self.io.read_text(abs_file_path)\n if content is None:\n self.io.tool_error(f\"Unable to read {matched_file}\")\n else:\n self.coder.abs_fnames.add(abs_file_path)\n self.io.tool_output(f\"Added {matched_file} to the chat\")\n added_fnames.append(matched_file)\n\n if self.coder.repo and git_added:\n git_added = \" \".join(git_added)\n commit_message = f\"aider: Added {git_added}\"\n self.coder.repo.commit(message=commit_message)\n\n if not added_fnames:\n return\n\n # only reply if there's been some chatting since the last edit\n if not self.coder.cur_messages:\n return\n\n reply = prompts.added_files.format(fnames=\", \".join(added_fnames))\n return reply\n\n def completions_drop(self, partial):\n files = self.coder.get_inchat_relative_files()\n\n for fname in files:\n if partial.lower() in fname.lower():\n yield Completion(fname, start_position=-len(partial))\n\n def cmd_drop(self, args):\n \"Remove matching files from the chat session\"\n\n if not args.strip():\n self.io.tool_output(\"Dropping all files from the chat session.\")\n self.coder.abs_fnames = set()\n\n for word in args.split():\n matched_files = self.glob_filtered_to_repo(word)\n\n if not matched_files:\n self.io.tool_error(f\"No files matched '{word}'\")\n\n for matched_file in matched_files:\n abs_fname = self.coder.abs_root_path(matched_file)\n if abs_fname in self.coder.abs_fnames:\n self.coder.abs_fnames.remove(abs_fname)\n self.io.tool_output(f\"Removed {matched_file} from the chat\")\n\n def cmd_git(self, args):\n \"Run a git command\"\n combined_output = None\n try:\n parsed_args = shlex.split(\"git \" + args)\n env = dict(GIT_EDITOR=\"true\", **subprocess.os.environ)\n result = subprocess.run(\n parsed_args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True, env=env\n )\n combined_output = result.stdout\n except Exception as e:\n self.io.tool_error(f\"Error running git command: {e}\")\n\n if combined_output is None:\n return\n\n self.io.tool_output(combined_output)\n\n def cmd_run(self, args):\n \"Run a shell command and optionally add the output to the chat\"\n combined_output = None\n try:\n parsed_args = shlex.split(args)\n result = subprocess.run(\n parsed_args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True\n )\n combined_output = result.stdout\n except Exception as e:\n self.io.tool_error(f\"Error running command: {e}\")\n\n if combined_output is None:\n return\n\n self.io.tool_output(combined_output)\n\n if self.io.confirm_ask(\"Add the output to the chat?\", default=\"y\"):\n for line in combined_output.splitlines():\n self.io.tool_output(line, log_only=True)\n\n msg = prompts.run_output.format(\n command=args,\n output=combined_output,\n )\n return msg\n\n def cmd_exit(self, args):\n \"Exit the application\"\n sys.exit()\n\n def cmd_ls(self, args):\n \"List all known files and those included in the chat session\"\n\n files = self.coder.get_all_relative_files()\n\n other_files = []\n chat_files = []\n for file in files:\n abs_file_path = self.coder.abs_root_path(file)\n if abs_file_path in self.coder.abs_fnames:\n chat_files.append(file)\n else:\n other_files.append(file)\n\n if not chat_files and not other_files:\n self.io.tool_output(\"\\nNo files in chat or git repo.\")\n return\n\n if chat_files:\n self.io.tool_output(\"Files in chat:\\n\")\n for file in chat_files:\n self.io.tool_output(f\" {file}\")\n\n if other_files:\n self.io.tool_output(\"\\nRepo files not in the chat:\\n\")\n for file in other_files:\n self.io.tool_output(f\" {file}\")\n\n def cmd_help(self, args):\n \"Show help about all commands\"\n commands = sorted(self.get_commands())\n for cmd in commands:\n cmd_method_name = f\"cmd_{cmd[1:]}\"\n cmd_method = getattr(self, cmd_method_name, None)\n if cmd_method:\n description = cmd_method.__doc__\n self.io.tool_output(f\"{cmd} {description}\")\n else:\n self.io.tool_output(f\"{cmd} No description available.\")\n\n def cmd_voice(self, args):\n \"Record and transcribe voice input\"\n\n if not self.voice:\n try:\n self.voice = voice.Voice()\n except voice.SoundDeviceError:\n self.io.tool_error(\"Unable to import `sounddevice`, is portaudio installed?\")\n return\n\n history_iter = self.io.get_input_history()\n\n history = []\n size = 0\n for line in history_iter:\n if line.startswith(\"/\"):\n continue\n if line in history:\n continue\n if size + len(line) > 1024:\n break\n size += len(line)\n history.append(line)\n\n history.reverse()\n history = \"\\n\".join(history)\n\n text = self.voice.record_and_transcribe(history, language=self.voice_language)\n if text:\n self.io.add_to_input_history(text)\n print()\n self.io.user_input(text, log_only=False)\n print()\n\n return text\n\n\ndef expand_subdir(file_path):\n file_path = Path(file_path)\n if file_path.is_file():\n yield file_path\n return\n\n if file_path.is_dir():\n for file in file_path.rglob(\"*\"):\n if file.is_file():\n yield str(file)\n",
"path": "aider/commands.py"
},
{
"content": "import codecs\nimport os\nimport shutil\nimport sys\nimport tempfile\nfrom io import StringIO\nfrom pathlib import Path\nfrom unittest import TestCase\n\nimport git\n\nfrom aider import models\nfrom aider.coders import Coder\nfrom aider.commands import Commands\nfrom aider.dump import dump # noqa: F401\nfrom aider.io import InputOutput\nfrom tests.utils import GitTemporaryDirectory\n\n\nclass TestCommands(TestCase):\n def setUp(self):\n self.original_cwd = os.getcwd()\n self.tempdir = tempfile.mkdtemp()\n os.chdir(self.tempdir)\n\n def tearDown(self):\n os.chdir(self.original_cwd)\n shutil.rmtree(self.tempdir, ignore_errors=True)\n\n def test_cmd_add(self):\n # Initialize the Commands and InputOutput objects\n io = InputOutput(pretty=False, yes=True)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n # Call the cmd_add method with 'foo.txt' and 'bar.txt' as a single string\n commands.cmd_add(\"foo.txt bar.txt\")\n\n # Check if both files have been created in the temporary directory\n self.assertTrue(os.path.exists(\"foo.txt\"))\n self.assertTrue(os.path.exists(\"bar.txt\"))\n\n def test_cmd_add_with_glob_patterns(self):\n # Initialize the Commands and InputOutput objects\n io = InputOutput(pretty=False, yes=True)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n # Create some test files\n with open(\"test1.py\", \"w\") as f:\n f.write(\"print('test1')\")\n with open(\"test2.py\", \"w\") as f:\n f.write(\"print('test2')\")\n with open(\"test.txt\", \"w\") as f:\n f.write(\"test\")\n\n # Call the cmd_add method with a glob pattern\n commands.cmd_add(\"*.py\")\n\n # Check if the Python files have been added to the chat session\n self.assertIn(str(Path(\"test1.py\").resolve()), coder.abs_fnames)\n self.assertIn(str(Path(\"test2.py\").resolve()), coder.abs_fnames)\n\n # Check if the text file has not been added to the chat session\n self.assertNotIn(str(Path(\"test.txt\").resolve()), coder.abs_fnames)\n\n def test_cmd_add_no_match(self):\n # Initialize the Commands and InputOutput objects\n io = InputOutput(pretty=False, yes=True)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n # Call the cmd_add method with a non-existent file pattern\n commands.cmd_add(\"*.nonexistent\")\n\n # Check if no files have been added to the chat session\n self.assertEqual(len(coder.abs_fnames), 0)\n\n def test_cmd_add_drop_directory(self):\n # Initialize the Commands and InputOutput objects\n io = InputOutput(pretty=False, yes=False)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n # Create a directory and add files to it using pathlib\n Path(\"test_dir\").mkdir()\n Path(\"test_dir/another_dir\").mkdir()\n Path(\"test_dir/test_file1.txt\").write_text(\"Test file 1\")\n Path(\"test_dir/test_file2.txt\").write_text(\"Test file 2\")\n Path(\"test_dir/another_dir/test_file.txt\").write_text(\"Test file 3\")\n\n # Call the cmd_add method with a directory\n commands.cmd_add(\"test_dir test_dir/test_file2.txt\")\n\n # Check if the files have been added to the chat session\n self.assertIn(str(Path(\"test_dir/test_file1.txt\").resolve()), coder.abs_fnames)\n self.assertIn(str(Path(\"test_dir/test_file2.txt\").resolve()), coder.abs_fnames)\n self.assertIn(str(Path(\"test_dir/another_dir/test_file.txt\").resolve()), coder.abs_fnames)\n\n commands.cmd_drop(\"test_dir/another_dir\")\n self.assertIn(str(Path(\"test_dir/test_file1.txt\").resolve()), coder.abs_fnames)\n self.assertIn(str(Path(\"test_dir/test_file2.txt\").resolve()), coder.abs_fnames)\n self.assertNotIn(\n str(Path(\"test_dir/another_dir/test_file.txt\").resolve()), coder.abs_fnames\n )\n\n # Issue #139 /add problems when cwd != git_root\n\n # remember the proper abs path to this file\n abs_fname = str(Path(\"test_dir/another_dir/test_file.txt\").resolve())\n\n # chdir to someplace other than git_root\n Path(\"side_dir\").mkdir()\n os.chdir(\"side_dir\")\n\n # add it via it's git_root referenced name\n commands.cmd_add(\"test_dir/another_dir/test_file.txt\")\n\n # it should be there, but was not in v0.10.0\n self.assertIn(abs_fname, coder.abs_fnames)\n\n # drop it via it's git_root referenced name\n commands.cmd_drop(\"test_dir/another_dir/test_file.txt\")\n\n # it should be there, but was not in v0.10.0\n self.assertNotIn(abs_fname, coder.abs_fnames)\n\n def test_cmd_drop_with_glob_patterns(self):\n # Initialize the Commands and InputOutput objects\n io = InputOutput(pretty=False, yes=True)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n subdir = Path(\"subdir\")\n subdir.mkdir()\n (subdir / \"subtest1.py\").touch()\n (subdir / \"subtest2.py\").touch()\n\n Path(\"test1.py\").touch()\n Path(\"test2.py\").touch()\n\n # Add some files to the chat session\n commands.cmd_add(\"*.py\")\n\n self.assertEqual(len(coder.abs_fnames), 2)\n\n # Call the cmd_drop method with a glob pattern\n commands.cmd_drop(\"*2.py\")\n\n self.assertIn(str(Path(\"test1.py\").resolve()), coder.abs_fnames)\n self.assertNotIn(str(Path(\"test2.py\").resolve()), coder.abs_fnames)\n\n def test_cmd_add_bad_encoding(self):\n # Initialize the Commands and InputOutput objects\n io = InputOutput(pretty=False, yes=True)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n # Create a new file foo.bad which will fail to decode as utf-8\n with codecs.open(\"foo.bad\", \"w\", encoding=\"iso-8859-15\") as f:\n f.write(\"ÆØÅ\") # Characters not present in utf-8\n\n commands.cmd_add(\"foo.bad\")\n\n self.assertEqual(coder.abs_fnames, set())\n\n def test_cmd_git(self):\n # Initialize the Commands and InputOutput objects\n io = InputOutput(pretty=False, yes=True)\n\n with GitTemporaryDirectory() as tempdir:\n # Create a file in the temporary directory\n with open(f\"{tempdir}/test.txt\", \"w\") as f:\n f.write(\"test\")\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n # Run the cmd_git method with the arguments \"commit -a -m msg\"\n commands.cmd_git(\"add test.txt\")\n commands.cmd_git(\"commit -a -m msg\")\n\n # Check if the file has been committed to the repository\n repo = git.Repo(tempdir)\n files_in_repo = repo.git.ls_files()\n self.assertIn(\"test.txt\", files_in_repo)\n\n def test_cmd_tokens(self):\n # Initialize the Commands and InputOutput objects\n io = InputOutput(pretty=False, yes=True)\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n commands.cmd_add(\"foo.txt bar.txt\")\n\n # Redirect the standard output to an instance of io.StringIO\n stdout = StringIO()\n sys.stdout = stdout\n\n commands.cmd_tokens(\"\")\n\n # Reset the standard output\n sys.stdout = sys.__stdout__\n\n # Get the console output\n console_output = stdout.getvalue()\n\n self.assertIn(\"foo.txt\", console_output)\n self.assertIn(\"bar.txt\", console_output)\n\n def test_cmd_add_from_subdir(self):\n repo = git.Repo.init()\n repo.config_writer().set_value(\"user\", \"name\", \"Test User\").release()\n repo.config_writer().set_value(\"user\", \"email\", \"testuser@example.com\").release()\n\n # Create three empty files and add them to the git repository\n filenames = [\"one.py\", Path(\"subdir\") / \"two.py\", Path(\"anotherdir\") / \"three.py\"]\n for filename in filenames:\n file_path = Path(filename)\n file_path.parent.mkdir(parents=True, exist_ok=True)\n file_path.touch()\n repo.git.add(str(file_path))\n repo.git.commit(\"-m\", \"added\")\n\n filenames = [str(Path(fn).resolve()) for fn in filenames]\n\n ###\n\n os.chdir(\"subdir\")\n\n io = InputOutput(pretty=False, yes=True)\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n # this should get added\n commands.cmd_add(str(Path(\"anotherdir\") / \"three.py\"))\n\n # this should add one.py\n commands.cmd_add(\"*.py\")\n\n self.assertIn(filenames[0], coder.abs_fnames)\n self.assertNotIn(filenames[1], coder.abs_fnames)\n self.assertIn(filenames[2], coder.abs_fnames)\n\n def test_cmd_commit(self):\n with GitTemporaryDirectory():\n fname = \"test.txt\"\n with open(fname, \"w\") as f:\n f.write(\"test\")\n repo = git.Repo()\n repo.git.add(fname)\n repo.git.commit(\"-m\", \"initial\")\n\n io = InputOutput(pretty=False, yes=True)\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n self.assertFalse(repo.is_dirty())\n with open(fname, \"w\") as f:\n f.write(\"new\")\n self.assertTrue(repo.is_dirty())\n\n commit_message = \"Test commit message\"\n commands.cmd_commit(commit_message)\n self.assertFalse(repo.is_dirty())\n\n def test_cmd_add_from_sub_dir(self):\n with GitTemporaryDirectory():\n io = InputOutput(pretty=False, yes=False)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n Path(\"side_dir\").mkdir()\n os.chdir(\"side_dir\")\n\n # add a file that is in the side_dir\n with open(\"temp.txt\", \"w\"):\n pass\n\n # this was blowing up with GitCommandError, per:\n # https://github.com/paul-gauthier/aider/issues/201\n commands.cmd_add(\"temp.txt\")\n",
"path": "tests/test_commands.py"
}
] | 12_7 | python | import os
import shutil
import sys
import unittest
import tempfile
from pathlib import Path
from unittest import TestCase
class TestCommands(TestCase):
def setUp(self):
self.original_cwd = os.getcwd()
self.tempdir = tempfile.mkdtemp()
os.chdir(self.tempdir)
def tearDown(self):
os.chdir(self.original_cwd)
shutil.rmtree(self.tempdir, ignore_errors=True)
def test_cmd_add_from_outside_root(self):
from aider import models
from aider.commands import Commands
from aider.io import InputOutput
from tests.utils import ChdirTemporaryDirectory
with ChdirTemporaryDirectory() as tmp_dname:
root = Path("root")
root.mkdir()
os.chdir(str(root))
io = InputOutput(pretty=False, yes=False)
from aider.coders import Coder
coder = Coder.create(models.GPT35, None, io)
commands = Commands(io, coder)
outside_file = Path(tmp_dname) / "outside.txt"
outside_file.touch()
# This should not be allowed!
commands.cmd_add("../outside.txt")
self.assertEqual(len(coder.abs_fnames), 0)
def test_cmd_add_from_outside_git(self):
from aider import models
from aider.commands import Commands
from aider.io import InputOutput
from tests.utils import ChdirTemporaryDirectory, make_repo
with ChdirTemporaryDirectory() as tmp_dname:
root = Path("root")
root.mkdir()
os.chdir(str(root))
make_repo()
io = InputOutput(pretty=False, yes=False)
from aider.coders import Coder
coder = Coder.create(models.GPT35, None, io)
commands = Commands(io, coder)
outside_file = Path(tmp_dname) / "outside.txt"
outside_file.touch()
# This should not be allowed!
# It was blowing up with GitCommandError, per:
commands.cmd_add("../outside.txt")
self.assertEqual(len(coder.abs_fnames), 0)
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestCommands))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
main()
|
https://github.com/teamqurrent/aider | The objective of is to fix an issue in the `cmd_add` method of the `Commands` class within `commands.py`, specifically when adding files from a subdirectory. The goal is to ensure that file paths are correctly resolved relative to the coder.root directory, and the method can handle file creation and addition without errors, even when executed from a subdirectory. | 545f105 | aiohttp==3.8.4
aiosignal==1.3.1
async-timeout==4.0.2
attrs==23.1.0
certifi==2023.5.7
charset-normalizer==3.1.0
frozenlist==1.3.3
gitdb==4.0.10
GitPython==3.1.31
idna==3.4
markdown-it-py==2.2.0
mdurl==0.1.2
multidict==6.0.4
openai==0.27.6
prompt-toolkit==3.0.38
Pygments==2.15.1
requests==2.30.0
rich==13.3.5
smmap==5.0.0
tqdm==4.65.0
urllib3==2.0.2
wcwidth==0.2.6
yarl==1.9.2
pytest==7.3.1
tiktoken==0.4.0
configargparse
PyYAML
backoff==2.2.1
networkx==3.1
diskcache==5.6.1
numpy==1.26.1
scipy==1.11.3
jsonschema==4.17.3
sounddevice==0.4.6
soundfile==0.12.1
pathspec==0.11.2
grep-ast==0.2.4
| python3.9 | 98bacd0 | diff --git a/aider/commands.py b/aider/commands.py
--- a/aider/commands.py
+++ b/aider/commands.py
@@ -279,16 +279,17 @@ class Commands:
if any(char in word for char in "*?[]"):
self.io.tool_error(f"No files to add matching pattern: {word}")
else:
- if Path(word).exists():
- if Path(word).is_file():
- matched_files = [word]
+ fname = Path(self.coder.root) / word
+ if fname.exists():
+ if fname.is_file():
+ matched_files = [str(fname)]
else:
self.io.tool_error(f"Unable to add: {word}")
elif self.io.confirm_ask(
- f"No files matched '{word}'. Do you want to create the file?"
+ f"No files matched '{word}'. Do you want to create {fname}?"
):
- (Path(self.coder.root) / word).touch()
- matched_files = [word]
+ fname.touch()
+ matched_files = [str(fname)]
all_matched_files.update(matched_files)
diff --git a/tests/test_commands.py b/tests/test_commands.py
--- a/tests/test_commands.py
+++ b/tests/test_commands.py
@@ -276,3 +276,22 @@ class TestCommands(TestCase):
commit_message = "Test commit message"
commands.cmd_commit(commit_message)
self.assertFalse(repo.is_dirty())
+
+ def test_cmd_add_from_sub_dir(self):
+ with GitTemporaryDirectory():
+ io = InputOutput(pretty=False, yes=False)
+ from aider.coders import Coder
+
+ coder = Coder.create(models.GPT35, None, io)
+ commands = Commands(io, coder)
+
+ Path("side_dir").mkdir()
+ os.chdir("side_dir")
+
+ # add a file that is in the side_dir
+ with open("temp.txt", "w"):
+ pass
+
+ # this was blowing up with GitCommandError, per:
+ # https://github.com/paul-gauthier/aider/issues/201
+ commands.cmd_add("temp.txt")
| [
{
"content": "import json\nimport shlex\nimport subprocess\nimport sys\nfrom pathlib import Path\n\nimport git\nfrom prompt_toolkit.completion import Completion\n\nfrom aider import prompts, voice\n\nfrom .dump import dump # noqa: F401\n\n\nclass Commands:\n voice = None\n\n def __init__(self, io, coder, voice_language=None):\n self.io = io\n self.coder = coder\n\n if voice_language == \"auto\":\n voice_language = None\n\n self.voice_language = voice_language\n self.tokenizer = coder.main_model.tokenizer\n\n def is_command(self, inp):\n if inp[0] == \"/\":\n return True\n\n def get_commands(self):\n commands = []\n for attr in dir(self):\n if attr.startswith(\"cmd_\"):\n commands.append(\"/\" + attr[4:])\n\n return commands\n\n def get_command_completions(self, cmd_name, partial):\n cmd_completions_method_name = f\"completions_{cmd_name}\"\n cmd_completions_method = getattr(self, cmd_completions_method_name, None)\n if cmd_completions_method:\n for completion in cmd_completions_method(partial):\n yield completion\n\n def do_run(self, cmd_name, args):\n cmd_method_name = f\"cmd_{cmd_name}\"\n cmd_method = getattr(self, cmd_method_name, None)\n if cmd_method:\n return cmd_method(args)\n else:\n self.io.tool_output(f\"Error: Command {cmd_name} not found.\")\n\n def matching_commands(self, inp):\n words = inp.strip().split()\n if not words:\n return\n\n first_word = words[0]\n rest_inp = inp[len(words[0]) :]\n\n all_commands = self.get_commands()\n matching_commands = [cmd for cmd in all_commands if cmd.startswith(first_word)]\n return matching_commands, first_word, rest_inp\n\n def run(self, inp):\n res = self.matching_commands(inp)\n if res is None:\n return\n matching_commands, first_word, rest_inp = res\n if len(matching_commands) == 1:\n return self.do_run(matching_commands[0][1:], rest_inp)\n elif len(matching_commands) > 1:\n self.io.tool_error(f\"Ambiguous command: {', '.join(matching_commands)}\")\n else:\n self.io.tool_error(f\"Invalid command: {first_word}\")\n\n # any method called cmd_xxx becomes a command automatically.\n # each one must take an args param.\n\n def cmd_commit(self, args):\n \"Commit edits to the repo made outside the chat (commit message optional)\"\n\n if not self.coder.repo:\n self.io.tool_error(\"No git repository found.\")\n return\n\n if not self.coder.repo.is_dirty():\n self.io.tool_error(\"No more changes to commit.\")\n return\n\n commit_message = args.strip()\n self.coder.repo.commit(message=commit_message)\n\n def cmd_clear(self, args):\n \"Clear the chat history\"\n\n self.coder.done_messages = []\n self.coder.cur_messages = []\n\n def cmd_tokens(self, args):\n \"Report on the number of tokens used by the current chat context\"\n\n res = []\n\n # system messages\n msgs = [\n dict(role=\"system\", content=self.coder.gpt_prompts.main_system),\n dict(role=\"system\", content=self.coder.gpt_prompts.system_reminder),\n ]\n tokens = len(self.tokenizer.encode(json.dumps(msgs)))\n res.append((tokens, \"system messages\", \"\"))\n\n # chat history\n msgs = self.coder.done_messages + self.coder.cur_messages\n if msgs:\n msgs = [dict(role=\"dummy\", content=msg) for msg in msgs]\n msgs = json.dumps(msgs)\n tokens = len(self.tokenizer.encode(msgs))\n res.append((tokens, \"chat history\", \"use /clear to clear\"))\n\n # repo map\n other_files = set(self.coder.get_all_abs_files()) - set(self.coder.abs_fnames)\n if self.coder.repo_map:\n repo_content = self.coder.repo_map.get_repo_map(self.coder.abs_fnames, other_files)\n if repo_content:\n tokens = len(self.tokenizer.encode(repo_content))\n res.append((tokens, \"repository map\", \"use --map-tokens to resize\"))\n\n # files\n for fname in self.coder.abs_fnames:\n relative_fname = self.coder.get_rel_fname(fname)\n content = self.io.read_text(fname)\n # approximate\n content = f\"{relative_fname}\\n```\\n\" + content + \"```\\n\"\n tokens = len(self.tokenizer.encode(content))\n res.append((tokens, f\"{relative_fname}\", \"use /drop to drop from chat\"))\n\n self.io.tool_output(\"Approximate context window usage, in tokens:\")\n self.io.tool_output()\n\n width = 8\n cost_width = 7\n\n def fmt(v):\n return format(int(v), \",\").rjust(width)\n\n col_width = max(len(row[1]) for row in res)\n\n cost_pad = \" \" * cost_width\n total = 0\n total_cost = 0.0\n for tk, msg, tip in res:\n total += tk\n cost = tk * (self.coder.main_model.prompt_price / 1000)\n total_cost += cost\n msg = msg.ljust(col_width)\n self.io.tool_output(f\"${cost:5.2f} {fmt(tk)} {msg} {tip}\")\n\n self.io.tool_output(\"=\" * (width + cost_width + 1))\n self.io.tool_output(f\"${total_cost:5.2f} {fmt(total)} tokens total\")\n\n limit = self.coder.main_model.max_context_tokens\n remaining = limit - total\n if remaining > 1024:\n self.io.tool_output(f\"{cost_pad}{fmt(remaining)} tokens remaining in context window\")\n elif remaining > 0:\n self.io.tool_error(\n f\"{cost_pad}{fmt(remaining)} tokens remaining in context window (use /drop or\"\n \" /clear to make space)\"\n )\n else:\n self.io.tool_error(f\"{cost_pad}{fmt(remaining)} tokens remaining, window exhausted!\")\n self.io.tool_output(f\"{cost_pad}{fmt(limit)} tokens max context window size\")\n\n def cmd_undo(self, args):\n \"Undo the last git commit if it was done by aider\"\n if not self.coder.repo:\n self.io.tool_error(\"No git repository found.\")\n return\n\n if self.coder.repo.is_dirty():\n self.io.tool_error(\n \"The repository has uncommitted changes. Please commit or stash them before\"\n \" undoing.\"\n )\n return\n\n local_head = self.coder.repo.repo.git.rev_parse(\"HEAD\")\n current_branch = self.coder.repo.repo.active_branch.name\n try:\n remote_head = self.coder.repo.repo.git.rev_parse(f\"origin/{current_branch}\")\n has_origin = True\n except git.exc.GitCommandError:\n has_origin = False\n\n if has_origin:\n if local_head == remote_head:\n self.io.tool_error(\n \"The last commit has already been pushed to the origin. Undoing is not\"\n \" possible.\"\n )\n return\n\n last_commit = self.coder.repo.repo.head.commit\n if (\n not last_commit.message.startswith(\"aider:\")\n or last_commit.hexsha[:7] != self.coder.last_aider_commit_hash\n ):\n self.io.tool_error(\"The last commit was not made by aider in this chat session.\")\n return\n self.coder.repo.repo.git.reset(\"--hard\", \"HEAD~1\")\n self.io.tool_output(\n f\"{last_commit.message.strip()}\\n\"\n f\"The above commit {self.coder.last_aider_commit_hash} \"\n \"was reset and removed from git.\\n\"\n )\n\n if self.coder.main_model.send_undo_reply:\n return prompts.undo_command_reply\n\n def cmd_diff(self, args):\n \"Display the diff of the last aider commit\"\n if not self.coder.repo:\n self.io.tool_error(\"No git repository found.\")\n return\n\n if not self.coder.last_aider_commit_hash:\n self.io.tool_error(\"No previous aider commit found.\")\n return\n\n commits = f\"{self.coder.last_aider_commit_hash}~1\"\n diff = self.coder.repo.diff_commits(\n self.coder.pretty,\n commits,\n self.coder.last_aider_commit_hash,\n )\n\n # don't use io.tool_output() because we don't want to log or further colorize\n print(diff)\n\n def completions_add(self, partial):\n files = set(self.coder.get_all_relative_files())\n files = files - set(self.coder.get_inchat_relative_files())\n for fname in files:\n if partial.lower() in fname.lower():\n yield Completion(fname, start_position=-len(partial))\n\n def glob_filtered_to_repo(self, pattern):\n raw_matched_files = list(Path(self.coder.root).glob(pattern))\n\n matched_files = []\n for fn in raw_matched_files:\n matched_files += expand_subdir(fn)\n\n matched_files = [str(Path(fn).relative_to(self.coder.root)) for fn in matched_files]\n\n # if repo, filter against it\n if self.coder.repo:\n git_files = self.coder.repo.get_tracked_files()\n matched_files = [fn for fn in matched_files if str(fn) in git_files]\n\n res = list(map(str, matched_files))\n return res\n\n def cmd_add(self, args):\n \"Add matching files to the chat session using glob patterns\"\n\n added_fnames = []\n git_added = []\n git_files = self.coder.repo.get_tracked_files() if self.coder.repo else []\n\n all_matched_files = set()\n for word in args.split():\n matched_files = self.glob_filtered_to_repo(word)\n\n if not matched_files:\n if any(char in word for char in \"*?[]\"):\n self.io.tool_error(f\"No files to add matching pattern: {word}\")\n else:\n if Path(word).exists():\n if Path(word).is_file():\n matched_files = [word]\n else:\n self.io.tool_error(f\"Unable to add: {word}\")\n elif self.io.confirm_ask(\n f\"No files matched '{word}'. Do you want to create the file?\"\n ):\n (Path(self.coder.root) / word).touch()\n matched_files = [word]\n\n all_matched_files.update(matched_files)\n\n for matched_file in all_matched_files:\n abs_file_path = self.coder.abs_root_path(matched_file)\n\n if self.coder.repo and matched_file not in git_files:\n self.coder.repo.repo.git.add(abs_file_path)\n git_added.append(matched_file)\n\n if abs_file_path in self.coder.abs_fnames:\n self.io.tool_error(f\"{matched_file} is already in the chat\")\n else:\n content = self.io.read_text(abs_file_path)\n if content is None:\n self.io.tool_error(f\"Unable to read {matched_file}\")\n else:\n self.coder.abs_fnames.add(abs_file_path)\n self.io.tool_output(f\"Added {matched_file} to the chat\")\n added_fnames.append(matched_file)\n\n if self.coder.repo and git_added:\n git_added = \" \".join(git_added)\n commit_message = f\"aider: Added {git_added}\"\n self.coder.repo.commit(message=commit_message)\n\n if not added_fnames:\n return\n\n # only reply if there's been some chatting since the last edit\n if not self.coder.cur_messages:\n return\n\n reply = prompts.added_files.format(fnames=\", \".join(added_fnames))\n return reply\n\n def completions_drop(self, partial):\n files = self.coder.get_inchat_relative_files()\n\n for fname in files:\n if partial.lower() in fname.lower():\n yield Completion(fname, start_position=-len(partial))\n\n def cmd_drop(self, args):\n \"Remove matching files from the chat session\"\n\n if not args.strip():\n self.io.tool_output(\"Dropping all files from the chat session.\")\n self.coder.abs_fnames = set()\n\n for word in args.split():\n matched_files = self.glob_filtered_to_repo(word)\n\n if not matched_files:\n self.io.tool_error(f\"No files matched '{word}'\")\n\n for matched_file in matched_files:\n abs_fname = self.coder.abs_root_path(matched_file)\n if abs_fname in self.coder.abs_fnames:\n self.coder.abs_fnames.remove(abs_fname)\n self.io.tool_output(f\"Removed {matched_file} from the chat\")\n\n def cmd_git(self, args):\n \"Run a git command\"\n combined_output = None\n try:\n parsed_args = shlex.split(\"git \" + args)\n env = dict(GIT_EDITOR=\"true\", **subprocess.os.environ)\n result = subprocess.run(\n parsed_args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True, env=env\n )\n combined_output = result.stdout\n except Exception as e:\n self.io.tool_error(f\"Error running git command: {e}\")\n\n if combined_output is None:\n return\n\n self.io.tool_output(combined_output)\n\n def cmd_run(self, args):\n \"Run a shell command and optionally add the output to the chat\"\n combined_output = None\n try:\n parsed_args = shlex.split(args)\n result = subprocess.run(\n parsed_args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True\n )\n combined_output = result.stdout\n except Exception as e:\n self.io.tool_error(f\"Error running command: {e}\")\n\n if combined_output is None:\n return\n\n self.io.tool_output(combined_output)\n\n if self.io.confirm_ask(\"Add the output to the chat?\", default=\"y\"):\n for line in combined_output.splitlines():\n self.io.tool_output(line, log_only=True)\n\n msg = prompts.run_output.format(\n command=args,\n output=combined_output,\n )\n return msg\n\n def cmd_exit(self, args):\n \"Exit the application\"\n sys.exit()\n\n def cmd_ls(self, args):\n \"List all known files and those included in the chat session\"\n\n files = self.coder.get_all_relative_files()\n\n other_files = []\n chat_files = []\n for file in files:\n abs_file_path = self.coder.abs_root_path(file)\n if abs_file_path in self.coder.abs_fnames:\n chat_files.append(file)\n else:\n other_files.append(file)\n\n if not chat_files and not other_files:\n self.io.tool_output(\"\\nNo files in chat or git repo.\")\n return\n\n if chat_files:\n self.io.tool_output(\"Files in chat:\\n\")\n for file in chat_files:\n self.io.tool_output(f\" {file}\")\n\n if other_files:\n self.io.tool_output(\"\\nRepo files not in the chat:\\n\")\n for file in other_files:\n self.io.tool_output(f\" {file}\")\n\n def cmd_help(self, args):\n \"Show help about all commands\"\n commands = sorted(self.get_commands())\n for cmd in commands:\n cmd_method_name = f\"cmd_{cmd[1:]}\"\n cmd_method = getattr(self, cmd_method_name, None)\n if cmd_method:\n description = cmd_method.__doc__\n self.io.tool_output(f\"{cmd} {description}\")\n else:\n self.io.tool_output(f\"{cmd} No description available.\")\n\n def cmd_voice(self, args):\n \"Record and transcribe voice input\"\n\n if not self.voice:\n try:\n self.voice = voice.Voice()\n except voice.SoundDeviceError:\n self.io.tool_error(\"Unable to import `sounddevice`, is portaudio installed?\")\n return\n\n history_iter = self.io.get_input_history()\n\n history = []\n size = 0\n for line in history_iter:\n if line.startswith(\"/\"):\n continue\n if line in history:\n continue\n if size + len(line) > 1024:\n break\n size += len(line)\n history.append(line)\n\n history.reverse()\n history = \"\\n\".join(history)\n\n text = self.voice.record_and_transcribe(history, language=self.voice_language)\n if text:\n self.io.add_to_input_history(text)\n print()\n self.io.user_input(text, log_only=False)\n print()\n\n return text\n\n\ndef expand_subdir(file_path):\n file_path = Path(file_path)\n if file_path.is_file():\n yield file_path\n return\n\n if file_path.is_dir():\n for file in file_path.rglob(\"*\"):\n if file.is_file():\n yield str(file)\n",
"path": "aider/commands.py"
},
{
"content": "import codecs\nimport os\nimport shutil\nimport sys\nimport tempfile\nfrom io import StringIO\nfrom pathlib import Path\nfrom unittest import TestCase\n\nimport git\n\nfrom aider import models\nfrom aider.coders import Coder\nfrom aider.commands import Commands\nfrom aider.dump import dump # noqa: F401\nfrom aider.io import InputOutput\nfrom tests.utils import GitTemporaryDirectory\n\n\nclass TestCommands(TestCase):\n def setUp(self):\n self.original_cwd = os.getcwd()\n self.tempdir = tempfile.mkdtemp()\n os.chdir(self.tempdir)\n\n def tearDown(self):\n os.chdir(self.original_cwd)\n shutil.rmtree(self.tempdir, ignore_errors=True)\n\n def test_cmd_add(self):\n # Initialize the Commands and InputOutput objects\n io = InputOutput(pretty=False, yes=True)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n # Call the cmd_add method with 'foo.txt' and 'bar.txt' as a single string\n commands.cmd_add(\"foo.txt bar.txt\")\n\n # Check if both files have been created in the temporary directory\n self.assertTrue(os.path.exists(\"foo.txt\"))\n self.assertTrue(os.path.exists(\"bar.txt\"))\n\n def test_cmd_add_with_glob_patterns(self):\n # Initialize the Commands and InputOutput objects\n io = InputOutput(pretty=False, yes=True)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n # Create some test files\n with open(\"test1.py\", \"w\") as f:\n f.write(\"print('test1')\")\n with open(\"test2.py\", \"w\") as f:\n f.write(\"print('test2')\")\n with open(\"test.txt\", \"w\") as f:\n f.write(\"test\")\n\n # Call the cmd_add method with a glob pattern\n commands.cmd_add(\"*.py\")\n\n # Check if the Python files have been added to the chat session\n self.assertIn(str(Path(\"test1.py\").resolve()), coder.abs_fnames)\n self.assertIn(str(Path(\"test2.py\").resolve()), coder.abs_fnames)\n\n # Check if the text file has not been added to the chat session\n self.assertNotIn(str(Path(\"test.txt\").resolve()), coder.abs_fnames)\n\n def test_cmd_add_no_match(self):\n # Initialize the Commands and InputOutput objects\n io = InputOutput(pretty=False, yes=True)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n # Call the cmd_add method with a non-existent file pattern\n commands.cmd_add(\"*.nonexistent\")\n\n # Check if no files have been added to the chat session\n self.assertEqual(len(coder.abs_fnames), 0)\n\n def test_cmd_add_drop_directory(self):\n # Initialize the Commands and InputOutput objects\n io = InputOutput(pretty=False, yes=False)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n # Create a directory and add files to it using pathlib\n Path(\"test_dir\").mkdir()\n Path(\"test_dir/another_dir\").mkdir()\n Path(\"test_dir/test_file1.txt\").write_text(\"Test file 1\")\n Path(\"test_dir/test_file2.txt\").write_text(\"Test file 2\")\n Path(\"test_dir/another_dir/test_file.txt\").write_text(\"Test file 3\")\n\n # Call the cmd_add method with a directory\n commands.cmd_add(\"test_dir test_dir/test_file2.txt\")\n\n # Check if the files have been added to the chat session\n self.assertIn(str(Path(\"test_dir/test_file1.txt\").resolve()), coder.abs_fnames)\n self.assertIn(str(Path(\"test_dir/test_file2.txt\").resolve()), coder.abs_fnames)\n self.assertIn(str(Path(\"test_dir/another_dir/test_file.txt\").resolve()), coder.abs_fnames)\n\n commands.cmd_drop(\"test_dir/another_dir\")\n self.assertIn(str(Path(\"test_dir/test_file1.txt\").resolve()), coder.abs_fnames)\n self.assertIn(str(Path(\"test_dir/test_file2.txt\").resolve()), coder.abs_fnames)\n self.assertNotIn(\n str(Path(\"test_dir/another_dir/test_file.txt\").resolve()), coder.abs_fnames\n )\n\n # Issue #139 /add problems when cwd != git_root\n\n # remember the proper abs path to this file\n abs_fname = str(Path(\"test_dir/another_dir/test_file.txt\").resolve())\n\n # chdir to someplace other than git_root\n Path(\"side_dir\").mkdir()\n os.chdir(\"side_dir\")\n\n # add it via it's git_root referenced name\n commands.cmd_add(\"test_dir/another_dir/test_file.txt\")\n\n # it should be there, but was not in v0.10.0\n self.assertIn(abs_fname, coder.abs_fnames)\n\n # drop it via it's git_root referenced name\n commands.cmd_drop(\"test_dir/another_dir/test_file.txt\")\n\n # it should be there, but was not in v0.10.0\n self.assertNotIn(abs_fname, coder.abs_fnames)\n\n def test_cmd_drop_with_glob_patterns(self):\n # Initialize the Commands and InputOutput objects\n io = InputOutput(pretty=False, yes=True)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n subdir = Path(\"subdir\")\n subdir.mkdir()\n (subdir / \"subtest1.py\").touch()\n (subdir / \"subtest2.py\").touch()\n\n Path(\"test1.py\").touch()\n Path(\"test2.py\").touch()\n\n # Add some files to the chat session\n commands.cmd_add(\"*.py\")\n\n self.assertEqual(len(coder.abs_fnames), 2)\n\n # Call the cmd_drop method with a glob pattern\n commands.cmd_drop(\"*2.py\")\n\n self.assertIn(str(Path(\"test1.py\").resolve()), coder.abs_fnames)\n self.assertNotIn(str(Path(\"test2.py\").resolve()), coder.abs_fnames)\n\n def test_cmd_add_bad_encoding(self):\n # Initialize the Commands and InputOutput objects\n io = InputOutput(pretty=False, yes=True)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n # Create a new file foo.bad which will fail to decode as utf-8\n with codecs.open(\"foo.bad\", \"w\", encoding=\"iso-8859-15\") as f:\n f.write(\"ÆØÅ\") # Characters not present in utf-8\n\n commands.cmd_add(\"foo.bad\")\n\n self.assertEqual(coder.abs_fnames, set())\n\n def test_cmd_git(self):\n # Initialize the Commands and InputOutput objects\n io = InputOutput(pretty=False, yes=True)\n\n with GitTemporaryDirectory() as tempdir:\n # Create a file in the temporary directory\n with open(f\"{tempdir}/test.txt\", \"w\") as f:\n f.write(\"test\")\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n # Run the cmd_git method with the arguments \"commit -a -m msg\"\n commands.cmd_git(\"add test.txt\")\n commands.cmd_git(\"commit -a -m msg\")\n\n # Check if the file has been committed to the repository\n repo = git.Repo(tempdir)\n files_in_repo = repo.git.ls_files()\n self.assertIn(\"test.txt\", files_in_repo)\n\n def test_cmd_tokens(self):\n # Initialize the Commands and InputOutput objects\n io = InputOutput(pretty=False, yes=True)\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n commands.cmd_add(\"foo.txt bar.txt\")\n\n # Redirect the standard output to an instance of io.StringIO\n stdout = StringIO()\n sys.stdout = stdout\n\n commands.cmd_tokens(\"\")\n\n # Reset the standard output\n sys.stdout = sys.__stdout__\n\n # Get the console output\n console_output = stdout.getvalue()\n\n self.assertIn(\"foo.txt\", console_output)\n self.assertIn(\"bar.txt\", console_output)\n\n def test_cmd_add_from_subdir(self):\n repo = git.Repo.init()\n repo.config_writer().set_value(\"user\", \"name\", \"Test User\").release()\n repo.config_writer().set_value(\"user\", \"email\", \"testuser@example.com\").release()\n\n # Create three empty files and add them to the git repository\n filenames = [\"one.py\", Path(\"subdir\") / \"two.py\", Path(\"anotherdir\") / \"three.py\"]\n for filename in filenames:\n file_path = Path(filename)\n file_path.parent.mkdir(parents=True, exist_ok=True)\n file_path.touch()\n repo.git.add(str(file_path))\n repo.git.commit(\"-m\", \"added\")\n\n filenames = [str(Path(fn).resolve()) for fn in filenames]\n\n ###\n\n os.chdir(\"subdir\")\n\n io = InputOutput(pretty=False, yes=True)\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n # this should get added\n commands.cmd_add(str(Path(\"anotherdir\") / \"three.py\"))\n\n # this should add one.py\n commands.cmd_add(\"*.py\")\n\n self.assertIn(filenames[0], coder.abs_fnames)\n self.assertNotIn(filenames[1], coder.abs_fnames)\n self.assertIn(filenames[2], coder.abs_fnames)\n\n def test_cmd_commit(self):\n with GitTemporaryDirectory():\n fname = \"test.txt\"\n with open(fname, \"w\") as f:\n f.write(\"test\")\n repo = git.Repo()\n repo.git.add(fname)\n repo.git.commit(\"-m\", \"initial\")\n\n io = InputOutput(pretty=False, yes=True)\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n self.assertFalse(repo.is_dirty())\n with open(fname, \"w\") as f:\n f.write(\"new\")\n self.assertTrue(repo.is_dirty())\n\n commit_message = \"Test commit message\"\n commands.cmd_commit(commit_message)\n self.assertFalse(repo.is_dirty())\n",
"path": "tests/test_commands.py"
}
] | 12_8 | python |
import os
import shutil
import sys
import unittest
import tempfile
from pathlib import Path
from unittest import TestCase
class TestCommands(TestCase):
def setUp(self):
self.original_cwd = os.getcwd()
self.tempdir = tempfile.mkdtemp()
os.chdir(self.tempdir)
def tearDown(self):
os.chdir(self.original_cwd)
shutil.rmtree(self.tempdir, ignore_errors=True)
def test_cmd_add_from_subdir_again(self):
from aider import models
from aider.commands import Commands
from aider.io import InputOutput
from tests.utils import GitTemporaryDirectory
with GitTemporaryDirectory():
io = InputOutput(pretty=False, yes=False)
from aider.coders import Coder
coder = Coder.create(models.GPT35, None, io)
commands = Commands(io, coder)
Path("side_dir").mkdir()
os.chdir("side_dir")
# add a file that is in the side_dir
with open("temp.txt", "w"):
pass
# this was blowing up with GitCommandError, per:
commands.cmd_add("temp.txt")
# Additional setup for running the test suite, similar to your provided format
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestCommands))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
main()
|
https://github.com/teamqurrent/aider | Your objective is in `commands.py`, refine the `cmd_drop` method in the `Commands` class to ensure consistent and accurate path resolution for files being removed. The goal is to standardize the way file paths are resolved within the method, using a specific function from the coder object, thereby ensuring that file paths are correctly handled regardless of the current working directory or relative paths. | d785d9a | aiohttp==3.8.4
aiosignal==1.3.1
async-timeout==4.0.2
attrs==23.1.0
certifi==2023.5.7
charset-normalizer==3.1.0
frozenlist==1.3.3
gitdb==4.0.10
GitPython==3.1.31
idna==3.4
markdown-it-py==2.2.0
mdurl==0.1.2
multidict==6.0.4
openai==0.27.6
prompt-toolkit==3.0.38
Pygments==2.15.1
requests==2.30.0
rich==13.3.5
smmap==5.0.0
tqdm==4.65.0
urllib3==2.0.2
wcwidth==0.2.6
yarl==1.9.2
pytest==7.3.1
tiktoken==0.4.0
configargparse
PyYAML
backoff==2.2.1
networkx==3.1
diskcache==5.6.1
numpy==1.26.1
scipy==1.11.3
jsonschema==4.17.3
sounddevice==0.4.6
soundfile==0.12.1
pathspec==0.11.2
grep-ast==0.2.4
| python3.9 | 450a5ff | diff --git a/aider/commands.py b/aider/commands.py
--- a/aider/commands.py
+++ b/aider/commands.py
@@ -328,7 +328,7 @@ class Commands:
self.io.tool_error(f"No files matched '{word}'")
for matched_file in matched_files:
- abs_fname = str(Path(matched_file).resolve())
+ abs_fname = self.coder.abs_root_path(matched_file)
if abs_fname in self.coder.abs_fnames:
self.coder.abs_fnames.remove(abs_fname)
self.io.tool_output(f"Removed {matched_file} from the chat")
diff --git a/tests/test_commands.py b/tests/test_commands.py
--- a/tests/test_commands.py
+++ b/tests/test_commands.py
@@ -127,6 +127,12 @@ class TestCommands(TestCase):
# it should be there, but was not in v0.10.0
self.assertIn(abs_fname, coder.abs_fnames)
+ # drop it via it's git_root referenced name
+ commands.cmd_drop("test_dir/another_dir/test_file.txt")
+
+ # it should be there, but was not in v0.10.0
+ self.assertNotIn(abs_fname, coder.abs_fnames)
+
def test_cmd_drop_with_glob_patterns(self):
# Initialize the Commands and InputOutput objects
io = InputOutput(pretty=False, yes=True)
| [
{
"content": "import json\nimport shlex\nimport subprocess\nimport sys\nfrom pathlib import Path\n\nimport git\nimport tiktoken\nfrom prompt_toolkit.completion import Completion\n\nfrom aider import prompts\n\nfrom .dump import dump # noqa: F401\n\n\nclass Commands:\n def __init__(self, io, coder):\n self.io = io\n self.coder = coder\n self.tokenizer = tiktoken.encoding_for_model(coder.main_model.name)\n\n def is_command(self, inp):\n if inp[0] == \"/\":\n return True\n\n def get_commands(self):\n commands = []\n for attr in dir(self):\n if attr.startswith(\"cmd_\"):\n commands.append(\"/\" + attr[4:])\n\n return commands\n\n def get_command_completions(self, cmd_name, partial):\n cmd_completions_method_name = f\"completions_{cmd_name}\"\n cmd_completions_method = getattr(self, cmd_completions_method_name, None)\n if cmd_completions_method:\n for completion in cmd_completions_method(partial):\n yield completion\n\n def do_run(self, cmd_name, args):\n cmd_method_name = f\"cmd_{cmd_name}\"\n cmd_method = getattr(self, cmd_method_name, None)\n if cmd_method:\n return cmd_method(args)\n else:\n self.io.tool_output(f\"Error: Command {cmd_name} not found.\")\n\n def matching_commands(self, inp):\n words = inp.strip().split()\n if not words:\n return\n\n first_word = words[0]\n rest_inp = inp[len(words[0]) :]\n\n all_commands = self.get_commands()\n matching_commands = [cmd for cmd in all_commands if cmd.startswith(first_word)]\n return matching_commands, first_word, rest_inp\n\n def run(self, inp):\n res = self.matching_commands(inp)\n if res is None:\n return\n matching_commands, first_word, rest_inp = res\n if len(matching_commands) == 1:\n return self.do_run(matching_commands[0][1:], rest_inp)\n elif len(matching_commands) > 1:\n self.io.tool_error(f\"Ambiguous command: {', '.join(matching_commands)}\")\n else:\n self.io.tool_error(f\"Invalid command: {first_word}\")\n\n # any method called cmd_xxx becomes a command automatically.\n # each one must take an args param.\n\n def cmd_commit(self, args):\n \"Commit edits to the repo made outside the chat (commit message optional)\"\n\n if not self.coder.repo:\n self.io.tool_error(\"No git repository found.\")\n return\n\n if not self.coder.repo.is_dirty():\n self.io.tool_error(\"No more changes to commit.\")\n return\n\n commit_message = args.strip()\n self.coder.commit(message=commit_message, which=\"repo_files\")\n\n def cmd_clear(self, args):\n \"Clear the chat history\"\n\n self.coder.done_messages = []\n self.coder.cur_messages = []\n\n def cmd_tokens(self, args):\n \"Report on the number of tokens used by the current chat context\"\n\n res = []\n\n # system messages\n msgs = [\n dict(role=\"system\", content=self.coder.gpt_prompts.main_system),\n dict(role=\"system\", content=self.coder.gpt_prompts.system_reminder),\n ]\n tokens = len(self.tokenizer.encode(json.dumps(msgs)))\n res.append((tokens, \"system messages\", \"\"))\n\n # chat history\n msgs = self.coder.done_messages + self.coder.cur_messages\n if msgs:\n msgs = [dict(role=\"dummy\", content=msg) for msg in msgs]\n msgs = json.dumps(msgs)\n tokens = len(self.tokenizer.encode(msgs))\n res.append((tokens, \"chat history\", \"use /clear to clear\"))\n\n # repo map\n other_files = set(self.coder.get_all_abs_files()) - set(self.coder.abs_fnames)\n if self.coder.repo_map:\n repo_content = self.coder.repo_map.get_repo_map(self.coder.abs_fnames, other_files)\n if repo_content:\n tokens = len(self.tokenizer.encode(repo_content))\n res.append((tokens, \"repository map\", \"use --map-tokens to resize\"))\n\n # files\n for fname in self.coder.abs_fnames:\n relative_fname = self.coder.get_rel_fname(fname)\n content = self.io.read_text(fname)\n # approximate\n content = f\"{relative_fname}\\n```\\n\" + content + \"```\\n\"\n tokens = len(self.tokenizer.encode(content))\n res.append((tokens, f\"{relative_fname}\", \"use /drop to drop from chat\"))\n\n self.io.tool_output(\"Approximate context window usage, in tokens:\")\n self.io.tool_output()\n\n width = 8\n\n def fmt(v):\n return format(int(v), \",\").rjust(width)\n\n col_width = max(len(row[1]) for row in res)\n\n total = 0\n for tk, msg, tip in res:\n total += tk\n msg = msg.ljust(col_width)\n self.io.tool_output(f\"{fmt(tk)} {msg} {tip}\")\n\n self.io.tool_output(\"=\" * width)\n self.io.tool_output(f\"{fmt(total)} tokens total\")\n\n limit = self.coder.main_model.max_context_tokens\n remaining = limit - total\n if remaining > 0:\n self.io.tool_output(f\"{fmt(remaining)} tokens remaining in context window\")\n else:\n self.io.tool_error(f\"{fmt(remaining)} tokens remaining, window exhausted!\")\n self.io.tool_output(f\"{fmt(limit)} tokens max context window size\")\n\n def cmd_undo(self, args):\n \"Undo the last git commit if it was done by aider\"\n if not self.coder.repo:\n self.io.tool_error(\"No git repository found.\")\n return\n\n if self.coder.repo.is_dirty():\n self.io.tool_error(\n \"The repository has uncommitted changes. Please commit or stash them before\"\n \" undoing.\"\n )\n return\n\n local_head = self.coder.repo.git.rev_parse(\"HEAD\")\n current_branch = self.coder.repo.active_branch.name\n try:\n remote_head = self.coder.repo.git.rev_parse(f\"origin/{current_branch}\")\n has_origin = True\n except git.exc.GitCommandError:\n has_origin = False\n\n if has_origin:\n if local_head == remote_head:\n self.io.tool_error(\n \"The last commit has already been pushed to the origin. Undoing is not\"\n \" possible.\"\n )\n return\n\n last_commit = self.coder.repo.head.commit\n if (\n not last_commit.message.startswith(\"aider:\")\n or last_commit.hexsha[:7] != self.coder.last_aider_commit_hash\n ):\n self.io.tool_error(\"The last commit was not made by aider in this chat session.\")\n return\n self.coder.repo.git.reset(\"--hard\", \"HEAD~1\")\n self.io.tool_output(\n f\"{last_commit.message.strip()}\\n\"\n f\"The above commit {self.coder.last_aider_commit_hash} \"\n \"was reset and removed from git.\\n\"\n )\n\n if self.coder.main_model.send_undo_reply:\n return prompts.undo_command_reply\n\n def cmd_diff(self, args):\n \"Display the diff of the last aider commit\"\n if not self.coder.repo:\n self.io.tool_error(\"No git repository found.\")\n return\n\n if not self.coder.last_aider_commit_hash:\n self.io.tool_error(\"No previous aider commit found.\")\n return\n\n commits = f\"{self.coder.last_aider_commit_hash}~1\"\n diff = self.coder.get_diffs(commits, self.coder.last_aider_commit_hash)\n\n # don't use io.tool_output() because we don't want to log or further colorize\n print(diff)\n\n def completions_add(self, partial):\n files = set(self.coder.get_all_relative_files())\n files = files - set(self.coder.get_inchat_relative_files())\n for fname in files:\n if partial.lower() in fname.lower():\n yield Completion(fname, start_position=-len(partial))\n\n def glob_filtered_to_repo(self, pattern):\n raw_matched_files = list(Path(self.coder.root).glob(pattern))\n\n matched_files = []\n for fn in raw_matched_files:\n matched_files += expand_subdir(fn)\n\n matched_files = [str(Path(fn).relative_to(self.coder.root)) for fn in matched_files]\n\n # if repo, filter against it\n if self.coder.repo:\n git_files = self.coder.get_tracked_files()\n matched_files = [fn for fn in matched_files if str(fn) in git_files]\n\n res = list(map(str, matched_files))\n return res\n\n def cmd_add(self, args):\n \"Add matching files to the chat session using glob patterns\"\n\n added_fnames = []\n git_added = []\n git_files = self.coder.get_tracked_files()\n\n all_matched_files = set()\n for word in args.split():\n matched_files = self.glob_filtered_to_repo(word)\n\n if not matched_files:\n if any(char in word for char in \"*?[]\"):\n self.io.tool_error(f\"No files to add matching pattern: {word}\")\n else:\n if Path(word).exists():\n if Path(word).is_file():\n matched_files = [word]\n else:\n self.io.tool_error(f\"Unable to add: {word}\")\n elif self.io.confirm_ask(\n f\"No files matched '{word}'. Do you want to create the file?\"\n ):\n (Path(self.coder.root) / word).touch()\n matched_files = [word]\n\n all_matched_files.update(matched_files)\n\n for matched_file in all_matched_files:\n abs_file_path = self.coder.abs_root_path(matched_file)\n\n if self.coder.repo and matched_file not in git_files:\n self.coder.repo.git.add(abs_file_path)\n git_added.append(matched_file)\n\n if abs_file_path in self.coder.abs_fnames:\n self.io.tool_error(f\"{matched_file} is already in the chat\")\n else:\n content = self.io.read_text(abs_file_path)\n if content is None:\n self.io.tool_error(f\"Unable to read {matched_file}\")\n else:\n self.coder.abs_fnames.add(abs_file_path)\n self.io.tool_output(f\"Added {matched_file} to the chat\")\n added_fnames.append(matched_file)\n\n if self.coder.repo and git_added:\n git_added = \" \".join(git_added)\n commit_message = f\"aider: Added {git_added}\"\n self.coder.repo.git.commit(\"-m\", commit_message, \"--no-verify\")\n commit_hash = self.coder.repo.head.commit.hexsha[:7]\n self.io.tool_output(f\"Commit {commit_hash} {commit_message}\")\n\n if not added_fnames:\n return\n\n # only reply if there's been some chatting since the last edit\n if not self.coder.cur_messages:\n return\n\n reply = prompts.added_files.format(fnames=\", \".join(added_fnames))\n return reply\n\n def completions_drop(self, partial):\n files = self.coder.get_inchat_relative_files()\n\n for fname in files:\n if partial.lower() in fname.lower():\n yield Completion(fname, start_position=-len(partial))\n\n def cmd_drop(self, args):\n \"Remove matching files from the chat session\"\n\n if not args.strip():\n self.io.tool_output(\"Dropping all files from the chat session.\")\n self.coder.abs_fnames = set()\n\n for word in args.split():\n matched_files = self.glob_filtered_to_repo(word)\n\n if not matched_files:\n self.io.tool_error(f\"No files matched '{word}'\")\n\n for matched_file in matched_files:\n abs_fname = str(Path(matched_file).resolve())\n if abs_fname in self.coder.abs_fnames:\n self.coder.abs_fnames.remove(abs_fname)\n self.io.tool_output(f\"Removed {matched_file} from the chat\")\n\n def cmd_git(self, args):\n \"Run a git command\"\n combined_output = None\n try:\n parsed_args = shlex.split(\"git \" + args)\n result = subprocess.run(\n parsed_args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True\n )\n combined_output = result.stdout\n except Exception as e:\n self.io.tool_error(f\"Error running git command: {e}\")\n\n if combined_output is None:\n return\n\n self.io.tool_output(combined_output)\n\n def cmd_run(self, args):\n \"Run a shell command and optionally add the output to the chat\"\n combined_output = None\n try:\n parsed_args = shlex.split(args)\n result = subprocess.run(\n parsed_args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True\n )\n combined_output = result.stdout\n except Exception as e:\n self.io.tool_error(f\"Error running command: {e}\")\n\n if combined_output is None:\n return\n\n self.io.tool_output(combined_output)\n\n if self.io.confirm_ask(\"Add the output to the chat?\", default=\"y\"):\n for line in combined_output.splitlines():\n self.io.tool_output(line, log_only=True)\n\n msg = prompts.run_output.format(\n command=args,\n output=combined_output,\n )\n return msg\n\n def cmd_exit(self, args):\n \"Exit the application\"\n sys.exit()\n\n def cmd_ls(self, args):\n \"List all known files and those included in the chat session\"\n\n files = self.coder.get_all_relative_files()\n\n other_files = []\n chat_files = []\n for file in files:\n abs_file_path = self.coder.abs_root_path(file)\n if abs_file_path in self.coder.abs_fnames:\n chat_files.append(file)\n else:\n other_files.append(file)\n\n if not chat_files and not other_files:\n self.io.tool_output(\"\\nNo files in chat or git repo.\")\n return\n\n if chat_files:\n self.io.tool_output(\"Files in chat:\\n\")\n for file in chat_files:\n self.io.tool_output(f\" {file}\")\n\n if other_files:\n self.io.tool_output(\"\\nRepo files not in the chat:\\n\")\n for file in other_files:\n self.io.tool_output(f\" {file}\")\n\n def cmd_help(self, args):\n \"Show help about all commands\"\n commands = sorted(self.get_commands())\n for cmd in commands:\n cmd_method_name = f\"cmd_{cmd[1:]}\"\n cmd_method = getattr(self, cmd_method_name, None)\n if cmd_method:\n description = cmd_method.__doc__\n self.io.tool_output(f\"{cmd} {description}\")\n else:\n self.io.tool_output(f\"{cmd} No description available.\")\n\n\ndef expand_subdir(file_path):\n file_path = Path(file_path)\n if file_path.is_file():\n yield file_path\n return\n\n if file_path.is_dir():\n for file in file_path.rglob(\"*\"):\n if file.is_file():\n yield str(file)\n",
"path": "aider/commands.py"
},
{
"content": "import codecs\nimport os\nimport shutil\nimport sys\nimport tempfile\nfrom io import StringIO\nfrom pathlib import Path\nfrom unittest import TestCase\n\nimport git\n\nfrom aider import models\nfrom aider.coders import Coder\nfrom aider.commands import Commands\nfrom aider.dump import dump # noqa: F401\nfrom aider.io import InputOutput\nfrom tests.utils import GitTemporaryDirectory\n\n\nclass TestCommands(TestCase):\n def setUp(self):\n self.original_cwd = os.getcwd()\n self.tempdir = tempfile.mkdtemp()\n os.chdir(self.tempdir)\n\n def tearDown(self):\n os.chdir(self.original_cwd)\n shutil.rmtree(self.tempdir)\n\n def test_cmd_add(self):\n # Initialize the Commands and InputOutput objects\n io = InputOutput(pretty=False, yes=True)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n # Call the cmd_add method with 'foo.txt' and 'bar.txt' as a single string\n commands.cmd_add(\"foo.txt bar.txt\")\n\n # Check if both files have been created in the temporary directory\n self.assertTrue(os.path.exists(\"foo.txt\"))\n self.assertTrue(os.path.exists(\"bar.txt\"))\n\n def test_cmd_add_with_glob_patterns(self):\n # Initialize the Commands and InputOutput objects\n io = InputOutput(pretty=False, yes=True)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n # Create some test files\n with open(\"test1.py\", \"w\") as f:\n f.write(\"print('test1')\")\n with open(\"test2.py\", \"w\") as f:\n f.write(\"print('test2')\")\n with open(\"test.txt\", \"w\") as f:\n f.write(\"test\")\n\n # Call the cmd_add method with a glob pattern\n commands.cmd_add(\"*.py\")\n\n # Check if the Python files have been added to the chat session\n self.assertIn(str(Path(\"test1.py\").resolve()), coder.abs_fnames)\n self.assertIn(str(Path(\"test2.py\").resolve()), coder.abs_fnames)\n\n # Check if the text file has not been added to the chat session\n self.assertNotIn(str(Path(\"test.txt\").resolve()), coder.abs_fnames)\n\n def test_cmd_add_no_match(self):\n # Initialize the Commands and InputOutput objects\n io = InputOutput(pretty=False, yes=True)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n # Call the cmd_add method with a non-existent file pattern\n commands.cmd_add(\"*.nonexistent\")\n\n # Check if no files have been added to the chat session\n self.assertEqual(len(coder.abs_fnames), 0)\n\n def test_cmd_add_drop_directory(self):\n # Initialize the Commands and InputOutput objects\n io = InputOutput(pretty=False, yes=False)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n # Create a directory and add files to it using pathlib\n Path(\"test_dir\").mkdir()\n Path(\"test_dir/another_dir\").mkdir()\n Path(\"test_dir/test_file1.txt\").write_text(\"Test file 1\")\n Path(\"test_dir/test_file2.txt\").write_text(\"Test file 2\")\n Path(\"test_dir/another_dir/test_file.txt\").write_text(\"Test file 3\")\n\n # Call the cmd_add method with a directory\n commands.cmd_add(\"test_dir test_dir/test_file2.txt\")\n\n # Check if the files have been added to the chat session\n self.assertIn(str(Path(\"test_dir/test_file1.txt\").resolve()), coder.abs_fnames)\n self.assertIn(str(Path(\"test_dir/test_file2.txt\").resolve()), coder.abs_fnames)\n self.assertIn(str(Path(\"test_dir/another_dir/test_file.txt\").resolve()), coder.abs_fnames)\n\n commands.cmd_drop(\"test_dir/another_dir\")\n self.assertIn(str(Path(\"test_dir/test_file1.txt\").resolve()), coder.abs_fnames)\n self.assertIn(str(Path(\"test_dir/test_file2.txt\").resolve()), coder.abs_fnames)\n self.assertNotIn(\n str(Path(\"test_dir/another_dir/test_file.txt\").resolve()), coder.abs_fnames\n )\n\n # Issue #139 /add problems when cwd != git_root\n\n # remember the proper abs path to this file\n abs_fname = str(Path(\"test_dir/another_dir/test_file.txt\").resolve())\n\n # chdir to someplace other than git_root\n Path(\"side_dir\").mkdir()\n os.chdir(\"side_dir\")\n\n # add it via it's git_root referenced name\n commands.cmd_add(\"test_dir/another_dir/test_file.txt\")\n\n # it should be there, but was not in v0.10.0\n self.assertIn(abs_fname, coder.abs_fnames)\n\n def test_cmd_drop_with_glob_patterns(self):\n # Initialize the Commands and InputOutput objects\n io = InputOutput(pretty=False, yes=True)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n subdir = Path(\"subdir\")\n subdir.mkdir()\n (subdir / \"subtest1.py\").touch()\n (subdir / \"subtest2.py\").touch()\n\n Path(\"test1.py\").touch()\n Path(\"test2.py\").touch()\n\n # Add some files to the chat session\n commands.cmd_add(\"*.py\")\n\n self.assertEqual(len(coder.abs_fnames), 2)\n\n # Call the cmd_drop method with a glob pattern\n commands.cmd_drop(\"*2.py\")\n\n self.assertIn(str(Path(\"test1.py\").resolve()), coder.abs_fnames)\n self.assertNotIn(str(Path(\"test2.py\").resolve()), coder.abs_fnames)\n\n def test_cmd_add_bad_encoding(self):\n # Initialize the Commands and InputOutput objects\n io = InputOutput(pretty=False, yes=True)\n from aider.coders import Coder\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n # Create a new file foo.bad which will fail to decode as utf-8\n with codecs.open(\"foo.bad\", \"w\", encoding=\"iso-8859-15\") as f:\n f.write(\"ÆØÅ\") # Characters not present in utf-8\n\n commands.cmd_add(\"foo.bad\")\n\n self.assertEqual(coder.abs_fnames, set())\n\n def test_cmd_git(self):\n # Initialize the Commands and InputOutput objects\n io = InputOutput(pretty=False, yes=True)\n\n with GitTemporaryDirectory() as tempdir:\n # Create a file in the temporary directory\n with open(f\"{tempdir}/test.txt\", \"w\") as f:\n f.write(\"test\")\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n # Run the cmd_git method with the arguments \"commit -a -m msg\"\n commands.cmd_git(\"add test.txt\")\n commands.cmd_git(\"commit -a -m msg\")\n\n # Check if the file has been committed to the repository\n repo = git.Repo(tempdir)\n files_in_repo = repo.git.ls_files()\n self.assertIn(\"test.txt\", files_in_repo)\n\n def test_cmd_tokens(self):\n # Initialize the Commands and InputOutput objects\n io = InputOutput(pretty=False, yes=True)\n\n coder = Coder.create(models.GPT35, None, io)\n commands = Commands(io, coder)\n\n commands.cmd_add(\"foo.txt bar.txt\")\n\n # Redirect the standard output to an instance of io.StringIO\n stdout = StringIO()\n sys.stdout = stdout\n\n commands.cmd_tokens(\"\")\n\n # Reset the standard output\n sys.stdout = sys.__stdout__\n\n # Get the console output\n console_output = stdout.getvalue()\n\n self.assertIn(\"foo.txt\", console_output)\n self.assertIn(\"bar.txt\", console_output)\n",
"path": "tests/test_commands.py"
}
] | 12_9 | python | import os
import shutil
import sys
import unittest
import tempfile
from pathlib import Path
from unittest import TestCase
class TestCommands(TestCase):
def setUp(self):
self.original_cwd = os.getcwd()
self.tempdir = tempfile.mkdtemp()
os.chdir(self.tempdir)
def tearDown(self):
os.chdir(self.original_cwd)
shutil.rmtree(self.tempdir, ignore_errors=True)
def test_cmd_add_drop_directory(self):
from aider import models
from aider.commands import Commands
from aider.io import InputOutput
# Initialize the Commands and InputOutput objects
io = InputOutput(pretty=False, yes=False)
from aider.coders import Coder
coder = Coder.create(models.GPT35, None, io)
commands = Commands(io, coder)
# Create a directory and add files to it using pathlib
Path("test_dir").mkdir()
Path("test_dir/another_dir").mkdir()
Path("test_dir/test_file1.txt").write_text("Test file 1")
Path("test_dir/test_file2.txt").write_text("Test file 2")
Path("test_dir/another_dir/test_file.txt").write_text("Test file 3")
# Call the cmd_add method with a directory
commands.cmd_add("test_dir test_dir/test_file2.txt")
# Check if the files have been added to the chat session
self.assertIn(str(Path("test_dir/test_file1.txt").resolve()), coder.abs_fnames)
self.assertIn(str(Path("test_dir/test_file2.txt").resolve()), coder.abs_fnames)
self.assertIn(str(Path("test_dir/another_dir/test_file.txt").resolve()), coder.abs_fnames)
commands.cmd_drop("test_dir/another_dir")
self.assertIn(str(Path("test_dir/test_file1.txt").resolve()), coder.abs_fnames)
self.assertIn(str(Path("test_dir/test_file2.txt").resolve()), coder.abs_fnames)
self.assertNotIn(
str(Path("test_dir/another_dir/test_file.txt").resolve()), coder.abs_fnames
)
# remember the proper abs path to this file
abs_fname = str(Path("test_dir/another_dir/test_file.txt").resolve())
# chdir to someplace other than git_root
Path("side_dir").mkdir()
os.chdir("side_dir")
# add it via it's git_root referenced name
commands.cmd_add("test_dir/another_dir/test_file.txt")
# it should be there, but was not in v0.10.0
self.assertIn(abs_fname, coder.abs_fnames)
# drop it via it's git_root referenced name
commands.cmd_drop("test_dir/another_dir/test_file.txt")
# it should be there, but was not in v0.10.0
self.assertNotIn(abs_fname, coder.abs_fnames)
# Additional setup for running the test suite, similar to your provided format
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestCommands))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
main()
|
https://github.com/teamqurrent/requests | Your objective is improving the validation of header parts (name and value) by refining the way these parts are checked against their respective types (str or bytes) and their format. The key files involved are `requests/_internal_utils.py` and `requests/utils.py`. The primary functions to modify or focus on are `_validate_header_part` in `utils.py` and the `HEADER_VALIDATORS` in `_internal_utils.py`. | e90852d | -e .[socks]
pytest
pytest-cov
pytest-httpbin==1.0.0
pytest-mock
httpbin==0.7.0
trustme
wheel
chardet>=3.0.2,<3.1.0
idna>=2.5,<2.8
urllib3>=1.21.1,<1.24
certifi>=2017.4.17
# Flask Stack
Flask>1.0,<2.0
markupsafe<2.1
| python3.9 | 7f694b79 | diff --git a/requests/_internal_utils.py b/requests/_internal_utils.py
--- a/requests/_internal_utils.py
+++ b/requests/_internal_utils.py
@@ -14,9 +14,11 @@ _VALID_HEADER_NAME_RE_STR = re.compile(r"^[^:\s][^:\r\n]*$")
_VALID_HEADER_VALUE_RE_BYTE = re.compile(rb"^\S[^\r\n]*$|^$")
_VALID_HEADER_VALUE_RE_STR = re.compile(r"^\S[^\r\n]*$|^$")
+_HEADER_VALIDATORS_STR = (_VALID_HEADER_NAME_RE_STR, _VALID_HEADER_VALUE_RE_STR)
+_HEADER_VALIDATORS_BYTE = (_VALID_HEADER_NAME_RE_BYTE, _VALID_HEADER_VALUE_RE_BYTE)
HEADER_VALIDATORS = {
- bytes: (_VALID_HEADER_NAME_RE_BYTE, _VALID_HEADER_VALUE_RE_BYTE),
- str: (_VALID_HEADER_NAME_RE_STR, _VALID_HEADER_VALUE_RE_STR),
+ bytes: _HEADER_VALIDATORS_BYTE,
+ str: _HEADER_VALIDATORS_STR,
}
diff --git a/requests/utils.py b/requests/utils.py
--- a/requests/utils.py
+++ b/requests/utils.py
@@ -25,7 +25,12 @@ from . import certs
from .__version__ import __version__
# to_native_string is unused here, but imported here for backwards compatibility
-from ._internal_utils import HEADER_VALIDATORS, to_native_string # noqa: F401
+from ._internal_utils import ( # noqa: F401
+ _HEADER_VALIDATORS_BYTE,
+ _HEADER_VALIDATORS_STR,
+ HEADER_VALIDATORS,
+ to_native_string,
+)
from .compat import (
Mapping,
basestring,
@@ -1031,20 +1036,23 @@ def check_header_validity(header):
:param header: tuple, in the format (name, value).
"""
name, value = header
+ _validate_header_part(header, name, 0)
+ _validate_header_part(header, value, 1)
- for part in header:
- if type(part) not in HEADER_VALIDATORS:
- raise InvalidHeader(
- f"Header part ({part!r}) from {{{name!r}: {value!r}}} must be "
- f"of type str or bytes, not {type(part)}"
- )
-
- _validate_header_part(name, "name", HEADER_VALIDATORS[type(name)][0])
- _validate_header_part(value, "value", HEADER_VALIDATORS[type(value)][1])
+def _validate_header_part(header, header_part, header_validator_index):
+ if isinstance(header_part, str):
+ validator = _HEADER_VALIDATORS_STR[header_validator_index]
+ elif isinstance(header_part, bytes):
+ validator = _HEADER_VALIDATORS_BYTE[header_validator_index]
+ else:
+ raise InvalidHeader(
+ f"Header part ({header_part!r}) from {header} "
+ f"must be of type str or bytes, not {type(header_part)}"
+ )
-def _validate_header_part(header_part, header_kind, validator):
if not validator.match(header_part):
+ header_kind = "name" if header_validator_index == 0 else "value"
raise InvalidHeader(
f"Invalid leading whitespace, reserved character(s), or return"
f"character(s) in header {header_kind}: {header_part!r}"
diff --git a/tests/test_requests.py b/tests/test_requests.py
--- a/tests/test_requests.py
+++ b/tests/test_requests.py
@@ -1752,6 +1752,31 @@ class TestRequests:
with pytest.raises(InvalidHeader):
requests.get(httpbin("get"), headers=invalid_header)
+ def test_header_with_subclass_types(self, httpbin):
+ """If the subclasses does not behave *exactly* like
+ the base bytes/str classes, this is not supported.
+ This test is for backwards compatibility.
+ """
+
+ class MyString(str):
+ pass
+
+ class MyBytes(bytes):
+ pass
+
+ r_str = requests.get(httpbin("get"), headers={MyString("x-custom"): "myheader"})
+ assert r_str.request.headers["x-custom"] == "myheader"
+
+ r_bytes = requests.get(
+ httpbin("get"), headers={MyBytes(b"x-custom"): b"myheader"}
+ )
+ assert r_bytes.request.headers["x-custom"] == b"myheader"
+
+ r_mixed = requests.get(
+ httpbin("get"), headers={MyString("x-custom"): MyBytes(b"myheader")}
+ )
+ assert r_mixed.request.headers["x-custom"] == b"myheader"
+
@pytest.mark.parametrize("files", ("foo", b"foo", bytearray(b"foo")))
def test_can_send_objects_with_files(self, httpbin, files):
data = {"a": "this is a string"}
| [
{
"content": "\"\"\"\nrequests._internal_utils\n~~~~~~~~~~~~~~\n\nProvides utility functions that are consumed internally by Requests\nwhich depend on extremely few external helpers (such as compat)\n\"\"\"\nimport re\n\nfrom .compat import builtin_str\n\n_VALID_HEADER_NAME_RE_BYTE = re.compile(rb\"^[^:\\s][^:\\r\\n]*$\")\n_VALID_HEADER_NAME_RE_STR = re.compile(r\"^[^:\\s][^:\\r\\n]*$\")\n_VALID_HEADER_VALUE_RE_BYTE = re.compile(rb\"^\\S[^\\r\\n]*$|^$\")\n_VALID_HEADER_VALUE_RE_STR = re.compile(r\"^\\S[^\\r\\n]*$|^$\")\n\nHEADER_VALIDATORS = {\n bytes: (_VALID_HEADER_NAME_RE_BYTE, _VALID_HEADER_VALUE_RE_BYTE),\n str: (_VALID_HEADER_NAME_RE_STR, _VALID_HEADER_VALUE_RE_STR),\n}\n\n\ndef to_native_string(string, encoding=\"ascii\"):\n \"\"\"Given a string object, regardless of type, returns a representation of\n that string in the native string type, encoding and decoding where\n necessary. This assumes ASCII unless told otherwise.\n \"\"\"\n if isinstance(string, builtin_str):\n out = string\n else:\n out = string.decode(encoding)\n\n return out\n\n\ndef unicode_is_ascii(u_string):\n \"\"\"Determine if unicode string only contains ASCII characters.\n\n :param str u_string: unicode string to check. Must be unicode\n and not Python 2 `str`.\n :rtype: bool\n \"\"\"\n assert isinstance(u_string, str)\n try:\n u_string.encode(\"ascii\")\n return True\n except UnicodeEncodeError:\n return False\n",
"path": "requests/_internal_utils.py"
},
{
"content": "\"\"\"\nrequests.utils\n~~~~~~~~~~~~~~\n\nThis module provides utility functions that are used within Requests\nthat are also useful for external consumption.\n\"\"\"\n\nimport codecs\nimport contextlib\nimport io\nimport os\nimport re\nimport socket\nimport struct\nimport sys\nimport tempfile\nimport warnings\nimport zipfile\nfrom collections import OrderedDict\n\nfrom urllib3.util import make_headers, parse_url\n\nfrom . import certs\nfrom .__version__ import __version__\n\n# to_native_string is unused here, but imported here for backwards compatibility\nfrom ._internal_utils import HEADER_VALIDATORS, to_native_string # noqa: F401\nfrom .compat import (\n Mapping,\n basestring,\n bytes,\n getproxies,\n getproxies_environment,\n integer_types,\n)\nfrom .compat import parse_http_list as _parse_list_header\nfrom .compat import (\n proxy_bypass,\n proxy_bypass_environment,\n quote,\n str,\n unquote,\n urlparse,\n urlunparse,\n)\nfrom .cookies import cookiejar_from_dict\nfrom .exceptions import (\n FileModeWarning,\n InvalidHeader,\n InvalidURL,\n UnrewindableBodyError,\n)\nfrom .structures import CaseInsensitiveDict\n\nNETRC_FILES = (\".netrc\", \"_netrc\")\n\nDEFAULT_CA_BUNDLE_PATH = certs.where()\n\nDEFAULT_PORTS = {\"http\": 80, \"https\": 443}\n\n# Ensure that ', ' is used to preserve previous delimiter behavior.\nDEFAULT_ACCEPT_ENCODING = \", \".join(\n re.split(r\",\\s*\", make_headers(accept_encoding=True)[\"accept-encoding\"])\n)\n\n\nif sys.platform == \"win32\":\n # provide a proxy_bypass version on Windows without DNS lookups\n\n def proxy_bypass_registry(host):\n try:\n import winreg\n except ImportError:\n return False\n\n try:\n internetSettings = winreg.OpenKey(\n winreg.HKEY_CURRENT_USER,\n r\"Software\\Microsoft\\Windows\\CurrentVersion\\Internet Settings\",\n )\n # ProxyEnable could be REG_SZ or REG_DWORD, normalizing it\n proxyEnable = int(winreg.QueryValueEx(internetSettings, \"ProxyEnable\")[0])\n # ProxyOverride is almost always a string\n proxyOverride = winreg.QueryValueEx(internetSettings, \"ProxyOverride\")[0]\n except (OSError, ValueError):\n return False\n if not proxyEnable or not proxyOverride:\n return False\n\n # make a check value list from the registry entry: replace the\n # '<local>' string by the localhost entry and the corresponding\n # canonical entry.\n proxyOverride = proxyOverride.split(\";\")\n # now check if we match one of the registry values.\n for test in proxyOverride:\n if test == \"<local>\":\n if \".\" not in host:\n return True\n test = test.replace(\".\", r\"\\.\") # mask dots\n test = test.replace(\"*\", r\".*\") # change glob sequence\n test = test.replace(\"?\", r\".\") # change glob char\n if re.match(test, host, re.I):\n return True\n return False\n\n def proxy_bypass(host): # noqa\n \"\"\"Return True, if the host should be bypassed.\n\n Checks proxy settings gathered from the environment, if specified,\n or the registry.\n \"\"\"\n if getproxies_environment():\n return proxy_bypass_environment(host)\n else:\n return proxy_bypass_registry(host)\n\n\ndef dict_to_sequence(d):\n \"\"\"Returns an internal sequence dictionary update.\"\"\"\n\n if hasattr(d, \"items\"):\n d = d.items()\n\n return d\n\n\ndef super_len(o):\n total_length = None\n current_position = 0\n\n if hasattr(o, \"__len__\"):\n total_length = len(o)\n\n elif hasattr(o, \"len\"):\n total_length = o.len\n\n elif hasattr(o, \"fileno\"):\n try:\n fileno = o.fileno()\n except (io.UnsupportedOperation, AttributeError):\n # AttributeError is a surprising exception, seeing as how we've just checked\n # that `hasattr(o, 'fileno')`. It happens for objects obtained via\n # `Tarfile.extractfile()`, per issue 5229.\n pass\n else:\n total_length = os.fstat(fileno).st_size\n\n # Having used fstat to determine the file length, we need to\n # confirm that this file was opened up in binary mode.\n if \"b\" not in o.mode:\n warnings.warn(\n (\n \"Requests has determined the content-length for this \"\n \"request using the binary size of the file: however, the \"\n \"file has been opened in text mode (i.e. without the 'b' \"\n \"flag in the mode). This may lead to an incorrect \"\n \"content-length. In Requests 3.0, support will be removed \"\n \"for files in text mode.\"\n ),\n FileModeWarning,\n )\n\n if hasattr(o, \"tell\"):\n try:\n current_position = o.tell()\n except OSError:\n # This can happen in some weird situations, such as when the file\n # is actually a special file descriptor like stdin. In this\n # instance, we don't know what the length is, so set it to zero and\n # let requests chunk it instead.\n if total_length is not None:\n current_position = total_length\n else:\n if hasattr(o, \"seek\") and total_length is None:\n # StringIO and BytesIO have seek but no usable fileno\n try:\n # seek to end of file\n o.seek(0, 2)\n total_length = o.tell()\n\n # seek back to current position to support\n # partially read file-like objects\n o.seek(current_position or 0)\n except OSError:\n total_length = 0\n\n if total_length is None:\n total_length = 0\n\n return max(0, total_length - current_position)\n\n\ndef get_netrc_auth(url, raise_errors=False):\n \"\"\"Returns the Requests tuple auth for a given url from netrc.\"\"\"\n\n netrc_file = os.environ.get(\"NETRC\")\n if netrc_file is not None:\n netrc_locations = (netrc_file,)\n else:\n netrc_locations = (f\"~/{f}\" for f in NETRC_FILES)\n\n try:\n from netrc import NetrcParseError, netrc\n\n netrc_path = None\n\n for f in netrc_locations:\n try:\n loc = os.path.expanduser(f)\n except KeyError:\n # os.path.expanduser can fail when $HOME is undefined and\n # getpwuid fails. See https://bugs.python.org/issue20164 &\n # https://github.com/psf/requests/issues/1846\n return\n\n if os.path.exists(loc):\n netrc_path = loc\n break\n\n # Abort early if there isn't one.\n if netrc_path is None:\n return\n\n ri = urlparse(url)\n\n # Strip port numbers from netloc. This weird `if...encode`` dance is\n # used for Python 3.2, which doesn't support unicode literals.\n splitstr = b\":\"\n if isinstance(url, str):\n splitstr = splitstr.decode(\"ascii\")\n host = ri.netloc.split(splitstr)[0]\n\n try:\n _netrc = netrc(netrc_path).authenticators(host)\n if _netrc:\n # Return with login / password\n login_i = 0 if _netrc[0] else 1\n return (_netrc[login_i], _netrc[2])\n except (NetrcParseError, OSError):\n # If there was a parsing error or a permissions issue reading the file,\n # we'll just skip netrc auth unless explicitly asked to raise errors.\n if raise_errors:\n raise\n\n # App Engine hackiness.\n except (ImportError, AttributeError):\n pass\n\n\ndef guess_filename(obj):\n \"\"\"Tries to guess the filename of the given object.\"\"\"\n name = getattr(obj, \"name\", None)\n if name and isinstance(name, basestring) and name[0] != \"<\" and name[-1] != \">\":\n return os.path.basename(name)\n\n\ndef extract_zipped_paths(path):\n \"\"\"Replace nonexistent paths that look like they refer to a member of a zip\n archive with the location of an extracted copy of the target, or else\n just return the provided path unchanged.\n \"\"\"\n if os.path.exists(path):\n # this is already a valid path, no need to do anything further\n return path\n\n # find the first valid part of the provided path and treat that as a zip archive\n # assume the rest of the path is the name of a member in the archive\n archive, member = os.path.split(path)\n while archive and not os.path.exists(archive):\n archive, prefix = os.path.split(archive)\n if not prefix:\n # If we don't check for an empty prefix after the split (in other words, archive remains unchanged after the split),\n # we _can_ end up in an infinite loop on a rare corner case affecting a small number of users\n break\n member = \"/\".join([prefix, member])\n\n if not zipfile.is_zipfile(archive):\n return path\n\n zip_file = zipfile.ZipFile(archive)\n if member not in zip_file.namelist():\n return path\n\n # we have a valid zip archive and a valid member of that archive\n tmp = tempfile.gettempdir()\n extracted_path = os.path.join(tmp, member.split(\"/\")[-1])\n if not os.path.exists(extracted_path):\n # use read + write to avoid the creating nested folders, we only want the file, avoids mkdir racing condition\n with atomic_open(extracted_path) as file_handler:\n file_handler.write(zip_file.read(member))\n return extracted_path\n\n\n@contextlib.contextmanager\ndef atomic_open(filename):\n \"\"\"Write a file to the disk in an atomic fashion\"\"\"\n tmp_descriptor, tmp_name = tempfile.mkstemp(dir=os.path.dirname(filename))\n try:\n with os.fdopen(tmp_descriptor, \"wb\") as tmp_handler:\n yield tmp_handler\n os.replace(tmp_name, filename)\n except BaseException:\n os.remove(tmp_name)\n raise\n\n\ndef from_key_val_list(value):\n \"\"\"Take an object and test to see if it can be represented as a\n dictionary. Unless it can not be represented as such, return an\n OrderedDict, e.g.,\n\n ::\n\n >>> from_key_val_list([('key', 'val')])\n OrderedDict([('key', 'val')])\n >>> from_key_val_list('string')\n Traceback (most recent call last):\n ...\n ValueError: cannot encode objects that are not 2-tuples\n >>> from_key_val_list({'key': 'val'})\n OrderedDict([('key', 'val')])\n\n :rtype: OrderedDict\n \"\"\"\n if value is None:\n return None\n\n if isinstance(value, (str, bytes, bool, int)):\n raise ValueError(\"cannot encode objects that are not 2-tuples\")\n\n return OrderedDict(value)\n\n\ndef to_key_val_list(value):\n \"\"\"Take an object and test to see if it can be represented as a\n dictionary. If it can be, return a list of tuples, e.g.,\n\n ::\n\n >>> to_key_val_list([('key', 'val')])\n [('key', 'val')]\n >>> to_key_val_list({'key': 'val'})\n [('key', 'val')]\n >>> to_key_val_list('string')\n Traceback (most recent call last):\n ...\n ValueError: cannot encode objects that are not 2-tuples\n\n :rtype: list\n \"\"\"\n if value is None:\n return None\n\n if isinstance(value, (str, bytes, bool, int)):\n raise ValueError(\"cannot encode objects that are not 2-tuples\")\n\n if isinstance(value, Mapping):\n value = value.items()\n\n return list(value)\n\n\n# From mitsuhiko/werkzeug (used with permission).\ndef parse_list_header(value):\n \"\"\"Parse lists as described by RFC 2068 Section 2.\n\n In particular, parse comma-separated lists where the elements of\n the list may include quoted-strings. A quoted-string could\n contain a comma. A non-quoted string could have quotes in the\n middle. Quotes are removed automatically after parsing.\n\n It basically works like :func:`parse_set_header` just that items\n may appear multiple times and case sensitivity is preserved.\n\n The return value is a standard :class:`list`:\n\n >>> parse_list_header('token, \"quoted value\"')\n ['token', 'quoted value']\n\n To create a header from the :class:`list` again, use the\n :func:`dump_header` function.\n\n :param value: a string with a list header.\n :return: :class:`list`\n :rtype: list\n \"\"\"\n result = []\n for item in _parse_list_header(value):\n if item[:1] == item[-1:] == '\"':\n item = unquote_header_value(item[1:-1])\n result.append(item)\n return result\n\n\n# From mitsuhiko/werkzeug (used with permission).\ndef parse_dict_header(value):\n \"\"\"Parse lists of key, value pairs as described by RFC 2068 Section 2 and\n convert them into a python dict:\n\n >>> d = parse_dict_header('foo=\"is a fish\", bar=\"as well\"')\n >>> type(d) is dict\n True\n >>> sorted(d.items())\n [('bar', 'as well'), ('foo', 'is a fish')]\n\n If there is no value for a key it will be `None`:\n\n >>> parse_dict_header('key_without_value')\n {'key_without_value': None}\n\n To create a header from the :class:`dict` again, use the\n :func:`dump_header` function.\n\n :param value: a string with a dict header.\n :return: :class:`dict`\n :rtype: dict\n \"\"\"\n result = {}\n for item in _parse_list_header(value):\n if \"=\" not in item:\n result[item] = None\n continue\n name, value = item.split(\"=\", 1)\n if value[:1] == value[-1:] == '\"':\n value = unquote_header_value(value[1:-1])\n result[name] = value\n return result\n\n\n# From mitsuhiko/werkzeug (used with permission).\ndef unquote_header_value(value, is_filename=False):\n r\"\"\"Unquotes a header value. (Reversal of :func:`quote_header_value`).\n This does not use the real unquoting but what browsers are actually\n using for quoting.\n\n :param value: the header value to unquote.\n :rtype: str\n \"\"\"\n if value and value[0] == value[-1] == '\"':\n # this is not the real unquoting, but fixing this so that the\n # RFC is met will result in bugs with internet explorer and\n # probably some other browsers as well. IE for example is\n # uploading files with \"C:\\foo\\bar.txt\" as filename\n value = value[1:-1]\n\n # if this is a filename and the starting characters look like\n # a UNC path, then just return the value without quotes. Using the\n # replace sequence below on a UNC path has the effect of turning\n # the leading double slash into a single slash and then\n # _fix_ie_filename() doesn't work correctly. See #458.\n if not is_filename or value[:2] != \"\\\\\\\\\":\n return value.replace(\"\\\\\\\\\", \"\\\\\").replace('\\\\\"', '\"')\n return value\n\n\ndef dict_from_cookiejar(cj):\n \"\"\"Returns a key/value dictionary from a CookieJar.\n\n :param cj: CookieJar object to extract cookies from.\n :rtype: dict\n \"\"\"\n\n cookie_dict = {}\n\n for cookie in cj:\n cookie_dict[cookie.name] = cookie.value\n\n return cookie_dict\n\n\ndef add_dict_to_cookiejar(cj, cookie_dict):\n \"\"\"Returns a CookieJar from a key/value dictionary.\n\n :param cj: CookieJar to insert cookies into.\n :param cookie_dict: Dict of key/values to insert into CookieJar.\n :rtype: CookieJar\n \"\"\"\n\n return cookiejar_from_dict(cookie_dict, cj)\n\n\ndef get_encodings_from_content(content):\n \"\"\"Returns encodings from given content string.\n\n :param content: bytestring to extract encodings from.\n \"\"\"\n warnings.warn(\n (\n \"In requests 3.0, get_encodings_from_content will be removed. For \"\n \"more information, please see the discussion on issue #2266. (This\"\n \" warning should only appear once.)\"\n ),\n DeprecationWarning,\n )\n\n charset_re = re.compile(r'<meta.*?charset=[\"\\']*(.+?)[\"\\'>]', flags=re.I)\n pragma_re = re.compile(r'<meta.*?content=[\"\\']*;?charset=(.+?)[\"\\'>]', flags=re.I)\n xml_re = re.compile(r'^<\\?xml.*?encoding=[\"\\']*(.+?)[\"\\'>]')\n\n return (\n charset_re.findall(content)\n + pragma_re.findall(content)\n + xml_re.findall(content)\n )\n\n\ndef _parse_content_type_header(header):\n \"\"\"Returns content type and parameters from given header\n\n :param header: string\n :return: tuple containing content type and dictionary of\n parameters\n \"\"\"\n\n tokens = header.split(\";\")\n content_type, params = tokens[0].strip(), tokens[1:]\n params_dict = {}\n items_to_strip = \"\\\"' \"\n\n for param in params:\n param = param.strip()\n if param:\n key, value = param, True\n index_of_equals = param.find(\"=\")\n if index_of_equals != -1:\n key = param[:index_of_equals].strip(items_to_strip)\n value = param[index_of_equals + 1 :].strip(items_to_strip)\n params_dict[key.lower()] = value\n return content_type, params_dict\n\n\ndef get_encoding_from_headers(headers):\n \"\"\"Returns encodings from given HTTP Header Dict.\n\n :param headers: dictionary to extract encoding from.\n :rtype: str\n \"\"\"\n\n content_type = headers.get(\"content-type\")\n\n if not content_type:\n return None\n\n content_type, params = _parse_content_type_header(content_type)\n\n if \"charset\" in params:\n return params[\"charset\"].strip(\"'\\\"\")\n\n if \"text\" in content_type:\n return \"ISO-8859-1\"\n\n if \"application/json\" in content_type:\n # Assume UTF-8 based on RFC 4627: https://www.ietf.org/rfc/rfc4627.txt since the charset was unset\n return \"utf-8\"\n\n\ndef stream_decode_response_unicode(iterator, r):\n \"\"\"Stream decodes an iterator.\"\"\"\n\n if r.encoding is None:\n yield from iterator\n return\n\n decoder = codecs.getincrementaldecoder(r.encoding)(errors=\"replace\")\n for chunk in iterator:\n rv = decoder.decode(chunk)\n if rv:\n yield rv\n rv = decoder.decode(b\"\", final=True)\n if rv:\n yield rv\n\n\ndef iter_slices(string, slice_length):\n \"\"\"Iterate over slices of a string.\"\"\"\n pos = 0\n if slice_length is None or slice_length <= 0:\n slice_length = len(string)\n while pos < len(string):\n yield string[pos : pos + slice_length]\n pos += slice_length\n\n\ndef get_unicode_from_response(r):\n \"\"\"Returns the requested content back in unicode.\n\n :param r: Response object to get unicode content from.\n\n Tried:\n\n 1. charset from content-type\n 2. fall back and replace all unicode characters\n\n :rtype: str\n \"\"\"\n warnings.warn(\n (\n \"In requests 3.0, get_unicode_from_response will be removed. For \"\n \"more information, please see the discussion on issue #2266. (This\"\n \" warning should only appear once.)\"\n ),\n DeprecationWarning,\n )\n\n tried_encodings = []\n\n # Try charset from content-type\n encoding = get_encoding_from_headers(r.headers)\n\n if encoding:\n try:\n return str(r.content, encoding)\n except UnicodeError:\n tried_encodings.append(encoding)\n\n # Fall back:\n try:\n return str(r.content, encoding, errors=\"replace\")\n except TypeError:\n return r.content\n\n\n# The unreserved URI characters (RFC 3986)\nUNRESERVED_SET = frozenset(\n \"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz\" + \"0123456789-._~\"\n)\n\n\ndef unquote_unreserved(uri):\n \"\"\"Un-escape any percent-escape sequences in a URI that are unreserved\n characters. This leaves all reserved, illegal and non-ASCII bytes encoded.\n\n :rtype: str\n \"\"\"\n parts = uri.split(\"%\")\n for i in range(1, len(parts)):\n h = parts[i][0:2]\n if len(h) == 2 and h.isalnum():\n try:\n c = chr(int(h, 16))\n except ValueError:\n raise InvalidURL(f\"Invalid percent-escape sequence: '{h}'\")\n\n if c in UNRESERVED_SET:\n parts[i] = c + parts[i][2:]\n else:\n parts[i] = f\"%{parts[i]}\"\n else:\n parts[i] = f\"%{parts[i]}\"\n return \"\".join(parts)\n\n\ndef requote_uri(uri):\n \"\"\"Re-quote the given URI.\n\n This function passes the given URI through an unquote/quote cycle to\n ensure that it is fully and consistently quoted.\n\n :rtype: str\n \"\"\"\n safe_with_percent = \"!#$%&'()*+,/:;=?@[]~\"\n safe_without_percent = \"!#$&'()*+,/:;=?@[]~\"\n try:\n # Unquote only the unreserved characters\n # Then quote only illegal characters (do not quote reserved,\n # unreserved, or '%')\n return quote(unquote_unreserved(uri), safe=safe_with_percent)\n except InvalidURL:\n # We couldn't unquote the given URI, so let's try quoting it, but\n # there may be unquoted '%'s in the URI. We need to make sure they're\n # properly quoted so they do not cause issues elsewhere.\n return quote(uri, safe=safe_without_percent)\n\n\ndef address_in_network(ip, net):\n \"\"\"This function allows you to check if an IP belongs to a network subnet\n\n Example: returns True if ip = 192.168.1.1 and net = 192.168.1.0/24\n returns False if ip = 192.168.1.1 and net = 192.168.100.0/24\n\n :rtype: bool\n \"\"\"\n ipaddr = struct.unpack(\"=L\", socket.inet_aton(ip))[0]\n netaddr, bits = net.split(\"/\")\n netmask = struct.unpack(\"=L\", socket.inet_aton(dotted_netmask(int(bits))))[0]\n network = struct.unpack(\"=L\", socket.inet_aton(netaddr))[0] & netmask\n return (ipaddr & netmask) == (network & netmask)\n\n\ndef dotted_netmask(mask):\n \"\"\"Converts mask from /xx format to xxx.xxx.xxx.xxx\n\n Example: if mask is 24 function returns 255.255.255.0\n\n :rtype: str\n \"\"\"\n bits = 0xFFFFFFFF ^ (1 << 32 - mask) - 1\n return socket.inet_ntoa(struct.pack(\">I\", bits))\n\n\ndef is_ipv4_address(string_ip):\n \"\"\"\n :rtype: bool\n \"\"\"\n try:\n socket.inet_aton(string_ip)\n except OSError:\n return False\n return True\n\n\ndef is_valid_cidr(string_network):\n \"\"\"\n Very simple check of the cidr format in no_proxy variable.\n\n :rtype: bool\n \"\"\"\n if string_network.count(\"/\") == 1:\n try:\n mask = int(string_network.split(\"/\")[1])\n except ValueError:\n return False\n\n if mask < 1 or mask > 32:\n return False\n\n try:\n socket.inet_aton(string_network.split(\"/\")[0])\n except OSError:\n return False\n else:\n return False\n return True\n\n\n@contextlib.contextmanager\ndef set_environ(env_name, value):\n \"\"\"Set the environment variable 'env_name' to 'value'\n\n Save previous value, yield, and then restore the previous value stored in\n the environment variable 'env_name'.\n\n If 'value' is None, do nothing\"\"\"\n value_changed = value is not None\n if value_changed:\n old_value = os.environ.get(env_name)\n os.environ[env_name] = value\n try:\n yield\n finally:\n if value_changed:\n if old_value is None:\n del os.environ[env_name]\n else:\n os.environ[env_name] = old_value\n\n\ndef should_bypass_proxies(url, no_proxy):\n \"\"\"\n Returns whether we should bypass proxies or not.\n\n :rtype: bool\n \"\"\"\n # Prioritize lowercase environment variables over uppercase\n # to keep a consistent behaviour with other http projects (curl, wget).\n def get_proxy(key):\n return os.environ.get(key) or os.environ.get(key.upper())\n\n # First check whether no_proxy is defined. If it is, check that the URL\n # we're getting isn't in the no_proxy list.\n no_proxy_arg = no_proxy\n if no_proxy is None:\n no_proxy = get_proxy(\"no_proxy\")\n parsed = urlparse(url)\n\n if parsed.hostname is None:\n # URLs don't always have hostnames, e.g. file:/// urls.\n return True\n\n if no_proxy:\n # We need to check whether we match here. We need to see if we match\n # the end of the hostname, both with and without the port.\n no_proxy = (host for host in no_proxy.replace(\" \", \"\").split(\",\") if host)\n\n if is_ipv4_address(parsed.hostname):\n for proxy_ip in no_proxy:\n if is_valid_cidr(proxy_ip):\n if address_in_network(parsed.hostname, proxy_ip):\n return True\n elif parsed.hostname == proxy_ip:\n # If no_proxy ip was defined in plain IP notation instead of cidr notation &\n # matches the IP of the index\n return True\n else:\n host_with_port = parsed.hostname\n if parsed.port:\n host_with_port += f\":{parsed.port}\"\n\n for host in no_proxy:\n if parsed.hostname.endswith(host) or host_with_port.endswith(host):\n # The URL does match something in no_proxy, so we don't want\n # to apply the proxies on this URL.\n return True\n\n with set_environ(\"no_proxy\", no_proxy_arg):\n # parsed.hostname can be `None` in cases such as a file URI.\n try:\n bypass = proxy_bypass(parsed.hostname)\n except (TypeError, socket.gaierror):\n bypass = False\n\n if bypass:\n return True\n\n return False\n\n\ndef get_environ_proxies(url, no_proxy=None):\n \"\"\"\n Return a dict of environment proxies.\n\n :rtype: dict\n \"\"\"\n if should_bypass_proxies(url, no_proxy=no_proxy):\n return {}\n else:\n return getproxies()\n\n\ndef select_proxy(url, proxies):\n \"\"\"Select a proxy for the url, if applicable.\n\n :param url: The url being for the request\n :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs\n \"\"\"\n proxies = proxies or {}\n urlparts = urlparse(url)\n if urlparts.hostname is None:\n return proxies.get(urlparts.scheme, proxies.get(\"all\"))\n\n proxy_keys = [\n urlparts.scheme + \"://\" + urlparts.hostname,\n urlparts.scheme,\n \"all://\" + urlparts.hostname,\n \"all\",\n ]\n proxy = None\n for proxy_key in proxy_keys:\n if proxy_key in proxies:\n proxy = proxies[proxy_key]\n break\n\n return proxy\n\n\ndef resolve_proxies(request, proxies, trust_env=True):\n \"\"\"This method takes proxy information from a request and configuration\n input to resolve a mapping of target proxies. This will consider settings\n such a NO_PROXY to strip proxy configurations.\n\n :param request: Request or PreparedRequest\n :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs\n :param trust_env: Boolean declaring whether to trust environment configs\n\n :rtype: dict\n \"\"\"\n proxies = proxies if proxies is not None else {}\n url = request.url\n scheme = urlparse(url).scheme\n no_proxy = proxies.get(\"no_proxy\")\n new_proxies = proxies.copy()\n\n if trust_env and not should_bypass_proxies(url, no_proxy=no_proxy):\n environ_proxies = get_environ_proxies(url, no_proxy=no_proxy)\n\n proxy = environ_proxies.get(scheme, environ_proxies.get(\"all\"))\n\n if proxy:\n new_proxies.setdefault(scheme, proxy)\n return new_proxies\n\n\ndef default_user_agent(name=\"python-requests\"):\n \"\"\"\n Return a string representing the default user agent.\n\n :rtype: str\n \"\"\"\n return f\"{name}/{__version__}\"\n\n\ndef default_headers():\n \"\"\"\n :rtype: requests.structures.CaseInsensitiveDict\n \"\"\"\n return CaseInsensitiveDict(\n {\n \"User-Agent\": default_user_agent(),\n \"Accept-Encoding\": DEFAULT_ACCEPT_ENCODING,\n \"Accept\": \"*/*\",\n \"Connection\": \"keep-alive\",\n }\n )\n\n\ndef parse_header_links(value):\n \"\"\"Return a list of parsed link headers proxies.\n\n i.e. Link: <http:/.../front.jpeg>; rel=front; type=\"image/jpeg\",<http://.../back.jpeg>; rel=back;type=\"image/jpeg\"\n\n :rtype: list\n \"\"\"\n\n links = []\n\n replace_chars = \" '\\\"\"\n\n value = value.strip(replace_chars)\n if not value:\n return links\n\n for val in re.split(\", *<\", value):\n try:\n url, params = val.split(\";\", 1)\n except ValueError:\n url, params = val, \"\"\n\n link = {\"url\": url.strip(\"<> '\\\"\")}\n\n for param in params.split(\";\"):\n try:\n key, value = param.split(\"=\")\n except ValueError:\n break\n\n link[key.strip(replace_chars)] = value.strip(replace_chars)\n\n links.append(link)\n\n return links\n\n\n# Null bytes; no need to recreate these on each call to guess_json_utf\n_null = \"\\x00\".encode(\"ascii\") # encoding to ASCII for Python 3\n_null2 = _null * 2\n_null3 = _null * 3\n\n\ndef guess_json_utf(data):\n \"\"\"\n :rtype: str\n \"\"\"\n # JSON always starts with two ASCII characters, so detection is as\n # easy as counting the nulls and from their location and count\n # determine the encoding. Also detect a BOM, if present.\n sample = data[:4]\n if sample in (codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE):\n return \"utf-32\" # BOM included\n if sample[:3] == codecs.BOM_UTF8:\n return \"utf-8-sig\" # BOM included, MS style (discouraged)\n if sample[:2] in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE):\n return \"utf-16\" # BOM included\n nullcount = sample.count(_null)\n if nullcount == 0:\n return \"utf-8\"\n if nullcount == 2:\n if sample[::2] == _null2: # 1st and 3rd are null\n return \"utf-16-be\"\n if sample[1::2] == _null2: # 2nd and 4th are null\n return \"utf-16-le\"\n # Did not detect 2 valid UTF-16 ascii-range characters\n if nullcount == 3:\n if sample[:3] == _null3:\n return \"utf-32-be\"\n if sample[1:] == _null3:\n return \"utf-32-le\"\n # Did not detect a valid UTF-32 ascii-range character\n return None\n\n\ndef prepend_scheme_if_needed(url, new_scheme):\n \"\"\"Given a URL that may or may not have a scheme, prepend the given scheme.\n Does not replace a present scheme with the one provided as an argument.\n\n :rtype: str\n \"\"\"\n parsed = parse_url(url)\n scheme, auth, host, port, path, query, fragment = parsed\n\n # A defect in urlparse determines that there isn't a netloc present in some\n # urls. We previously assumed parsing was overly cautious, and swapped the\n # netloc and path. Due to a lack of tests on the original defect, this is\n # maintained with parse_url for backwards compatibility.\n netloc = parsed.netloc\n if not netloc:\n netloc, path = path, netloc\n\n if auth:\n # parse_url doesn't provide the netloc with auth\n # so we'll add it ourselves.\n netloc = \"@\".join([auth, netloc])\n if scheme is None:\n scheme = new_scheme\n if path is None:\n path = \"\"\n\n return urlunparse((scheme, netloc, path, \"\", query, fragment))\n\n\ndef get_auth_from_url(url):\n \"\"\"Given a url with authentication components, extract them into a tuple of\n username,password.\n\n :rtype: (str,str)\n \"\"\"\n parsed = urlparse(url)\n\n try:\n auth = (unquote(parsed.username), unquote(parsed.password))\n except (AttributeError, TypeError):\n auth = (\"\", \"\")\n\n return auth\n\n\ndef check_header_validity(header):\n \"\"\"Verifies that header parts don't contain leading whitespace\n reserved characters, or return characters.\n\n :param header: tuple, in the format (name, value).\n \"\"\"\n name, value = header\n\n for part in header:\n if type(part) not in HEADER_VALIDATORS:\n raise InvalidHeader(\n f\"Header part ({part!r}) from {{{name!r}: {value!r}}} must be \"\n f\"of type str or bytes, not {type(part)}\"\n )\n\n _validate_header_part(name, \"name\", HEADER_VALIDATORS[type(name)][0])\n _validate_header_part(value, \"value\", HEADER_VALIDATORS[type(value)][1])\n\n\ndef _validate_header_part(header_part, header_kind, validator):\n if not validator.match(header_part):\n raise InvalidHeader(\n f\"Invalid leading whitespace, reserved character(s), or return\"\n f\"character(s) in header {header_kind}: {header_part!r}\"\n )\n\n\ndef urldefragauth(url):\n \"\"\"\n Given a url remove the fragment and the authentication part.\n\n :rtype: str\n \"\"\"\n scheme, netloc, path, params, query, fragment = urlparse(url)\n\n # see func:`prepend_scheme_if_needed`\n if not netloc:\n netloc, path = path, netloc\n\n netloc = netloc.rsplit(\"@\", 1)[-1]\n\n return urlunparse((scheme, netloc, path, params, query, \"\"))\n\n\ndef rewind_body(prepared_request):\n \"\"\"Move file pointer back to its recorded starting position\n so it can be read again on redirect.\n \"\"\"\n body_seek = getattr(prepared_request.body, \"seek\", None)\n if body_seek is not None and isinstance(\n prepared_request._body_position, integer_types\n ):\n try:\n body_seek(prepared_request._body_position)\n except OSError:\n raise UnrewindableBodyError(\n \"An error occurred when rewinding request body for redirect.\"\n )\n else:\n raise UnrewindableBodyError(\"Unable to rewind request body for redirect.\")\n",
"path": "requests/utils.py"
},
{
"content": "\"\"\"Tests for Requests.\"\"\"\n\nimport collections\nimport contextlib\nimport io\nimport json\nimport os\nimport pickle\nimport re\nimport warnings\n\nimport pytest\nimport urllib3\nfrom urllib3.util import Timeout as Urllib3Timeout\n\nimport requests\nfrom requests.adapters import HTTPAdapter\nfrom requests.auth import HTTPDigestAuth, _basic_auth_str\nfrom requests.compat import (\n JSONDecodeError,\n Morsel,\n MutableMapping,\n builtin_str,\n cookielib,\n getproxies,\n urlparse,\n)\nfrom requests.cookies import cookiejar_from_dict, morsel_to_cookie\nfrom requests.exceptions import (\n ChunkedEncodingError,\n ConnectionError,\n ConnectTimeout,\n ContentDecodingError,\n InvalidHeader,\n InvalidProxyURL,\n InvalidSchema,\n InvalidURL,\n MissingSchema,\n ProxyError,\n ReadTimeout,\n RequestException,\n RetryError,\n)\nfrom requests.exceptions import SSLError as RequestsSSLError\nfrom requests.exceptions import Timeout, TooManyRedirects, UnrewindableBodyError\nfrom requests.hooks import default_hooks\nfrom requests.models import PreparedRequest, urlencode\nfrom requests.sessions import SessionRedirectMixin\nfrom requests.structures import CaseInsensitiveDict\n\nfrom . import SNIMissingWarning\nfrom .compat import StringIO\nfrom .utils import override_environ\n\n# Requests to this URL should always fail with a connection timeout (nothing\n# listening on that port)\nTARPIT = \"http://10.255.255.1\"\n\n# This is to avoid waiting the timeout of using TARPIT\nINVALID_PROXY = \"http://localhost:1\"\n\ntry:\n from ssl import SSLContext\n\n del SSLContext\n HAS_MODERN_SSL = True\nexcept ImportError:\n HAS_MODERN_SSL = False\n\ntry:\n requests.pyopenssl\n HAS_PYOPENSSL = True\nexcept AttributeError:\n HAS_PYOPENSSL = False\n\n\nclass TestRequests:\n\n digest_auth_algo = (\"MD5\", \"SHA-256\", \"SHA-512\")\n\n def test_entry_points(self):\n\n requests.session\n requests.session().get\n requests.session().head\n requests.get\n requests.head\n requests.put\n requests.patch\n requests.post\n # Not really an entry point, but people rely on it.\n from requests.packages.urllib3.poolmanager import PoolManager # noqa:F401\n\n @pytest.mark.parametrize(\n \"exception, url\",\n (\n (MissingSchema, \"hiwpefhipowhefopw\"),\n (InvalidSchema, \"localhost:3128\"),\n (InvalidSchema, \"localhost.localdomain:3128/\"),\n (InvalidSchema, \"10.122.1.1:3128/\"),\n (InvalidURL, \"http://\"),\n (InvalidURL, \"http://*example.com\"),\n (InvalidURL, \"http://.example.com\"),\n ),\n )\n def test_invalid_url(self, exception, url):\n with pytest.raises(exception):\n requests.get(url)\n\n def test_basic_building(self):\n req = requests.Request()\n req.url = \"http://kennethreitz.org/\"\n req.data = {\"life\": \"42\"}\n\n pr = req.prepare()\n assert pr.url == req.url\n assert pr.body == \"life=42\"\n\n @pytest.mark.parametrize(\"method\", (\"GET\", \"HEAD\"))\n def test_no_content_length(self, httpbin, method):\n req = requests.Request(method, httpbin(method.lower())).prepare()\n assert \"Content-Length\" not in req.headers\n\n @pytest.mark.parametrize(\"method\", (\"POST\", \"PUT\", \"PATCH\", \"OPTIONS\"))\n def test_no_body_content_length(self, httpbin, method):\n req = requests.Request(method, httpbin(method.lower())).prepare()\n assert req.headers[\"Content-Length\"] == \"0\"\n\n @pytest.mark.parametrize(\"method\", (\"POST\", \"PUT\", \"PATCH\", \"OPTIONS\"))\n def test_empty_content_length(self, httpbin, method):\n req = requests.Request(method, httpbin(method.lower()), data=\"\").prepare()\n assert req.headers[\"Content-Length\"] == \"0\"\n\n def test_override_content_length(self, httpbin):\n headers = {\"Content-Length\": \"not zero\"}\n r = requests.Request(\"POST\", httpbin(\"post\"), headers=headers).prepare()\n assert \"Content-Length\" in r.headers\n assert r.headers[\"Content-Length\"] == \"not zero\"\n\n def test_path_is_not_double_encoded(self):\n request = requests.Request(\"GET\", \"http://0.0.0.0/get/test case\").prepare()\n\n assert request.path_url == \"/get/test%20case\"\n\n @pytest.mark.parametrize(\n \"url, expected\",\n (\n (\n \"http://example.com/path#fragment\",\n \"http://example.com/path?a=b#fragment\",\n ),\n (\n \"http://example.com/path?key=value#fragment\",\n \"http://example.com/path?key=value&a=b#fragment\",\n ),\n ),\n )\n def test_params_are_added_before_fragment(self, url, expected):\n request = requests.Request(\"GET\", url, params={\"a\": \"b\"}).prepare()\n assert request.url == expected\n\n def test_params_original_order_is_preserved_by_default(self):\n param_ordered_dict = collections.OrderedDict(\n ((\"z\", 1), (\"a\", 1), (\"k\", 1), (\"d\", 1))\n )\n session = requests.Session()\n request = requests.Request(\n \"GET\", \"http://example.com/\", params=param_ordered_dict\n )\n prep = session.prepare_request(request)\n assert prep.url == \"http://example.com/?z=1&a=1&k=1&d=1\"\n\n def test_params_bytes_are_encoded(self):\n request = requests.Request(\n \"GET\", \"http://example.com\", params=b\"test=foo\"\n ).prepare()\n assert request.url == \"http://example.com/?test=foo\"\n\n def test_binary_put(self):\n request = requests.Request(\n \"PUT\", \"http://example.com\", data=\"ööö\".encode()\n ).prepare()\n assert isinstance(request.body, bytes)\n\n def test_whitespaces_are_removed_from_url(self):\n # Test for issue #3696\n request = requests.Request(\"GET\", \" http://example.com\").prepare()\n assert request.url == \"http://example.com/\"\n\n @pytest.mark.parametrize(\"scheme\", (\"http://\", \"HTTP://\", \"hTTp://\", \"HttP://\"))\n def test_mixed_case_scheme_acceptable(self, httpbin, scheme):\n s = requests.Session()\n s.proxies = getproxies()\n parts = urlparse(httpbin(\"get\"))\n url = scheme + parts.netloc + parts.path\n r = requests.Request(\"GET\", url)\n r = s.send(r.prepare())\n assert r.status_code == 200, f\"failed for scheme {scheme}\"\n\n def test_HTTP_200_OK_GET_ALTERNATIVE(self, httpbin):\n r = requests.Request(\"GET\", httpbin(\"get\"))\n s = requests.Session()\n s.proxies = getproxies()\n\n r = s.send(r.prepare())\n\n assert r.status_code == 200\n\n def test_HTTP_302_ALLOW_REDIRECT_GET(self, httpbin):\n r = requests.get(httpbin(\"redirect\", \"1\"))\n assert r.status_code == 200\n assert r.history[0].status_code == 302\n assert r.history[0].is_redirect\n\n def test_HTTP_307_ALLOW_REDIRECT_POST(self, httpbin):\n r = requests.post(\n httpbin(\"redirect-to\"),\n data=\"test\",\n params={\"url\": \"post\", \"status_code\": 307},\n )\n assert r.status_code == 200\n assert r.history[0].status_code == 307\n assert r.history[0].is_redirect\n assert r.json()[\"data\"] == \"test\"\n\n def test_HTTP_307_ALLOW_REDIRECT_POST_WITH_SEEKABLE(self, httpbin):\n byte_str = b\"test\"\n r = requests.post(\n httpbin(\"redirect-to\"),\n data=io.BytesIO(byte_str),\n params={\"url\": \"post\", \"status_code\": 307},\n )\n assert r.status_code == 200\n assert r.history[0].status_code == 307\n assert r.history[0].is_redirect\n assert r.json()[\"data\"] == byte_str.decode(\"utf-8\")\n\n def test_HTTP_302_TOO_MANY_REDIRECTS(self, httpbin):\n try:\n requests.get(httpbin(\"relative-redirect\", \"50\"))\n except TooManyRedirects as e:\n url = httpbin(\"relative-redirect\", \"20\")\n assert e.request.url == url\n assert e.response.url == url\n assert len(e.response.history) == 30\n else:\n pytest.fail(\"Expected redirect to raise TooManyRedirects but it did not\")\n\n def test_HTTP_302_TOO_MANY_REDIRECTS_WITH_PARAMS(self, httpbin):\n s = requests.session()\n s.max_redirects = 5\n try:\n s.get(httpbin(\"relative-redirect\", \"50\"))\n except TooManyRedirects as e:\n url = httpbin(\"relative-redirect\", \"45\")\n assert e.request.url == url\n assert e.response.url == url\n assert len(e.response.history) == 5\n else:\n pytest.fail(\n \"Expected custom max number of redirects to be respected but was not\"\n )\n\n def test_http_301_changes_post_to_get(self, httpbin):\n r = requests.post(httpbin(\"status\", \"301\"))\n assert r.status_code == 200\n assert r.request.method == \"GET\"\n assert r.history[0].status_code == 301\n assert r.history[0].is_redirect\n\n def test_http_301_doesnt_change_head_to_get(self, httpbin):\n r = requests.head(httpbin(\"status\", \"301\"), allow_redirects=True)\n print(r.content)\n assert r.status_code == 200\n assert r.request.method == \"HEAD\"\n assert r.history[0].status_code == 301\n assert r.history[0].is_redirect\n\n def test_http_302_changes_post_to_get(self, httpbin):\n r = requests.post(httpbin(\"status\", \"302\"))\n assert r.status_code == 200\n assert r.request.method == \"GET\"\n assert r.history[0].status_code == 302\n assert r.history[0].is_redirect\n\n def test_http_302_doesnt_change_head_to_get(self, httpbin):\n r = requests.head(httpbin(\"status\", \"302\"), allow_redirects=True)\n assert r.status_code == 200\n assert r.request.method == \"HEAD\"\n assert r.history[0].status_code == 302\n assert r.history[0].is_redirect\n\n def test_http_303_changes_post_to_get(self, httpbin):\n r = requests.post(httpbin(\"status\", \"303\"))\n assert r.status_code == 200\n assert r.request.method == \"GET\"\n assert r.history[0].status_code == 303\n assert r.history[0].is_redirect\n\n def test_http_303_doesnt_change_head_to_get(self, httpbin):\n r = requests.head(httpbin(\"status\", \"303\"), allow_redirects=True)\n assert r.status_code == 200\n assert r.request.method == \"HEAD\"\n assert r.history[0].status_code == 303\n assert r.history[0].is_redirect\n\n def test_header_and_body_removal_on_redirect(self, httpbin):\n purged_headers = (\"Content-Length\", \"Content-Type\")\n ses = requests.Session()\n req = requests.Request(\"POST\", httpbin(\"post\"), data={\"test\": \"data\"})\n prep = ses.prepare_request(req)\n resp = ses.send(prep)\n\n # Mimic a redirect response\n resp.status_code = 302\n resp.headers[\"location\"] = \"get\"\n\n # Run request through resolve_redirects\n next_resp = next(ses.resolve_redirects(resp, prep))\n assert next_resp.request.body is None\n for header in purged_headers:\n assert header not in next_resp.request.headers\n\n def test_transfer_enc_removal_on_redirect(self, httpbin):\n purged_headers = (\"Transfer-Encoding\", \"Content-Type\")\n ses = requests.Session()\n req = requests.Request(\"POST\", httpbin(\"post\"), data=(b\"x\" for x in range(1)))\n prep = ses.prepare_request(req)\n assert \"Transfer-Encoding\" in prep.headers\n\n # Create Response to avoid https://github.com/kevin1024/pytest-httpbin/issues/33\n resp = requests.Response()\n resp.raw = io.BytesIO(b\"the content\")\n resp.request = prep\n setattr(resp.raw, \"release_conn\", lambda *args: args)\n\n # Mimic a redirect response\n resp.status_code = 302\n resp.headers[\"location\"] = httpbin(\"get\")\n\n # Run request through resolve_redirect\n next_resp = next(ses.resolve_redirects(resp, prep))\n assert next_resp.request.body is None\n for header in purged_headers:\n assert header not in next_resp.request.headers\n\n def test_fragment_maintained_on_redirect(self, httpbin):\n fragment = \"#view=edit&token=hunter2\"\n r = requests.get(httpbin(\"redirect-to?url=get\") + fragment)\n\n assert len(r.history) > 0\n assert r.history[0].request.url == httpbin(\"redirect-to?url=get\") + fragment\n assert r.url == httpbin(\"get\") + fragment\n\n def test_HTTP_200_OK_GET_WITH_PARAMS(self, httpbin):\n heads = {\"User-agent\": \"Mozilla/5.0\"}\n\n r = requests.get(httpbin(\"user-agent\"), headers=heads)\n\n assert heads[\"User-agent\"] in r.text\n assert r.status_code == 200\n\n def test_HTTP_200_OK_GET_WITH_MIXED_PARAMS(self, httpbin):\n heads = {\"User-agent\": \"Mozilla/5.0\"}\n\n r = requests.get(\n httpbin(\"get\") + \"?test=true\", params={\"q\": \"test\"}, headers=heads\n )\n assert r.status_code == 200\n\n def test_set_cookie_on_301(self, httpbin):\n s = requests.session()\n url = httpbin(\"cookies/set?foo=bar\")\n s.get(url)\n assert s.cookies[\"foo\"] == \"bar\"\n\n def test_cookie_sent_on_redirect(self, httpbin):\n s = requests.session()\n s.get(httpbin(\"cookies/set?foo=bar\"))\n r = s.get(httpbin(\"redirect/1\")) # redirects to httpbin('get')\n assert \"Cookie\" in r.json()[\"headers\"]\n\n def test_cookie_removed_on_expire(self, httpbin):\n s = requests.session()\n s.get(httpbin(\"cookies/set?foo=bar\"))\n assert s.cookies[\"foo\"] == \"bar\"\n s.get(\n httpbin(\"response-headers\"),\n params={\"Set-Cookie\": \"foo=deleted; expires=Thu, 01-Jan-1970 00:00:01 GMT\"},\n )\n assert \"foo\" not in s.cookies\n\n def test_cookie_quote_wrapped(self, httpbin):\n s = requests.session()\n s.get(httpbin('cookies/set?foo=\"bar:baz\"'))\n assert s.cookies[\"foo\"] == '\"bar:baz\"'\n\n def test_cookie_persists_via_api(self, httpbin):\n s = requests.session()\n r = s.get(httpbin(\"redirect/1\"), cookies={\"foo\": \"bar\"})\n assert \"foo\" in r.request.headers[\"Cookie\"]\n assert \"foo\" in r.history[0].request.headers[\"Cookie\"]\n\n def test_request_cookie_overrides_session_cookie(self, httpbin):\n s = requests.session()\n s.cookies[\"foo\"] = \"bar\"\n r = s.get(httpbin(\"cookies\"), cookies={\"foo\": \"baz\"})\n assert r.json()[\"cookies\"][\"foo\"] == \"baz\"\n # Session cookie should not be modified\n assert s.cookies[\"foo\"] == \"bar\"\n\n def test_request_cookies_not_persisted(self, httpbin):\n s = requests.session()\n s.get(httpbin(\"cookies\"), cookies={\"foo\": \"baz\"})\n # Sending a request with cookies should not add cookies to the session\n assert not s.cookies\n\n def test_generic_cookiejar_works(self, httpbin):\n cj = cookielib.CookieJar()\n cookiejar_from_dict({\"foo\": \"bar\"}, cj)\n s = requests.session()\n s.cookies = cj\n r = s.get(httpbin(\"cookies\"))\n # Make sure the cookie was sent\n assert r.json()[\"cookies\"][\"foo\"] == \"bar\"\n # Make sure the session cj is still the custom one\n assert s.cookies is cj\n\n def test_param_cookiejar_works(self, httpbin):\n cj = cookielib.CookieJar()\n cookiejar_from_dict({\"foo\": \"bar\"}, cj)\n s = requests.session()\n r = s.get(httpbin(\"cookies\"), cookies=cj)\n # Make sure the cookie was sent\n assert r.json()[\"cookies\"][\"foo\"] == \"bar\"\n\n def test_cookielib_cookiejar_on_redirect(self, httpbin):\n \"\"\"Tests resolve_redirect doesn't fail when merging cookies\n with non-RequestsCookieJar cookiejar.\n\n See GH #3579\n \"\"\"\n cj = cookiejar_from_dict({\"foo\": \"bar\"}, cookielib.CookieJar())\n s = requests.Session()\n s.cookies = cookiejar_from_dict({\"cookie\": \"tasty\"})\n\n # Prepare request without using Session\n req = requests.Request(\"GET\", httpbin(\"headers\"), cookies=cj)\n prep_req = req.prepare()\n\n # Send request and simulate redirect\n resp = s.send(prep_req)\n resp.status_code = 302\n resp.headers[\"location\"] = httpbin(\"get\")\n redirects = s.resolve_redirects(resp, prep_req)\n resp = next(redirects)\n\n # Verify CookieJar isn't being converted to RequestsCookieJar\n assert isinstance(prep_req._cookies, cookielib.CookieJar)\n assert isinstance(resp.request._cookies, cookielib.CookieJar)\n assert not isinstance(resp.request._cookies, requests.cookies.RequestsCookieJar)\n\n cookies = {}\n for c in resp.request._cookies:\n cookies[c.name] = c.value\n assert cookies[\"foo\"] == \"bar\"\n assert cookies[\"cookie\"] == \"tasty\"\n\n def test_requests_in_history_are_not_overridden(self, httpbin):\n resp = requests.get(httpbin(\"redirect/3\"))\n urls = [r.url for r in resp.history]\n req_urls = [r.request.url for r in resp.history]\n assert urls == req_urls\n\n def test_history_is_always_a_list(self, httpbin):\n \"\"\"Show that even with redirects, Response.history is always a list.\"\"\"\n resp = requests.get(httpbin(\"get\"))\n assert isinstance(resp.history, list)\n resp = requests.get(httpbin(\"redirect/1\"))\n assert isinstance(resp.history, list)\n assert not isinstance(resp.history, tuple)\n\n def test_headers_on_session_with_None_are_not_sent(self, httpbin):\n \"\"\"Do not send headers in Session.headers with None values.\"\"\"\n ses = requests.Session()\n ses.headers[\"Accept-Encoding\"] = None\n req = requests.Request(\"GET\", httpbin(\"get\"))\n prep = ses.prepare_request(req)\n assert \"Accept-Encoding\" not in prep.headers\n\n def test_headers_preserve_order(self, httpbin):\n \"\"\"Preserve order when headers provided as OrderedDict.\"\"\"\n ses = requests.Session()\n ses.headers = collections.OrderedDict()\n ses.headers[\"Accept-Encoding\"] = \"identity\"\n ses.headers[\"First\"] = \"1\"\n ses.headers[\"Second\"] = \"2\"\n headers = collections.OrderedDict([(\"Third\", \"3\"), (\"Fourth\", \"4\")])\n headers[\"Fifth\"] = \"5\"\n headers[\"Second\"] = \"222\"\n req = requests.Request(\"GET\", httpbin(\"get\"), headers=headers)\n prep = ses.prepare_request(req)\n items = list(prep.headers.items())\n assert items[0] == (\"Accept-Encoding\", \"identity\")\n assert items[1] == (\"First\", \"1\")\n assert items[2] == (\"Second\", \"222\")\n assert items[3] == (\"Third\", \"3\")\n assert items[4] == (\"Fourth\", \"4\")\n assert items[5] == (\"Fifth\", \"5\")\n\n @pytest.mark.parametrize(\"key\", (\"User-agent\", \"user-agent\"))\n def test_user_agent_transfers(self, httpbin, key):\n\n heads = {key: \"Mozilla/5.0 (github.com/psf/requests)\"}\n\n r = requests.get(httpbin(\"user-agent\"), headers=heads)\n assert heads[key] in r.text\n\n def test_HTTP_200_OK_HEAD(self, httpbin):\n r = requests.head(httpbin(\"get\"))\n assert r.status_code == 200\n\n def test_HTTP_200_OK_PUT(self, httpbin):\n r = requests.put(httpbin(\"put\"))\n assert r.status_code == 200\n\n def test_BASICAUTH_TUPLE_HTTP_200_OK_GET(self, httpbin):\n auth = (\"user\", \"pass\")\n url = httpbin(\"basic-auth\", \"user\", \"pass\")\n\n r = requests.get(url, auth=auth)\n assert r.status_code == 200\n\n r = requests.get(url)\n assert r.status_code == 401\n\n s = requests.session()\n s.auth = auth\n r = s.get(url)\n assert r.status_code == 200\n\n @pytest.mark.parametrize(\n \"username, password\",\n (\n (\"user\", \"pass\"),\n (\"имя\".encode(), \"пароль\".encode()),\n (42, 42),\n (None, None),\n ),\n )\n def test_set_basicauth(self, httpbin, username, password):\n auth = (username, password)\n url = httpbin(\"get\")\n\n r = requests.Request(\"GET\", url, auth=auth)\n p = r.prepare()\n\n assert p.headers[\"Authorization\"] == _basic_auth_str(username, password)\n\n def test_basicauth_encodes_byte_strings(self):\n \"\"\"Ensure b'test' formats as the byte string \"test\" rather\n than the unicode string \"b'test'\" in Python 3.\n \"\"\"\n auth = (b\"\\xc5\\xafsername\", b\"test\\xc6\\xb6\")\n r = requests.Request(\"GET\", \"http://localhost\", auth=auth)\n p = r.prepare()\n\n assert p.headers[\"Authorization\"] == \"Basic xa9zZXJuYW1lOnRlc3TGtg==\"\n\n @pytest.mark.parametrize(\n \"url, exception\",\n (\n # Connecting to an unknown domain should raise a ConnectionError\n (\"http://doesnotexist.google.com\", ConnectionError),\n # Connecting to an invalid port should raise a ConnectionError\n (\"http://localhost:1\", ConnectionError),\n # Inputing a URL that cannot be parsed should raise an InvalidURL error\n (\"http://fe80::5054:ff:fe5a:fc0\", InvalidURL),\n ),\n )\n def test_errors(self, url, exception):\n with pytest.raises(exception):\n requests.get(url, timeout=1)\n\n def test_proxy_error(self):\n # any proxy related error (address resolution, no route to host, etc) should result in a ProxyError\n with pytest.raises(ProxyError):\n requests.get(\n \"http://localhost:1\", proxies={\"http\": \"non-resolvable-address\"}\n )\n\n def test_proxy_error_on_bad_url(self, httpbin, httpbin_secure):\n with pytest.raises(InvalidProxyURL):\n requests.get(httpbin_secure(), proxies={\"https\": \"http:/badproxyurl:3128\"})\n\n with pytest.raises(InvalidProxyURL):\n requests.get(httpbin(), proxies={\"http\": \"http://:8080\"})\n\n with pytest.raises(InvalidProxyURL):\n requests.get(httpbin_secure(), proxies={\"https\": \"https://\"})\n\n with pytest.raises(InvalidProxyURL):\n requests.get(httpbin(), proxies={\"http\": \"http:///example.com:8080\"})\n\n def test_respect_proxy_env_on_send_self_prepared_request(self, httpbin):\n with override_environ(http_proxy=INVALID_PROXY):\n with pytest.raises(ProxyError):\n session = requests.Session()\n request = requests.Request(\"GET\", httpbin())\n session.send(request.prepare())\n\n def test_respect_proxy_env_on_send_session_prepared_request(self, httpbin):\n with override_environ(http_proxy=INVALID_PROXY):\n with pytest.raises(ProxyError):\n session = requests.Session()\n request = requests.Request(\"GET\", httpbin())\n prepared = session.prepare_request(request)\n session.send(prepared)\n\n def test_respect_proxy_env_on_send_with_redirects(self, httpbin):\n with override_environ(http_proxy=INVALID_PROXY):\n with pytest.raises(ProxyError):\n session = requests.Session()\n url = httpbin(\"redirect/1\")\n print(url)\n request = requests.Request(\"GET\", url)\n session.send(request.prepare())\n\n def test_respect_proxy_env_on_get(self, httpbin):\n with override_environ(http_proxy=INVALID_PROXY):\n with pytest.raises(ProxyError):\n session = requests.Session()\n session.get(httpbin())\n\n def test_respect_proxy_env_on_request(self, httpbin):\n with override_environ(http_proxy=INVALID_PROXY):\n with pytest.raises(ProxyError):\n session = requests.Session()\n session.request(method=\"GET\", url=httpbin())\n\n def test_proxy_authorization_preserved_on_request(self, httpbin):\n proxy_auth_value = \"Bearer XXX\"\n session = requests.Session()\n session.headers.update({\"Proxy-Authorization\": proxy_auth_value})\n resp = session.request(method=\"GET\", url=httpbin(\"get\"))\n sent_headers = resp.json().get(\"headers\", {})\n\n assert sent_headers.get(\"Proxy-Authorization\") == proxy_auth_value\n\n def test_basicauth_with_netrc(self, httpbin):\n auth = (\"user\", \"pass\")\n wrong_auth = (\"wronguser\", \"wrongpass\")\n url = httpbin(\"basic-auth\", \"user\", \"pass\")\n\n old_auth = requests.sessions.get_netrc_auth\n\n try:\n\n def get_netrc_auth_mock(url):\n return auth\n\n requests.sessions.get_netrc_auth = get_netrc_auth_mock\n\n # Should use netrc and work.\n r = requests.get(url)\n assert r.status_code == 200\n\n # Given auth should override and fail.\n r = requests.get(url, auth=wrong_auth)\n assert r.status_code == 401\n\n s = requests.session()\n\n # Should use netrc and work.\n r = s.get(url)\n assert r.status_code == 200\n\n # Given auth should override and fail.\n s.auth = wrong_auth\n r = s.get(url)\n assert r.status_code == 401\n finally:\n requests.sessions.get_netrc_auth = old_auth\n\n def test_DIGEST_HTTP_200_OK_GET(self, httpbin):\n\n for authtype in self.digest_auth_algo:\n auth = HTTPDigestAuth(\"user\", \"pass\")\n url = httpbin(\"digest-auth\", \"auth\", \"user\", \"pass\", authtype, \"never\")\n\n r = requests.get(url, auth=auth)\n assert r.status_code == 200\n\n r = requests.get(url)\n assert r.status_code == 401\n print(r.headers[\"WWW-Authenticate\"])\n\n s = requests.session()\n s.auth = HTTPDigestAuth(\"user\", \"pass\")\n r = s.get(url)\n assert r.status_code == 200\n\n def test_DIGEST_AUTH_RETURNS_COOKIE(self, httpbin):\n\n for authtype in self.digest_auth_algo:\n url = httpbin(\"digest-auth\", \"auth\", \"user\", \"pass\", authtype)\n auth = HTTPDigestAuth(\"user\", \"pass\")\n r = requests.get(url)\n assert r.cookies[\"fake\"] == \"fake_value\"\n\n r = requests.get(url, auth=auth)\n assert r.status_code == 200\n\n def test_DIGEST_AUTH_SETS_SESSION_COOKIES(self, httpbin):\n\n for authtype in self.digest_auth_algo:\n url = httpbin(\"digest-auth\", \"auth\", \"user\", \"pass\", authtype)\n auth = HTTPDigestAuth(\"user\", \"pass\")\n s = requests.Session()\n s.get(url, auth=auth)\n assert s.cookies[\"fake\"] == \"fake_value\"\n\n def test_DIGEST_STREAM(self, httpbin):\n\n for authtype in self.digest_auth_algo:\n auth = HTTPDigestAuth(\"user\", \"pass\")\n url = httpbin(\"digest-auth\", \"auth\", \"user\", \"pass\", authtype)\n\n r = requests.get(url, auth=auth, stream=True)\n assert r.raw.read() != b\"\"\n\n r = requests.get(url, auth=auth, stream=False)\n assert r.raw.read() == b\"\"\n\n def test_DIGESTAUTH_WRONG_HTTP_401_GET(self, httpbin):\n\n for authtype in self.digest_auth_algo:\n auth = HTTPDigestAuth(\"user\", \"wrongpass\")\n url = httpbin(\"digest-auth\", \"auth\", \"user\", \"pass\", authtype)\n\n r = requests.get(url, auth=auth)\n assert r.status_code == 401\n\n r = requests.get(url)\n assert r.status_code == 401\n\n s = requests.session()\n s.auth = auth\n r = s.get(url)\n assert r.status_code == 401\n\n def test_DIGESTAUTH_QUOTES_QOP_VALUE(self, httpbin):\n\n for authtype in self.digest_auth_algo:\n auth = HTTPDigestAuth(\"user\", \"pass\")\n url = httpbin(\"digest-auth\", \"auth\", \"user\", \"pass\", authtype)\n\n r = requests.get(url, auth=auth)\n assert '\"auth\"' in r.request.headers[\"Authorization\"]\n\n def test_POSTBIN_GET_POST_FILES(self, httpbin):\n\n url = httpbin(\"post\")\n requests.post(url).raise_for_status()\n\n post1 = requests.post(url, data={\"some\": \"data\"})\n assert post1.status_code == 200\n\n with open(\"requirements-dev.txt\") as f:\n post2 = requests.post(url, files={\"some\": f})\n assert post2.status_code == 200\n\n post4 = requests.post(url, data='[{\"some\": \"json\"}]')\n assert post4.status_code == 200\n\n with pytest.raises(ValueError):\n requests.post(url, files=[\"bad file data\"])\n\n def test_invalid_files_input(self, httpbin):\n\n url = httpbin(\"post\")\n post = requests.post(url, files={\"random-file-1\": None, \"random-file-2\": 1})\n assert b'name=\"random-file-1\"' not in post.request.body\n assert b'name=\"random-file-2\"' in post.request.body\n\n def test_POSTBIN_SEEKED_OBJECT_WITH_NO_ITER(self, httpbin):\n class TestStream:\n def __init__(self, data):\n self.data = data.encode()\n self.length = len(self.data)\n self.index = 0\n\n def __len__(self):\n return self.length\n\n def read(self, size=None):\n if size:\n ret = self.data[self.index : self.index + size]\n self.index += size\n else:\n ret = self.data[self.index :]\n self.index = self.length\n return ret\n\n def tell(self):\n return self.index\n\n def seek(self, offset, where=0):\n if where == 0:\n self.index = offset\n elif where == 1:\n self.index += offset\n elif where == 2:\n self.index = self.length + offset\n\n test = TestStream(\"test\")\n post1 = requests.post(httpbin(\"post\"), data=test)\n assert post1.status_code == 200\n assert post1.json()[\"data\"] == \"test\"\n\n test = TestStream(\"test\")\n test.seek(2)\n post2 = requests.post(httpbin(\"post\"), data=test)\n assert post2.status_code == 200\n assert post2.json()[\"data\"] == \"st\"\n\n def test_POSTBIN_GET_POST_FILES_WITH_DATA(self, httpbin):\n\n url = httpbin(\"post\")\n requests.post(url).raise_for_status()\n\n post1 = requests.post(url, data={\"some\": \"data\"})\n assert post1.status_code == 200\n\n with open(\"requirements-dev.txt\") as f:\n post2 = requests.post(url, data={\"some\": \"data\"}, files={\"some\": f})\n assert post2.status_code == 200\n\n post4 = requests.post(url, data='[{\"some\": \"json\"}]')\n assert post4.status_code == 200\n\n with pytest.raises(ValueError):\n requests.post(url, files=[\"bad file data\"])\n\n def test_post_with_custom_mapping(self, httpbin):\n class CustomMapping(MutableMapping):\n def __init__(self, *args, **kwargs):\n self.data = dict(*args, **kwargs)\n\n def __delitem__(self, key):\n del self.data[key]\n\n def __getitem__(self, key):\n return self.data[key]\n\n def __setitem__(self, key, value):\n self.data[key] = value\n\n def __iter__(self):\n return iter(self.data)\n\n def __len__(self):\n return len(self.data)\n\n data = CustomMapping({\"some\": \"data\"})\n url = httpbin(\"post\")\n found_json = requests.post(url, data=data).json().get(\"form\")\n assert found_json == {\"some\": \"data\"}\n\n def test_conflicting_post_params(self, httpbin):\n url = httpbin(\"post\")\n with open(\"requirements-dev.txt\") as f:\n with pytest.raises(ValueError):\n requests.post(url, data='[{\"some\": \"data\"}]', files={\"some\": f})\n\n def test_request_ok_set(self, httpbin):\n r = requests.get(httpbin(\"status\", \"404\"))\n assert not r.ok\n\n def test_status_raising(self, httpbin):\n r = requests.get(httpbin(\"status\", \"404\"))\n with pytest.raises(requests.exceptions.HTTPError):\n r.raise_for_status()\n\n r = requests.get(httpbin(\"status\", \"500\"))\n assert not r.ok\n\n def test_decompress_gzip(self, httpbin):\n r = requests.get(httpbin(\"gzip\"))\n r.content.decode(\"ascii\")\n\n @pytest.mark.parametrize(\n \"url, params\",\n (\n (\"/get\", {\"foo\": \"føø\"}),\n (\"/get\", {\"føø\": \"føø\"}),\n (\"/get\", {\"føø\": \"føø\"}),\n (\"/get\", {\"foo\": \"foo\"}),\n (\"ø\", {\"foo\": \"foo\"}),\n ),\n )\n def test_unicode_get(self, httpbin, url, params):\n requests.get(httpbin(url), params=params)\n\n def test_unicode_header_name(self, httpbin):\n requests.put(\n httpbin(\"put\"),\n headers={\"Content-Type\": \"application/octet-stream\"},\n data=\"\\xff\",\n ) # compat.str is unicode.\n\n def test_pyopenssl_redirect(self, httpbin_secure, httpbin_ca_bundle):\n requests.get(httpbin_secure(\"status\", \"301\"), verify=httpbin_ca_bundle)\n\n def test_invalid_ca_certificate_path(self, httpbin_secure):\n INVALID_PATH = \"/garbage\"\n with pytest.raises(IOError) as e:\n requests.get(httpbin_secure(), verify=INVALID_PATH)\n assert str(\n e.value\n ) == \"Could not find a suitable TLS CA certificate bundle, invalid path: {}\".format(\n INVALID_PATH\n )\n\n def test_invalid_ssl_certificate_files(self, httpbin_secure):\n INVALID_PATH = \"/garbage\"\n with pytest.raises(IOError) as e:\n requests.get(httpbin_secure(), cert=INVALID_PATH)\n assert str(\n e.value\n ) == \"Could not find the TLS certificate file, invalid path: {}\".format(\n INVALID_PATH\n )\n\n with pytest.raises(IOError) as e:\n requests.get(httpbin_secure(), cert=(\".\", INVALID_PATH))\n assert str(e.value) == (\n f\"Could not find the TLS key file, invalid path: {INVALID_PATH}\"\n )\n\n @pytest.mark.parametrize(\n \"env, expected\",\n (\n ({}, True),\n ({\"REQUESTS_CA_BUNDLE\": \"/some/path\"}, \"/some/path\"),\n ({\"REQUESTS_CA_BUNDLE\": \"\"}, True),\n ({\"CURL_CA_BUNDLE\": \"/some/path\"}, \"/some/path\"),\n ({\"CURL_CA_BUNDLE\": \"\"}, True),\n ({\"REQUESTS_CA_BUNDLE\": \"\", \"CURL_CA_BUNDLE\": \"\"}, True),\n (\n {\n \"REQUESTS_CA_BUNDLE\": \"/some/path\",\n \"CURL_CA_BUNDLE\": \"/curl/path\",\n },\n \"/some/path\",\n ),\n (\n {\n \"REQUESTS_CA_BUNDLE\": \"\",\n \"CURL_CA_BUNDLE\": \"/curl/path\",\n },\n \"/curl/path\",\n ),\n ),\n )\n def test_env_cert_bundles(self, httpbin, mocker, env, expected):\n s = requests.Session()\n mocker.patch(\"os.environ\", env)\n settings = s.merge_environment_settings(\n url=httpbin(\"get\"), proxies={}, stream=False, verify=True, cert=None\n )\n assert settings[\"verify\"] == expected\n\n def test_http_with_certificate(self, httpbin):\n r = requests.get(httpbin(), cert=\".\")\n assert r.status_code == 200\n\n @pytest.mark.skipif(\n SNIMissingWarning is None,\n reason=\"urllib3 2.0 removed that warning and errors out instead\",\n )\n def test_https_warnings(self, nosan_server):\n \"\"\"warnings are emitted with requests.get\"\"\"\n host, port, ca_bundle = nosan_server\n if HAS_MODERN_SSL or HAS_PYOPENSSL:\n warnings_expected = (\"SubjectAltNameWarning\",)\n else:\n warnings_expected = (\n \"SNIMissingWarning\",\n \"InsecurePlatformWarning\",\n \"SubjectAltNameWarning\",\n )\n\n with pytest.warns(None) as warning_records:\n warnings.simplefilter(\"always\")\n requests.get(f\"https://localhost:{port}/\", verify=ca_bundle)\n\n warning_records = [\n item\n for item in warning_records\n if item.category.__name__ != \"ResourceWarning\"\n ]\n\n warnings_category = tuple(item.category.__name__ for item in warning_records)\n assert warnings_category == warnings_expected\n\n def test_certificate_failure(self, httpbin_secure):\n \"\"\"\n When underlying SSL problems occur, an SSLError is raised.\n \"\"\"\n with pytest.raises(RequestsSSLError):\n # Our local httpbin does not have a trusted CA, so this call will\n # fail if we use our default trust bundle.\n requests.get(httpbin_secure(\"status\", \"200\"))\n\n def test_urlencoded_get_query_multivalued_param(self, httpbin):\n\n r = requests.get(httpbin(\"get\"), params={\"test\": [\"foo\", \"baz\"]})\n assert r.status_code == 200\n assert r.url == httpbin(\"get?test=foo&test=baz\")\n\n def test_form_encoded_post_query_multivalued_element(self, httpbin):\n r = requests.Request(\n method=\"POST\", url=httpbin(\"post\"), data=dict(test=[\"foo\", \"baz\"])\n )\n prep = r.prepare()\n assert prep.body == \"test=foo&test=baz\"\n\n def test_different_encodings_dont_break_post(self, httpbin):\n with open(__file__, \"rb\") as f:\n r = requests.post(\n httpbin(\"post\"),\n data={\"stuff\": json.dumps({\"a\": 123})},\n params={\"blah\": \"asdf1234\"},\n files={\"file\": (\"test_requests.py\", f)},\n )\n assert r.status_code == 200\n\n @pytest.mark.parametrize(\n \"data\",\n (\n {\"stuff\": \"ëlïxr\"},\n {\"stuff\": \"ëlïxr\".encode()},\n {\"stuff\": \"elixr\"},\n {\"stuff\": b\"elixr\"},\n ),\n )\n def test_unicode_multipart_post(self, httpbin, data):\n with open(__file__, \"rb\") as f:\n r = requests.post(\n httpbin(\"post\"),\n data=data,\n files={\"file\": (\"test_requests.py\", f)},\n )\n assert r.status_code == 200\n\n def test_unicode_multipart_post_fieldnames(self, httpbin):\n filename = os.path.splitext(__file__)[0] + \".py\"\n with open(filename, \"rb\") as f:\n r = requests.Request(\n method=\"POST\",\n url=httpbin(\"post\"),\n data={b\"stuff\": \"elixr\"},\n files={\"file\": (\"test_requests.py\", f)},\n )\n prep = r.prepare()\n\n assert b'name=\"stuff\"' in prep.body\n assert b\"name=\\\"b'stuff'\\\"\" not in prep.body\n\n def test_unicode_method_name(self, httpbin):\n with open(__file__, \"rb\") as f:\n files = {\"file\": f}\n r = requests.request(\n method=\"POST\",\n url=httpbin(\"post\"),\n files=files,\n )\n assert r.status_code == 200\n\n def test_unicode_method_name_with_request_object(self, httpbin):\n s = requests.Session()\n with open(__file__, \"rb\") as f:\n files = {\"file\": f}\n req = requests.Request(\"POST\", httpbin(\"post\"), files=files)\n prep = s.prepare_request(req)\n assert isinstance(prep.method, builtin_str)\n assert prep.method == \"POST\"\n\n resp = s.send(prep)\n assert resp.status_code == 200\n\n def test_non_prepared_request_error(self):\n s = requests.Session()\n req = requests.Request(\"POST\", \"/\")\n\n with pytest.raises(ValueError) as e:\n s.send(req)\n assert str(e.value) == \"You can only send PreparedRequests.\"\n\n def test_custom_content_type(self, httpbin):\n with open(__file__, \"rb\") as f1:\n with open(__file__, \"rb\") as f2:\n data = {\"stuff\": json.dumps({\"a\": 123})}\n files = {\n \"file1\": (\"test_requests.py\", f1),\n \"file2\": (\"test_requests\", f2, \"text/py-content-type\"),\n }\n r = requests.post(httpbin(\"post\"), data=data, files=files)\n assert r.status_code == 200\n assert b\"text/py-content-type\" in r.request.body\n\n def test_hook_receives_request_arguments(self, httpbin):\n def hook(resp, **kwargs):\n assert resp is not None\n assert kwargs != {}\n\n s = requests.Session()\n r = requests.Request(\"GET\", httpbin(), hooks={\"response\": hook})\n prep = s.prepare_request(r)\n s.send(prep)\n\n def test_session_hooks_are_used_with_no_request_hooks(self, httpbin):\n def hook(*args, **kwargs):\n pass\n\n s = requests.Session()\n s.hooks[\"response\"].append(hook)\n r = requests.Request(\"GET\", httpbin())\n prep = s.prepare_request(r)\n assert prep.hooks[\"response\"] != []\n assert prep.hooks[\"response\"] == [hook]\n\n def test_session_hooks_are_overridden_by_request_hooks(self, httpbin):\n def hook1(*args, **kwargs):\n pass\n\n def hook2(*args, **kwargs):\n pass\n\n assert hook1 is not hook2\n s = requests.Session()\n s.hooks[\"response\"].append(hook2)\n r = requests.Request(\"GET\", httpbin(), hooks={\"response\": [hook1]})\n prep = s.prepare_request(r)\n assert prep.hooks[\"response\"] == [hook1]\n\n def test_prepared_request_hook(self, httpbin):\n def hook(resp, **kwargs):\n resp.hook_working = True\n return resp\n\n req = requests.Request(\"GET\", httpbin(), hooks={\"response\": hook})\n prep = req.prepare()\n\n s = requests.Session()\n s.proxies = getproxies()\n resp = s.send(prep)\n\n assert hasattr(resp, \"hook_working\")\n\n def test_prepared_from_session(self, httpbin):\n class DummyAuth(requests.auth.AuthBase):\n def __call__(self, r):\n r.headers[\"Dummy-Auth-Test\"] = \"dummy-auth-test-ok\"\n return r\n\n req = requests.Request(\"GET\", httpbin(\"headers\"))\n assert not req.auth\n\n s = requests.Session()\n s.auth = DummyAuth()\n\n prep = s.prepare_request(req)\n resp = s.send(prep)\n\n assert resp.json()[\"headers\"][\"Dummy-Auth-Test\"] == \"dummy-auth-test-ok\"\n\n def test_prepare_request_with_bytestring_url(self):\n req = requests.Request(\"GET\", b\"https://httpbin.org/\")\n s = requests.Session()\n prep = s.prepare_request(req)\n assert prep.url == \"https://httpbin.org/\"\n\n def test_request_with_bytestring_host(self, httpbin):\n s = requests.Session()\n resp = s.request(\n \"GET\",\n httpbin(\"cookies/set?cookie=value\"),\n allow_redirects=False,\n headers={\"Host\": b\"httpbin.org\"},\n )\n assert resp.cookies.get(\"cookie\") == \"value\"\n\n def test_links(self):\n r = requests.Response()\n r.headers = {\n \"cache-control\": \"public, max-age=60, s-maxage=60\",\n \"connection\": \"keep-alive\",\n \"content-encoding\": \"gzip\",\n \"content-type\": \"application/json; charset=utf-8\",\n \"date\": \"Sat, 26 Jan 2013 16:47:56 GMT\",\n \"etag\": '\"6ff6a73c0e446c1f61614769e3ceb778\"',\n \"last-modified\": \"Sat, 26 Jan 2013 16:22:39 GMT\",\n \"link\": (\n \"<https://api.github.com/users/kennethreitz/repos?\"\n 'page=2&per_page=10>; rel=\"next\", <https://api.github.'\n \"com/users/kennethreitz/repos?page=7&per_page=10>; \"\n ' rel=\"last\"'\n ),\n \"server\": \"GitHub.com\",\n \"status\": \"200 OK\",\n \"vary\": \"Accept\",\n \"x-content-type-options\": \"nosniff\",\n \"x-github-media-type\": \"github.beta\",\n \"x-ratelimit-limit\": \"60\",\n \"x-ratelimit-remaining\": \"57\",\n }\n assert r.links[\"next\"][\"rel\"] == \"next\"\n\n def test_cookie_parameters(self):\n key = \"some_cookie\"\n value = \"some_value\"\n secure = True\n domain = \"test.com\"\n rest = {\"HttpOnly\": True}\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value, secure=secure, domain=domain, rest=rest)\n\n assert len(jar) == 1\n assert \"some_cookie\" in jar\n\n cookie = list(jar)[0]\n assert cookie.secure == secure\n assert cookie.domain == domain\n assert cookie._rest[\"HttpOnly\"] == rest[\"HttpOnly\"]\n\n def test_cookie_as_dict_keeps_len(self):\n key = \"some_cookie\"\n value = \"some_value\"\n\n key1 = \"some_cookie1\"\n value1 = \"some_value1\"\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value)\n jar.set(key1, value1)\n\n d1 = dict(jar)\n d2 = dict(jar.iteritems())\n d3 = dict(jar.items())\n\n assert len(jar) == 2\n assert len(d1) == 2\n assert len(d2) == 2\n assert len(d3) == 2\n\n def test_cookie_as_dict_keeps_items(self):\n key = \"some_cookie\"\n value = \"some_value\"\n\n key1 = \"some_cookie1\"\n value1 = \"some_value1\"\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value)\n jar.set(key1, value1)\n\n d1 = dict(jar)\n d2 = dict(jar.iteritems())\n d3 = dict(jar.items())\n\n assert d1[\"some_cookie\"] == \"some_value\"\n assert d2[\"some_cookie\"] == \"some_value\"\n assert d3[\"some_cookie1\"] == \"some_value1\"\n\n def test_cookie_as_dict_keys(self):\n key = \"some_cookie\"\n value = \"some_value\"\n\n key1 = \"some_cookie1\"\n value1 = \"some_value1\"\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value)\n jar.set(key1, value1)\n\n keys = jar.keys()\n assert keys == list(keys)\n # make sure one can use keys multiple times\n assert list(keys) == list(keys)\n\n def test_cookie_as_dict_values(self):\n key = \"some_cookie\"\n value = \"some_value\"\n\n key1 = \"some_cookie1\"\n value1 = \"some_value1\"\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value)\n jar.set(key1, value1)\n\n values = jar.values()\n assert values == list(values)\n # make sure one can use values multiple times\n assert list(values) == list(values)\n\n def test_cookie_as_dict_items(self):\n key = \"some_cookie\"\n value = \"some_value\"\n\n key1 = \"some_cookie1\"\n value1 = \"some_value1\"\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value)\n jar.set(key1, value1)\n\n items = jar.items()\n assert items == list(items)\n # make sure one can use items multiple times\n assert list(items) == list(items)\n\n def test_cookie_duplicate_names_different_domains(self):\n key = \"some_cookie\"\n value = \"some_value\"\n domain1 = \"test1.com\"\n domain2 = \"test2.com\"\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value, domain=domain1)\n jar.set(key, value, domain=domain2)\n assert key in jar\n items = jar.items()\n assert len(items) == 2\n\n # Verify that CookieConflictError is raised if domain is not specified\n with pytest.raises(requests.cookies.CookieConflictError):\n jar.get(key)\n\n # Verify that CookieConflictError is not raised if domain is specified\n cookie = jar.get(key, domain=domain1)\n assert cookie == value\n\n def test_cookie_duplicate_names_raises_cookie_conflict_error(self):\n key = \"some_cookie\"\n value = \"some_value\"\n path = \"some_path\"\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value, path=path)\n jar.set(key, value)\n with pytest.raises(requests.cookies.CookieConflictError):\n jar.get(key)\n\n def test_cookie_policy_copy(self):\n class MyCookiePolicy(cookielib.DefaultCookiePolicy):\n pass\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set_policy(MyCookiePolicy())\n assert isinstance(jar.copy().get_policy(), MyCookiePolicy)\n\n def test_time_elapsed_blank(self, httpbin):\n r = requests.get(httpbin(\"get\"))\n td = r.elapsed\n total_seconds = (\n td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6\n ) / 10**6\n assert total_seconds > 0.0\n\n def test_empty_response_has_content_none(self):\n r = requests.Response()\n assert r.content is None\n\n def test_response_is_iterable(self):\n r = requests.Response()\n io = StringIO.StringIO(\"abc\")\n read_ = io.read\n\n def read_mock(amt, decode_content=None):\n return read_(amt)\n\n setattr(io, \"read\", read_mock)\n r.raw = io\n assert next(iter(r))\n io.close()\n\n def test_response_decode_unicode(self):\n \"\"\"When called with decode_unicode, Response.iter_content should always\n return unicode.\n \"\"\"\n r = requests.Response()\n r._content_consumed = True\n r._content = b\"the content\"\n r.encoding = \"ascii\"\n\n chunks = r.iter_content(decode_unicode=True)\n assert all(isinstance(chunk, str) for chunk in chunks)\n\n # also for streaming\n r = requests.Response()\n r.raw = io.BytesIO(b\"the content\")\n r.encoding = \"ascii\"\n chunks = r.iter_content(decode_unicode=True)\n assert all(isinstance(chunk, str) for chunk in chunks)\n\n def test_response_reason_unicode(self):\n # check for unicode HTTP status\n r = requests.Response()\n r.url = \"unicode URL\"\n r.reason = \"Komponenttia ei löydy\".encode()\n r.status_code = 404\n r.encoding = None\n assert not r.ok # old behaviour - crashes here\n\n def test_response_reason_unicode_fallback(self):\n # check raise_status falls back to ISO-8859-1\n r = requests.Response()\n r.url = \"some url\"\n reason = \"Komponenttia ei löydy\"\n r.reason = reason.encode(\"latin-1\")\n r.status_code = 500\n r.encoding = None\n with pytest.raises(requests.exceptions.HTTPError) as e:\n r.raise_for_status()\n assert reason in e.value.args[0]\n\n def test_response_chunk_size_type(self):\n \"\"\"Ensure that chunk_size is passed as None or an integer, otherwise\n raise a TypeError.\n \"\"\"\n r = requests.Response()\n r.raw = io.BytesIO(b\"the content\")\n chunks = r.iter_content(1)\n assert all(len(chunk) == 1 for chunk in chunks)\n\n r = requests.Response()\n r.raw = io.BytesIO(b\"the content\")\n chunks = r.iter_content(None)\n assert list(chunks) == [b\"the content\"]\n\n r = requests.Response()\n r.raw = io.BytesIO(b\"the content\")\n with pytest.raises(TypeError):\n chunks = r.iter_content(\"1024\")\n\n @pytest.mark.parametrize(\n \"exception, args, expected\",\n (\n (urllib3.exceptions.ProtocolError, tuple(), ChunkedEncodingError),\n (urllib3.exceptions.DecodeError, tuple(), ContentDecodingError),\n (urllib3.exceptions.ReadTimeoutError, (None, \"\", \"\"), ConnectionError),\n (urllib3.exceptions.SSLError, tuple(), RequestsSSLError),\n ),\n )\n def test_iter_content_wraps_exceptions(\n self, httpbin, mocker, exception, args, expected\n ):\n r = requests.Response()\n r.raw = mocker.Mock()\n # ReadTimeoutError can't be initialized by mock\n # so we'll manually create the instance with args\n r.raw.stream.side_effect = exception(*args)\n\n with pytest.raises(expected):\n next(r.iter_content(1024))\n\n def test_request_and_response_are_pickleable(self, httpbin):\n r = requests.get(httpbin(\"get\"))\n\n # verify we can pickle the original request\n assert pickle.loads(pickle.dumps(r.request))\n\n # verify we can pickle the response and that we have access to\n # the original request.\n pr = pickle.loads(pickle.dumps(r))\n assert r.request.url == pr.request.url\n assert r.request.headers == pr.request.headers\n\n def test_prepared_request_is_pickleable(self, httpbin):\n p = requests.Request(\"GET\", httpbin(\"get\")).prepare()\n\n # Verify PreparedRequest can be pickled and unpickled\n r = pickle.loads(pickle.dumps(p))\n assert r.url == p.url\n assert r.headers == p.headers\n assert r.body == p.body\n\n # Verify unpickled PreparedRequest sends properly\n s = requests.Session()\n resp = s.send(r)\n assert resp.status_code == 200\n\n def test_prepared_request_with_file_is_pickleable(self, httpbin):\n with open(__file__, \"rb\") as f:\n r = requests.Request(\"POST\", httpbin(\"post\"), files={\"file\": f})\n p = r.prepare()\n\n # Verify PreparedRequest can be pickled and unpickled\n r = pickle.loads(pickle.dumps(p))\n assert r.url == p.url\n assert r.headers == p.headers\n assert r.body == p.body\n\n # Verify unpickled PreparedRequest sends properly\n s = requests.Session()\n resp = s.send(r)\n assert resp.status_code == 200\n\n def test_prepared_request_with_hook_is_pickleable(self, httpbin):\n r = requests.Request(\"GET\", httpbin(\"get\"), hooks=default_hooks())\n p = r.prepare()\n\n # Verify PreparedRequest can be pickled\n r = pickle.loads(pickle.dumps(p))\n assert r.url == p.url\n assert r.headers == p.headers\n assert r.body == p.body\n assert r.hooks == p.hooks\n\n # Verify unpickled PreparedRequest sends properly\n s = requests.Session()\n resp = s.send(r)\n assert resp.status_code == 200\n\n def test_cannot_send_unprepared_requests(self, httpbin):\n r = requests.Request(url=httpbin())\n with pytest.raises(ValueError):\n requests.Session().send(r)\n\n def test_http_error(self):\n error = requests.exceptions.HTTPError()\n assert not error.response\n response = requests.Response()\n error = requests.exceptions.HTTPError(response=response)\n assert error.response == response\n error = requests.exceptions.HTTPError(\"message\", response=response)\n assert str(error) == \"message\"\n assert error.response == response\n\n def test_session_pickling(self, httpbin):\n r = requests.Request(\"GET\", httpbin(\"get\"))\n s = requests.Session()\n\n s = pickle.loads(pickle.dumps(s))\n s.proxies = getproxies()\n\n r = s.send(r.prepare())\n assert r.status_code == 200\n\n def test_fixes_1329(self, httpbin):\n \"\"\"Ensure that header updates are done case-insensitively.\"\"\"\n s = requests.Session()\n s.headers.update({\"ACCEPT\": \"BOGUS\"})\n s.headers.update({\"accept\": \"application/json\"})\n r = s.get(httpbin(\"get\"))\n headers = r.request.headers\n assert headers[\"accept\"] == \"application/json\"\n assert headers[\"Accept\"] == \"application/json\"\n assert headers[\"ACCEPT\"] == \"application/json\"\n\n def test_uppercase_scheme_redirect(self, httpbin):\n parts = urlparse(httpbin(\"html\"))\n url = \"HTTP://\" + parts.netloc + parts.path\n r = requests.get(httpbin(\"redirect-to\"), params={\"url\": url})\n assert r.status_code == 200\n assert r.url.lower() == url.lower()\n\n def test_transport_adapter_ordering(self):\n s = requests.Session()\n order = [\"https://\", \"http://\"]\n assert order == list(s.adapters)\n s.mount(\"http://git\", HTTPAdapter())\n s.mount(\"http://github\", HTTPAdapter())\n s.mount(\"http://github.com\", HTTPAdapter())\n s.mount(\"http://github.com/about/\", HTTPAdapter())\n order = [\n \"http://github.com/about/\",\n \"http://github.com\",\n \"http://github\",\n \"http://git\",\n \"https://\",\n \"http://\",\n ]\n assert order == list(s.adapters)\n s.mount(\"http://gittip\", HTTPAdapter())\n s.mount(\"http://gittip.com\", HTTPAdapter())\n s.mount(\"http://gittip.com/about/\", HTTPAdapter())\n order = [\n \"http://github.com/about/\",\n \"http://gittip.com/about/\",\n \"http://github.com\",\n \"http://gittip.com\",\n \"http://github\",\n \"http://gittip\",\n \"http://git\",\n \"https://\",\n \"http://\",\n ]\n assert order == list(s.adapters)\n s2 = requests.Session()\n s2.adapters = {\"http://\": HTTPAdapter()}\n s2.mount(\"https://\", HTTPAdapter())\n assert \"http://\" in s2.adapters\n assert \"https://\" in s2.adapters\n\n def test_session_get_adapter_prefix_matching(self):\n prefix = \"https://example.com\"\n more_specific_prefix = prefix + \"/some/path\"\n\n url_matching_only_prefix = prefix + \"/another/path\"\n url_matching_more_specific_prefix = more_specific_prefix + \"/longer/path\"\n url_not_matching_prefix = \"https://another.example.com/\"\n\n s = requests.Session()\n prefix_adapter = HTTPAdapter()\n more_specific_prefix_adapter = HTTPAdapter()\n s.mount(prefix, prefix_adapter)\n s.mount(more_specific_prefix, more_specific_prefix_adapter)\n\n assert s.get_adapter(url_matching_only_prefix) is prefix_adapter\n assert (\n s.get_adapter(url_matching_more_specific_prefix)\n is more_specific_prefix_adapter\n )\n assert s.get_adapter(url_not_matching_prefix) not in (\n prefix_adapter,\n more_specific_prefix_adapter,\n )\n\n def test_session_get_adapter_prefix_matching_mixed_case(self):\n mixed_case_prefix = \"hTtPs://eXamPle.CoM/MixEd_CAse_PREfix\"\n url_matching_prefix = mixed_case_prefix + \"/full_url\"\n\n s = requests.Session()\n my_adapter = HTTPAdapter()\n s.mount(mixed_case_prefix, my_adapter)\n\n assert s.get_adapter(url_matching_prefix) is my_adapter\n\n def test_session_get_adapter_prefix_matching_is_case_insensitive(self):\n mixed_case_prefix = \"hTtPs://eXamPle.CoM/MixEd_CAse_PREfix\"\n url_matching_prefix_with_different_case = (\n \"HtTpS://exaMPLe.cOm/MiXeD_caSE_preFIX/another_url\"\n )\n\n s = requests.Session()\n my_adapter = HTTPAdapter()\n s.mount(mixed_case_prefix, my_adapter)\n\n assert s.get_adapter(url_matching_prefix_with_different_case) is my_adapter\n\n def test_header_remove_is_case_insensitive(self, httpbin):\n # From issue #1321\n s = requests.Session()\n s.headers[\"foo\"] = \"bar\"\n r = s.get(httpbin(\"get\"), headers={\"FOO\": None})\n assert \"foo\" not in r.request.headers\n\n def test_params_are_merged_case_sensitive(self, httpbin):\n s = requests.Session()\n s.params[\"foo\"] = \"bar\"\n r = s.get(httpbin(\"get\"), params={\"FOO\": \"bar\"})\n assert r.json()[\"args\"] == {\"foo\": \"bar\", \"FOO\": \"bar\"}\n\n def test_long_authinfo_in_url(self):\n url = \"http://{}:{}@{}:9000/path?query#frag\".format(\n \"E8A3BE87-9E3F-4620-8858-95478E385B5B\",\n \"EA770032-DA4D-4D84-8CE9-29C6D910BF1E\",\n \"exactly-------------sixty-----------three------------characters\",\n )\n r = requests.Request(\"GET\", url).prepare()\n assert r.url == url\n\n def test_header_keys_are_native(self, httpbin):\n headers = {\"unicode\": \"blah\", b\"byte\": \"blah\"}\n r = requests.Request(\"GET\", httpbin(\"get\"), headers=headers)\n p = r.prepare()\n\n # This is testing that they are builtin strings. A bit weird, but there\n # we go.\n assert \"unicode\" in p.headers.keys()\n assert \"byte\" in p.headers.keys()\n\n def test_header_validation(self, httpbin):\n \"\"\"Ensure prepare_headers regex isn't flagging valid header contents.\"\"\"\n valid_headers = {\n \"foo\": \"bar baz qux\",\n \"bar\": b\"fbbq\",\n \"baz\": \"\",\n \"qux\": \"1\",\n }\n r = requests.get(httpbin(\"get\"), headers=valid_headers)\n for key in valid_headers.keys():\n valid_headers[key] == r.request.headers[key]\n\n @pytest.mark.parametrize(\n \"invalid_header, key\",\n (\n ({\"foo\": 3}, \"foo\"),\n ({\"bar\": {\"foo\": \"bar\"}}, \"bar\"),\n ({\"baz\": [\"foo\", \"bar\"]}, \"baz\"),\n ),\n )\n def test_header_value_not_str(self, httpbin, invalid_header, key):\n \"\"\"Ensure the header value is of type string or bytes as\n per discussion in GH issue #3386\n \"\"\"\n with pytest.raises(InvalidHeader) as excinfo:\n requests.get(httpbin(\"get\"), headers=invalid_header)\n assert key in str(excinfo.value)\n\n @pytest.mark.parametrize(\n \"invalid_header\",\n (\n {\"foo\": \"bar\\r\\nbaz: qux\"},\n {\"foo\": \"bar\\n\\rbaz: qux\"},\n {\"foo\": \"bar\\nbaz: qux\"},\n {\"foo\": \"bar\\rbaz: qux\"},\n {\"fo\\ro\": \"bar\"},\n {\"fo\\r\\no\": \"bar\"},\n {\"fo\\n\\ro\": \"bar\"},\n {\"fo\\no\": \"bar\"},\n ),\n )\n def test_header_no_return_chars(self, httpbin, invalid_header):\n \"\"\"Ensure that a header containing return character sequences raise an\n exception. Otherwise, multiple headers are created from single string.\n \"\"\"\n with pytest.raises(InvalidHeader):\n requests.get(httpbin(\"get\"), headers=invalid_header)\n\n @pytest.mark.parametrize(\n \"invalid_header\",\n (\n {\" foo\": \"bar\"},\n {\"\\tfoo\": \"bar\"},\n {\" foo\": \"bar\"},\n {\"foo\": \" bar\"},\n {\"foo\": \" bar\"},\n {\"foo\": \"\\tbar\"},\n {\" \": \"bar\"},\n ),\n )\n def test_header_no_leading_space(self, httpbin, invalid_header):\n \"\"\"Ensure headers containing leading whitespace raise\n InvalidHeader Error before sending.\n \"\"\"\n with pytest.raises(InvalidHeader):\n requests.get(httpbin(\"get\"), headers=invalid_header)\n\n @pytest.mark.parametrize(\"files\", (\"foo\", b\"foo\", bytearray(b\"foo\")))\n def test_can_send_objects_with_files(self, httpbin, files):\n data = {\"a\": \"this is a string\"}\n files = {\"b\": files}\n r = requests.Request(\"POST\", httpbin(\"post\"), data=data, files=files)\n p = r.prepare()\n assert \"multipart/form-data\" in p.headers[\"Content-Type\"]\n\n def test_can_send_file_object_with_non_string_filename(self, httpbin):\n f = io.BytesIO()\n f.name = 2\n r = requests.Request(\"POST\", httpbin(\"post\"), files={\"f\": f})\n p = r.prepare()\n\n assert \"multipart/form-data\" in p.headers[\"Content-Type\"]\n\n def test_autoset_header_values_are_native(self, httpbin):\n data = \"this is a string\"\n length = \"16\"\n req = requests.Request(\"POST\", httpbin(\"post\"), data=data)\n p = req.prepare()\n\n assert p.headers[\"Content-Length\"] == length\n\n def test_nonhttp_schemes_dont_check_URLs(self):\n test_urls = (\n \"data:image/gif;base64,R0lGODlhAQABAHAAACH5BAUAAAAALAAAAAABAAEAAAICRAEAOw==\",\n \"file:///etc/passwd\",\n \"magnet:?xt=urn:btih:be08f00302bc2d1d3cfa3af02024fa647a271431\",\n )\n for test_url in test_urls:\n req = requests.Request(\"GET\", test_url)\n preq = req.prepare()\n assert test_url == preq.url\n\n def test_auth_is_stripped_on_http_downgrade(\n self, httpbin, httpbin_secure, httpbin_ca_bundle\n ):\n r = requests.get(\n httpbin_secure(\"redirect-to\"),\n params={\"url\": httpbin(\"get\")},\n auth=(\"user\", \"pass\"),\n verify=httpbin_ca_bundle,\n )\n assert r.history[0].request.headers[\"Authorization\"]\n assert \"Authorization\" not in r.request.headers\n\n def test_auth_is_retained_for_redirect_on_host(self, httpbin):\n r = requests.get(httpbin(\"redirect/1\"), auth=(\"user\", \"pass\"))\n h1 = r.history[0].request.headers[\"Authorization\"]\n h2 = r.request.headers[\"Authorization\"]\n\n assert h1 == h2\n\n def test_should_strip_auth_host_change(self):\n s = requests.Session()\n assert s.should_strip_auth(\n \"http://example.com/foo\", \"http://another.example.com/\"\n )\n\n def test_should_strip_auth_http_downgrade(self):\n s = requests.Session()\n assert s.should_strip_auth(\"https://example.com/foo\", \"http://example.com/bar\")\n\n def test_should_strip_auth_https_upgrade(self):\n s = requests.Session()\n assert not s.should_strip_auth(\n \"http://example.com/foo\", \"https://example.com/bar\"\n )\n assert not s.should_strip_auth(\n \"http://example.com:80/foo\", \"https://example.com/bar\"\n )\n assert not s.should_strip_auth(\n \"http://example.com/foo\", \"https://example.com:443/bar\"\n )\n # Non-standard ports should trigger stripping\n assert s.should_strip_auth(\n \"http://example.com:8080/foo\", \"https://example.com/bar\"\n )\n assert s.should_strip_auth(\n \"http://example.com/foo\", \"https://example.com:8443/bar\"\n )\n\n def test_should_strip_auth_port_change(self):\n s = requests.Session()\n assert s.should_strip_auth(\n \"http://example.com:1234/foo\", \"https://example.com:4321/bar\"\n )\n\n @pytest.mark.parametrize(\n \"old_uri, new_uri\",\n (\n (\"https://example.com:443/foo\", \"https://example.com/bar\"),\n (\"http://example.com:80/foo\", \"http://example.com/bar\"),\n (\"https://example.com/foo\", \"https://example.com:443/bar\"),\n (\"http://example.com/foo\", \"http://example.com:80/bar\"),\n ),\n )\n def test_should_strip_auth_default_port(self, old_uri, new_uri):\n s = requests.Session()\n assert not s.should_strip_auth(old_uri, new_uri)\n\n def test_manual_redirect_with_partial_body_read(self, httpbin):\n s = requests.Session()\n r1 = s.get(httpbin(\"redirect/2\"), allow_redirects=False, stream=True)\n assert r1.is_redirect\n rg = s.resolve_redirects(r1, r1.request, stream=True)\n\n # read only the first eight bytes of the response body,\n # then follow the redirect\n r1.iter_content(8)\n r2 = next(rg)\n assert r2.is_redirect\n\n # read all of the response via iter_content,\n # then follow the redirect\n for _ in r2.iter_content():\n pass\n r3 = next(rg)\n assert not r3.is_redirect\n\n def test_prepare_body_position_non_stream(self):\n data = b\"the data\"\n prep = requests.Request(\"GET\", \"http://example.com\", data=data).prepare()\n assert prep._body_position is None\n\n def test_rewind_body(self):\n data = io.BytesIO(b\"the data\")\n prep = requests.Request(\"GET\", \"http://example.com\", data=data).prepare()\n assert prep._body_position == 0\n assert prep.body.read() == b\"the data\"\n\n # the data has all been read\n assert prep.body.read() == b\"\"\n\n # rewind it back\n requests.utils.rewind_body(prep)\n assert prep.body.read() == b\"the data\"\n\n def test_rewind_partially_read_body(self):\n data = io.BytesIO(b\"the data\")\n data.read(4) # read some data\n prep = requests.Request(\"GET\", \"http://example.com\", data=data).prepare()\n assert prep._body_position == 4\n assert prep.body.read() == b\"data\"\n\n # the data has all been read\n assert prep.body.read() == b\"\"\n\n # rewind it back\n requests.utils.rewind_body(prep)\n assert prep.body.read() == b\"data\"\n\n def test_rewind_body_no_seek(self):\n class BadFileObj:\n def __init__(self, data):\n self.data = data\n\n def tell(self):\n return 0\n\n def __iter__(self):\n return\n\n data = BadFileObj(\"the data\")\n prep = requests.Request(\"GET\", \"http://example.com\", data=data).prepare()\n assert prep._body_position == 0\n\n with pytest.raises(UnrewindableBodyError) as e:\n requests.utils.rewind_body(prep)\n\n assert \"Unable to rewind request body\" in str(e)\n\n def test_rewind_body_failed_seek(self):\n class BadFileObj:\n def __init__(self, data):\n self.data = data\n\n def tell(self):\n return 0\n\n def seek(self, pos, whence=0):\n raise OSError()\n\n def __iter__(self):\n return\n\n data = BadFileObj(\"the data\")\n prep = requests.Request(\"GET\", \"http://example.com\", data=data).prepare()\n assert prep._body_position == 0\n\n with pytest.raises(UnrewindableBodyError) as e:\n requests.utils.rewind_body(prep)\n\n assert \"error occurred when rewinding request body\" in str(e)\n\n def test_rewind_body_failed_tell(self):\n class BadFileObj:\n def __init__(self, data):\n self.data = data\n\n def tell(self):\n raise OSError()\n\n def __iter__(self):\n return\n\n data = BadFileObj(\"the data\")\n prep = requests.Request(\"GET\", \"http://example.com\", data=data).prepare()\n assert prep._body_position is not None\n\n with pytest.raises(UnrewindableBodyError) as e:\n requests.utils.rewind_body(prep)\n\n assert \"Unable to rewind request body\" in str(e)\n\n def _patch_adapter_gzipped_redirect(self, session, url):\n adapter = session.get_adapter(url=url)\n org_build_response = adapter.build_response\n self._patched_response = False\n\n def build_response(*args, **kwargs):\n resp = org_build_response(*args, **kwargs)\n if not self._patched_response:\n resp.raw.headers[\"content-encoding\"] = \"gzip\"\n self._patched_response = True\n return resp\n\n adapter.build_response = build_response\n\n def test_redirect_with_wrong_gzipped_header(self, httpbin):\n s = requests.Session()\n url = httpbin(\"redirect/1\")\n self._patch_adapter_gzipped_redirect(s, url)\n s.get(url)\n\n @pytest.mark.parametrize(\n \"username, password, auth_str\",\n (\n (\"test\", \"test\", \"Basic dGVzdDp0ZXN0\"),\n (\n \"имя\".encode(),\n \"пароль\".encode(),\n \"Basic 0LjQvNGPOtC/0LDRgNC+0LvRjA==\",\n ),\n ),\n )\n def test_basic_auth_str_is_always_native(self, username, password, auth_str):\n s = _basic_auth_str(username, password)\n assert isinstance(s, builtin_str)\n assert s == auth_str\n\n def test_requests_history_is_saved(self, httpbin):\n r = requests.get(httpbin(\"redirect/5\"))\n total = r.history[-1].history\n i = 0\n for item in r.history:\n assert item.history == total[0:i]\n i += 1\n\n def test_json_param_post_content_type_works(self, httpbin):\n r = requests.post(httpbin(\"post\"), json={\"life\": 42})\n assert r.status_code == 200\n assert \"application/json\" in r.request.headers[\"Content-Type\"]\n assert {\"life\": 42} == r.json()[\"json\"]\n\n def test_json_param_post_should_not_override_data_param(self, httpbin):\n r = requests.Request(\n method=\"POST\",\n url=httpbin(\"post\"),\n data={\"stuff\": \"elixr\"},\n json={\"music\": \"flute\"},\n )\n prep = r.prepare()\n assert \"stuff=elixr\" == prep.body\n\n def test_response_iter_lines(self, httpbin):\n r = requests.get(httpbin(\"stream/4\"), stream=True)\n assert r.status_code == 200\n\n it = r.iter_lines()\n next(it)\n assert len(list(it)) == 3\n\n def test_response_context_manager(self, httpbin):\n with requests.get(httpbin(\"stream/4\"), stream=True) as response:\n assert isinstance(response, requests.Response)\n\n assert response.raw.closed\n\n def test_unconsumed_session_response_closes_connection(self, httpbin):\n s = requests.session()\n\n with contextlib.closing(s.get(httpbin(\"stream/4\"), stream=True)) as response:\n pass\n\n assert response._content_consumed is False\n assert response.raw.closed\n\n @pytest.mark.xfail\n def test_response_iter_lines_reentrant(self, httpbin):\n \"\"\"Response.iter_lines() is not reentrant safe\"\"\"\n r = requests.get(httpbin(\"stream/4\"), stream=True)\n assert r.status_code == 200\n\n next(r.iter_lines())\n assert len(list(r.iter_lines())) == 3\n\n def test_session_close_proxy_clear(self, mocker):\n proxies = {\n \"one\": mocker.Mock(),\n \"two\": mocker.Mock(),\n }\n session = requests.Session()\n mocker.patch.dict(session.adapters[\"http://\"].proxy_manager, proxies)\n session.close()\n proxies[\"one\"].clear.assert_called_once_with()\n proxies[\"two\"].clear.assert_called_once_with()\n\n def test_proxy_auth(self):\n adapter = HTTPAdapter()\n headers = adapter.proxy_headers(\"http://user:pass@httpbin.org\")\n assert headers == {\"Proxy-Authorization\": \"Basic dXNlcjpwYXNz\"}\n\n def test_proxy_auth_empty_pass(self):\n adapter = HTTPAdapter()\n headers = adapter.proxy_headers(\"http://user:@httpbin.org\")\n assert headers == {\"Proxy-Authorization\": \"Basic dXNlcjo=\"}\n\n def test_response_json_when_content_is_None(self, httpbin):\n r = requests.get(httpbin(\"/status/204\"))\n # Make sure r.content is None\n r.status_code = 0\n r._content = False\n r._content_consumed = False\n\n assert r.content is None\n with pytest.raises(ValueError):\n r.json()\n\n def test_response_without_release_conn(self):\n \"\"\"Test `close` call for non-urllib3-like raw objects.\n Should work when `release_conn` attr doesn't exist on `response.raw`.\n \"\"\"\n resp = requests.Response()\n resp.raw = StringIO.StringIO(\"test\")\n assert not resp.raw.closed\n resp.close()\n assert resp.raw.closed\n\n def test_empty_stream_with_auth_does_not_set_content_length_header(self, httpbin):\n \"\"\"Ensure that a byte stream with size 0 will not set both a Content-Length\n and Transfer-Encoding header.\n \"\"\"\n auth = (\"user\", \"pass\")\n url = httpbin(\"post\")\n file_obj = io.BytesIO(b\"\")\n r = requests.Request(\"POST\", url, auth=auth, data=file_obj)\n prepared_request = r.prepare()\n assert \"Transfer-Encoding\" in prepared_request.headers\n assert \"Content-Length\" not in prepared_request.headers\n\n def test_stream_with_auth_does_not_set_transfer_encoding_header(self, httpbin):\n \"\"\"Ensure that a byte stream with size > 0 will not set both a Content-Length\n and Transfer-Encoding header.\n \"\"\"\n auth = (\"user\", \"pass\")\n url = httpbin(\"post\")\n file_obj = io.BytesIO(b\"test data\")\n r = requests.Request(\"POST\", url, auth=auth, data=file_obj)\n prepared_request = r.prepare()\n assert \"Transfer-Encoding\" not in prepared_request.headers\n assert \"Content-Length\" in prepared_request.headers\n\n def test_chunked_upload_does_not_set_content_length_header(self, httpbin):\n \"\"\"Ensure that requests with a generator body stream using\n Transfer-Encoding: chunked, not a Content-Length header.\n \"\"\"\n data = (i for i in [b\"a\", b\"b\", b\"c\"])\n url = httpbin(\"post\")\n r = requests.Request(\"POST\", url, data=data)\n prepared_request = r.prepare()\n assert \"Transfer-Encoding\" in prepared_request.headers\n assert \"Content-Length\" not in prepared_request.headers\n\n def test_custom_redirect_mixin(self, httpbin):\n \"\"\"Tests a custom mixin to overwrite ``get_redirect_target``.\n\n Ensures a subclassed ``requests.Session`` can handle a certain type of\n malformed redirect responses.\n\n 1. original request receives a proper response: 302 redirect\n 2. following the redirect, a malformed response is given:\n status code = HTTP 200\n location = alternate url\n 3. the custom session catches the edge case and follows the redirect\n \"\"\"\n url_final = httpbin(\"html\")\n querystring_malformed = urlencode({\"location\": url_final})\n url_redirect_malformed = httpbin(\"response-headers?%s\" % querystring_malformed)\n querystring_redirect = urlencode({\"url\": url_redirect_malformed})\n url_redirect = httpbin(\"redirect-to?%s\" % querystring_redirect)\n urls_test = [\n url_redirect,\n url_redirect_malformed,\n url_final,\n ]\n\n class CustomRedirectSession(requests.Session):\n def get_redirect_target(self, resp):\n # default behavior\n if resp.is_redirect:\n return resp.headers[\"location\"]\n # edge case - check to see if 'location' is in headers anyways\n location = resp.headers.get(\"location\")\n if location and (location != resp.url):\n return location\n return None\n\n session = CustomRedirectSession()\n r = session.get(urls_test[0])\n assert len(r.history) == 2\n assert r.status_code == 200\n assert r.history[0].status_code == 302\n assert r.history[0].is_redirect\n assert r.history[1].status_code == 200\n assert not r.history[1].is_redirect\n assert r.url == urls_test[2]\n\n\nclass TestCaseInsensitiveDict:\n @pytest.mark.parametrize(\n \"cid\",\n (\n CaseInsensitiveDict({\"Foo\": \"foo\", \"BAr\": \"bar\"}),\n CaseInsensitiveDict([(\"Foo\", \"foo\"), (\"BAr\", \"bar\")]),\n CaseInsensitiveDict(FOO=\"foo\", BAr=\"bar\"),\n ),\n )\n def test_init(self, cid):\n assert len(cid) == 2\n assert \"foo\" in cid\n assert \"bar\" in cid\n\n def test_docstring_example(self):\n cid = CaseInsensitiveDict()\n cid[\"Accept\"] = \"application/json\"\n assert cid[\"aCCEPT\"] == \"application/json\"\n assert list(cid) == [\"Accept\"]\n\n def test_len(self):\n cid = CaseInsensitiveDict({\"a\": \"a\", \"b\": \"b\"})\n cid[\"A\"] = \"a\"\n assert len(cid) == 2\n\n def test_getitem(self):\n cid = CaseInsensitiveDict({\"Spam\": \"blueval\"})\n assert cid[\"spam\"] == \"blueval\"\n assert cid[\"SPAM\"] == \"blueval\"\n\n def test_fixes_649(self):\n \"\"\"__setitem__ should behave case-insensitively.\"\"\"\n cid = CaseInsensitiveDict()\n cid[\"spam\"] = \"oneval\"\n cid[\"Spam\"] = \"twoval\"\n cid[\"sPAM\"] = \"redval\"\n cid[\"SPAM\"] = \"blueval\"\n assert cid[\"spam\"] == \"blueval\"\n assert cid[\"SPAM\"] == \"blueval\"\n assert list(cid.keys()) == [\"SPAM\"]\n\n def test_delitem(self):\n cid = CaseInsensitiveDict()\n cid[\"Spam\"] = \"someval\"\n del cid[\"sPam\"]\n assert \"spam\" not in cid\n assert len(cid) == 0\n\n def test_contains(self):\n cid = CaseInsensitiveDict()\n cid[\"Spam\"] = \"someval\"\n assert \"Spam\" in cid\n assert \"spam\" in cid\n assert \"SPAM\" in cid\n assert \"sPam\" in cid\n assert \"notspam\" not in cid\n\n def test_get(self):\n cid = CaseInsensitiveDict()\n cid[\"spam\"] = \"oneval\"\n cid[\"SPAM\"] = \"blueval\"\n assert cid.get(\"spam\") == \"blueval\"\n assert cid.get(\"SPAM\") == \"blueval\"\n assert cid.get(\"sPam\") == \"blueval\"\n assert cid.get(\"notspam\", \"default\") == \"default\"\n\n def test_update(self):\n cid = CaseInsensitiveDict()\n cid[\"spam\"] = \"blueval\"\n cid.update({\"sPam\": \"notblueval\"})\n assert cid[\"spam\"] == \"notblueval\"\n cid = CaseInsensitiveDict({\"Foo\": \"foo\", \"BAr\": \"bar\"})\n cid.update({\"fOO\": \"anotherfoo\", \"bAR\": \"anotherbar\"})\n assert len(cid) == 2\n assert cid[\"foo\"] == \"anotherfoo\"\n assert cid[\"bar\"] == \"anotherbar\"\n\n def test_update_retains_unchanged(self):\n cid = CaseInsensitiveDict({\"foo\": \"foo\", \"bar\": \"bar\"})\n cid.update({\"foo\": \"newfoo\"})\n assert cid[\"bar\"] == \"bar\"\n\n def test_iter(self):\n cid = CaseInsensitiveDict({\"Spam\": \"spam\", \"Eggs\": \"eggs\"})\n keys = frozenset([\"Spam\", \"Eggs\"])\n assert frozenset(iter(cid)) == keys\n\n def test_equality(self):\n cid = CaseInsensitiveDict({\"SPAM\": \"blueval\", \"Eggs\": \"redval\"})\n othercid = CaseInsensitiveDict({\"spam\": \"blueval\", \"eggs\": \"redval\"})\n assert cid == othercid\n del othercid[\"spam\"]\n assert cid != othercid\n assert cid == {\"spam\": \"blueval\", \"eggs\": \"redval\"}\n assert cid != object()\n\n def test_setdefault(self):\n cid = CaseInsensitiveDict({\"Spam\": \"blueval\"})\n assert cid.setdefault(\"spam\", \"notblueval\") == \"blueval\"\n assert cid.setdefault(\"notspam\", \"notblueval\") == \"notblueval\"\n\n def test_lower_items(self):\n cid = CaseInsensitiveDict(\n {\n \"Accept\": \"application/json\",\n \"user-Agent\": \"requests\",\n }\n )\n keyset = frozenset(lowerkey for lowerkey, v in cid.lower_items())\n lowerkeyset = frozenset([\"accept\", \"user-agent\"])\n assert keyset == lowerkeyset\n\n def test_preserve_key_case(self):\n cid = CaseInsensitiveDict(\n {\n \"Accept\": \"application/json\",\n \"user-Agent\": \"requests\",\n }\n )\n keyset = frozenset([\"Accept\", \"user-Agent\"])\n assert frozenset(i[0] for i in cid.items()) == keyset\n assert frozenset(cid.keys()) == keyset\n assert frozenset(cid) == keyset\n\n def test_preserve_last_key_case(self):\n cid = CaseInsensitiveDict(\n {\n \"Accept\": \"application/json\",\n \"user-Agent\": \"requests\",\n }\n )\n cid.update({\"ACCEPT\": \"application/json\"})\n cid[\"USER-AGENT\"] = \"requests\"\n keyset = frozenset([\"ACCEPT\", \"USER-AGENT\"])\n assert frozenset(i[0] for i in cid.items()) == keyset\n assert frozenset(cid.keys()) == keyset\n assert frozenset(cid) == keyset\n\n def test_copy(self):\n cid = CaseInsensitiveDict(\n {\n \"Accept\": \"application/json\",\n \"user-Agent\": \"requests\",\n }\n )\n cid_copy = cid.copy()\n assert cid == cid_copy\n cid[\"changed\"] = True\n assert cid != cid_copy\n\n\nclass TestMorselToCookieExpires:\n \"\"\"Tests for morsel_to_cookie when morsel contains expires.\"\"\"\n\n def test_expires_valid_str(self):\n \"\"\"Test case where we convert expires from string time.\"\"\"\n\n morsel = Morsel()\n morsel[\"expires\"] = \"Thu, 01-Jan-1970 00:00:01 GMT\"\n cookie = morsel_to_cookie(morsel)\n assert cookie.expires == 1\n\n @pytest.mark.parametrize(\n \"value, exception\",\n (\n (100, TypeError),\n (\"woops\", ValueError),\n ),\n )\n def test_expires_invalid_int(self, value, exception):\n \"\"\"Test case where an invalid type is passed for expires.\"\"\"\n morsel = Morsel()\n morsel[\"expires\"] = value\n with pytest.raises(exception):\n morsel_to_cookie(morsel)\n\n def test_expires_none(self):\n \"\"\"Test case where expires is None.\"\"\"\n\n morsel = Morsel()\n morsel[\"expires\"] = None\n cookie = morsel_to_cookie(morsel)\n assert cookie.expires is None\n\n\nclass TestMorselToCookieMaxAge:\n\n \"\"\"Tests for morsel_to_cookie when morsel contains max-age.\"\"\"\n\n def test_max_age_valid_int(self):\n \"\"\"Test case where a valid max age in seconds is passed.\"\"\"\n\n morsel = Morsel()\n morsel[\"max-age\"] = 60\n cookie = morsel_to_cookie(morsel)\n assert isinstance(cookie.expires, int)\n\n def test_max_age_invalid_str(self):\n \"\"\"Test case where a invalid max age is passed.\"\"\"\n\n morsel = Morsel()\n morsel[\"max-age\"] = \"woops\"\n with pytest.raises(TypeError):\n morsel_to_cookie(morsel)\n\n\nclass TestTimeout:\n def test_stream_timeout(self, httpbin):\n try:\n requests.get(httpbin(\"delay/10\"), timeout=2.0)\n except requests.exceptions.Timeout as e:\n assert \"Read timed out\" in e.args[0].args[0]\n\n @pytest.mark.parametrize(\n \"timeout, error_text\",\n (\n ((3, 4, 5), \"(connect, read)\"),\n (\"foo\", \"must be an int, float or None\"),\n ),\n )\n def test_invalid_timeout(self, httpbin, timeout, error_text):\n with pytest.raises(ValueError) as e:\n requests.get(httpbin(\"get\"), timeout=timeout)\n assert error_text in str(e)\n\n @pytest.mark.parametrize(\"timeout\", (None, Urllib3Timeout(connect=None, read=None)))\n def test_none_timeout(self, httpbin, timeout):\n \"\"\"Check that you can set None as a valid timeout value.\n\n To actually test this behavior, we'd want to check that setting the\n timeout to None actually lets the request block past the system default\n timeout. However, this would make the test suite unbearably slow.\n Instead we verify that setting the timeout to None does not prevent the\n request from succeeding.\n \"\"\"\n r = requests.get(httpbin(\"get\"), timeout=timeout)\n assert r.status_code == 200\n\n @pytest.mark.parametrize(\n \"timeout\", ((None, 0.1), Urllib3Timeout(connect=None, read=0.1))\n )\n def test_read_timeout(self, httpbin, timeout):\n try:\n requests.get(httpbin(\"delay/10\"), timeout=timeout)\n pytest.fail(\"The recv() request should time out.\")\n except ReadTimeout:\n pass\n\n @pytest.mark.parametrize(\n \"timeout\", ((0.1, None), Urllib3Timeout(connect=0.1, read=None))\n )\n def test_connect_timeout(self, timeout):\n try:\n requests.get(TARPIT, timeout=timeout)\n pytest.fail(\"The connect() request should time out.\")\n except ConnectTimeout as e:\n assert isinstance(e, ConnectionError)\n assert isinstance(e, Timeout)\n\n @pytest.mark.parametrize(\n \"timeout\", ((0.1, 0.1), Urllib3Timeout(connect=0.1, read=0.1))\n )\n def test_total_timeout_connect(self, timeout):\n try:\n requests.get(TARPIT, timeout=timeout)\n pytest.fail(\"The connect() request should time out.\")\n except ConnectTimeout:\n pass\n\n def test_encoded_methods(self, httpbin):\n \"\"\"See: https://github.com/psf/requests/issues/2316\"\"\"\n r = requests.request(b\"GET\", httpbin(\"get\"))\n assert r.ok\n\n\nSendCall = collections.namedtuple(\"SendCall\", (\"args\", \"kwargs\"))\n\n\nclass RedirectSession(SessionRedirectMixin):\n def __init__(self, order_of_redirects):\n self.redirects = order_of_redirects\n self.calls = []\n self.max_redirects = 30\n self.cookies = {}\n self.trust_env = False\n\n def send(self, *args, **kwargs):\n self.calls.append(SendCall(args, kwargs))\n return self.build_response()\n\n def build_response(self):\n request = self.calls[-1].args[0]\n r = requests.Response()\n\n try:\n r.status_code = int(self.redirects.pop(0))\n except IndexError:\n r.status_code = 200\n\n r.headers = CaseInsensitiveDict({\"Location\": \"/\"})\n r.raw = self._build_raw()\n r.request = request\n return r\n\n def _build_raw(self):\n string = StringIO.StringIO(\"\")\n setattr(string, \"release_conn\", lambda *args: args)\n return string\n\n\ndef test_json_encodes_as_bytes():\n # urllib3 expects bodies as bytes-like objects\n body = {\"key\": \"value\"}\n p = PreparedRequest()\n p.prepare(method=\"GET\", url=\"https://www.example.com/\", json=body)\n assert isinstance(p.body, bytes)\n\n\ndef test_requests_are_updated_each_time(httpbin):\n session = RedirectSession([303, 307])\n prep = requests.Request(\"POST\", httpbin(\"post\")).prepare()\n r0 = session.send(prep)\n assert r0.request.method == \"POST\"\n assert session.calls[-1] == SendCall((r0.request,), {})\n redirect_generator = session.resolve_redirects(r0, prep)\n default_keyword_args = {\n \"stream\": False,\n \"verify\": True,\n \"cert\": None,\n \"timeout\": None,\n \"allow_redirects\": False,\n \"proxies\": {},\n }\n for response in redirect_generator:\n assert response.request.method == \"GET\"\n send_call = SendCall((response.request,), default_keyword_args)\n assert session.calls[-1] == send_call\n\n\n@pytest.mark.parametrize(\n \"var,url,proxy\",\n [\n (\"http_proxy\", \"http://example.com\", \"socks5://proxy.com:9876\"),\n (\"https_proxy\", \"https://example.com\", \"socks5://proxy.com:9876\"),\n (\"all_proxy\", \"http://example.com\", \"socks5://proxy.com:9876\"),\n (\"all_proxy\", \"https://example.com\", \"socks5://proxy.com:9876\"),\n ],\n)\ndef test_proxy_env_vars_override_default(var, url, proxy):\n session = requests.Session()\n prep = PreparedRequest()\n prep.prepare(method=\"GET\", url=url)\n\n kwargs = {var: proxy}\n scheme = urlparse(url).scheme\n with override_environ(**kwargs):\n proxies = session.rebuild_proxies(prep, {})\n assert scheme in proxies\n assert proxies[scheme] == proxy\n\n\n@pytest.mark.parametrize(\n \"data\",\n (\n ((\"a\", \"b\"), (\"c\", \"d\")),\n ((\"c\", \"d\"), (\"a\", \"b\")),\n ((\"a\", \"b\"), (\"c\", \"d\"), (\"e\", \"f\")),\n ),\n)\ndef test_data_argument_accepts_tuples(data):\n \"\"\"Ensure that the data argument will accept tuples of strings\n and properly encode them.\n \"\"\"\n p = PreparedRequest()\n p.prepare(\n method=\"GET\", url=\"http://www.example.com\", data=data, hooks=default_hooks()\n )\n assert p.body == urlencode(data)\n\n\n@pytest.mark.parametrize(\n \"kwargs\",\n (\n None,\n {\n \"method\": \"GET\",\n \"url\": \"http://www.example.com\",\n \"data\": \"foo=bar\",\n \"hooks\": default_hooks(),\n },\n {\n \"method\": \"GET\",\n \"url\": \"http://www.example.com\",\n \"data\": \"foo=bar\",\n \"hooks\": default_hooks(),\n \"cookies\": {\"foo\": \"bar\"},\n },\n {\"method\": \"GET\", \"url\": \"http://www.example.com/üniçø∂é\"},\n ),\n)\ndef test_prepared_copy(kwargs):\n p = PreparedRequest()\n if kwargs:\n p.prepare(**kwargs)\n copy = p.copy()\n for attr in (\"method\", \"url\", \"headers\", \"_cookies\", \"body\", \"hooks\"):\n assert getattr(p, attr) == getattr(copy, attr)\n\n\ndef test_urllib3_retries(httpbin):\n from urllib3.util import Retry\n\n s = requests.Session()\n s.mount(\"http://\", HTTPAdapter(max_retries=Retry(total=2, status_forcelist=[500])))\n\n with pytest.raises(RetryError):\n s.get(httpbin(\"status/500\"))\n\n\ndef test_urllib3_pool_connection_closed(httpbin):\n s = requests.Session()\n s.mount(\"http://\", HTTPAdapter(pool_connections=0, pool_maxsize=0))\n\n try:\n s.get(httpbin(\"status/200\"))\n except ConnectionError as e:\n assert \"Pool is closed.\" in str(e)\n\n\nclass TestPreparingURLs:\n @pytest.mark.parametrize(\n \"url,expected\",\n (\n (\"http://google.com\", \"http://google.com/\"),\n (\"http://ジェーピーニック.jp\", \"http://xn--hckqz9bzb1cyrb.jp/\"),\n (\"http://xn--n3h.net/\", \"http://xn--n3h.net/\"),\n (\"http://ジェーピーニック.jp\".encode(), \"http://xn--hckqz9bzb1cyrb.jp/\"),\n (\"http://straße.de/straße\", \"http://xn--strae-oqa.de/stra%C3%9Fe\"),\n (\n \"http://straße.de/straße\".encode(),\n \"http://xn--strae-oqa.de/stra%C3%9Fe\",\n ),\n (\n \"http://Königsgäßchen.de/straße\",\n \"http://xn--knigsgchen-b4a3dun.de/stra%C3%9Fe\",\n ),\n (\n \"http://Königsgäßchen.de/straße\".encode(),\n \"http://xn--knigsgchen-b4a3dun.de/stra%C3%9Fe\",\n ),\n (b\"http://xn--n3h.net/\", \"http://xn--n3h.net/\"),\n (\n b\"http://[1200:0000:ab00:1234:0000:2552:7777:1313]:12345/\",\n \"http://[1200:0000:ab00:1234:0000:2552:7777:1313]:12345/\",\n ),\n (\n \"http://[1200:0000:ab00:1234:0000:2552:7777:1313]:12345/\",\n \"http://[1200:0000:ab00:1234:0000:2552:7777:1313]:12345/\",\n ),\n ),\n )\n def test_preparing_url(self, url, expected):\n def normalize_percent_encode(x):\n # Helper function that normalizes equivalent\n # percent-encoded bytes before comparisons\n for c in re.findall(r\"%[a-fA-F0-9]{2}\", x):\n x = x.replace(c, c.upper())\n return x\n\n r = requests.Request(\"GET\", url=url)\n p = r.prepare()\n assert normalize_percent_encode(p.url) == expected\n\n @pytest.mark.parametrize(\n \"url\",\n (\n b\"http://*.google.com\",\n b\"http://*\",\n \"http://*.google.com\",\n \"http://*\",\n \"http://☃.net/\",\n ),\n )\n def test_preparing_bad_url(self, url):\n r = requests.Request(\"GET\", url=url)\n with pytest.raises(requests.exceptions.InvalidURL):\n r.prepare()\n\n @pytest.mark.parametrize(\"url, exception\", ((\"http://localhost:-1\", InvalidURL),))\n def test_redirecting_to_bad_url(self, httpbin, url, exception):\n with pytest.raises(exception):\n requests.get(httpbin(\"redirect-to\"), params={\"url\": url})\n\n @pytest.mark.parametrize(\n \"input, expected\",\n (\n (\n b\"http+unix://%2Fvar%2Frun%2Fsocket/path%7E\",\n \"http+unix://%2Fvar%2Frun%2Fsocket/path~\",\n ),\n (\n \"http+unix://%2Fvar%2Frun%2Fsocket/path%7E\",\n \"http+unix://%2Fvar%2Frun%2Fsocket/path~\",\n ),\n (\n b\"mailto:user@example.org\",\n \"mailto:user@example.org\",\n ),\n (\n \"mailto:user@example.org\",\n \"mailto:user@example.org\",\n ),\n (\n b\"data:SSDimaUgUHl0aG9uIQ==\",\n \"data:SSDimaUgUHl0aG9uIQ==\",\n ),\n ),\n )\n def test_url_mutation(self, input, expected):\n \"\"\"\n This test validates that we correctly exclude some URLs from\n preparation, and that we handle others. Specifically, it tests that\n any URL whose scheme doesn't begin with \"http\" is left alone, and\n those whose scheme *does* begin with \"http\" are mutated.\n \"\"\"\n r = requests.Request(\"GET\", url=input)\n p = r.prepare()\n assert p.url == expected\n\n @pytest.mark.parametrize(\n \"input, params, expected\",\n (\n (\n b\"http+unix://%2Fvar%2Frun%2Fsocket/path\",\n {\"key\": \"value\"},\n \"http+unix://%2Fvar%2Frun%2Fsocket/path?key=value\",\n ),\n (\n \"http+unix://%2Fvar%2Frun%2Fsocket/path\",\n {\"key\": \"value\"},\n \"http+unix://%2Fvar%2Frun%2Fsocket/path?key=value\",\n ),\n (\n b\"mailto:user@example.org\",\n {\"key\": \"value\"},\n \"mailto:user@example.org\",\n ),\n (\n \"mailto:user@example.org\",\n {\"key\": \"value\"},\n \"mailto:user@example.org\",\n ),\n ),\n )\n def test_parameters_for_nonstandard_schemes(self, input, params, expected):\n \"\"\"\n Setting parameters for nonstandard schemes is allowed if those schemes\n begin with \"http\", and is forbidden otherwise.\n \"\"\"\n r = requests.Request(\"GET\", url=input, params=params)\n p = r.prepare()\n assert p.url == expected\n\n def test_post_json_nan(self, httpbin):\n data = {\"foo\": float(\"nan\")}\n with pytest.raises(requests.exceptions.InvalidJSONError):\n requests.post(httpbin(\"post\"), json=data)\n\n def test_json_decode_compatibility(self, httpbin):\n r = requests.get(httpbin(\"bytes/20\"))\n with pytest.raises(requests.exceptions.JSONDecodeError) as excinfo:\n r.json()\n assert isinstance(excinfo.value, RequestException)\n assert isinstance(excinfo.value, JSONDecodeError)\n assert r.text not in str(excinfo.value)\n\n def test_json_decode_persists_doc_attr(self, httpbin):\n r = requests.get(httpbin(\"bytes/20\"))\n with pytest.raises(requests.exceptions.JSONDecodeError) as excinfo:\n r.json()\n assert excinfo.value.doc == r.text\n",
"path": "tests/test_requests.py"
}
] | 13_0 | python | import sys
import pytest
import requests
# Requests to this URL should always fail with a connection timeout (nothing
# listening on that port)
TARPIT = "http://10.255.255.1"
# This is to avoid waiting the timeout of using TARPIT
INVALID_PROXY = "http://localhost:1"
class TestRequests:
try:
from ssl import SSLContext
del SSLContext
HAS_MODERN_SSL = True
except ImportError:
HAS_MODERN_SSL = False
try:
requests.pyopenssl
HAS_PYOPENSSL = True
except AttributeError:
HAS_PYOPENSSL = False
try:
from http.server import HTTPServer, SimpleHTTPRequestHandler
except ImportError:
from BaseHTTPServer import HTTPServer
from SimpleHTTPServer import SimpleHTTPRequestHandler
@staticmethod
def prepare_url(value):
from requests.compat import urljoin
# Issue #1483: Make sure the URL always has a trailing slash
httpbin_url = value.url.rstrip("/") + "/"
def inner(*suffix):
return urljoin(httpbin_url, "/".join(suffix))
return inner
@pytest.fixture
def httpbin(self, httpbin):
return self.prepare_url(httpbin)
@pytest.fixture
def httpbin_secure(self, httpbin_secure):
return self.prepare_url(httpbin_secure)
@pytest.fixture
def nosan_server(self, tmp_path_factory):
# delay importing until the fixture in order to make it possible
# to deselect the test via command-line when trustme is not available
import trustme
import ssl
import threading
tmpdir = tmp_path_factory.mktemp("certs")
ca = trustme.CA()
# only commonName, no subjectAltName
server_cert = ca.issue_cert(common_name="localhost")
ca_bundle = str(tmpdir / "ca.pem")
ca.cert_pem.write_to_path(ca_bundle)
context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
server_cert.configure_cert(context)
server = self.HTTPServer(("localhost", 0), self.SimpleHTTPRequestHandler)
server.socket = context.wrap_socket(server.socket, server_side=True)
server_thread = threading.Thread(target=server.serve_forever)
server_thread.start()
yield "localhost", server.server_address[1], ca_bundle
server.shutdown()
server_thread.join()
def test_header_with_subclass_types(self, httpbin):
"""If the subclasses does not behave *exactly* like
the base bytes/str classes, this is not supported.
This test is for backwards compatibility.
"""
class MyString(str):
pass
class MyBytes(bytes):
pass
r_str = requests.get(httpbin("get"), headers={MyString("x-custom"): "myheader"})
assert r_str.request.headers["x-custom"] == "myheader"
r_bytes = requests.get(
httpbin("get"), headers={MyBytes(b"x-custom"): b"myheader"}
)
assert r_bytes.request.headers["x-custom"] == b"myheader"
r_mixed = requests.get(
httpbin("get"), headers={MyString("x-custom"): MyBytes(b"myheader")}
)
assert r_mixed.request.headers["x-custom"] == b"myheader"
def main():
import pytest
# Run the pytest tests programmatically
exit_code = pytest.main(["-v", __file__])
# Exit with status code 1 if any test fails, otherwise 0
if exit_code != 0:
sys.exit(1)
else:
sys.exit(0)
if __name__ == '__main__':
main()
|
https://github.com/teamqurrent/requests | Your objective is enhancing the validation of HTTP headers in the requests library. The primary goal is to ensure that header names and values do not contain leading whitespace, reserved characters, or return characters, which could lead to security vulnerabilities like header injection. The key files to modify are `requests/_internal_utils.py` and `requests/utils.py`. The main changes involve defining regular expressions for valid header names and values and updating the header validation logic in the `utils.py` file. | 60865f2 | -e .[socks]
pytest
pytest-cov
pytest-httpbin==1.0.0
pytest-mock
httpbin==0.7.0
trustme
wheel
chardet>=3.0.2,<3.1.0
idna>=2.5,<2.8
urllib3>=1.21.1,<1.24
certifi>=2017.4.17
# Flask Stack
Flask>1.0,<2.0
markupsafe<2.1
| python3.9 | e36f3459 | diff --git a/requests/_internal_utils.py b/requests/_internal_utils.py
--- a/requests/_internal_utils.py
+++ b/requests/_internal_utils.py
@@ -5,9 +5,20 @@ requests._internal_utils
Provides utility functions that are consumed internally by Requests
which depend on extremely few external helpers (such as compat)
"""
+import re
from .compat import builtin_str
+_VALID_HEADER_NAME_RE_BYTE = re.compile(rb"^[^:\s][^:\r\n]*$")
+_VALID_HEADER_NAME_RE_STR = re.compile(r"^[^:\s][^:\r\n]*$")
+_VALID_HEADER_VALUE_RE_BYTE = re.compile(rb"^\S[^\r\n]*$|^$")
+_VALID_HEADER_VALUE_RE_STR = re.compile(r"^\S[^\r\n]*$|^$")
+
+HEADER_VALIDATORS = {
+ bytes: (_VALID_HEADER_NAME_RE_BYTE, _VALID_HEADER_VALUE_RE_BYTE),
+ str: (_VALID_HEADER_NAME_RE_STR, _VALID_HEADER_VALUE_RE_STR),
+}
+
def to_native_string(string, encoding="ascii"):
"""Given a string object, regardless of type, returns a representation of
diff --git a/requests/utils.py b/requests/utils.py
--- a/requests/utils.py
+++ b/requests/utils.py
@@ -25,7 +25,7 @@ from . import certs
from .__version__ import __version__
# to_native_string is unused here, but imported here for backwards compatibility
-from ._internal_utils import to_native_string # noqa: F401
+from ._internal_utils import HEADER_VALIDATORS, to_native_string # noqa: F401
from .compat import (
Mapping,
basestring,
@@ -1024,33 +1024,30 @@ def get_auth_from_url(url):
return auth
-# Moved outside of function to avoid recompile every call
-_CLEAN_HEADER_REGEX_BYTE = re.compile(b"^\\S[^\\r\\n]*$|^$")
-_CLEAN_HEADER_REGEX_STR = re.compile(r"^\S[^\r\n]*$|^$")
-
-
def check_header_validity(header):
- """Verifies that header value is a string which doesn't contain
- leading whitespace or return characters. This prevents unintended
- header injection.
+ """Verifies that header parts don't contain leading whitespace
+ reserved characters, or return characters.
:param header: tuple, in the format (name, value).
"""
name, value = header
- if isinstance(value, bytes):
- pat = _CLEAN_HEADER_REGEX_BYTE
- else:
- pat = _CLEAN_HEADER_REGEX_STR
- try:
- if not pat.match(value):
+ for part in header:
+ if type(part) not in HEADER_VALIDATORS:
raise InvalidHeader(
- f"Invalid return character or leading space in header: {name}"
+ f"Header part ({part!r}) from {{{name!r}: {value!r}}} must be "
+ f"of type str or bytes, not {type(part)}"
)
- except TypeError:
+
+ _validate_header_part(name, "name", HEADER_VALIDATORS[type(name)][0])
+ _validate_header_part(value, "value", HEADER_VALIDATORS[type(value)][1])
+
+
+def _validate_header_part(header_part, header_kind, validator):
+ if not validator.match(header_part):
raise InvalidHeader(
- f"Value for header {{{name}: {value}}} must be of type "
- f"str or bytes, not {type(value)}"
+ f"Invalid leading whitespace, reserved character(s), or return"
+ f"character(s) in header {header_kind}: {header_part!r}"
)
diff --git a/tests/test_requests.py b/tests/test_requests.py
--- a/tests/test_requests.py
+++ b/tests/test_requests.py
@@ -1096,7 +1096,7 @@ class TestRequests:
def test_custom_content_type(self, httpbin):
with open(__file__, "rb") as f1:
with open(__file__, "rb") as f2:
- data={"stuff": json.dumps({"a": 123})}
+ data = {"stuff": json.dumps({"a": 123})}
files = {
"file1": ("test_requests.py", f1),
"file2": ("test_requests", f2, "text/py-content-type"),
@@ -1682,68 +1682,70 @@ class TestRequests:
def test_header_validation(self, httpbin):
"""Ensure prepare_headers regex isn't flagging valid header contents."""
- headers_ok = {
+ valid_headers = {
"foo": "bar baz qux",
"bar": b"fbbq",
"baz": "",
"qux": "1",
}
- r = requests.get(httpbin("get"), headers=headers_ok)
- assert r.request.headers["foo"] == headers_ok["foo"]
+ r = requests.get(httpbin("get"), headers=valid_headers)
+ for key in valid_headers.keys():
+ valid_headers[key] == r.request.headers[key]
- def test_header_value_not_str(self, httpbin):
+ @pytest.mark.parametrize(
+ "invalid_header, key",
+ (
+ ({"foo": 3}, "foo"),
+ ({"bar": {"foo": "bar"}}, "bar"),
+ ({"baz": ["foo", "bar"]}, "baz"),
+ ),
+ )
+ def test_header_value_not_str(self, httpbin, invalid_header, key):
"""Ensure the header value is of type string or bytes as
per discussion in GH issue #3386
"""
- headers_int = {"foo": 3}
- headers_dict = {"bar": {"foo": "bar"}}
- headers_list = {"baz": ["foo", "bar"]}
-
- # Test for int
- with pytest.raises(InvalidHeader) as excinfo:
- requests.get(httpbin("get"), headers=headers_int)
- assert "foo" in str(excinfo.value)
- # Test for dict
with pytest.raises(InvalidHeader) as excinfo:
- requests.get(httpbin("get"), headers=headers_dict)
- assert "bar" in str(excinfo.value)
- # Test for list
- with pytest.raises(InvalidHeader) as excinfo:
- requests.get(httpbin("get"), headers=headers_list)
- assert "baz" in str(excinfo.value)
+ requests.get(httpbin("get"), headers=invalid_header)
+ assert key in str(excinfo.value)
- def test_header_no_return_chars(self, httpbin):
+ @pytest.mark.parametrize(
+ "invalid_header",
+ (
+ {"foo": "bar\r\nbaz: qux"},
+ {"foo": "bar\n\rbaz: qux"},
+ {"foo": "bar\nbaz: qux"},
+ {"foo": "bar\rbaz: qux"},
+ {"fo\ro": "bar"},
+ {"fo\r\no": "bar"},
+ {"fo\n\ro": "bar"},
+ {"fo\no": "bar"},
+ ),
+ )
+ def test_header_no_return_chars(self, httpbin, invalid_header):
"""Ensure that a header containing return character sequences raise an
exception. Otherwise, multiple headers are created from single string.
"""
- headers_ret = {"foo": "bar\r\nbaz: qux"}
- headers_lf = {"foo": "bar\nbaz: qux"}
- headers_cr = {"foo": "bar\rbaz: qux"}
-
- # Test for newline
- with pytest.raises(InvalidHeader):
- requests.get(httpbin("get"), headers=headers_ret)
- # Test for line feed
- with pytest.raises(InvalidHeader):
- requests.get(httpbin("get"), headers=headers_lf)
- # Test for carriage return
with pytest.raises(InvalidHeader):
- requests.get(httpbin("get"), headers=headers_cr)
+ requests.get(httpbin("get"), headers=invalid_header)
- def test_header_no_leading_space(self, httpbin):
+ @pytest.mark.parametrize(
+ "invalid_header",
+ (
+ {" foo": "bar"},
+ {"\tfoo": "bar"},
+ {" foo": "bar"},
+ {"foo": " bar"},
+ {"foo": " bar"},
+ {"foo": "\tbar"},
+ {" ": "bar"},
+ ),
+ )
+ def test_header_no_leading_space(self, httpbin, invalid_header):
"""Ensure headers containing leading whitespace raise
InvalidHeader Error before sending.
"""
- headers_space = {"foo": " bar"}
- headers_tab = {"foo": " bar"}
-
- # Test for whitespace
- with pytest.raises(InvalidHeader):
- requests.get(httpbin("get"), headers=headers_space)
-
- # Test for tab
with pytest.raises(InvalidHeader):
- requests.get(httpbin("get"), headers=headers_tab)
+ requests.get(httpbin("get"), headers=invalid_header)
@pytest.mark.parametrize("files", ("foo", b"foo", bytearray(b"foo")))
def test_can_send_objects_with_files(self, httpbin, files):
| [
{
"content": "\"\"\"\nrequests._internal_utils\n~~~~~~~~~~~~~~\n\nProvides utility functions that are consumed internally by Requests\nwhich depend on extremely few external helpers (such as compat)\n\"\"\"\n\nfrom .compat import builtin_str\n\n\ndef to_native_string(string, encoding=\"ascii\"):\n \"\"\"Given a string object, regardless of type, returns a representation of\n that string in the native string type, encoding and decoding where\n necessary. This assumes ASCII unless told otherwise.\n \"\"\"\n if isinstance(string, builtin_str):\n out = string\n else:\n out = string.decode(encoding)\n\n return out\n\n\ndef unicode_is_ascii(u_string):\n \"\"\"Determine if unicode string only contains ASCII characters.\n\n :param str u_string: unicode string to check. Must be unicode\n and not Python 2 `str`.\n :rtype: bool\n \"\"\"\n assert isinstance(u_string, str)\n try:\n u_string.encode(\"ascii\")\n return True\n except UnicodeEncodeError:\n return False\n",
"path": "requests/_internal_utils.py"
},
{
"content": "\"\"\"\nrequests.utils\n~~~~~~~~~~~~~~\n\nThis module provides utility functions that are used within Requests\nthat are also useful for external consumption.\n\"\"\"\n\nimport codecs\nimport contextlib\nimport io\nimport os\nimport re\nimport socket\nimport struct\nimport sys\nimport tempfile\nimport warnings\nimport zipfile\nfrom collections import OrderedDict\n\nfrom urllib3.util import make_headers, parse_url\n\nfrom . import certs\nfrom .__version__ import __version__\n\n# to_native_string is unused here, but imported here for backwards compatibility\nfrom ._internal_utils import to_native_string # noqa: F401\nfrom .compat import (\n Mapping,\n basestring,\n bytes,\n getproxies,\n getproxies_environment,\n integer_types,\n)\nfrom .compat import parse_http_list as _parse_list_header\nfrom .compat import (\n proxy_bypass,\n proxy_bypass_environment,\n quote,\n str,\n unquote,\n urlparse,\n urlunparse,\n)\nfrom .cookies import cookiejar_from_dict\nfrom .exceptions import (\n FileModeWarning,\n InvalidHeader,\n InvalidURL,\n UnrewindableBodyError,\n)\nfrom .structures import CaseInsensitiveDict\n\nNETRC_FILES = (\".netrc\", \"_netrc\")\n\nDEFAULT_CA_BUNDLE_PATH = certs.where()\n\nDEFAULT_PORTS = {\"http\": 80, \"https\": 443}\n\n# Ensure that ', ' is used to preserve previous delimiter behavior.\nDEFAULT_ACCEPT_ENCODING = \", \".join(\n re.split(r\",\\s*\", make_headers(accept_encoding=True)[\"accept-encoding\"])\n)\n\n\nif sys.platform == \"win32\":\n # provide a proxy_bypass version on Windows without DNS lookups\n\n def proxy_bypass_registry(host):\n try:\n import winreg\n except ImportError:\n return False\n\n try:\n internetSettings = winreg.OpenKey(\n winreg.HKEY_CURRENT_USER,\n r\"Software\\Microsoft\\Windows\\CurrentVersion\\Internet Settings\",\n )\n # ProxyEnable could be REG_SZ or REG_DWORD, normalizing it\n proxyEnable = int(winreg.QueryValueEx(internetSettings, \"ProxyEnable\")[0])\n # ProxyOverride is almost always a string\n proxyOverride = winreg.QueryValueEx(internetSettings, \"ProxyOverride\")[0]\n except (OSError, ValueError):\n return False\n if not proxyEnable or not proxyOverride:\n return False\n\n # make a check value list from the registry entry: replace the\n # '<local>' string by the localhost entry and the corresponding\n # canonical entry.\n proxyOverride = proxyOverride.split(\";\")\n # now check if we match one of the registry values.\n for test in proxyOverride:\n if test == \"<local>\":\n if \".\" not in host:\n return True\n test = test.replace(\".\", r\"\\.\") # mask dots\n test = test.replace(\"*\", r\".*\") # change glob sequence\n test = test.replace(\"?\", r\".\") # change glob char\n if re.match(test, host, re.I):\n return True\n return False\n\n def proxy_bypass(host): # noqa\n \"\"\"Return True, if the host should be bypassed.\n\n Checks proxy settings gathered from the environment, if specified,\n or the registry.\n \"\"\"\n if getproxies_environment():\n return proxy_bypass_environment(host)\n else:\n return proxy_bypass_registry(host)\n\n\ndef dict_to_sequence(d):\n \"\"\"Returns an internal sequence dictionary update.\"\"\"\n\n if hasattr(d, \"items\"):\n d = d.items()\n\n return d\n\n\ndef super_len(o):\n total_length = None\n current_position = 0\n\n if hasattr(o, \"__len__\"):\n total_length = len(o)\n\n elif hasattr(o, \"len\"):\n total_length = o.len\n\n elif hasattr(o, \"fileno\"):\n try:\n fileno = o.fileno()\n except (io.UnsupportedOperation, AttributeError):\n # AttributeError is a surprising exception, seeing as how we've just checked\n # that `hasattr(o, 'fileno')`. It happens for objects obtained via\n # `Tarfile.extractfile()`, per issue 5229.\n pass\n else:\n total_length = os.fstat(fileno).st_size\n\n # Having used fstat to determine the file length, we need to\n # confirm that this file was opened up in binary mode.\n if \"b\" not in o.mode:\n warnings.warn(\n (\n \"Requests has determined the content-length for this \"\n \"request using the binary size of the file: however, the \"\n \"file has been opened in text mode (i.e. without the 'b' \"\n \"flag in the mode). This may lead to an incorrect \"\n \"content-length. In Requests 3.0, support will be removed \"\n \"for files in text mode.\"\n ),\n FileModeWarning,\n )\n\n if hasattr(o, \"tell\"):\n try:\n current_position = o.tell()\n except OSError:\n # This can happen in some weird situations, such as when the file\n # is actually a special file descriptor like stdin. In this\n # instance, we don't know what the length is, so set it to zero and\n # let requests chunk it instead.\n if total_length is not None:\n current_position = total_length\n else:\n if hasattr(o, \"seek\") and total_length is None:\n # StringIO and BytesIO have seek but no usable fileno\n try:\n # seek to end of file\n o.seek(0, 2)\n total_length = o.tell()\n\n # seek back to current position to support\n # partially read file-like objects\n o.seek(current_position or 0)\n except OSError:\n total_length = 0\n\n if total_length is None:\n total_length = 0\n\n return max(0, total_length - current_position)\n\n\ndef get_netrc_auth(url, raise_errors=False):\n \"\"\"Returns the Requests tuple auth for a given url from netrc.\"\"\"\n\n netrc_file = os.environ.get(\"NETRC\")\n if netrc_file is not None:\n netrc_locations = (netrc_file,)\n else:\n netrc_locations = (f\"~/{f}\" for f in NETRC_FILES)\n\n try:\n from netrc import NetrcParseError, netrc\n\n netrc_path = None\n\n for f in netrc_locations:\n try:\n loc = os.path.expanduser(f)\n except KeyError:\n # os.path.expanduser can fail when $HOME is undefined and\n # getpwuid fails. See https://bugs.python.org/issue20164 &\n # https://github.com/psf/requests/issues/1846\n return\n\n if os.path.exists(loc):\n netrc_path = loc\n break\n\n # Abort early if there isn't one.\n if netrc_path is None:\n return\n\n ri = urlparse(url)\n\n # Strip port numbers from netloc. This weird `if...encode`` dance is\n # used for Python 3.2, which doesn't support unicode literals.\n splitstr = b\":\"\n if isinstance(url, str):\n splitstr = splitstr.decode(\"ascii\")\n host = ri.netloc.split(splitstr)[0]\n\n try:\n _netrc = netrc(netrc_path).authenticators(host)\n if _netrc:\n # Return with login / password\n login_i = 0 if _netrc[0] else 1\n return (_netrc[login_i], _netrc[2])\n except (NetrcParseError, OSError):\n # If there was a parsing error or a permissions issue reading the file,\n # we'll just skip netrc auth unless explicitly asked to raise errors.\n if raise_errors:\n raise\n\n # App Engine hackiness.\n except (ImportError, AttributeError):\n pass\n\n\ndef guess_filename(obj):\n \"\"\"Tries to guess the filename of the given object.\"\"\"\n name = getattr(obj, \"name\", None)\n if name and isinstance(name, basestring) and name[0] != \"<\" and name[-1] != \">\":\n return os.path.basename(name)\n\n\ndef extract_zipped_paths(path):\n \"\"\"Replace nonexistent paths that look like they refer to a member of a zip\n archive with the location of an extracted copy of the target, or else\n just return the provided path unchanged.\n \"\"\"\n if os.path.exists(path):\n # this is already a valid path, no need to do anything further\n return path\n\n # find the first valid part of the provided path and treat that as a zip archive\n # assume the rest of the path is the name of a member in the archive\n archive, member = os.path.split(path)\n while archive and not os.path.exists(archive):\n archive, prefix = os.path.split(archive)\n if not prefix:\n # If we don't check for an empty prefix after the split (in other words, archive remains unchanged after the split),\n # we _can_ end up in an infinite loop on a rare corner case affecting a small number of users\n break\n member = \"/\".join([prefix, member])\n\n if not zipfile.is_zipfile(archive):\n return path\n\n zip_file = zipfile.ZipFile(archive)\n if member not in zip_file.namelist():\n return path\n\n # we have a valid zip archive and a valid member of that archive\n tmp = tempfile.gettempdir()\n extracted_path = os.path.join(tmp, member.split(\"/\")[-1])\n if not os.path.exists(extracted_path):\n # use read + write to avoid the creating nested folders, we only want the file, avoids mkdir racing condition\n with atomic_open(extracted_path) as file_handler:\n file_handler.write(zip_file.read(member))\n return extracted_path\n\n\n@contextlib.contextmanager\ndef atomic_open(filename):\n \"\"\"Write a file to the disk in an atomic fashion\"\"\"\n tmp_descriptor, tmp_name = tempfile.mkstemp(dir=os.path.dirname(filename))\n try:\n with os.fdopen(tmp_descriptor, \"wb\") as tmp_handler:\n yield tmp_handler\n os.replace(tmp_name, filename)\n except BaseException:\n os.remove(tmp_name)\n raise\n\n\ndef from_key_val_list(value):\n \"\"\"Take an object and test to see if it can be represented as a\n dictionary. Unless it can not be represented as such, return an\n OrderedDict, e.g.,\n\n ::\n\n >>> from_key_val_list([('key', 'val')])\n OrderedDict([('key', 'val')])\n >>> from_key_val_list('string')\n Traceback (most recent call last):\n ...\n ValueError: cannot encode objects that are not 2-tuples\n >>> from_key_val_list({'key': 'val'})\n OrderedDict([('key', 'val')])\n\n :rtype: OrderedDict\n \"\"\"\n if value is None:\n return None\n\n if isinstance(value, (str, bytes, bool, int)):\n raise ValueError(\"cannot encode objects that are not 2-tuples\")\n\n return OrderedDict(value)\n\n\ndef to_key_val_list(value):\n \"\"\"Take an object and test to see if it can be represented as a\n dictionary. If it can be, return a list of tuples, e.g.,\n\n ::\n\n >>> to_key_val_list([('key', 'val')])\n [('key', 'val')]\n >>> to_key_val_list({'key': 'val'})\n [('key', 'val')]\n >>> to_key_val_list('string')\n Traceback (most recent call last):\n ...\n ValueError: cannot encode objects that are not 2-tuples\n\n :rtype: list\n \"\"\"\n if value is None:\n return None\n\n if isinstance(value, (str, bytes, bool, int)):\n raise ValueError(\"cannot encode objects that are not 2-tuples\")\n\n if isinstance(value, Mapping):\n value = value.items()\n\n return list(value)\n\n\n# From mitsuhiko/werkzeug (used with permission).\ndef parse_list_header(value):\n \"\"\"Parse lists as described by RFC 2068 Section 2.\n\n In particular, parse comma-separated lists where the elements of\n the list may include quoted-strings. A quoted-string could\n contain a comma. A non-quoted string could have quotes in the\n middle. Quotes are removed automatically after parsing.\n\n It basically works like :func:`parse_set_header` just that items\n may appear multiple times and case sensitivity is preserved.\n\n The return value is a standard :class:`list`:\n\n >>> parse_list_header('token, \"quoted value\"')\n ['token', 'quoted value']\n\n To create a header from the :class:`list` again, use the\n :func:`dump_header` function.\n\n :param value: a string with a list header.\n :return: :class:`list`\n :rtype: list\n \"\"\"\n result = []\n for item in _parse_list_header(value):\n if item[:1] == item[-1:] == '\"':\n item = unquote_header_value(item[1:-1])\n result.append(item)\n return result\n\n\n# From mitsuhiko/werkzeug (used with permission).\ndef parse_dict_header(value):\n \"\"\"Parse lists of key, value pairs as described by RFC 2068 Section 2 and\n convert them into a python dict:\n\n >>> d = parse_dict_header('foo=\"is a fish\", bar=\"as well\"')\n >>> type(d) is dict\n True\n >>> sorted(d.items())\n [('bar', 'as well'), ('foo', 'is a fish')]\n\n If there is no value for a key it will be `None`:\n\n >>> parse_dict_header('key_without_value')\n {'key_without_value': None}\n\n To create a header from the :class:`dict` again, use the\n :func:`dump_header` function.\n\n :param value: a string with a dict header.\n :return: :class:`dict`\n :rtype: dict\n \"\"\"\n result = {}\n for item in _parse_list_header(value):\n if \"=\" not in item:\n result[item] = None\n continue\n name, value = item.split(\"=\", 1)\n if value[:1] == value[-1:] == '\"':\n value = unquote_header_value(value[1:-1])\n result[name] = value\n return result\n\n\n# From mitsuhiko/werkzeug (used with permission).\ndef unquote_header_value(value, is_filename=False):\n r\"\"\"Unquotes a header value. (Reversal of :func:`quote_header_value`).\n This does not use the real unquoting but what browsers are actually\n using for quoting.\n\n :param value: the header value to unquote.\n :rtype: str\n \"\"\"\n if value and value[0] == value[-1] == '\"':\n # this is not the real unquoting, but fixing this so that the\n # RFC is met will result in bugs with internet explorer and\n # probably some other browsers as well. IE for example is\n # uploading files with \"C:\\foo\\bar.txt\" as filename\n value = value[1:-1]\n\n # if this is a filename and the starting characters look like\n # a UNC path, then just return the value without quotes. Using the\n # replace sequence below on a UNC path has the effect of turning\n # the leading double slash into a single slash and then\n # _fix_ie_filename() doesn't work correctly. See #458.\n if not is_filename or value[:2] != \"\\\\\\\\\":\n return value.replace(\"\\\\\\\\\", \"\\\\\").replace('\\\\\"', '\"')\n return value\n\n\ndef dict_from_cookiejar(cj):\n \"\"\"Returns a key/value dictionary from a CookieJar.\n\n :param cj: CookieJar object to extract cookies from.\n :rtype: dict\n \"\"\"\n\n cookie_dict = {}\n\n for cookie in cj:\n cookie_dict[cookie.name] = cookie.value\n\n return cookie_dict\n\n\ndef add_dict_to_cookiejar(cj, cookie_dict):\n \"\"\"Returns a CookieJar from a key/value dictionary.\n\n :param cj: CookieJar to insert cookies into.\n :param cookie_dict: Dict of key/values to insert into CookieJar.\n :rtype: CookieJar\n \"\"\"\n\n return cookiejar_from_dict(cookie_dict, cj)\n\n\ndef get_encodings_from_content(content):\n \"\"\"Returns encodings from given content string.\n\n :param content: bytestring to extract encodings from.\n \"\"\"\n warnings.warn(\n (\n \"In requests 3.0, get_encodings_from_content will be removed. For \"\n \"more information, please see the discussion on issue #2266. (This\"\n \" warning should only appear once.)\"\n ),\n DeprecationWarning,\n )\n\n charset_re = re.compile(r'<meta.*?charset=[\"\\']*(.+?)[\"\\'>]', flags=re.I)\n pragma_re = re.compile(r'<meta.*?content=[\"\\']*;?charset=(.+?)[\"\\'>]', flags=re.I)\n xml_re = re.compile(r'^<\\?xml.*?encoding=[\"\\']*(.+?)[\"\\'>]')\n\n return (\n charset_re.findall(content)\n + pragma_re.findall(content)\n + xml_re.findall(content)\n )\n\n\ndef _parse_content_type_header(header):\n \"\"\"Returns content type and parameters from given header\n\n :param header: string\n :return: tuple containing content type and dictionary of\n parameters\n \"\"\"\n\n tokens = header.split(\";\")\n content_type, params = tokens[0].strip(), tokens[1:]\n params_dict = {}\n items_to_strip = \"\\\"' \"\n\n for param in params:\n param = param.strip()\n if param:\n key, value = param, True\n index_of_equals = param.find(\"=\")\n if index_of_equals != -1:\n key = param[:index_of_equals].strip(items_to_strip)\n value = param[index_of_equals + 1 :].strip(items_to_strip)\n params_dict[key.lower()] = value\n return content_type, params_dict\n\n\ndef get_encoding_from_headers(headers):\n \"\"\"Returns encodings from given HTTP Header Dict.\n\n :param headers: dictionary to extract encoding from.\n :rtype: str\n \"\"\"\n\n content_type = headers.get(\"content-type\")\n\n if not content_type:\n return None\n\n content_type, params = _parse_content_type_header(content_type)\n\n if \"charset\" in params:\n return params[\"charset\"].strip(\"'\\\"\")\n\n if \"text\" in content_type:\n return \"ISO-8859-1\"\n\n if \"application/json\" in content_type:\n # Assume UTF-8 based on RFC 4627: https://www.ietf.org/rfc/rfc4627.txt since the charset was unset\n return \"utf-8\"\n\n\ndef stream_decode_response_unicode(iterator, r):\n \"\"\"Stream decodes an iterator.\"\"\"\n\n if r.encoding is None:\n yield from iterator\n return\n\n decoder = codecs.getincrementaldecoder(r.encoding)(errors=\"replace\")\n for chunk in iterator:\n rv = decoder.decode(chunk)\n if rv:\n yield rv\n rv = decoder.decode(b\"\", final=True)\n if rv:\n yield rv\n\n\ndef iter_slices(string, slice_length):\n \"\"\"Iterate over slices of a string.\"\"\"\n pos = 0\n if slice_length is None or slice_length <= 0:\n slice_length = len(string)\n while pos < len(string):\n yield string[pos : pos + slice_length]\n pos += slice_length\n\n\ndef get_unicode_from_response(r):\n \"\"\"Returns the requested content back in unicode.\n\n :param r: Response object to get unicode content from.\n\n Tried:\n\n 1. charset from content-type\n 2. fall back and replace all unicode characters\n\n :rtype: str\n \"\"\"\n warnings.warn(\n (\n \"In requests 3.0, get_unicode_from_response will be removed. For \"\n \"more information, please see the discussion on issue #2266. (This\"\n \" warning should only appear once.)\"\n ),\n DeprecationWarning,\n )\n\n tried_encodings = []\n\n # Try charset from content-type\n encoding = get_encoding_from_headers(r.headers)\n\n if encoding:\n try:\n return str(r.content, encoding)\n except UnicodeError:\n tried_encodings.append(encoding)\n\n # Fall back:\n try:\n return str(r.content, encoding, errors=\"replace\")\n except TypeError:\n return r.content\n\n\n# The unreserved URI characters (RFC 3986)\nUNRESERVED_SET = frozenset(\n \"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz\" + \"0123456789-._~\"\n)\n\n\ndef unquote_unreserved(uri):\n \"\"\"Un-escape any percent-escape sequences in a URI that are unreserved\n characters. This leaves all reserved, illegal and non-ASCII bytes encoded.\n\n :rtype: str\n \"\"\"\n parts = uri.split(\"%\")\n for i in range(1, len(parts)):\n h = parts[i][0:2]\n if len(h) == 2 and h.isalnum():\n try:\n c = chr(int(h, 16))\n except ValueError:\n raise InvalidURL(f\"Invalid percent-escape sequence: '{h}'\")\n\n if c in UNRESERVED_SET:\n parts[i] = c + parts[i][2:]\n else:\n parts[i] = f\"%{parts[i]}\"\n else:\n parts[i] = f\"%{parts[i]}\"\n return \"\".join(parts)\n\n\ndef requote_uri(uri):\n \"\"\"Re-quote the given URI.\n\n This function passes the given URI through an unquote/quote cycle to\n ensure that it is fully and consistently quoted.\n\n :rtype: str\n \"\"\"\n safe_with_percent = \"!#$%&'()*+,/:;=?@[]~\"\n safe_without_percent = \"!#$&'()*+,/:;=?@[]~\"\n try:\n # Unquote only the unreserved characters\n # Then quote only illegal characters (do not quote reserved,\n # unreserved, or '%')\n return quote(unquote_unreserved(uri), safe=safe_with_percent)\n except InvalidURL:\n # We couldn't unquote the given URI, so let's try quoting it, but\n # there may be unquoted '%'s in the URI. We need to make sure they're\n # properly quoted so they do not cause issues elsewhere.\n return quote(uri, safe=safe_without_percent)\n\n\ndef address_in_network(ip, net):\n \"\"\"This function allows you to check if an IP belongs to a network subnet\n\n Example: returns True if ip = 192.168.1.1 and net = 192.168.1.0/24\n returns False if ip = 192.168.1.1 and net = 192.168.100.0/24\n\n :rtype: bool\n \"\"\"\n ipaddr = struct.unpack(\"=L\", socket.inet_aton(ip))[0]\n netaddr, bits = net.split(\"/\")\n netmask = struct.unpack(\"=L\", socket.inet_aton(dotted_netmask(int(bits))))[0]\n network = struct.unpack(\"=L\", socket.inet_aton(netaddr))[0] & netmask\n return (ipaddr & netmask) == (network & netmask)\n\n\ndef dotted_netmask(mask):\n \"\"\"Converts mask from /xx format to xxx.xxx.xxx.xxx\n\n Example: if mask is 24 function returns 255.255.255.0\n\n :rtype: str\n \"\"\"\n bits = 0xFFFFFFFF ^ (1 << 32 - mask) - 1\n return socket.inet_ntoa(struct.pack(\">I\", bits))\n\n\ndef is_ipv4_address(string_ip):\n \"\"\"\n :rtype: bool\n \"\"\"\n try:\n socket.inet_aton(string_ip)\n except OSError:\n return False\n return True\n\n\ndef is_valid_cidr(string_network):\n \"\"\"\n Very simple check of the cidr format in no_proxy variable.\n\n :rtype: bool\n \"\"\"\n if string_network.count(\"/\") == 1:\n try:\n mask = int(string_network.split(\"/\")[1])\n except ValueError:\n return False\n\n if mask < 1 or mask > 32:\n return False\n\n try:\n socket.inet_aton(string_network.split(\"/\")[0])\n except OSError:\n return False\n else:\n return False\n return True\n\n\n@contextlib.contextmanager\ndef set_environ(env_name, value):\n \"\"\"Set the environment variable 'env_name' to 'value'\n\n Save previous value, yield, and then restore the previous value stored in\n the environment variable 'env_name'.\n\n If 'value' is None, do nothing\"\"\"\n value_changed = value is not None\n if value_changed:\n old_value = os.environ.get(env_name)\n os.environ[env_name] = value\n try:\n yield\n finally:\n if value_changed:\n if old_value is None:\n del os.environ[env_name]\n else:\n os.environ[env_name] = old_value\n\n\ndef should_bypass_proxies(url, no_proxy):\n \"\"\"\n Returns whether we should bypass proxies or not.\n\n :rtype: bool\n \"\"\"\n # Prioritize lowercase environment variables over uppercase\n # to keep a consistent behaviour with other http projects (curl, wget).\n def get_proxy(key):\n return os.environ.get(key) or os.environ.get(key.upper())\n\n # First check whether no_proxy is defined. If it is, check that the URL\n # we're getting isn't in the no_proxy list.\n no_proxy_arg = no_proxy\n if no_proxy is None:\n no_proxy = get_proxy(\"no_proxy\")\n parsed = urlparse(url)\n\n if parsed.hostname is None:\n # URLs don't always have hostnames, e.g. file:/// urls.\n return True\n\n if no_proxy:\n # We need to check whether we match here. We need to see if we match\n # the end of the hostname, both with and without the port.\n no_proxy = (host for host in no_proxy.replace(\" \", \"\").split(\",\") if host)\n\n if is_ipv4_address(parsed.hostname):\n for proxy_ip in no_proxy:\n if is_valid_cidr(proxy_ip):\n if address_in_network(parsed.hostname, proxy_ip):\n return True\n elif parsed.hostname == proxy_ip:\n # If no_proxy ip was defined in plain IP notation instead of cidr notation &\n # matches the IP of the index\n return True\n else:\n host_with_port = parsed.hostname\n if parsed.port:\n host_with_port += f\":{parsed.port}\"\n\n for host in no_proxy:\n if parsed.hostname.endswith(host) or host_with_port.endswith(host):\n # The URL does match something in no_proxy, so we don't want\n # to apply the proxies on this URL.\n return True\n\n with set_environ(\"no_proxy\", no_proxy_arg):\n # parsed.hostname can be `None` in cases such as a file URI.\n try:\n bypass = proxy_bypass(parsed.hostname)\n except (TypeError, socket.gaierror):\n bypass = False\n\n if bypass:\n return True\n\n return False\n\n\ndef get_environ_proxies(url, no_proxy=None):\n \"\"\"\n Return a dict of environment proxies.\n\n :rtype: dict\n \"\"\"\n if should_bypass_proxies(url, no_proxy=no_proxy):\n return {}\n else:\n return getproxies()\n\n\ndef select_proxy(url, proxies):\n \"\"\"Select a proxy for the url, if applicable.\n\n :param url: The url being for the request\n :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs\n \"\"\"\n proxies = proxies or {}\n urlparts = urlparse(url)\n if urlparts.hostname is None:\n return proxies.get(urlparts.scheme, proxies.get(\"all\"))\n\n proxy_keys = [\n urlparts.scheme + \"://\" + urlparts.hostname,\n urlparts.scheme,\n \"all://\" + urlparts.hostname,\n \"all\",\n ]\n proxy = None\n for proxy_key in proxy_keys:\n if proxy_key in proxies:\n proxy = proxies[proxy_key]\n break\n\n return proxy\n\n\ndef resolve_proxies(request, proxies, trust_env=True):\n \"\"\"This method takes proxy information from a request and configuration\n input to resolve a mapping of target proxies. This will consider settings\n such a NO_PROXY to strip proxy configurations.\n\n :param request: Request or PreparedRequest\n :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs\n :param trust_env: Boolean declaring whether to trust environment configs\n\n :rtype: dict\n \"\"\"\n proxies = proxies if proxies is not None else {}\n url = request.url\n scheme = urlparse(url).scheme\n no_proxy = proxies.get(\"no_proxy\")\n new_proxies = proxies.copy()\n\n if trust_env and not should_bypass_proxies(url, no_proxy=no_proxy):\n environ_proxies = get_environ_proxies(url, no_proxy=no_proxy)\n\n proxy = environ_proxies.get(scheme, environ_proxies.get(\"all\"))\n\n if proxy:\n new_proxies.setdefault(scheme, proxy)\n return new_proxies\n\n\ndef default_user_agent(name=\"python-requests\"):\n \"\"\"\n Return a string representing the default user agent.\n\n :rtype: str\n \"\"\"\n return f\"{name}/{__version__}\"\n\n\ndef default_headers():\n \"\"\"\n :rtype: requests.structures.CaseInsensitiveDict\n \"\"\"\n return CaseInsensitiveDict(\n {\n \"User-Agent\": default_user_agent(),\n \"Accept-Encoding\": DEFAULT_ACCEPT_ENCODING,\n \"Accept\": \"*/*\",\n \"Connection\": \"keep-alive\",\n }\n )\n\n\ndef parse_header_links(value):\n \"\"\"Return a list of parsed link headers proxies.\n\n i.e. Link: <http:/.../front.jpeg>; rel=front; type=\"image/jpeg\",<http://.../back.jpeg>; rel=back;type=\"image/jpeg\"\n\n :rtype: list\n \"\"\"\n\n links = []\n\n replace_chars = \" '\\\"\"\n\n value = value.strip(replace_chars)\n if not value:\n return links\n\n for val in re.split(\", *<\", value):\n try:\n url, params = val.split(\";\", 1)\n except ValueError:\n url, params = val, \"\"\n\n link = {\"url\": url.strip(\"<> '\\\"\")}\n\n for param in params.split(\";\"):\n try:\n key, value = param.split(\"=\")\n except ValueError:\n break\n\n link[key.strip(replace_chars)] = value.strip(replace_chars)\n\n links.append(link)\n\n return links\n\n\n# Null bytes; no need to recreate these on each call to guess_json_utf\n_null = \"\\x00\".encode(\"ascii\") # encoding to ASCII for Python 3\n_null2 = _null * 2\n_null3 = _null * 3\n\n\ndef guess_json_utf(data):\n \"\"\"\n :rtype: str\n \"\"\"\n # JSON always starts with two ASCII characters, so detection is as\n # easy as counting the nulls and from their location and count\n # determine the encoding. Also detect a BOM, if present.\n sample = data[:4]\n if sample in (codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE):\n return \"utf-32\" # BOM included\n if sample[:3] == codecs.BOM_UTF8:\n return \"utf-8-sig\" # BOM included, MS style (discouraged)\n if sample[:2] in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE):\n return \"utf-16\" # BOM included\n nullcount = sample.count(_null)\n if nullcount == 0:\n return \"utf-8\"\n if nullcount == 2:\n if sample[::2] == _null2: # 1st and 3rd are null\n return \"utf-16-be\"\n if sample[1::2] == _null2: # 2nd and 4th are null\n return \"utf-16-le\"\n # Did not detect 2 valid UTF-16 ascii-range characters\n if nullcount == 3:\n if sample[:3] == _null3:\n return \"utf-32-be\"\n if sample[1:] == _null3:\n return \"utf-32-le\"\n # Did not detect a valid UTF-32 ascii-range character\n return None\n\n\ndef prepend_scheme_if_needed(url, new_scheme):\n \"\"\"Given a URL that may or may not have a scheme, prepend the given scheme.\n Does not replace a present scheme with the one provided as an argument.\n\n :rtype: str\n \"\"\"\n parsed = parse_url(url)\n scheme, auth, host, port, path, query, fragment = parsed\n\n # A defect in urlparse determines that there isn't a netloc present in some\n # urls. We previously assumed parsing was overly cautious, and swapped the\n # netloc and path. Due to a lack of tests on the original defect, this is\n # maintained with parse_url for backwards compatibility.\n netloc = parsed.netloc\n if not netloc:\n netloc, path = path, netloc\n\n if auth:\n # parse_url doesn't provide the netloc with auth\n # so we'll add it ourselves.\n netloc = \"@\".join([auth, netloc])\n if scheme is None:\n scheme = new_scheme\n if path is None:\n path = \"\"\n\n return urlunparse((scheme, netloc, path, \"\", query, fragment))\n\n\ndef get_auth_from_url(url):\n \"\"\"Given a url with authentication components, extract them into a tuple of\n username,password.\n\n :rtype: (str,str)\n \"\"\"\n parsed = urlparse(url)\n\n try:\n auth = (unquote(parsed.username), unquote(parsed.password))\n except (AttributeError, TypeError):\n auth = (\"\", \"\")\n\n return auth\n\n\n# Moved outside of function to avoid recompile every call\n_CLEAN_HEADER_REGEX_BYTE = re.compile(b\"^\\\\S[^\\\\r\\\\n]*$|^$\")\n_CLEAN_HEADER_REGEX_STR = re.compile(r\"^\\S[^\\r\\n]*$|^$\")\n\n\ndef check_header_validity(header):\n \"\"\"Verifies that header value is a string which doesn't contain\n leading whitespace or return characters. This prevents unintended\n header injection.\n\n :param header: tuple, in the format (name, value).\n \"\"\"\n name, value = header\n\n if isinstance(value, bytes):\n pat = _CLEAN_HEADER_REGEX_BYTE\n else:\n pat = _CLEAN_HEADER_REGEX_STR\n try:\n if not pat.match(value):\n raise InvalidHeader(\n f\"Invalid return character or leading space in header: {name}\"\n )\n except TypeError:\n raise InvalidHeader(\n f\"Value for header {{{name}: {value}}} must be of type \"\n f\"str or bytes, not {type(value)}\"\n )\n\n\ndef urldefragauth(url):\n \"\"\"\n Given a url remove the fragment and the authentication part.\n\n :rtype: str\n \"\"\"\n scheme, netloc, path, params, query, fragment = urlparse(url)\n\n # see func:`prepend_scheme_if_needed`\n if not netloc:\n netloc, path = path, netloc\n\n netloc = netloc.rsplit(\"@\", 1)[-1]\n\n return urlunparse((scheme, netloc, path, params, query, \"\"))\n\n\ndef rewind_body(prepared_request):\n \"\"\"Move file pointer back to its recorded starting position\n so it can be read again on redirect.\n \"\"\"\n body_seek = getattr(prepared_request.body, \"seek\", None)\n if body_seek is not None and isinstance(\n prepared_request._body_position, integer_types\n ):\n try:\n body_seek(prepared_request._body_position)\n except OSError:\n raise UnrewindableBodyError(\n \"An error occurred when rewinding request body for redirect.\"\n )\n else:\n raise UnrewindableBodyError(\"Unable to rewind request body for redirect.\")\n",
"path": "requests/utils.py"
},
{
"content": "\"\"\"Tests for Requests.\"\"\"\n\nimport collections\nimport contextlib\nimport io\nimport json\nimport os\nimport pickle\nimport re\nimport warnings\n\nimport pytest\nimport urllib3\nfrom urllib3.util import Timeout as Urllib3Timeout\n\nimport requests\nfrom requests.adapters import HTTPAdapter\nfrom requests.auth import HTTPDigestAuth, _basic_auth_str\nfrom requests.compat import (\n JSONDecodeError,\n Morsel,\n MutableMapping,\n builtin_str,\n cookielib,\n getproxies,\n urlparse,\n)\nfrom requests.cookies import cookiejar_from_dict, morsel_to_cookie\nfrom requests.exceptions import (\n ChunkedEncodingError,\n ConnectionError,\n ConnectTimeout,\n ContentDecodingError,\n InvalidHeader,\n InvalidProxyURL,\n InvalidSchema,\n InvalidURL,\n MissingSchema,\n ProxyError,\n ReadTimeout,\n RequestException,\n RetryError,\n)\nfrom requests.exceptions import SSLError as RequestsSSLError\nfrom requests.exceptions import Timeout, TooManyRedirects, UnrewindableBodyError\nfrom requests.hooks import default_hooks\nfrom requests.models import PreparedRequest, urlencode\nfrom requests.sessions import SessionRedirectMixin\nfrom requests.structures import CaseInsensitiveDict\n\nfrom .compat import StringIO\nfrom .utils import override_environ\n\n# Requests to this URL should always fail with a connection timeout (nothing\n# listening on that port)\nTARPIT = \"http://10.255.255.1\"\n\n# This is to avoid waiting the timeout of using TARPIT\nINVALID_PROXY = \"http://localhost:1\"\n\ntry:\n from ssl import SSLContext\n\n del SSLContext\n HAS_MODERN_SSL = True\nexcept ImportError:\n HAS_MODERN_SSL = False\n\ntry:\n requests.pyopenssl\n HAS_PYOPENSSL = True\nexcept AttributeError:\n HAS_PYOPENSSL = False\n\n\nclass TestRequests:\n\n digest_auth_algo = (\"MD5\", \"SHA-256\", \"SHA-512\")\n\n def test_entry_points(self):\n\n requests.session\n requests.session().get\n requests.session().head\n requests.get\n requests.head\n requests.put\n requests.patch\n requests.post\n # Not really an entry point, but people rely on it.\n from requests.packages.urllib3.poolmanager import PoolManager # noqa:F401\n\n @pytest.mark.parametrize(\n \"exception, url\",\n (\n (MissingSchema, \"hiwpefhipowhefopw\"),\n (InvalidSchema, \"localhost:3128\"),\n (InvalidSchema, \"localhost.localdomain:3128/\"),\n (InvalidSchema, \"10.122.1.1:3128/\"),\n (InvalidURL, \"http://\"),\n (InvalidURL, \"http://*example.com\"),\n (InvalidURL, \"http://.example.com\"),\n ),\n )\n def test_invalid_url(self, exception, url):\n with pytest.raises(exception):\n requests.get(url)\n\n def test_basic_building(self):\n req = requests.Request()\n req.url = \"http://kennethreitz.org/\"\n req.data = {\"life\": \"42\"}\n\n pr = req.prepare()\n assert pr.url == req.url\n assert pr.body == \"life=42\"\n\n @pytest.mark.parametrize(\"method\", (\"GET\", \"HEAD\"))\n def test_no_content_length(self, httpbin, method):\n req = requests.Request(method, httpbin(method.lower())).prepare()\n assert \"Content-Length\" not in req.headers\n\n @pytest.mark.parametrize(\"method\", (\"POST\", \"PUT\", \"PATCH\", \"OPTIONS\"))\n def test_no_body_content_length(self, httpbin, method):\n req = requests.Request(method, httpbin(method.lower())).prepare()\n assert req.headers[\"Content-Length\"] == \"0\"\n\n @pytest.mark.parametrize(\"method\", (\"POST\", \"PUT\", \"PATCH\", \"OPTIONS\"))\n def test_empty_content_length(self, httpbin, method):\n req = requests.Request(method, httpbin(method.lower()), data=\"\").prepare()\n assert req.headers[\"Content-Length\"] == \"0\"\n\n def test_override_content_length(self, httpbin):\n headers = {\"Content-Length\": \"not zero\"}\n r = requests.Request(\"POST\", httpbin(\"post\"), headers=headers).prepare()\n assert \"Content-Length\" in r.headers\n assert r.headers[\"Content-Length\"] == \"not zero\"\n\n def test_path_is_not_double_encoded(self):\n request = requests.Request(\"GET\", \"http://0.0.0.0/get/test case\").prepare()\n\n assert request.path_url == \"/get/test%20case\"\n\n @pytest.mark.parametrize(\n \"url, expected\",\n (\n (\n \"http://example.com/path#fragment\",\n \"http://example.com/path?a=b#fragment\",\n ),\n (\n \"http://example.com/path?key=value#fragment\",\n \"http://example.com/path?key=value&a=b#fragment\",\n ),\n ),\n )\n def test_params_are_added_before_fragment(self, url, expected):\n request = requests.Request(\"GET\", url, params={\"a\": \"b\"}).prepare()\n assert request.url == expected\n\n def test_params_original_order_is_preserved_by_default(self):\n param_ordered_dict = collections.OrderedDict(\n ((\"z\", 1), (\"a\", 1), (\"k\", 1), (\"d\", 1))\n )\n session = requests.Session()\n request = requests.Request(\n \"GET\", \"http://example.com/\", params=param_ordered_dict\n )\n prep = session.prepare_request(request)\n assert prep.url == \"http://example.com/?z=1&a=1&k=1&d=1\"\n\n def test_params_bytes_are_encoded(self):\n request = requests.Request(\n \"GET\", \"http://example.com\", params=b\"test=foo\"\n ).prepare()\n assert request.url == \"http://example.com/?test=foo\"\n\n def test_binary_put(self):\n request = requests.Request(\n \"PUT\", \"http://example.com\", data=\"ööö\".encode()\n ).prepare()\n assert isinstance(request.body, bytes)\n\n def test_whitespaces_are_removed_from_url(self):\n # Test for issue #3696\n request = requests.Request(\"GET\", \" http://example.com\").prepare()\n assert request.url == \"http://example.com/\"\n\n @pytest.mark.parametrize(\"scheme\", (\"http://\", \"HTTP://\", \"hTTp://\", \"HttP://\"))\n def test_mixed_case_scheme_acceptable(self, httpbin, scheme):\n s = requests.Session()\n s.proxies = getproxies()\n parts = urlparse(httpbin(\"get\"))\n url = scheme + parts.netloc + parts.path\n r = requests.Request(\"GET\", url)\n r = s.send(r.prepare())\n assert r.status_code == 200, f\"failed for scheme {scheme}\"\n\n def test_HTTP_200_OK_GET_ALTERNATIVE(self, httpbin):\n r = requests.Request(\"GET\", httpbin(\"get\"))\n s = requests.Session()\n s.proxies = getproxies()\n\n r = s.send(r.prepare())\n\n assert r.status_code == 200\n\n def test_HTTP_302_ALLOW_REDIRECT_GET(self, httpbin):\n r = requests.get(httpbin(\"redirect\", \"1\"))\n assert r.status_code == 200\n assert r.history[0].status_code == 302\n assert r.history[0].is_redirect\n\n def test_HTTP_307_ALLOW_REDIRECT_POST(self, httpbin):\n r = requests.post(\n httpbin(\"redirect-to\"),\n data=\"test\",\n params={\"url\": \"post\", \"status_code\": 307},\n )\n assert r.status_code == 200\n assert r.history[0].status_code == 307\n assert r.history[0].is_redirect\n assert r.json()[\"data\"] == \"test\"\n\n def test_HTTP_307_ALLOW_REDIRECT_POST_WITH_SEEKABLE(self, httpbin):\n byte_str = b\"test\"\n r = requests.post(\n httpbin(\"redirect-to\"),\n data=io.BytesIO(byte_str),\n params={\"url\": \"post\", \"status_code\": 307},\n )\n assert r.status_code == 200\n assert r.history[0].status_code == 307\n assert r.history[0].is_redirect\n assert r.json()[\"data\"] == byte_str.decode(\"utf-8\")\n\n def test_HTTP_302_TOO_MANY_REDIRECTS(self, httpbin):\n try:\n requests.get(httpbin(\"relative-redirect\", \"50\"))\n except TooManyRedirects as e:\n url = httpbin(\"relative-redirect\", \"20\")\n assert e.request.url == url\n assert e.response.url == url\n assert len(e.response.history) == 30\n else:\n pytest.fail(\"Expected redirect to raise TooManyRedirects but it did not\")\n\n def test_HTTP_302_TOO_MANY_REDIRECTS_WITH_PARAMS(self, httpbin):\n s = requests.session()\n s.max_redirects = 5\n try:\n s.get(httpbin(\"relative-redirect\", \"50\"))\n except TooManyRedirects as e:\n url = httpbin(\"relative-redirect\", \"45\")\n assert e.request.url == url\n assert e.response.url == url\n assert len(e.response.history) == 5\n else:\n pytest.fail(\n \"Expected custom max number of redirects to be respected but was not\"\n )\n\n def test_http_301_changes_post_to_get(self, httpbin):\n r = requests.post(httpbin(\"status\", \"301\"))\n assert r.status_code == 200\n assert r.request.method == \"GET\"\n assert r.history[0].status_code == 301\n assert r.history[0].is_redirect\n\n def test_http_301_doesnt_change_head_to_get(self, httpbin):\n r = requests.head(httpbin(\"status\", \"301\"), allow_redirects=True)\n print(r.content)\n assert r.status_code == 200\n assert r.request.method == \"HEAD\"\n assert r.history[0].status_code == 301\n assert r.history[0].is_redirect\n\n def test_http_302_changes_post_to_get(self, httpbin):\n r = requests.post(httpbin(\"status\", \"302\"))\n assert r.status_code == 200\n assert r.request.method == \"GET\"\n assert r.history[0].status_code == 302\n assert r.history[0].is_redirect\n\n def test_http_302_doesnt_change_head_to_get(self, httpbin):\n r = requests.head(httpbin(\"status\", \"302\"), allow_redirects=True)\n assert r.status_code == 200\n assert r.request.method == \"HEAD\"\n assert r.history[0].status_code == 302\n assert r.history[0].is_redirect\n\n def test_http_303_changes_post_to_get(self, httpbin):\n r = requests.post(httpbin(\"status\", \"303\"))\n assert r.status_code == 200\n assert r.request.method == \"GET\"\n assert r.history[0].status_code == 303\n assert r.history[0].is_redirect\n\n def test_http_303_doesnt_change_head_to_get(self, httpbin):\n r = requests.head(httpbin(\"status\", \"303\"), allow_redirects=True)\n assert r.status_code == 200\n assert r.request.method == \"HEAD\"\n assert r.history[0].status_code == 303\n assert r.history[0].is_redirect\n\n def test_header_and_body_removal_on_redirect(self, httpbin):\n purged_headers = (\"Content-Length\", \"Content-Type\")\n ses = requests.Session()\n req = requests.Request(\"POST\", httpbin(\"post\"), data={\"test\": \"data\"})\n prep = ses.prepare_request(req)\n resp = ses.send(prep)\n\n # Mimic a redirect response\n resp.status_code = 302\n resp.headers[\"location\"] = \"get\"\n\n # Run request through resolve_redirects\n next_resp = next(ses.resolve_redirects(resp, prep))\n assert next_resp.request.body is None\n for header in purged_headers:\n assert header not in next_resp.request.headers\n\n def test_transfer_enc_removal_on_redirect(self, httpbin):\n purged_headers = (\"Transfer-Encoding\", \"Content-Type\")\n ses = requests.Session()\n req = requests.Request(\"POST\", httpbin(\"post\"), data=(b\"x\" for x in range(1)))\n prep = ses.prepare_request(req)\n assert \"Transfer-Encoding\" in prep.headers\n\n # Create Response to avoid https://github.com/kevin1024/pytest-httpbin/issues/33\n resp = requests.Response()\n resp.raw = io.BytesIO(b\"the content\")\n resp.request = prep\n setattr(resp.raw, \"release_conn\", lambda *args: args)\n\n # Mimic a redirect response\n resp.status_code = 302\n resp.headers[\"location\"] = httpbin(\"get\")\n\n # Run request through resolve_redirect\n next_resp = next(ses.resolve_redirects(resp, prep))\n assert next_resp.request.body is None\n for header in purged_headers:\n assert header not in next_resp.request.headers\n\n def test_fragment_maintained_on_redirect(self, httpbin):\n fragment = \"#view=edit&token=hunter2\"\n r = requests.get(httpbin(\"redirect-to?url=get\") + fragment)\n\n assert len(r.history) > 0\n assert r.history[0].request.url == httpbin(\"redirect-to?url=get\") + fragment\n assert r.url == httpbin(\"get\") + fragment\n\n def test_HTTP_200_OK_GET_WITH_PARAMS(self, httpbin):\n heads = {\"User-agent\": \"Mozilla/5.0\"}\n\n r = requests.get(httpbin(\"user-agent\"), headers=heads)\n\n assert heads[\"User-agent\"] in r.text\n assert r.status_code == 200\n\n def test_HTTP_200_OK_GET_WITH_MIXED_PARAMS(self, httpbin):\n heads = {\"User-agent\": \"Mozilla/5.0\"}\n\n r = requests.get(\n httpbin(\"get\") + \"?test=true\", params={\"q\": \"test\"}, headers=heads\n )\n assert r.status_code == 200\n\n def test_set_cookie_on_301(self, httpbin):\n s = requests.session()\n url = httpbin(\"cookies/set?foo=bar\")\n s.get(url)\n assert s.cookies[\"foo\"] == \"bar\"\n\n def test_cookie_sent_on_redirect(self, httpbin):\n s = requests.session()\n s.get(httpbin(\"cookies/set?foo=bar\"))\n r = s.get(httpbin(\"redirect/1\")) # redirects to httpbin('get')\n assert \"Cookie\" in r.json()[\"headers\"]\n\n def test_cookie_removed_on_expire(self, httpbin):\n s = requests.session()\n s.get(httpbin(\"cookies/set?foo=bar\"))\n assert s.cookies[\"foo\"] == \"bar\"\n s.get(\n httpbin(\"response-headers\"),\n params={\"Set-Cookie\": \"foo=deleted; expires=Thu, 01-Jan-1970 00:00:01 GMT\"},\n )\n assert \"foo\" not in s.cookies\n\n def test_cookie_quote_wrapped(self, httpbin):\n s = requests.session()\n s.get(httpbin('cookies/set?foo=\"bar:baz\"'))\n assert s.cookies[\"foo\"] == '\"bar:baz\"'\n\n def test_cookie_persists_via_api(self, httpbin):\n s = requests.session()\n r = s.get(httpbin(\"redirect/1\"), cookies={\"foo\": \"bar\"})\n assert \"foo\" in r.request.headers[\"Cookie\"]\n assert \"foo\" in r.history[0].request.headers[\"Cookie\"]\n\n def test_request_cookie_overrides_session_cookie(self, httpbin):\n s = requests.session()\n s.cookies[\"foo\"] = \"bar\"\n r = s.get(httpbin(\"cookies\"), cookies={\"foo\": \"baz\"})\n assert r.json()[\"cookies\"][\"foo\"] == \"baz\"\n # Session cookie should not be modified\n assert s.cookies[\"foo\"] == \"bar\"\n\n def test_request_cookies_not_persisted(self, httpbin):\n s = requests.session()\n s.get(httpbin(\"cookies\"), cookies={\"foo\": \"baz\"})\n # Sending a request with cookies should not add cookies to the session\n assert not s.cookies\n\n def test_generic_cookiejar_works(self, httpbin):\n cj = cookielib.CookieJar()\n cookiejar_from_dict({\"foo\": \"bar\"}, cj)\n s = requests.session()\n s.cookies = cj\n r = s.get(httpbin(\"cookies\"))\n # Make sure the cookie was sent\n assert r.json()[\"cookies\"][\"foo\"] == \"bar\"\n # Make sure the session cj is still the custom one\n assert s.cookies is cj\n\n def test_param_cookiejar_works(self, httpbin):\n cj = cookielib.CookieJar()\n cookiejar_from_dict({\"foo\": \"bar\"}, cj)\n s = requests.session()\n r = s.get(httpbin(\"cookies\"), cookies=cj)\n # Make sure the cookie was sent\n assert r.json()[\"cookies\"][\"foo\"] == \"bar\"\n\n def test_cookielib_cookiejar_on_redirect(self, httpbin):\n \"\"\"Tests resolve_redirect doesn't fail when merging cookies\n with non-RequestsCookieJar cookiejar.\n\n See GH #3579\n \"\"\"\n cj = cookiejar_from_dict({\"foo\": \"bar\"}, cookielib.CookieJar())\n s = requests.Session()\n s.cookies = cookiejar_from_dict({\"cookie\": \"tasty\"})\n\n # Prepare request without using Session\n req = requests.Request(\"GET\", httpbin(\"headers\"), cookies=cj)\n prep_req = req.prepare()\n\n # Send request and simulate redirect\n resp = s.send(prep_req)\n resp.status_code = 302\n resp.headers[\"location\"] = httpbin(\"get\")\n redirects = s.resolve_redirects(resp, prep_req)\n resp = next(redirects)\n\n # Verify CookieJar isn't being converted to RequestsCookieJar\n assert isinstance(prep_req._cookies, cookielib.CookieJar)\n assert isinstance(resp.request._cookies, cookielib.CookieJar)\n assert not isinstance(resp.request._cookies, requests.cookies.RequestsCookieJar)\n\n cookies = {}\n for c in resp.request._cookies:\n cookies[c.name] = c.value\n assert cookies[\"foo\"] == \"bar\"\n assert cookies[\"cookie\"] == \"tasty\"\n\n def test_requests_in_history_are_not_overridden(self, httpbin):\n resp = requests.get(httpbin(\"redirect/3\"))\n urls = [r.url for r in resp.history]\n req_urls = [r.request.url for r in resp.history]\n assert urls == req_urls\n\n def test_history_is_always_a_list(self, httpbin):\n \"\"\"Show that even with redirects, Response.history is always a list.\"\"\"\n resp = requests.get(httpbin(\"get\"))\n assert isinstance(resp.history, list)\n resp = requests.get(httpbin(\"redirect/1\"))\n assert isinstance(resp.history, list)\n assert not isinstance(resp.history, tuple)\n\n def test_headers_on_session_with_None_are_not_sent(self, httpbin):\n \"\"\"Do not send headers in Session.headers with None values.\"\"\"\n ses = requests.Session()\n ses.headers[\"Accept-Encoding\"] = None\n req = requests.Request(\"GET\", httpbin(\"get\"))\n prep = ses.prepare_request(req)\n assert \"Accept-Encoding\" not in prep.headers\n\n def test_headers_preserve_order(self, httpbin):\n \"\"\"Preserve order when headers provided as OrderedDict.\"\"\"\n ses = requests.Session()\n ses.headers = collections.OrderedDict()\n ses.headers[\"Accept-Encoding\"] = \"identity\"\n ses.headers[\"First\"] = \"1\"\n ses.headers[\"Second\"] = \"2\"\n headers = collections.OrderedDict([(\"Third\", \"3\"), (\"Fourth\", \"4\")])\n headers[\"Fifth\"] = \"5\"\n headers[\"Second\"] = \"222\"\n req = requests.Request(\"GET\", httpbin(\"get\"), headers=headers)\n prep = ses.prepare_request(req)\n items = list(prep.headers.items())\n assert items[0] == (\"Accept-Encoding\", \"identity\")\n assert items[1] == (\"First\", \"1\")\n assert items[2] == (\"Second\", \"222\")\n assert items[3] == (\"Third\", \"3\")\n assert items[4] == (\"Fourth\", \"4\")\n assert items[5] == (\"Fifth\", \"5\")\n\n @pytest.mark.parametrize(\"key\", (\"User-agent\", \"user-agent\"))\n def test_user_agent_transfers(self, httpbin, key):\n\n heads = {key: \"Mozilla/5.0 (github.com/psf/requests)\"}\n\n r = requests.get(httpbin(\"user-agent\"), headers=heads)\n assert heads[key] in r.text\n\n def test_HTTP_200_OK_HEAD(self, httpbin):\n r = requests.head(httpbin(\"get\"))\n assert r.status_code == 200\n\n def test_HTTP_200_OK_PUT(self, httpbin):\n r = requests.put(httpbin(\"put\"))\n assert r.status_code == 200\n\n def test_BASICAUTH_TUPLE_HTTP_200_OK_GET(self, httpbin):\n auth = (\"user\", \"pass\")\n url = httpbin(\"basic-auth\", \"user\", \"pass\")\n\n r = requests.get(url, auth=auth)\n assert r.status_code == 200\n\n r = requests.get(url)\n assert r.status_code == 401\n\n s = requests.session()\n s.auth = auth\n r = s.get(url)\n assert r.status_code == 200\n\n @pytest.mark.parametrize(\n \"username, password\",\n (\n (\"user\", \"pass\"),\n (\"имя\".encode(), \"пароль\".encode()),\n (42, 42),\n (None, None),\n ),\n )\n def test_set_basicauth(self, httpbin, username, password):\n auth = (username, password)\n url = httpbin(\"get\")\n\n r = requests.Request(\"GET\", url, auth=auth)\n p = r.prepare()\n\n assert p.headers[\"Authorization\"] == _basic_auth_str(username, password)\n\n def test_basicauth_encodes_byte_strings(self):\n \"\"\"Ensure b'test' formats as the byte string \"test\" rather\n than the unicode string \"b'test'\" in Python 3.\n \"\"\"\n auth = (b\"\\xc5\\xafsername\", b\"test\\xc6\\xb6\")\n r = requests.Request(\"GET\", \"http://localhost\", auth=auth)\n p = r.prepare()\n\n assert p.headers[\"Authorization\"] == \"Basic xa9zZXJuYW1lOnRlc3TGtg==\"\n\n @pytest.mark.parametrize(\n \"url, exception\",\n (\n # Connecting to an unknown domain should raise a ConnectionError\n (\"http://doesnotexist.google.com\", ConnectionError),\n # Connecting to an invalid port should raise a ConnectionError\n (\"http://localhost:1\", ConnectionError),\n # Inputing a URL that cannot be parsed should raise an InvalidURL error\n (\"http://fe80::5054:ff:fe5a:fc0\", InvalidURL),\n ),\n )\n def test_errors(self, url, exception):\n with pytest.raises(exception):\n requests.get(url, timeout=1)\n\n def test_proxy_error(self):\n # any proxy related error (address resolution, no route to host, etc) should result in a ProxyError\n with pytest.raises(ProxyError):\n requests.get(\n \"http://localhost:1\", proxies={\"http\": \"non-resolvable-address\"}\n )\n\n def test_proxy_error_on_bad_url(self, httpbin, httpbin_secure):\n with pytest.raises(InvalidProxyURL):\n requests.get(httpbin_secure(), proxies={\"https\": \"http:/badproxyurl:3128\"})\n\n with pytest.raises(InvalidProxyURL):\n requests.get(httpbin(), proxies={\"http\": \"http://:8080\"})\n\n with pytest.raises(InvalidProxyURL):\n requests.get(httpbin_secure(), proxies={\"https\": \"https://\"})\n\n with pytest.raises(InvalidProxyURL):\n requests.get(httpbin(), proxies={\"http\": \"http:///example.com:8080\"})\n\n def test_respect_proxy_env_on_send_self_prepared_request(self, httpbin):\n with override_environ(http_proxy=INVALID_PROXY):\n with pytest.raises(ProxyError):\n session = requests.Session()\n request = requests.Request(\"GET\", httpbin())\n session.send(request.prepare())\n\n def test_respect_proxy_env_on_send_session_prepared_request(self, httpbin):\n with override_environ(http_proxy=INVALID_PROXY):\n with pytest.raises(ProxyError):\n session = requests.Session()\n request = requests.Request(\"GET\", httpbin())\n prepared = session.prepare_request(request)\n session.send(prepared)\n\n def test_respect_proxy_env_on_send_with_redirects(self, httpbin):\n with override_environ(http_proxy=INVALID_PROXY):\n with pytest.raises(ProxyError):\n session = requests.Session()\n url = httpbin(\"redirect/1\")\n print(url)\n request = requests.Request(\"GET\", url)\n session.send(request.prepare())\n\n def test_respect_proxy_env_on_get(self, httpbin):\n with override_environ(http_proxy=INVALID_PROXY):\n with pytest.raises(ProxyError):\n session = requests.Session()\n session.get(httpbin())\n\n def test_respect_proxy_env_on_request(self, httpbin):\n with override_environ(http_proxy=INVALID_PROXY):\n with pytest.raises(ProxyError):\n session = requests.Session()\n session.request(method=\"GET\", url=httpbin())\n\n def test_proxy_authorization_preserved_on_request(self, httpbin):\n proxy_auth_value = \"Bearer XXX\"\n session = requests.Session()\n session.headers.update({\"Proxy-Authorization\": proxy_auth_value})\n resp = session.request(method=\"GET\", url=httpbin(\"get\"))\n sent_headers = resp.json().get(\"headers\", {})\n\n assert sent_headers.get(\"Proxy-Authorization\") == proxy_auth_value\n\n def test_basicauth_with_netrc(self, httpbin):\n auth = (\"user\", \"pass\")\n wrong_auth = (\"wronguser\", \"wrongpass\")\n url = httpbin(\"basic-auth\", \"user\", \"pass\")\n\n old_auth = requests.sessions.get_netrc_auth\n\n try:\n\n def get_netrc_auth_mock(url):\n return auth\n\n requests.sessions.get_netrc_auth = get_netrc_auth_mock\n\n # Should use netrc and work.\n r = requests.get(url)\n assert r.status_code == 200\n\n # Given auth should override and fail.\n r = requests.get(url, auth=wrong_auth)\n assert r.status_code == 401\n\n s = requests.session()\n\n # Should use netrc and work.\n r = s.get(url)\n assert r.status_code == 200\n\n # Given auth should override and fail.\n s.auth = wrong_auth\n r = s.get(url)\n assert r.status_code == 401\n finally:\n requests.sessions.get_netrc_auth = old_auth\n\n def test_DIGEST_HTTP_200_OK_GET(self, httpbin):\n\n for authtype in self.digest_auth_algo:\n auth = HTTPDigestAuth(\"user\", \"pass\")\n url = httpbin(\"digest-auth\", \"auth\", \"user\", \"pass\", authtype, \"never\")\n\n r = requests.get(url, auth=auth)\n assert r.status_code == 200\n\n r = requests.get(url)\n assert r.status_code == 401\n print(r.headers[\"WWW-Authenticate\"])\n\n s = requests.session()\n s.auth = HTTPDigestAuth(\"user\", \"pass\")\n r = s.get(url)\n assert r.status_code == 200\n\n def test_DIGEST_AUTH_RETURNS_COOKIE(self, httpbin):\n\n for authtype in self.digest_auth_algo:\n url = httpbin(\"digest-auth\", \"auth\", \"user\", \"pass\", authtype)\n auth = HTTPDigestAuth(\"user\", \"pass\")\n r = requests.get(url)\n assert r.cookies[\"fake\"] == \"fake_value\"\n\n r = requests.get(url, auth=auth)\n assert r.status_code == 200\n\n def test_DIGEST_AUTH_SETS_SESSION_COOKIES(self, httpbin):\n\n for authtype in self.digest_auth_algo:\n url = httpbin(\"digest-auth\", \"auth\", \"user\", \"pass\", authtype)\n auth = HTTPDigestAuth(\"user\", \"pass\")\n s = requests.Session()\n s.get(url, auth=auth)\n assert s.cookies[\"fake\"] == \"fake_value\"\n\n def test_DIGEST_STREAM(self, httpbin):\n\n for authtype in self.digest_auth_algo:\n auth = HTTPDigestAuth(\"user\", \"pass\")\n url = httpbin(\"digest-auth\", \"auth\", \"user\", \"pass\", authtype)\n\n r = requests.get(url, auth=auth, stream=True)\n assert r.raw.read() != b\"\"\n\n r = requests.get(url, auth=auth, stream=False)\n assert r.raw.read() == b\"\"\n\n def test_DIGESTAUTH_WRONG_HTTP_401_GET(self, httpbin):\n\n for authtype in self.digest_auth_algo:\n auth = HTTPDigestAuth(\"user\", \"wrongpass\")\n url = httpbin(\"digest-auth\", \"auth\", \"user\", \"pass\", authtype)\n\n r = requests.get(url, auth=auth)\n assert r.status_code == 401\n\n r = requests.get(url)\n assert r.status_code == 401\n\n s = requests.session()\n s.auth = auth\n r = s.get(url)\n assert r.status_code == 401\n\n def test_DIGESTAUTH_QUOTES_QOP_VALUE(self, httpbin):\n\n for authtype in self.digest_auth_algo:\n auth = HTTPDigestAuth(\"user\", \"pass\")\n url = httpbin(\"digest-auth\", \"auth\", \"user\", \"pass\", authtype)\n\n r = requests.get(url, auth=auth)\n assert '\"auth\"' in r.request.headers[\"Authorization\"]\n\n def test_POSTBIN_GET_POST_FILES(self, httpbin):\n\n url = httpbin(\"post\")\n requests.post(url).raise_for_status()\n\n post1 = requests.post(url, data={\"some\": \"data\"})\n assert post1.status_code == 200\n\n with open(\"requirements-dev.txt\") as f:\n post2 = requests.post(url, files={\"some\": f})\n assert post2.status_code == 200\n\n post4 = requests.post(url, data='[{\"some\": \"json\"}]')\n assert post4.status_code == 200\n\n with pytest.raises(ValueError):\n requests.post(url, files=[\"bad file data\"])\n\n def test_invalid_files_input(self, httpbin):\n\n url = httpbin(\"post\")\n post = requests.post(url, files={\"random-file-1\": None, \"random-file-2\": 1})\n assert b'name=\"random-file-1\"' not in post.request.body\n assert b'name=\"random-file-2\"' in post.request.body\n\n def test_POSTBIN_SEEKED_OBJECT_WITH_NO_ITER(self, httpbin):\n class TestStream:\n def __init__(self, data):\n self.data = data.encode()\n self.length = len(self.data)\n self.index = 0\n\n def __len__(self):\n return self.length\n\n def read(self, size=None):\n if size:\n ret = self.data[self.index : self.index + size]\n self.index += size\n else:\n ret = self.data[self.index :]\n self.index = self.length\n return ret\n\n def tell(self):\n return self.index\n\n def seek(self, offset, where=0):\n if where == 0:\n self.index = offset\n elif where == 1:\n self.index += offset\n elif where == 2:\n self.index = self.length + offset\n\n test = TestStream(\"test\")\n post1 = requests.post(httpbin(\"post\"), data=test)\n assert post1.status_code == 200\n assert post1.json()[\"data\"] == \"test\"\n\n test = TestStream(\"test\")\n test.seek(2)\n post2 = requests.post(httpbin(\"post\"), data=test)\n assert post2.status_code == 200\n assert post2.json()[\"data\"] == \"st\"\n\n def test_POSTBIN_GET_POST_FILES_WITH_DATA(self, httpbin):\n\n url = httpbin(\"post\")\n requests.post(url).raise_for_status()\n\n post1 = requests.post(url, data={\"some\": \"data\"})\n assert post1.status_code == 200\n\n with open(\"requirements-dev.txt\") as f:\n post2 = requests.post(url, data={\"some\": \"data\"}, files={\"some\": f})\n assert post2.status_code == 200\n\n post4 = requests.post(url, data='[{\"some\": \"json\"}]')\n assert post4.status_code == 200\n\n with pytest.raises(ValueError):\n requests.post(url, files=[\"bad file data\"])\n\n def test_post_with_custom_mapping(self, httpbin):\n class CustomMapping(MutableMapping):\n def __init__(self, *args, **kwargs):\n self.data = dict(*args, **kwargs)\n\n def __delitem__(self, key):\n del self.data[key]\n\n def __getitem__(self, key):\n return self.data[key]\n\n def __setitem__(self, key, value):\n self.data[key] = value\n\n def __iter__(self):\n return iter(self.data)\n\n def __len__(self):\n return len(self.data)\n\n data = CustomMapping({\"some\": \"data\"})\n url = httpbin(\"post\")\n found_json = requests.post(url, data=data).json().get(\"form\")\n assert found_json == {\"some\": \"data\"}\n\n def test_conflicting_post_params(self, httpbin):\n url = httpbin(\"post\")\n with open(\"requirements-dev.txt\") as f:\n with pytest.raises(ValueError):\n requests.post(url, data='[{\"some\": \"data\"}]', files={\"some\": f})\n\n def test_request_ok_set(self, httpbin):\n r = requests.get(httpbin(\"status\", \"404\"))\n assert not r.ok\n\n def test_status_raising(self, httpbin):\n r = requests.get(httpbin(\"status\", \"404\"))\n with pytest.raises(requests.exceptions.HTTPError):\n r.raise_for_status()\n\n r = requests.get(httpbin(\"status\", \"500\"))\n assert not r.ok\n\n def test_decompress_gzip(self, httpbin):\n r = requests.get(httpbin(\"gzip\"))\n r.content.decode(\"ascii\")\n\n @pytest.mark.parametrize(\n \"url, params\",\n (\n (\"/get\", {\"foo\": \"føø\"}),\n (\"/get\", {\"føø\": \"føø\"}),\n (\"/get\", {\"føø\": \"føø\"}),\n (\"/get\", {\"foo\": \"foo\"}),\n (\"ø\", {\"foo\": \"foo\"}),\n ),\n )\n def test_unicode_get(self, httpbin, url, params):\n requests.get(httpbin(url), params=params)\n\n def test_unicode_header_name(self, httpbin):\n requests.put(\n httpbin(\"put\"),\n headers={\"Content-Type\": \"application/octet-stream\"},\n data=\"\\xff\",\n ) # compat.str is unicode.\n\n def test_pyopenssl_redirect(self, httpbin_secure, httpbin_ca_bundle):\n requests.get(httpbin_secure(\"status\", \"301\"), verify=httpbin_ca_bundle)\n\n def test_invalid_ca_certificate_path(self, httpbin_secure):\n INVALID_PATH = \"/garbage\"\n with pytest.raises(IOError) as e:\n requests.get(httpbin_secure(), verify=INVALID_PATH)\n assert str(\n e.value\n ) == \"Could not find a suitable TLS CA certificate bundle, invalid path: {}\".format(\n INVALID_PATH\n )\n\n def test_invalid_ssl_certificate_files(self, httpbin_secure):\n INVALID_PATH = \"/garbage\"\n with pytest.raises(IOError) as e:\n requests.get(httpbin_secure(), cert=INVALID_PATH)\n assert str(\n e.value\n ) == \"Could not find the TLS certificate file, invalid path: {}\".format(\n INVALID_PATH\n )\n\n with pytest.raises(IOError) as e:\n requests.get(httpbin_secure(), cert=(\".\", INVALID_PATH))\n assert str(e.value) == (\n f\"Could not find the TLS key file, invalid path: {INVALID_PATH}\"\n )\n\n @pytest.mark.parametrize(\n \"env, expected\",\n (\n ({}, True),\n ({\"REQUESTS_CA_BUNDLE\": \"/some/path\"}, \"/some/path\"),\n ({\"REQUESTS_CA_BUNDLE\": \"\"}, True),\n ({\"CURL_CA_BUNDLE\": \"/some/path\"}, \"/some/path\"),\n ({\"CURL_CA_BUNDLE\": \"\"}, True),\n ({\"REQUESTS_CA_BUNDLE\": \"\", \"CURL_CA_BUNDLE\": \"\"}, True),\n (\n {\n \"REQUESTS_CA_BUNDLE\": \"/some/path\",\n \"CURL_CA_BUNDLE\": \"/curl/path\",\n },\n \"/some/path\",\n ),\n (\n {\n \"REQUESTS_CA_BUNDLE\": \"\",\n \"CURL_CA_BUNDLE\": \"/curl/path\",\n },\n \"/curl/path\",\n ),\n ),\n )\n def test_env_cert_bundles(self, httpbin, mocker, env, expected):\n s = requests.Session()\n mocker.patch(\"os.environ\", env)\n settings = s.merge_environment_settings(\n url=httpbin(\"get\"), proxies={}, stream=False, verify=True, cert=None\n )\n assert settings[\"verify\"] == expected\n\n def test_http_with_certificate(self, httpbin):\n r = requests.get(httpbin(), cert=\".\")\n assert r.status_code == 200\n\n def test_https_warnings(self, nosan_server):\n \"\"\"warnings are emitted with requests.get\"\"\"\n host, port, ca_bundle = nosan_server\n if HAS_MODERN_SSL or HAS_PYOPENSSL:\n warnings_expected = (\"SubjectAltNameWarning\",)\n else:\n warnings_expected = (\n \"SNIMissingWarning\",\n \"InsecurePlatformWarning\",\n \"SubjectAltNameWarning\",\n )\n\n with pytest.warns(None) as warning_records:\n warnings.simplefilter(\"always\")\n requests.get(f\"https://localhost:{port}/\", verify=ca_bundle)\n\n warning_records = [\n item\n for item in warning_records\n if item.category.__name__ != \"ResourceWarning\"\n ]\n\n warnings_category = tuple(item.category.__name__ for item in warning_records)\n assert warnings_category == warnings_expected\n\n def test_certificate_failure(self, httpbin_secure):\n \"\"\"\n When underlying SSL problems occur, an SSLError is raised.\n \"\"\"\n with pytest.raises(RequestsSSLError):\n # Our local httpbin does not have a trusted CA, so this call will\n # fail if we use our default trust bundle.\n requests.get(httpbin_secure(\"status\", \"200\"))\n\n def test_urlencoded_get_query_multivalued_param(self, httpbin):\n\n r = requests.get(httpbin(\"get\"), params={\"test\": [\"foo\", \"baz\"]})\n assert r.status_code == 200\n assert r.url == httpbin(\"get?test=foo&test=baz\")\n\n def test_form_encoded_post_query_multivalued_element(self, httpbin):\n r = requests.Request(\n method=\"POST\", url=httpbin(\"post\"), data=dict(test=[\"foo\", \"baz\"])\n )\n prep = r.prepare()\n assert prep.body == \"test=foo&test=baz\"\n\n def test_different_encodings_dont_break_post(self, httpbin):\n with open(__file__, \"rb\") as f:\n r = requests.post(\n httpbin(\"post\"),\n data={\"stuff\": json.dumps({\"a\": 123})},\n params={\"blah\": \"asdf1234\"},\n files={\"file\": (\"test_requests.py\", f)},\n )\n assert r.status_code == 200\n\n @pytest.mark.parametrize(\n \"data\",\n (\n {\"stuff\": \"ëlïxr\"},\n {\"stuff\": \"ëlïxr\".encode()},\n {\"stuff\": \"elixr\"},\n {\"stuff\": b\"elixr\"},\n ),\n )\n def test_unicode_multipart_post(self, httpbin, data):\n with open(__file__, \"rb\") as f:\n r = requests.post(\n httpbin(\"post\"),\n data=data,\n files={\"file\": (\"test_requests.py\", f)},\n )\n assert r.status_code == 200\n\n def test_unicode_multipart_post_fieldnames(self, httpbin):\n filename = os.path.splitext(__file__)[0] + \".py\"\n with open(filename, \"rb\") as f:\n r = requests.Request(\n method=\"POST\",\n url=httpbin(\"post\"),\n data={b\"stuff\": \"elixr\"},\n files={\"file\": (\"test_requests.py\", f)},\n )\n prep = r.prepare()\n\n assert b'name=\"stuff\"' in prep.body\n assert b\"name=\\\"b'stuff'\\\"\" not in prep.body\n\n def test_unicode_method_name(self, httpbin):\n with open(__file__, \"rb\") as f:\n files = {\"file\": f}\n r = requests.request(\n method=\"POST\",\n url=httpbin(\"post\"),\n files=files,\n )\n assert r.status_code == 200\n\n def test_unicode_method_name_with_request_object(self, httpbin):\n s = requests.Session()\n with open(__file__, \"rb\") as f:\n files = {\"file\": f}\n req = requests.Request(\"POST\", httpbin(\"post\"), files=files)\n prep = s.prepare_request(req)\n assert isinstance(prep.method, builtin_str)\n assert prep.method == \"POST\"\n\n resp = s.send(prep)\n assert resp.status_code == 200\n\n def test_non_prepared_request_error(self):\n s = requests.Session()\n req = requests.Request(\"POST\", \"/\")\n\n with pytest.raises(ValueError) as e:\n s.send(req)\n assert str(e.value) == \"You can only send PreparedRequests.\"\n\n def test_custom_content_type(self, httpbin):\n with open(__file__, \"rb\") as f1:\n with open(__file__, \"rb\") as f2:\n data={\"stuff\": json.dumps({\"a\": 123})}\n files = {\n \"file1\": (\"test_requests.py\", f1),\n \"file2\": (\"test_requests\", f2, \"text/py-content-type\"),\n }\n r = requests.post(httpbin(\"post\"), data=data, files=files)\n assert r.status_code == 200\n assert b\"text/py-content-type\" in r.request.body\n\n def test_hook_receives_request_arguments(self, httpbin):\n def hook(resp, **kwargs):\n assert resp is not None\n assert kwargs != {}\n\n s = requests.Session()\n r = requests.Request(\"GET\", httpbin(), hooks={\"response\": hook})\n prep = s.prepare_request(r)\n s.send(prep)\n\n def test_session_hooks_are_used_with_no_request_hooks(self, httpbin):\n def hook(*args, **kwargs):\n pass\n\n s = requests.Session()\n s.hooks[\"response\"].append(hook)\n r = requests.Request(\"GET\", httpbin())\n prep = s.prepare_request(r)\n assert prep.hooks[\"response\"] != []\n assert prep.hooks[\"response\"] == [hook]\n\n def test_session_hooks_are_overridden_by_request_hooks(self, httpbin):\n def hook1(*args, **kwargs):\n pass\n\n def hook2(*args, **kwargs):\n pass\n\n assert hook1 is not hook2\n s = requests.Session()\n s.hooks[\"response\"].append(hook2)\n r = requests.Request(\"GET\", httpbin(), hooks={\"response\": [hook1]})\n prep = s.prepare_request(r)\n assert prep.hooks[\"response\"] == [hook1]\n\n def test_prepared_request_hook(self, httpbin):\n def hook(resp, **kwargs):\n resp.hook_working = True\n return resp\n\n req = requests.Request(\"GET\", httpbin(), hooks={\"response\": hook})\n prep = req.prepare()\n\n s = requests.Session()\n s.proxies = getproxies()\n resp = s.send(prep)\n\n assert hasattr(resp, \"hook_working\")\n\n def test_prepared_from_session(self, httpbin):\n class DummyAuth(requests.auth.AuthBase):\n def __call__(self, r):\n r.headers[\"Dummy-Auth-Test\"] = \"dummy-auth-test-ok\"\n return r\n\n req = requests.Request(\"GET\", httpbin(\"headers\"))\n assert not req.auth\n\n s = requests.Session()\n s.auth = DummyAuth()\n\n prep = s.prepare_request(req)\n resp = s.send(prep)\n\n assert resp.json()[\"headers\"][\"Dummy-Auth-Test\"] == \"dummy-auth-test-ok\"\n\n def test_prepare_request_with_bytestring_url(self):\n req = requests.Request(\"GET\", b\"https://httpbin.org/\")\n s = requests.Session()\n prep = s.prepare_request(req)\n assert prep.url == \"https://httpbin.org/\"\n\n def test_request_with_bytestring_host(self, httpbin):\n s = requests.Session()\n resp = s.request(\n \"GET\",\n httpbin(\"cookies/set?cookie=value\"),\n allow_redirects=False,\n headers={\"Host\": b\"httpbin.org\"},\n )\n assert resp.cookies.get(\"cookie\") == \"value\"\n\n def test_links(self):\n r = requests.Response()\n r.headers = {\n \"cache-control\": \"public, max-age=60, s-maxage=60\",\n \"connection\": \"keep-alive\",\n \"content-encoding\": \"gzip\",\n \"content-type\": \"application/json; charset=utf-8\",\n \"date\": \"Sat, 26 Jan 2013 16:47:56 GMT\",\n \"etag\": '\"6ff6a73c0e446c1f61614769e3ceb778\"',\n \"last-modified\": \"Sat, 26 Jan 2013 16:22:39 GMT\",\n \"link\": (\n \"<https://api.github.com/users/kennethreitz/repos?\"\n 'page=2&per_page=10>; rel=\"next\", <https://api.github.'\n \"com/users/kennethreitz/repos?page=7&per_page=10>; \"\n ' rel=\"last\"'\n ),\n \"server\": \"GitHub.com\",\n \"status\": \"200 OK\",\n \"vary\": \"Accept\",\n \"x-content-type-options\": \"nosniff\",\n \"x-github-media-type\": \"github.beta\",\n \"x-ratelimit-limit\": \"60\",\n \"x-ratelimit-remaining\": \"57\",\n }\n assert r.links[\"next\"][\"rel\"] == \"next\"\n\n def test_cookie_parameters(self):\n key = \"some_cookie\"\n value = \"some_value\"\n secure = True\n domain = \"test.com\"\n rest = {\"HttpOnly\": True}\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value, secure=secure, domain=domain, rest=rest)\n\n assert len(jar) == 1\n assert \"some_cookie\" in jar\n\n cookie = list(jar)[0]\n assert cookie.secure == secure\n assert cookie.domain == domain\n assert cookie._rest[\"HttpOnly\"] == rest[\"HttpOnly\"]\n\n def test_cookie_as_dict_keeps_len(self):\n key = \"some_cookie\"\n value = \"some_value\"\n\n key1 = \"some_cookie1\"\n value1 = \"some_value1\"\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value)\n jar.set(key1, value1)\n\n d1 = dict(jar)\n d2 = dict(jar.iteritems())\n d3 = dict(jar.items())\n\n assert len(jar) == 2\n assert len(d1) == 2\n assert len(d2) == 2\n assert len(d3) == 2\n\n def test_cookie_as_dict_keeps_items(self):\n key = \"some_cookie\"\n value = \"some_value\"\n\n key1 = \"some_cookie1\"\n value1 = \"some_value1\"\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value)\n jar.set(key1, value1)\n\n d1 = dict(jar)\n d2 = dict(jar.iteritems())\n d3 = dict(jar.items())\n\n assert d1[\"some_cookie\"] == \"some_value\"\n assert d2[\"some_cookie\"] == \"some_value\"\n assert d3[\"some_cookie1\"] == \"some_value1\"\n\n def test_cookie_as_dict_keys(self):\n key = \"some_cookie\"\n value = \"some_value\"\n\n key1 = \"some_cookie1\"\n value1 = \"some_value1\"\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value)\n jar.set(key1, value1)\n\n keys = jar.keys()\n assert keys == list(keys)\n # make sure one can use keys multiple times\n assert list(keys) == list(keys)\n\n def test_cookie_as_dict_values(self):\n key = \"some_cookie\"\n value = \"some_value\"\n\n key1 = \"some_cookie1\"\n value1 = \"some_value1\"\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value)\n jar.set(key1, value1)\n\n values = jar.values()\n assert values == list(values)\n # make sure one can use values multiple times\n assert list(values) == list(values)\n\n def test_cookie_as_dict_items(self):\n key = \"some_cookie\"\n value = \"some_value\"\n\n key1 = \"some_cookie1\"\n value1 = \"some_value1\"\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value)\n jar.set(key1, value1)\n\n items = jar.items()\n assert items == list(items)\n # make sure one can use items multiple times\n assert list(items) == list(items)\n\n def test_cookie_duplicate_names_different_domains(self):\n key = \"some_cookie\"\n value = \"some_value\"\n domain1 = \"test1.com\"\n domain2 = \"test2.com\"\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value, domain=domain1)\n jar.set(key, value, domain=domain2)\n assert key in jar\n items = jar.items()\n assert len(items) == 2\n\n # Verify that CookieConflictError is raised if domain is not specified\n with pytest.raises(requests.cookies.CookieConflictError):\n jar.get(key)\n\n # Verify that CookieConflictError is not raised if domain is specified\n cookie = jar.get(key, domain=domain1)\n assert cookie == value\n\n def test_cookie_duplicate_names_raises_cookie_conflict_error(self):\n key = \"some_cookie\"\n value = \"some_value\"\n path = \"some_path\"\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value, path=path)\n jar.set(key, value)\n with pytest.raises(requests.cookies.CookieConflictError):\n jar.get(key)\n\n def test_cookie_policy_copy(self):\n class MyCookiePolicy(cookielib.DefaultCookiePolicy):\n pass\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set_policy(MyCookiePolicy())\n assert isinstance(jar.copy().get_policy(), MyCookiePolicy)\n\n def test_time_elapsed_blank(self, httpbin):\n r = requests.get(httpbin(\"get\"))\n td = r.elapsed\n total_seconds = (\n td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6\n ) / 10**6\n assert total_seconds > 0.0\n\n def test_empty_response_has_content_none(self):\n r = requests.Response()\n assert r.content is None\n\n def test_response_is_iterable(self):\n r = requests.Response()\n io = StringIO.StringIO(\"abc\")\n read_ = io.read\n\n def read_mock(amt, decode_content=None):\n return read_(amt)\n\n setattr(io, \"read\", read_mock)\n r.raw = io\n assert next(iter(r))\n io.close()\n\n def test_response_decode_unicode(self):\n \"\"\"When called with decode_unicode, Response.iter_content should always\n return unicode.\n \"\"\"\n r = requests.Response()\n r._content_consumed = True\n r._content = b\"the content\"\n r.encoding = \"ascii\"\n\n chunks = r.iter_content(decode_unicode=True)\n assert all(isinstance(chunk, str) for chunk in chunks)\n\n # also for streaming\n r = requests.Response()\n r.raw = io.BytesIO(b\"the content\")\n r.encoding = \"ascii\"\n chunks = r.iter_content(decode_unicode=True)\n assert all(isinstance(chunk, str) for chunk in chunks)\n\n def test_response_reason_unicode(self):\n # check for unicode HTTP status\n r = requests.Response()\n r.url = \"unicode URL\"\n r.reason = \"Komponenttia ei löydy\".encode()\n r.status_code = 404\n r.encoding = None\n assert not r.ok # old behaviour - crashes here\n\n def test_response_reason_unicode_fallback(self):\n # check raise_status falls back to ISO-8859-1\n r = requests.Response()\n r.url = \"some url\"\n reason = \"Komponenttia ei löydy\"\n r.reason = reason.encode(\"latin-1\")\n r.status_code = 500\n r.encoding = None\n with pytest.raises(requests.exceptions.HTTPError) as e:\n r.raise_for_status()\n assert reason in e.value.args[0]\n\n def test_response_chunk_size_type(self):\n \"\"\"Ensure that chunk_size is passed as None or an integer, otherwise\n raise a TypeError.\n \"\"\"\n r = requests.Response()\n r.raw = io.BytesIO(b\"the content\")\n chunks = r.iter_content(1)\n assert all(len(chunk) == 1 for chunk in chunks)\n\n r = requests.Response()\n r.raw = io.BytesIO(b\"the content\")\n chunks = r.iter_content(None)\n assert list(chunks) == [b\"the content\"]\n\n r = requests.Response()\n r.raw = io.BytesIO(b\"the content\")\n with pytest.raises(TypeError):\n chunks = r.iter_content(\"1024\")\n\n @pytest.mark.parametrize(\n \"exception, args, expected\",\n (\n (urllib3.exceptions.ProtocolError, tuple(), ChunkedEncodingError),\n (urllib3.exceptions.DecodeError, tuple(), ContentDecodingError),\n (urllib3.exceptions.ReadTimeoutError, (None, \"\", \"\"), ConnectionError),\n (urllib3.exceptions.SSLError, tuple(), RequestsSSLError),\n ),\n )\n def test_iter_content_wraps_exceptions(\n self, httpbin, mocker, exception, args, expected\n ):\n r = requests.Response()\n r.raw = mocker.Mock()\n # ReadTimeoutError can't be initialized by mock\n # so we'll manually create the instance with args\n r.raw.stream.side_effect = exception(*args)\n\n with pytest.raises(expected):\n next(r.iter_content(1024))\n\n def test_request_and_response_are_pickleable(self, httpbin):\n r = requests.get(httpbin(\"get\"))\n\n # verify we can pickle the original request\n assert pickle.loads(pickle.dumps(r.request))\n\n # verify we can pickle the response and that we have access to\n # the original request.\n pr = pickle.loads(pickle.dumps(r))\n assert r.request.url == pr.request.url\n assert r.request.headers == pr.request.headers\n\n def test_prepared_request_is_pickleable(self, httpbin):\n p = requests.Request(\"GET\", httpbin(\"get\")).prepare()\n\n # Verify PreparedRequest can be pickled and unpickled\n r = pickle.loads(pickle.dumps(p))\n assert r.url == p.url\n assert r.headers == p.headers\n assert r.body == p.body\n\n # Verify unpickled PreparedRequest sends properly\n s = requests.Session()\n resp = s.send(r)\n assert resp.status_code == 200\n\n def test_prepared_request_with_file_is_pickleable(self, httpbin):\n with open(__file__, \"rb\") as f:\n r = requests.Request(\"POST\", httpbin(\"post\"), files={\"file\": f})\n p = r.prepare()\n\n # Verify PreparedRequest can be pickled and unpickled\n r = pickle.loads(pickle.dumps(p))\n assert r.url == p.url\n assert r.headers == p.headers\n assert r.body == p.body\n\n # Verify unpickled PreparedRequest sends properly\n s = requests.Session()\n resp = s.send(r)\n assert resp.status_code == 200\n\n def test_prepared_request_with_hook_is_pickleable(self, httpbin):\n r = requests.Request(\"GET\", httpbin(\"get\"), hooks=default_hooks())\n p = r.prepare()\n\n # Verify PreparedRequest can be pickled\n r = pickle.loads(pickle.dumps(p))\n assert r.url == p.url\n assert r.headers == p.headers\n assert r.body == p.body\n assert r.hooks == p.hooks\n\n # Verify unpickled PreparedRequest sends properly\n s = requests.Session()\n resp = s.send(r)\n assert resp.status_code == 200\n\n def test_cannot_send_unprepared_requests(self, httpbin):\n r = requests.Request(url=httpbin())\n with pytest.raises(ValueError):\n requests.Session().send(r)\n\n def test_http_error(self):\n error = requests.exceptions.HTTPError()\n assert not error.response\n response = requests.Response()\n error = requests.exceptions.HTTPError(response=response)\n assert error.response == response\n error = requests.exceptions.HTTPError(\"message\", response=response)\n assert str(error) == \"message\"\n assert error.response == response\n\n def test_session_pickling(self, httpbin):\n r = requests.Request(\"GET\", httpbin(\"get\"))\n s = requests.Session()\n\n s = pickle.loads(pickle.dumps(s))\n s.proxies = getproxies()\n\n r = s.send(r.prepare())\n assert r.status_code == 200\n\n def test_fixes_1329(self, httpbin):\n \"\"\"Ensure that header updates are done case-insensitively.\"\"\"\n s = requests.Session()\n s.headers.update({\"ACCEPT\": \"BOGUS\"})\n s.headers.update({\"accept\": \"application/json\"})\n r = s.get(httpbin(\"get\"))\n headers = r.request.headers\n assert headers[\"accept\"] == \"application/json\"\n assert headers[\"Accept\"] == \"application/json\"\n assert headers[\"ACCEPT\"] == \"application/json\"\n\n def test_uppercase_scheme_redirect(self, httpbin):\n parts = urlparse(httpbin(\"html\"))\n url = \"HTTP://\" + parts.netloc + parts.path\n r = requests.get(httpbin(\"redirect-to\"), params={\"url\": url})\n assert r.status_code == 200\n assert r.url.lower() == url.lower()\n\n def test_transport_adapter_ordering(self):\n s = requests.Session()\n order = [\"https://\", \"http://\"]\n assert order == list(s.adapters)\n s.mount(\"http://git\", HTTPAdapter())\n s.mount(\"http://github\", HTTPAdapter())\n s.mount(\"http://github.com\", HTTPAdapter())\n s.mount(\"http://github.com/about/\", HTTPAdapter())\n order = [\n \"http://github.com/about/\",\n \"http://github.com\",\n \"http://github\",\n \"http://git\",\n \"https://\",\n \"http://\",\n ]\n assert order == list(s.adapters)\n s.mount(\"http://gittip\", HTTPAdapter())\n s.mount(\"http://gittip.com\", HTTPAdapter())\n s.mount(\"http://gittip.com/about/\", HTTPAdapter())\n order = [\n \"http://github.com/about/\",\n \"http://gittip.com/about/\",\n \"http://github.com\",\n \"http://gittip.com\",\n \"http://github\",\n \"http://gittip\",\n \"http://git\",\n \"https://\",\n \"http://\",\n ]\n assert order == list(s.adapters)\n s2 = requests.Session()\n s2.adapters = {\"http://\": HTTPAdapter()}\n s2.mount(\"https://\", HTTPAdapter())\n assert \"http://\" in s2.adapters\n assert \"https://\" in s2.adapters\n\n def test_session_get_adapter_prefix_matching(self):\n prefix = \"https://example.com\"\n more_specific_prefix = prefix + \"/some/path\"\n\n url_matching_only_prefix = prefix + \"/another/path\"\n url_matching_more_specific_prefix = more_specific_prefix + \"/longer/path\"\n url_not_matching_prefix = \"https://another.example.com/\"\n\n s = requests.Session()\n prefix_adapter = HTTPAdapter()\n more_specific_prefix_adapter = HTTPAdapter()\n s.mount(prefix, prefix_adapter)\n s.mount(more_specific_prefix, more_specific_prefix_adapter)\n\n assert s.get_adapter(url_matching_only_prefix) is prefix_adapter\n assert (\n s.get_adapter(url_matching_more_specific_prefix)\n is more_specific_prefix_adapter\n )\n assert s.get_adapter(url_not_matching_prefix) not in (\n prefix_adapter,\n more_specific_prefix_adapter,\n )\n\n def test_session_get_adapter_prefix_matching_mixed_case(self):\n mixed_case_prefix = \"hTtPs://eXamPle.CoM/MixEd_CAse_PREfix\"\n url_matching_prefix = mixed_case_prefix + \"/full_url\"\n\n s = requests.Session()\n my_adapter = HTTPAdapter()\n s.mount(mixed_case_prefix, my_adapter)\n\n assert s.get_adapter(url_matching_prefix) is my_adapter\n\n def test_session_get_adapter_prefix_matching_is_case_insensitive(self):\n mixed_case_prefix = \"hTtPs://eXamPle.CoM/MixEd_CAse_PREfix\"\n url_matching_prefix_with_different_case = (\n \"HtTpS://exaMPLe.cOm/MiXeD_caSE_preFIX/another_url\"\n )\n\n s = requests.Session()\n my_adapter = HTTPAdapter()\n s.mount(mixed_case_prefix, my_adapter)\n\n assert s.get_adapter(url_matching_prefix_with_different_case) is my_adapter\n\n def test_header_remove_is_case_insensitive(self, httpbin):\n # From issue #1321\n s = requests.Session()\n s.headers[\"foo\"] = \"bar\"\n r = s.get(httpbin(\"get\"), headers={\"FOO\": None})\n assert \"foo\" not in r.request.headers\n\n def test_params_are_merged_case_sensitive(self, httpbin):\n s = requests.Session()\n s.params[\"foo\"] = \"bar\"\n r = s.get(httpbin(\"get\"), params={\"FOO\": \"bar\"})\n assert r.json()[\"args\"] == {\"foo\": \"bar\", \"FOO\": \"bar\"}\n\n def test_long_authinfo_in_url(self):\n url = \"http://{}:{}@{}:9000/path?query#frag\".format(\n \"E8A3BE87-9E3F-4620-8858-95478E385B5B\",\n \"EA770032-DA4D-4D84-8CE9-29C6D910BF1E\",\n \"exactly-------------sixty-----------three------------characters\",\n )\n r = requests.Request(\"GET\", url).prepare()\n assert r.url == url\n\n def test_header_keys_are_native(self, httpbin):\n headers = {\"unicode\": \"blah\", b\"byte\": \"blah\"}\n r = requests.Request(\"GET\", httpbin(\"get\"), headers=headers)\n p = r.prepare()\n\n # This is testing that they are builtin strings. A bit weird, but there\n # we go.\n assert \"unicode\" in p.headers.keys()\n assert \"byte\" in p.headers.keys()\n\n def test_header_validation(self, httpbin):\n \"\"\"Ensure prepare_headers regex isn't flagging valid header contents.\"\"\"\n headers_ok = {\n \"foo\": \"bar baz qux\",\n \"bar\": b\"fbbq\",\n \"baz\": \"\",\n \"qux\": \"1\",\n }\n r = requests.get(httpbin(\"get\"), headers=headers_ok)\n assert r.request.headers[\"foo\"] == headers_ok[\"foo\"]\n\n def test_header_value_not_str(self, httpbin):\n \"\"\"Ensure the header value is of type string or bytes as\n per discussion in GH issue #3386\n \"\"\"\n headers_int = {\"foo\": 3}\n headers_dict = {\"bar\": {\"foo\": \"bar\"}}\n headers_list = {\"baz\": [\"foo\", \"bar\"]}\n\n # Test for int\n with pytest.raises(InvalidHeader) as excinfo:\n requests.get(httpbin(\"get\"), headers=headers_int)\n assert \"foo\" in str(excinfo.value)\n # Test for dict\n with pytest.raises(InvalidHeader) as excinfo:\n requests.get(httpbin(\"get\"), headers=headers_dict)\n assert \"bar\" in str(excinfo.value)\n # Test for list\n with pytest.raises(InvalidHeader) as excinfo:\n requests.get(httpbin(\"get\"), headers=headers_list)\n assert \"baz\" in str(excinfo.value)\n\n def test_header_no_return_chars(self, httpbin):\n \"\"\"Ensure that a header containing return character sequences raise an\n exception. Otherwise, multiple headers are created from single string.\n \"\"\"\n headers_ret = {\"foo\": \"bar\\r\\nbaz: qux\"}\n headers_lf = {\"foo\": \"bar\\nbaz: qux\"}\n headers_cr = {\"foo\": \"bar\\rbaz: qux\"}\n\n # Test for newline\n with pytest.raises(InvalidHeader):\n requests.get(httpbin(\"get\"), headers=headers_ret)\n # Test for line feed\n with pytest.raises(InvalidHeader):\n requests.get(httpbin(\"get\"), headers=headers_lf)\n # Test for carriage return\n with pytest.raises(InvalidHeader):\n requests.get(httpbin(\"get\"), headers=headers_cr)\n\n def test_header_no_leading_space(self, httpbin):\n \"\"\"Ensure headers containing leading whitespace raise\n InvalidHeader Error before sending.\n \"\"\"\n headers_space = {\"foo\": \" bar\"}\n headers_tab = {\"foo\": \" bar\"}\n\n # Test for whitespace\n with pytest.raises(InvalidHeader):\n requests.get(httpbin(\"get\"), headers=headers_space)\n\n # Test for tab\n with pytest.raises(InvalidHeader):\n requests.get(httpbin(\"get\"), headers=headers_tab)\n\n @pytest.mark.parametrize(\"files\", (\"foo\", b\"foo\", bytearray(b\"foo\")))\n def test_can_send_objects_with_files(self, httpbin, files):\n data = {\"a\": \"this is a string\"}\n files = {\"b\": files}\n r = requests.Request(\"POST\", httpbin(\"post\"), data=data, files=files)\n p = r.prepare()\n assert \"multipart/form-data\" in p.headers[\"Content-Type\"]\n\n def test_can_send_file_object_with_non_string_filename(self, httpbin):\n f = io.BytesIO()\n f.name = 2\n r = requests.Request(\"POST\", httpbin(\"post\"), files={\"f\": f})\n p = r.prepare()\n\n assert \"multipart/form-data\" in p.headers[\"Content-Type\"]\n\n def test_autoset_header_values_are_native(self, httpbin):\n data = \"this is a string\"\n length = \"16\"\n req = requests.Request(\"POST\", httpbin(\"post\"), data=data)\n p = req.prepare()\n\n assert p.headers[\"Content-Length\"] == length\n\n def test_nonhttp_schemes_dont_check_URLs(self):\n test_urls = (\n \"data:image/gif;base64,R0lGODlhAQABAHAAACH5BAUAAAAALAAAAAABAAEAAAICRAEAOw==\",\n \"file:///etc/passwd\",\n \"magnet:?xt=urn:btih:be08f00302bc2d1d3cfa3af02024fa647a271431\",\n )\n for test_url in test_urls:\n req = requests.Request(\"GET\", test_url)\n preq = req.prepare()\n assert test_url == preq.url\n\n def test_auth_is_stripped_on_http_downgrade(\n self, httpbin, httpbin_secure, httpbin_ca_bundle\n ):\n r = requests.get(\n httpbin_secure(\"redirect-to\"),\n params={\"url\": httpbin(\"get\")},\n auth=(\"user\", \"pass\"),\n verify=httpbin_ca_bundle,\n )\n assert r.history[0].request.headers[\"Authorization\"]\n assert \"Authorization\" not in r.request.headers\n\n def test_auth_is_retained_for_redirect_on_host(self, httpbin):\n r = requests.get(httpbin(\"redirect/1\"), auth=(\"user\", \"pass\"))\n h1 = r.history[0].request.headers[\"Authorization\"]\n h2 = r.request.headers[\"Authorization\"]\n\n assert h1 == h2\n\n def test_should_strip_auth_host_change(self):\n s = requests.Session()\n assert s.should_strip_auth(\n \"http://example.com/foo\", \"http://another.example.com/\"\n )\n\n def test_should_strip_auth_http_downgrade(self):\n s = requests.Session()\n assert s.should_strip_auth(\"https://example.com/foo\", \"http://example.com/bar\")\n\n def test_should_strip_auth_https_upgrade(self):\n s = requests.Session()\n assert not s.should_strip_auth(\n \"http://example.com/foo\", \"https://example.com/bar\"\n )\n assert not s.should_strip_auth(\n \"http://example.com:80/foo\", \"https://example.com/bar\"\n )\n assert not s.should_strip_auth(\n \"http://example.com/foo\", \"https://example.com:443/bar\"\n )\n # Non-standard ports should trigger stripping\n assert s.should_strip_auth(\n \"http://example.com:8080/foo\", \"https://example.com/bar\"\n )\n assert s.should_strip_auth(\n \"http://example.com/foo\", \"https://example.com:8443/bar\"\n )\n\n def test_should_strip_auth_port_change(self):\n s = requests.Session()\n assert s.should_strip_auth(\n \"http://example.com:1234/foo\", \"https://example.com:4321/bar\"\n )\n\n @pytest.mark.parametrize(\n \"old_uri, new_uri\",\n (\n (\"https://example.com:443/foo\", \"https://example.com/bar\"),\n (\"http://example.com:80/foo\", \"http://example.com/bar\"),\n (\"https://example.com/foo\", \"https://example.com:443/bar\"),\n (\"http://example.com/foo\", \"http://example.com:80/bar\"),\n ),\n )\n def test_should_strip_auth_default_port(self, old_uri, new_uri):\n s = requests.Session()\n assert not s.should_strip_auth(old_uri, new_uri)\n\n def test_manual_redirect_with_partial_body_read(self, httpbin):\n s = requests.Session()\n r1 = s.get(httpbin(\"redirect/2\"), allow_redirects=False, stream=True)\n assert r1.is_redirect\n rg = s.resolve_redirects(r1, r1.request, stream=True)\n\n # read only the first eight bytes of the response body,\n # then follow the redirect\n r1.iter_content(8)\n r2 = next(rg)\n assert r2.is_redirect\n\n # read all of the response via iter_content,\n # then follow the redirect\n for _ in r2.iter_content():\n pass\n r3 = next(rg)\n assert not r3.is_redirect\n\n def test_prepare_body_position_non_stream(self):\n data = b\"the data\"\n prep = requests.Request(\"GET\", \"http://example.com\", data=data).prepare()\n assert prep._body_position is None\n\n def test_rewind_body(self):\n data = io.BytesIO(b\"the data\")\n prep = requests.Request(\"GET\", \"http://example.com\", data=data).prepare()\n assert prep._body_position == 0\n assert prep.body.read() == b\"the data\"\n\n # the data has all been read\n assert prep.body.read() == b\"\"\n\n # rewind it back\n requests.utils.rewind_body(prep)\n assert prep.body.read() == b\"the data\"\n\n def test_rewind_partially_read_body(self):\n data = io.BytesIO(b\"the data\")\n data.read(4) # read some data\n prep = requests.Request(\"GET\", \"http://example.com\", data=data).prepare()\n assert prep._body_position == 4\n assert prep.body.read() == b\"data\"\n\n # the data has all been read\n assert prep.body.read() == b\"\"\n\n # rewind it back\n requests.utils.rewind_body(prep)\n assert prep.body.read() == b\"data\"\n\n def test_rewind_body_no_seek(self):\n class BadFileObj:\n def __init__(self, data):\n self.data = data\n\n def tell(self):\n return 0\n\n def __iter__(self):\n return\n\n data = BadFileObj(\"the data\")\n prep = requests.Request(\"GET\", \"http://example.com\", data=data).prepare()\n assert prep._body_position == 0\n\n with pytest.raises(UnrewindableBodyError) as e:\n requests.utils.rewind_body(prep)\n\n assert \"Unable to rewind request body\" in str(e)\n\n def test_rewind_body_failed_seek(self):\n class BadFileObj:\n def __init__(self, data):\n self.data = data\n\n def tell(self):\n return 0\n\n def seek(self, pos, whence=0):\n raise OSError()\n\n def __iter__(self):\n return\n\n data = BadFileObj(\"the data\")\n prep = requests.Request(\"GET\", \"http://example.com\", data=data).prepare()\n assert prep._body_position == 0\n\n with pytest.raises(UnrewindableBodyError) as e:\n requests.utils.rewind_body(prep)\n\n assert \"error occurred when rewinding request body\" in str(e)\n\n def test_rewind_body_failed_tell(self):\n class BadFileObj:\n def __init__(self, data):\n self.data = data\n\n def tell(self):\n raise OSError()\n\n def __iter__(self):\n return\n\n data = BadFileObj(\"the data\")\n prep = requests.Request(\"GET\", \"http://example.com\", data=data).prepare()\n assert prep._body_position is not None\n\n with pytest.raises(UnrewindableBodyError) as e:\n requests.utils.rewind_body(prep)\n\n assert \"Unable to rewind request body\" in str(e)\n\n def _patch_adapter_gzipped_redirect(self, session, url):\n adapter = session.get_adapter(url=url)\n org_build_response = adapter.build_response\n self._patched_response = False\n\n def build_response(*args, **kwargs):\n resp = org_build_response(*args, **kwargs)\n if not self._patched_response:\n resp.raw.headers[\"content-encoding\"] = \"gzip\"\n self._patched_response = True\n return resp\n\n adapter.build_response = build_response\n\n def test_redirect_with_wrong_gzipped_header(self, httpbin):\n s = requests.Session()\n url = httpbin(\"redirect/1\")\n self._patch_adapter_gzipped_redirect(s, url)\n s.get(url)\n\n @pytest.mark.parametrize(\n \"username, password, auth_str\",\n (\n (\"test\", \"test\", \"Basic dGVzdDp0ZXN0\"),\n (\n \"имя\".encode(),\n \"пароль\".encode(),\n \"Basic 0LjQvNGPOtC/0LDRgNC+0LvRjA==\",\n ),\n ),\n )\n def test_basic_auth_str_is_always_native(self, username, password, auth_str):\n s = _basic_auth_str(username, password)\n assert isinstance(s, builtin_str)\n assert s == auth_str\n\n def test_requests_history_is_saved(self, httpbin):\n r = requests.get(httpbin(\"redirect/5\"))\n total = r.history[-1].history\n i = 0\n for item in r.history:\n assert item.history == total[0:i]\n i += 1\n\n def test_json_param_post_content_type_works(self, httpbin):\n r = requests.post(httpbin(\"post\"), json={\"life\": 42})\n assert r.status_code == 200\n assert \"application/json\" in r.request.headers[\"Content-Type\"]\n assert {\"life\": 42} == r.json()[\"json\"]\n\n def test_json_param_post_should_not_override_data_param(self, httpbin):\n r = requests.Request(\n method=\"POST\",\n url=httpbin(\"post\"),\n data={\"stuff\": \"elixr\"},\n json={\"music\": \"flute\"},\n )\n prep = r.prepare()\n assert \"stuff=elixr\" == prep.body\n\n def test_response_iter_lines(self, httpbin):\n r = requests.get(httpbin(\"stream/4\"), stream=True)\n assert r.status_code == 200\n\n it = r.iter_lines()\n next(it)\n assert len(list(it)) == 3\n\n def test_response_context_manager(self, httpbin):\n with requests.get(httpbin(\"stream/4\"), stream=True) as response:\n assert isinstance(response, requests.Response)\n\n assert response.raw.closed\n\n def test_unconsumed_session_response_closes_connection(self, httpbin):\n s = requests.session()\n\n with contextlib.closing(s.get(httpbin(\"stream/4\"), stream=True)) as response:\n pass\n\n assert response._content_consumed is False\n assert response.raw.closed\n\n @pytest.mark.xfail\n def test_response_iter_lines_reentrant(self, httpbin):\n \"\"\"Response.iter_lines() is not reentrant safe\"\"\"\n r = requests.get(httpbin(\"stream/4\"), stream=True)\n assert r.status_code == 200\n\n next(r.iter_lines())\n assert len(list(r.iter_lines())) == 3\n\n def test_session_close_proxy_clear(self, mocker):\n proxies = {\n \"one\": mocker.Mock(),\n \"two\": mocker.Mock(),\n }\n session = requests.Session()\n mocker.patch.dict(session.adapters[\"http://\"].proxy_manager, proxies)\n session.close()\n proxies[\"one\"].clear.assert_called_once_with()\n proxies[\"two\"].clear.assert_called_once_with()\n\n def test_proxy_auth(self):\n adapter = HTTPAdapter()\n headers = adapter.proxy_headers(\"http://user:pass@httpbin.org\")\n assert headers == {\"Proxy-Authorization\": \"Basic dXNlcjpwYXNz\"}\n\n def test_proxy_auth_empty_pass(self):\n adapter = HTTPAdapter()\n headers = adapter.proxy_headers(\"http://user:@httpbin.org\")\n assert headers == {\"Proxy-Authorization\": \"Basic dXNlcjo=\"}\n\n def test_response_json_when_content_is_None(self, httpbin):\n r = requests.get(httpbin(\"/status/204\"))\n # Make sure r.content is None\n r.status_code = 0\n r._content = False\n r._content_consumed = False\n\n assert r.content is None\n with pytest.raises(ValueError):\n r.json()\n\n def test_response_without_release_conn(self):\n \"\"\"Test `close` call for non-urllib3-like raw objects.\n Should work when `release_conn` attr doesn't exist on `response.raw`.\n \"\"\"\n resp = requests.Response()\n resp.raw = StringIO.StringIO(\"test\")\n assert not resp.raw.closed\n resp.close()\n assert resp.raw.closed\n\n def test_empty_stream_with_auth_does_not_set_content_length_header(self, httpbin):\n \"\"\"Ensure that a byte stream with size 0 will not set both a Content-Length\n and Transfer-Encoding header.\n \"\"\"\n auth = (\"user\", \"pass\")\n url = httpbin(\"post\")\n file_obj = io.BytesIO(b\"\")\n r = requests.Request(\"POST\", url, auth=auth, data=file_obj)\n prepared_request = r.prepare()\n assert \"Transfer-Encoding\" in prepared_request.headers\n assert \"Content-Length\" not in prepared_request.headers\n\n def test_stream_with_auth_does_not_set_transfer_encoding_header(self, httpbin):\n \"\"\"Ensure that a byte stream with size > 0 will not set both a Content-Length\n and Transfer-Encoding header.\n \"\"\"\n auth = (\"user\", \"pass\")\n url = httpbin(\"post\")\n file_obj = io.BytesIO(b\"test data\")\n r = requests.Request(\"POST\", url, auth=auth, data=file_obj)\n prepared_request = r.prepare()\n assert \"Transfer-Encoding\" not in prepared_request.headers\n assert \"Content-Length\" in prepared_request.headers\n\n def test_chunked_upload_does_not_set_content_length_header(self, httpbin):\n \"\"\"Ensure that requests with a generator body stream using\n Transfer-Encoding: chunked, not a Content-Length header.\n \"\"\"\n data = (i for i in [b\"a\", b\"b\", b\"c\"])\n url = httpbin(\"post\")\n r = requests.Request(\"POST\", url, data=data)\n prepared_request = r.prepare()\n assert \"Transfer-Encoding\" in prepared_request.headers\n assert \"Content-Length\" not in prepared_request.headers\n\n def test_custom_redirect_mixin(self, httpbin):\n \"\"\"Tests a custom mixin to overwrite ``get_redirect_target``.\n\n Ensures a subclassed ``requests.Session`` can handle a certain type of\n malformed redirect responses.\n\n 1. original request receives a proper response: 302 redirect\n 2. following the redirect, a malformed response is given:\n status code = HTTP 200\n location = alternate url\n 3. the custom session catches the edge case and follows the redirect\n \"\"\"\n url_final = httpbin(\"html\")\n querystring_malformed = urlencode({\"location\": url_final})\n url_redirect_malformed = httpbin(\"response-headers?%s\" % querystring_malformed)\n querystring_redirect = urlencode({\"url\": url_redirect_malformed})\n url_redirect = httpbin(\"redirect-to?%s\" % querystring_redirect)\n urls_test = [\n url_redirect,\n url_redirect_malformed,\n url_final,\n ]\n\n class CustomRedirectSession(requests.Session):\n def get_redirect_target(self, resp):\n # default behavior\n if resp.is_redirect:\n return resp.headers[\"location\"]\n # edge case - check to see if 'location' is in headers anyways\n location = resp.headers.get(\"location\")\n if location and (location != resp.url):\n return location\n return None\n\n session = CustomRedirectSession()\n r = session.get(urls_test[0])\n assert len(r.history) == 2\n assert r.status_code == 200\n assert r.history[0].status_code == 302\n assert r.history[0].is_redirect\n assert r.history[1].status_code == 200\n assert not r.history[1].is_redirect\n assert r.url == urls_test[2]\n\n\nclass TestCaseInsensitiveDict:\n @pytest.mark.parametrize(\n \"cid\",\n (\n CaseInsensitiveDict({\"Foo\": \"foo\", \"BAr\": \"bar\"}),\n CaseInsensitiveDict([(\"Foo\", \"foo\"), (\"BAr\", \"bar\")]),\n CaseInsensitiveDict(FOO=\"foo\", BAr=\"bar\"),\n ),\n )\n def test_init(self, cid):\n assert len(cid) == 2\n assert \"foo\" in cid\n assert \"bar\" in cid\n\n def test_docstring_example(self):\n cid = CaseInsensitiveDict()\n cid[\"Accept\"] = \"application/json\"\n assert cid[\"aCCEPT\"] == \"application/json\"\n assert list(cid) == [\"Accept\"]\n\n def test_len(self):\n cid = CaseInsensitiveDict({\"a\": \"a\", \"b\": \"b\"})\n cid[\"A\"] = \"a\"\n assert len(cid) == 2\n\n def test_getitem(self):\n cid = CaseInsensitiveDict({\"Spam\": \"blueval\"})\n assert cid[\"spam\"] == \"blueval\"\n assert cid[\"SPAM\"] == \"blueval\"\n\n def test_fixes_649(self):\n \"\"\"__setitem__ should behave case-insensitively.\"\"\"\n cid = CaseInsensitiveDict()\n cid[\"spam\"] = \"oneval\"\n cid[\"Spam\"] = \"twoval\"\n cid[\"sPAM\"] = \"redval\"\n cid[\"SPAM\"] = \"blueval\"\n assert cid[\"spam\"] == \"blueval\"\n assert cid[\"SPAM\"] == \"blueval\"\n assert list(cid.keys()) == [\"SPAM\"]\n\n def test_delitem(self):\n cid = CaseInsensitiveDict()\n cid[\"Spam\"] = \"someval\"\n del cid[\"sPam\"]\n assert \"spam\" not in cid\n assert len(cid) == 0\n\n def test_contains(self):\n cid = CaseInsensitiveDict()\n cid[\"Spam\"] = \"someval\"\n assert \"Spam\" in cid\n assert \"spam\" in cid\n assert \"SPAM\" in cid\n assert \"sPam\" in cid\n assert \"notspam\" not in cid\n\n def test_get(self):\n cid = CaseInsensitiveDict()\n cid[\"spam\"] = \"oneval\"\n cid[\"SPAM\"] = \"blueval\"\n assert cid.get(\"spam\") == \"blueval\"\n assert cid.get(\"SPAM\") == \"blueval\"\n assert cid.get(\"sPam\") == \"blueval\"\n assert cid.get(\"notspam\", \"default\") == \"default\"\n\n def test_update(self):\n cid = CaseInsensitiveDict()\n cid[\"spam\"] = \"blueval\"\n cid.update({\"sPam\": \"notblueval\"})\n assert cid[\"spam\"] == \"notblueval\"\n cid = CaseInsensitiveDict({\"Foo\": \"foo\", \"BAr\": \"bar\"})\n cid.update({\"fOO\": \"anotherfoo\", \"bAR\": \"anotherbar\"})\n assert len(cid) == 2\n assert cid[\"foo\"] == \"anotherfoo\"\n assert cid[\"bar\"] == \"anotherbar\"\n\n def test_update_retains_unchanged(self):\n cid = CaseInsensitiveDict({\"foo\": \"foo\", \"bar\": \"bar\"})\n cid.update({\"foo\": \"newfoo\"})\n assert cid[\"bar\"] == \"bar\"\n\n def test_iter(self):\n cid = CaseInsensitiveDict({\"Spam\": \"spam\", \"Eggs\": \"eggs\"})\n keys = frozenset([\"Spam\", \"Eggs\"])\n assert frozenset(iter(cid)) == keys\n\n def test_equality(self):\n cid = CaseInsensitiveDict({\"SPAM\": \"blueval\", \"Eggs\": \"redval\"})\n othercid = CaseInsensitiveDict({\"spam\": \"blueval\", \"eggs\": \"redval\"})\n assert cid == othercid\n del othercid[\"spam\"]\n assert cid != othercid\n assert cid == {\"spam\": \"blueval\", \"eggs\": \"redval\"}\n assert cid != object()\n\n def test_setdefault(self):\n cid = CaseInsensitiveDict({\"Spam\": \"blueval\"})\n assert cid.setdefault(\"spam\", \"notblueval\") == \"blueval\"\n assert cid.setdefault(\"notspam\", \"notblueval\") == \"notblueval\"\n\n def test_lower_items(self):\n cid = CaseInsensitiveDict(\n {\n \"Accept\": \"application/json\",\n \"user-Agent\": \"requests\",\n }\n )\n keyset = frozenset(lowerkey for lowerkey, v in cid.lower_items())\n lowerkeyset = frozenset([\"accept\", \"user-agent\"])\n assert keyset == lowerkeyset\n\n def test_preserve_key_case(self):\n cid = CaseInsensitiveDict(\n {\n \"Accept\": \"application/json\",\n \"user-Agent\": \"requests\",\n }\n )\n keyset = frozenset([\"Accept\", \"user-Agent\"])\n assert frozenset(i[0] for i in cid.items()) == keyset\n assert frozenset(cid.keys()) == keyset\n assert frozenset(cid) == keyset\n\n def test_preserve_last_key_case(self):\n cid = CaseInsensitiveDict(\n {\n \"Accept\": \"application/json\",\n \"user-Agent\": \"requests\",\n }\n )\n cid.update({\"ACCEPT\": \"application/json\"})\n cid[\"USER-AGENT\"] = \"requests\"\n keyset = frozenset([\"ACCEPT\", \"USER-AGENT\"])\n assert frozenset(i[0] for i in cid.items()) == keyset\n assert frozenset(cid.keys()) == keyset\n assert frozenset(cid) == keyset\n\n def test_copy(self):\n cid = CaseInsensitiveDict(\n {\n \"Accept\": \"application/json\",\n \"user-Agent\": \"requests\",\n }\n )\n cid_copy = cid.copy()\n assert cid == cid_copy\n cid[\"changed\"] = True\n assert cid != cid_copy\n\n\nclass TestMorselToCookieExpires:\n \"\"\"Tests for morsel_to_cookie when morsel contains expires.\"\"\"\n\n def test_expires_valid_str(self):\n \"\"\"Test case where we convert expires from string time.\"\"\"\n\n morsel = Morsel()\n morsel[\"expires\"] = \"Thu, 01-Jan-1970 00:00:01 GMT\"\n cookie = morsel_to_cookie(morsel)\n assert cookie.expires == 1\n\n @pytest.mark.parametrize(\n \"value, exception\",\n (\n (100, TypeError),\n (\"woops\", ValueError),\n ),\n )\n def test_expires_invalid_int(self, value, exception):\n \"\"\"Test case where an invalid type is passed for expires.\"\"\"\n morsel = Morsel()\n morsel[\"expires\"] = value\n with pytest.raises(exception):\n morsel_to_cookie(morsel)\n\n def test_expires_none(self):\n \"\"\"Test case where expires is None.\"\"\"\n\n morsel = Morsel()\n morsel[\"expires\"] = None\n cookie = morsel_to_cookie(morsel)\n assert cookie.expires is None\n\n\nclass TestMorselToCookieMaxAge:\n\n \"\"\"Tests for morsel_to_cookie when morsel contains max-age.\"\"\"\n\n def test_max_age_valid_int(self):\n \"\"\"Test case where a valid max age in seconds is passed.\"\"\"\n\n morsel = Morsel()\n morsel[\"max-age\"] = 60\n cookie = morsel_to_cookie(morsel)\n assert isinstance(cookie.expires, int)\n\n def test_max_age_invalid_str(self):\n \"\"\"Test case where a invalid max age is passed.\"\"\"\n\n morsel = Morsel()\n morsel[\"max-age\"] = \"woops\"\n with pytest.raises(TypeError):\n morsel_to_cookie(morsel)\n\n\nclass TestTimeout:\n def test_stream_timeout(self, httpbin):\n try:\n requests.get(httpbin(\"delay/10\"), timeout=2.0)\n except requests.exceptions.Timeout as e:\n assert \"Read timed out\" in e.args[0].args[0]\n\n @pytest.mark.parametrize(\n \"timeout, error_text\",\n (\n ((3, 4, 5), \"(connect, read)\"),\n (\"foo\", \"must be an int, float or None\"),\n ),\n )\n def test_invalid_timeout(self, httpbin, timeout, error_text):\n with pytest.raises(ValueError) as e:\n requests.get(httpbin(\"get\"), timeout=timeout)\n assert error_text in str(e)\n\n @pytest.mark.parametrize(\"timeout\", (None, Urllib3Timeout(connect=None, read=None)))\n def test_none_timeout(self, httpbin, timeout):\n \"\"\"Check that you can set None as a valid timeout value.\n\n To actually test this behavior, we'd want to check that setting the\n timeout to None actually lets the request block past the system default\n timeout. However, this would make the test suite unbearably slow.\n Instead we verify that setting the timeout to None does not prevent the\n request from succeeding.\n \"\"\"\n r = requests.get(httpbin(\"get\"), timeout=timeout)\n assert r.status_code == 200\n\n @pytest.mark.parametrize(\n \"timeout\", ((None, 0.1), Urllib3Timeout(connect=None, read=0.1))\n )\n def test_read_timeout(self, httpbin, timeout):\n try:\n requests.get(httpbin(\"delay/10\"), timeout=timeout)\n pytest.fail(\"The recv() request should time out.\")\n except ReadTimeout:\n pass\n\n @pytest.mark.parametrize(\n \"timeout\", ((0.1, None), Urllib3Timeout(connect=0.1, read=None))\n )\n def test_connect_timeout(self, timeout):\n try:\n requests.get(TARPIT, timeout=timeout)\n pytest.fail(\"The connect() request should time out.\")\n except ConnectTimeout as e:\n assert isinstance(e, ConnectionError)\n assert isinstance(e, Timeout)\n\n @pytest.mark.parametrize(\n \"timeout\", ((0.1, 0.1), Urllib3Timeout(connect=0.1, read=0.1))\n )\n def test_total_timeout_connect(self, timeout):\n try:\n requests.get(TARPIT, timeout=timeout)\n pytest.fail(\"The connect() request should time out.\")\n except ConnectTimeout:\n pass\n\n def test_encoded_methods(self, httpbin):\n \"\"\"See: https://github.com/psf/requests/issues/2316\"\"\"\n r = requests.request(b\"GET\", httpbin(\"get\"))\n assert r.ok\n\n\nSendCall = collections.namedtuple(\"SendCall\", (\"args\", \"kwargs\"))\n\n\nclass RedirectSession(SessionRedirectMixin):\n def __init__(self, order_of_redirects):\n self.redirects = order_of_redirects\n self.calls = []\n self.max_redirects = 30\n self.cookies = {}\n self.trust_env = False\n\n def send(self, *args, **kwargs):\n self.calls.append(SendCall(args, kwargs))\n return self.build_response()\n\n def build_response(self):\n request = self.calls[-1].args[0]\n r = requests.Response()\n\n try:\n r.status_code = int(self.redirects.pop(0))\n except IndexError:\n r.status_code = 200\n\n r.headers = CaseInsensitiveDict({\"Location\": \"/\"})\n r.raw = self._build_raw()\n r.request = request\n return r\n\n def _build_raw(self):\n string = StringIO.StringIO(\"\")\n setattr(string, \"release_conn\", lambda *args: args)\n return string\n\n\ndef test_json_encodes_as_bytes():\n # urllib3 expects bodies as bytes-like objects\n body = {\"key\": \"value\"}\n p = PreparedRequest()\n p.prepare(method=\"GET\", url=\"https://www.example.com/\", json=body)\n assert isinstance(p.body, bytes)\n\n\ndef test_requests_are_updated_each_time(httpbin):\n session = RedirectSession([303, 307])\n prep = requests.Request(\"POST\", httpbin(\"post\")).prepare()\n r0 = session.send(prep)\n assert r0.request.method == \"POST\"\n assert session.calls[-1] == SendCall((r0.request,), {})\n redirect_generator = session.resolve_redirects(r0, prep)\n default_keyword_args = {\n \"stream\": False,\n \"verify\": True,\n \"cert\": None,\n \"timeout\": None,\n \"allow_redirects\": False,\n \"proxies\": {},\n }\n for response in redirect_generator:\n assert response.request.method == \"GET\"\n send_call = SendCall((response.request,), default_keyword_args)\n assert session.calls[-1] == send_call\n\n\n@pytest.mark.parametrize(\n \"var,url,proxy\",\n [\n (\"http_proxy\", \"http://example.com\", \"socks5://proxy.com:9876\"),\n (\"https_proxy\", \"https://example.com\", \"socks5://proxy.com:9876\"),\n (\"all_proxy\", \"http://example.com\", \"socks5://proxy.com:9876\"),\n (\"all_proxy\", \"https://example.com\", \"socks5://proxy.com:9876\"),\n ],\n)\ndef test_proxy_env_vars_override_default(var, url, proxy):\n session = requests.Session()\n prep = PreparedRequest()\n prep.prepare(method=\"GET\", url=url)\n\n kwargs = {var: proxy}\n scheme = urlparse(url).scheme\n with override_environ(**kwargs):\n proxies = session.rebuild_proxies(prep, {})\n assert scheme in proxies\n assert proxies[scheme] == proxy\n\n\n@pytest.mark.parametrize(\n \"data\",\n (\n ((\"a\", \"b\"), (\"c\", \"d\")),\n ((\"c\", \"d\"), (\"a\", \"b\")),\n ((\"a\", \"b\"), (\"c\", \"d\"), (\"e\", \"f\")),\n ),\n)\ndef test_data_argument_accepts_tuples(data):\n \"\"\"Ensure that the data argument will accept tuples of strings\n and properly encode them.\n \"\"\"\n p = PreparedRequest()\n p.prepare(\n method=\"GET\", url=\"http://www.example.com\", data=data, hooks=default_hooks()\n )\n assert p.body == urlencode(data)\n\n\n@pytest.mark.parametrize(\n \"kwargs\",\n (\n None,\n {\n \"method\": \"GET\",\n \"url\": \"http://www.example.com\",\n \"data\": \"foo=bar\",\n \"hooks\": default_hooks(),\n },\n {\n \"method\": \"GET\",\n \"url\": \"http://www.example.com\",\n \"data\": \"foo=bar\",\n \"hooks\": default_hooks(),\n \"cookies\": {\"foo\": \"bar\"},\n },\n {\"method\": \"GET\", \"url\": \"http://www.example.com/üniçø∂é\"},\n ),\n)\ndef test_prepared_copy(kwargs):\n p = PreparedRequest()\n if kwargs:\n p.prepare(**kwargs)\n copy = p.copy()\n for attr in (\"method\", \"url\", \"headers\", \"_cookies\", \"body\", \"hooks\"):\n assert getattr(p, attr) == getattr(copy, attr)\n\n\ndef test_urllib3_retries(httpbin):\n from urllib3.util import Retry\n\n s = requests.Session()\n s.mount(\"http://\", HTTPAdapter(max_retries=Retry(total=2, status_forcelist=[500])))\n\n with pytest.raises(RetryError):\n s.get(httpbin(\"status/500\"))\n\n\ndef test_urllib3_pool_connection_closed(httpbin):\n s = requests.Session()\n s.mount(\"http://\", HTTPAdapter(pool_connections=0, pool_maxsize=0))\n\n try:\n s.get(httpbin(\"status/200\"))\n except ConnectionError as e:\n assert \"Pool is closed.\" in str(e)\n\n\nclass TestPreparingURLs:\n @pytest.mark.parametrize(\n \"url,expected\",\n (\n (\"http://google.com\", \"http://google.com/\"),\n (\"http://ジェーピーニック.jp\", \"http://xn--hckqz9bzb1cyrb.jp/\"),\n (\"http://xn--n3h.net/\", \"http://xn--n3h.net/\"),\n (\"http://ジェーピーニック.jp\".encode(), \"http://xn--hckqz9bzb1cyrb.jp/\"),\n (\"http://straße.de/straße\", \"http://xn--strae-oqa.de/stra%C3%9Fe\"),\n (\n \"http://straße.de/straße\".encode(),\n \"http://xn--strae-oqa.de/stra%C3%9Fe\",\n ),\n (\n \"http://Königsgäßchen.de/straße\",\n \"http://xn--knigsgchen-b4a3dun.de/stra%C3%9Fe\",\n ),\n (\n \"http://Königsgäßchen.de/straße\".encode(),\n \"http://xn--knigsgchen-b4a3dun.de/stra%C3%9Fe\",\n ),\n (b\"http://xn--n3h.net/\", \"http://xn--n3h.net/\"),\n (\n b\"http://[1200:0000:ab00:1234:0000:2552:7777:1313]:12345/\",\n \"http://[1200:0000:ab00:1234:0000:2552:7777:1313]:12345/\",\n ),\n (\n \"http://[1200:0000:ab00:1234:0000:2552:7777:1313]:12345/\",\n \"http://[1200:0000:ab00:1234:0000:2552:7777:1313]:12345/\",\n ),\n ),\n )\n def test_preparing_url(self, url, expected):\n def normalize_percent_encode(x):\n # Helper function that normalizes equivalent\n # percent-encoded bytes before comparisons\n for c in re.findall(r\"%[a-fA-F0-9]{2}\", x):\n x = x.replace(c, c.upper())\n return x\n\n r = requests.Request(\"GET\", url=url)\n p = r.prepare()\n assert normalize_percent_encode(p.url) == expected\n\n @pytest.mark.parametrize(\n \"url\",\n (\n b\"http://*.google.com\",\n b\"http://*\",\n \"http://*.google.com\",\n \"http://*\",\n \"http://☃.net/\",\n ),\n )\n def test_preparing_bad_url(self, url):\n r = requests.Request(\"GET\", url=url)\n with pytest.raises(requests.exceptions.InvalidURL):\n r.prepare()\n\n @pytest.mark.parametrize(\"url, exception\", ((\"http://localhost:-1\", InvalidURL),))\n def test_redirecting_to_bad_url(self, httpbin, url, exception):\n with pytest.raises(exception):\n requests.get(httpbin(\"redirect-to\"), params={\"url\": url})\n\n @pytest.mark.parametrize(\n \"input, expected\",\n (\n (\n b\"http+unix://%2Fvar%2Frun%2Fsocket/path%7E\",\n \"http+unix://%2Fvar%2Frun%2Fsocket/path~\",\n ),\n (\n \"http+unix://%2Fvar%2Frun%2Fsocket/path%7E\",\n \"http+unix://%2Fvar%2Frun%2Fsocket/path~\",\n ),\n (\n b\"mailto:user@example.org\",\n \"mailto:user@example.org\",\n ),\n (\n \"mailto:user@example.org\",\n \"mailto:user@example.org\",\n ),\n (\n b\"data:SSDimaUgUHl0aG9uIQ==\",\n \"data:SSDimaUgUHl0aG9uIQ==\",\n ),\n ),\n )\n def test_url_mutation(self, input, expected):\n \"\"\"\n This test validates that we correctly exclude some URLs from\n preparation, and that we handle others. Specifically, it tests that\n any URL whose scheme doesn't begin with \"http\" is left alone, and\n those whose scheme *does* begin with \"http\" are mutated.\n \"\"\"\n r = requests.Request(\"GET\", url=input)\n p = r.prepare()\n assert p.url == expected\n\n @pytest.mark.parametrize(\n \"input, params, expected\",\n (\n (\n b\"http+unix://%2Fvar%2Frun%2Fsocket/path\",\n {\"key\": \"value\"},\n \"http+unix://%2Fvar%2Frun%2Fsocket/path?key=value\",\n ),\n (\n \"http+unix://%2Fvar%2Frun%2Fsocket/path\",\n {\"key\": \"value\"},\n \"http+unix://%2Fvar%2Frun%2Fsocket/path?key=value\",\n ),\n (\n b\"mailto:user@example.org\",\n {\"key\": \"value\"},\n \"mailto:user@example.org\",\n ),\n (\n \"mailto:user@example.org\",\n {\"key\": \"value\"},\n \"mailto:user@example.org\",\n ),\n ),\n )\n def test_parameters_for_nonstandard_schemes(self, input, params, expected):\n \"\"\"\n Setting parameters for nonstandard schemes is allowed if those schemes\n begin with \"http\", and is forbidden otherwise.\n \"\"\"\n r = requests.Request(\"GET\", url=input, params=params)\n p = r.prepare()\n assert p.url == expected\n\n def test_post_json_nan(self, httpbin):\n data = {\"foo\": float(\"nan\")}\n with pytest.raises(requests.exceptions.InvalidJSONError):\n requests.post(httpbin(\"post\"), json=data)\n\n def test_json_decode_compatibility(self, httpbin):\n r = requests.get(httpbin(\"bytes/20\"))\n with pytest.raises(requests.exceptions.JSONDecodeError) as excinfo:\n r.json()\n assert isinstance(excinfo.value, RequestException)\n assert isinstance(excinfo.value, JSONDecodeError)\n assert r.text not in str(excinfo.value)\n\n def test_json_decode_persists_doc_attr(self, httpbin):\n r = requests.get(httpbin(\"bytes/20\"))\n with pytest.raises(requests.exceptions.JSONDecodeError) as excinfo:\n r.json()\n assert excinfo.value.doc == r.text\n",
"path": "tests/test_requests.py"
}
] | 13_1 | python | import sys
import pytest
# Requests to this URL should always fail with a connection timeout (nothing
# listening on that port)
TARPIT = "http://10.255.255.1"
# This is to avoid waiting the timeout of using TARPIT
INVALID_PROXY = "http://localhost:1"
class TestRequests:
import requests
from requests.exceptions import (
InvalidHeader
)
try:
from ssl import SSLContext
del SSLContext
HAS_MODERN_SSL = True
except ImportError:
HAS_MODERN_SSL = False
try:
requests.pyopenssl
HAS_PYOPENSSL = True
except AttributeError:
HAS_PYOPENSSL = False
try:
from http.server import HTTPServer, SimpleHTTPRequestHandler
except ImportError:
from BaseHTTPServer import HTTPServer
from SimpleHTTPServer import SimpleHTTPRequestHandler
@staticmethod
def prepare_url(value):
from requests.compat import urljoin
# Issue #1483: Make sure the URL always has a trailing slash
httpbin_url = value.url.rstrip("/") + "/"
def inner(*suffix):
return urljoin(httpbin_url, "/".join(suffix))
return inner
@pytest.fixture
def httpbin(self, httpbin):
return self.prepare_url(httpbin)
@pytest.fixture
def httpbin_secure(self, httpbin_secure):
return self.prepare_url(httpbin_secure)
@pytest.fixture
def nosan_server(self, tmp_path_factory):
# delay importing until the fixture in order to make it possible
# to deselect the test via command-line when trustme is not available
import trustme
import ssl
import threading
tmpdir = tmp_path_factory.mktemp("certs")
ca = trustme.CA()
# only commonName, no subjectAltName
server_cert = ca.issue_cert(common_name="localhost")
ca_bundle = str(tmpdir / "ca.pem")
ca.cert_pem.write_to_path(ca_bundle)
context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
server_cert.configure_cert(context)
server = self.HTTPServer(("localhost", 0), self.SimpleHTTPRequestHandler)
server.socket = context.wrap_socket(server.socket, server_side=True)
server_thread = threading.Thread(target=server.serve_forever)
server_thread.start()
yield "localhost", server.server_address[1], ca_bundle
server.shutdown()
server_thread.join()
@pytest.mark.parametrize(
"invalid_header",
(
{" foo": "bar"},
{"\tfoo": "bar"},
{" foo": "bar"},
{"foo": " bar"},
{"foo": " bar"},
{"foo": "\tbar"},
{" ": "bar"},
),
)
def test_header_no_leading_space(self, httpbin, invalid_header):
import requests
from requests.exceptions import (
InvalidHeader
)
"""Ensure headers containing leading whitespace raise
InvalidHeader Error before sending.
"""
with pytest.raises(InvalidHeader):
requests.get(httpbin("get"), headers=invalid_header)
@pytest.mark.parametrize(
"invalid_header",
(
{"foo": "bar\r\nbaz: qux"},
{"foo": "bar\n\rbaz: qux"},
{"foo": "bar\nbaz: qux"},
{"foo": "bar\rbaz: qux"},
{"fo\ro": "bar"},
{"fo\r\no": "bar"},
{"fo\n\ro": "bar"},
{"fo\no": "bar"},
),
)
def test_header_no_return_chars(self, httpbin, invalid_header):
import requests
from requests.exceptions import (
InvalidHeader
)
"""Ensure that a header containing return character sequences raise an
exception. Otherwise, multiple headers are created from single string.
"""
with pytest.raises(InvalidHeader):
requests.get(httpbin("get"), headers=invalid_header)
def main():
import pytest
# Run the pytest tests programmatically
exit_code = pytest.main(["-v", __file__])
# Exit with status code 1 if any test fails, otherwise 0
if exit_code != 0:
sys.exit(1)
else:
sys.exit(0)
if __name__ == '__main__':
main()
|
https://github.com/teamqurrent/requests | Your objective is to improve the exception handling in the requests library, specifically targeting the handling of SSL errors. The goal is to ensure that SSL errors from the urllib3 library are correctly wrapped and re-raised as requests.exceptions.SSLError within the requests library. This change is crucial for maintaining consistency in exception handling and providing clearer, more specific error messages to the users of the library. The primary file to modify is `requests/models.py`, focusing on the functions that deal with content streaming, such as `iter_content`. | 7ae3887 | -e .[socks]
pytest
pytest-cov
pytest-httpbin==1.0.0
pytest-mock
httpbin==0.7.0
trustme
wheel
chardet>=3.0.2,<3.1.0
idna>=2.5,<2.8
urllib3>=1.21.1,<1.24
certifi>=2017.4.17
# Flask Stack
Flask>1.0,<2.0
markupsafe<2.1
| python3.9 | 95f45673 | diff --git a/HISTORY.md b/HISTORY.md
--- a/HISTORY.md
+++ b/HISTORY.md
@@ -6,6 +6,11 @@ dev
- \[Short description of non-trivial change.\]
+**Bugfixes**
+
+- Fixed urllib3 exception leak, wrapping `urllib3.exceptions.SSLError` with
+ `requests.exceptions.SSLError` for `content` and `iter_content`.
+
2.27.1 (2022-01-05)
-------------------
diff --git a/requests/models.py b/requests/models.py
--- a/requests/models.py
+++ b/requests/models.py
@@ -19,7 +19,12 @@ from urllib3.fields import RequestField
from urllib3.filepost import encode_multipart_formdata
from urllib3.util import parse_url
from urllib3.exceptions import (
- DecodeError, ReadTimeoutError, ProtocolError, LocationParseError)
+ DecodeError,
+ LocationParseError,
+ ProtocolError,
+ ReadTimeoutError,
+ SSLError,
+)
from io import UnsupportedOperation
from .hooks import default_hooks
@@ -32,6 +37,7 @@ from .exceptions import (
ContentDecodingError, ConnectionError, StreamConsumedError,
InvalidJSONError)
from .exceptions import JSONDecodeError as RequestsJSONDecodeError
+from .exceptions import SSLError as RequestsSSLError
from ._internal_utils import to_native_string, unicode_is_ascii
from .utils import (
guess_filename, get_auth_from_url, requote_uri,
@@ -765,6 +771,8 @@ class Response(object):
raise ContentDecodingError(e)
except ReadTimeoutError as e:
raise ConnectionError(e)
+ except SSLError as e:
+ raise RequestsSSLError(e)
else:
# Standard file-like object.
while True:
diff --git a/tests/test_requests.py b/tests/test_requests.py
--- a/tests/test_requests.py
+++ b/tests/test_requests.py
@@ -14,6 +14,7 @@ import re
import io
import requests
import pytest
+import urllib3
from requests.adapters import HTTPAdapter
from requests.auth import HTTPDigestAuth, _basic_auth_str
from requests.compat import (
@@ -22,9 +23,25 @@ from requests.compat import (
from requests.cookies import (
cookiejar_from_dict, morsel_to_cookie)
from requests.exceptions import (
- ConnectionError, ConnectTimeout, InvalidSchema, InvalidURL,
- MissingSchema, ReadTimeout, Timeout, RetryError, RequestException, TooManyRedirects,
- ProxyError, InvalidHeader, UnrewindableBodyError, SSLError, InvalidProxyURL, InvalidJSONError)
+ ChunkedEncodingError,
+ ConnectionError,
+ ConnectTimeout,
+ ContentDecodingError,
+ InvalidHeader,
+ InvalidJSONError,
+ InvalidProxyURL,
+ InvalidSchema,
+ InvalidURL,
+ MissingSchema,
+ ProxyError,
+ ReadTimeout,
+ RequestException,
+ RetryError,
+ Timeout,
+ TooManyRedirects,
+ UnrewindableBodyError,
+)
+from requests.exceptions import SSLError as RequestsSSLError
from requests.models import PreparedRequest
from requests.structures import CaseInsensitiveDict
from requests.sessions import SessionRedirectMixin
@@ -910,7 +927,7 @@ class TestRequests:
"""
When underlying SSL problems occur, an SSLError is raised.
"""
- with pytest.raises(SSLError):
+ with pytest.raises(RequestsSSLError):
# Our local httpbin does not have a trusted CA, so this call will
# fail if we use our default trust bundle.
requests.get(httpbin_secure('status', '200'))
@@ -1320,6 +1337,26 @@ class TestRequests:
with pytest.raises(TypeError):
chunks = r.iter_content("1024")
+ @pytest.mark.parametrize(
+ 'exception, args, expected', (
+ (urllib3.exceptions.ProtocolError, tuple(), ChunkedEncodingError),
+ (urllib3.exceptions.DecodeError, tuple(), ContentDecodingError),
+ (urllib3.exceptions.ReadTimeoutError, (None, '', ''), ConnectionError),
+ (urllib3.exceptions.SSLError, tuple(), RequestsSSLError),
+ )
+ )
+ def test_iter_content_wraps_exceptions(
+ self, httpbin, mocker, exception, args, expected
+ ):
+ r = requests.Response()
+ r.raw = mocker.Mock()
+ # ReadTimeoutError can't be initialized by mock
+ # so we'll manually create the instance with args
+ r.raw.stream.side_effect = exception(*args)
+
+ with pytest.raises(expected):
+ next(r.iter_content(1024))
+
def test_request_and_response_are_pickleable(self, httpbin):
r = requests.get(httpbin('get'))
| [
{
"content": "Release History\n===============\n\ndev\n---\n\n- \\[Short description of non-trivial change.\\]\n\n2.27.1 (2022-01-05)\n-------------------\n\n**Bugfixes**\n\n- Fixed parsing issue that resulted in the `auth` component being\n dropped from proxy URLs. (#6028)\n\n2.27.0 (2022-01-03)\n-------------------\n\n**Improvements**\n\n- Officially added support for Python 3.10. (#5928)\n\n- Added a `requests.exceptions.JSONDecodeError` to unify JSON exceptions between\n Python 2 and 3. This gets raised in the `response.json()` method, and is\n backwards compatible as it inherits from previously thrown exceptions.\n Can be caught from `requests.exceptions.RequestException` as well. (#5856)\n\n- Improved error text for misnamed `InvalidSchema` and `MissingSchema`\n exceptions. This is a temporary fix until exceptions can be renamed\n (Schema->Scheme). (#6017)\n\n- Improved proxy parsing for proxy URLs missing a scheme. This will address\n recent changes to `urlparse` in Python 3.9+. (#5917)\n\n**Bugfixes**\n\n- Fixed defect in `extract_zipped_paths` which could result in an infinite loop\n for some paths. (#5851)\n\n- Fixed handling for `AttributeError` when calculating length of files obtained\n by `Tarfile.extractfile()`. (#5239)\n\n- Fixed urllib3 exception leak, wrapping `urllib3.exceptions.InvalidHeader` with\n `requests.exceptions.InvalidHeader`. (#5914)\n\n- Fixed bug where two Host headers were sent for chunked requests. (#5391)\n\n- Fixed regression in Requests 2.26.0 where `Proxy-Authorization` was\n incorrectly stripped from all requests sent with `Session.send`. (#5924)\n\n- Fixed performance regression in 2.26.0 for hosts with a large number of\n proxies available in the environment. (#5924)\n\n- Fixed idna exception leak, wrapping `UnicodeError` with\n `requests.exceptions.InvalidURL` for URLs with a leading dot (.) in the\n domain. (#5414)\n\n**Deprecations**\n\n- Requests support for Python 2.7 and 3.6 will be ending in 2022. While we\n don't have exact dates, Requests 2.27.x is likely to be the last release\n series providing support.\n\n2.26.0 (2021-07-13)\n-------------------\n\n**Improvements**\n\n- Requests now supports Brotli compression, if either the `brotli` or\n `brotlicffi` package is installed. (#5783)\n\n- `Session.send` now correctly resolves proxy configurations from both\n the Session and Request. Behavior now matches `Session.request`. (#5681)\n\n**Bugfixes**\n\n- Fixed a race condition in zip extraction when using Requests in parallel\n from zip archive. (#5707)\n\n**Dependencies**\n\n- Instead of `chardet`, use the MIT-licensed `charset_normalizer` for Python3\n to remove license ambiguity for projects bundling requests. If `chardet`\n is already installed on your machine it will be used instead of `charset_normalizer`\n to keep backwards compatibility. (#5797)\n\n You can also install `chardet` while installing requests by\n specifying `[use_chardet_on_py3]` extra as follows:\n\n ```shell\n pip install \"requests[use_chardet_on_py3]\"\n ```\n\n Python2 still depends upon the `chardet` module.\n\n- Requests now supports `idna` 3.x on Python 3. `idna` 2.x will continue to\n be used on Python 2 installations. (#5711)\n\n**Deprecations**\n\n- The `requests[security]` extra has been converted to a no-op install.\n PyOpenSSL is no longer the recommended secure option for Requests. (#5867)\n\n- Requests has officially dropped support for Python 3.5. (#5867)\n\n2.25.1 (2020-12-16)\n-------------------\n\n**Bugfixes**\n\n- Requests now treats `application/json` as `utf8` by default. Resolving\n inconsistencies between `r.text` and `r.json` output. (#5673)\n\n**Dependencies**\n\n- Requests now supports chardet v4.x.\n\n2.25.0 (2020-11-11)\n-------------------\n\n**Improvements**\n\n- Added support for NETRC environment variable. (#5643)\n\n**Dependencies**\n\n- Requests now supports urllib3 v1.26.\n\n**Deprecations**\n\n- Requests v2.25.x will be the last release series with support for Python 3.5.\n- The `requests[security]` extra is officially deprecated and will be removed\n in Requests v2.26.0.\n\n2.24.0 (2020-06-17)\n-------------------\n\n**Improvements**\n\n- pyOpenSSL TLS implementation is now only used if Python\n either doesn't have an `ssl` module or doesn't support\n SNI. Previously pyOpenSSL was unconditionally used if available.\n This applies even if pyOpenSSL is installed via the\n `requests[security]` extra (#5443)\n\n- Redirect resolution should now only occur when\n `allow_redirects` is True. (#5492)\n\n- No longer perform unnecessary Content-Length calculation for\n requests that won't use it. (#5496)\n\n2.23.0 (2020-02-19)\n-------------------\n\n**Improvements**\n\n- Remove defunct reference to `prefetch` in Session `__attrs__` (#5110)\n\n**Bugfixes**\n\n- Requests no longer outputs password in basic auth usage warning. (#5099)\n\n**Dependencies**\n\n- Pinning for `chardet` and `idna` now uses major version instead of minor.\n This hopefully reduces the need for releases every time a dependency is updated.\n\n2.22.0 (2019-05-15)\n-------------------\n\n**Dependencies**\n\n- Requests now supports urllib3 v1.25.2.\n (note: 1.25.0 and 1.25.1 are incompatible)\n\n**Deprecations**\n\n- Requests has officially stopped support for Python 3.4.\n\n2.21.0 (2018-12-10)\n-------------------\n\n**Dependencies**\n\n- Requests now supports idna v2.8.\n\n2.20.1 (2018-11-08)\n-------------------\n\n**Bugfixes**\n\n- Fixed bug with unintended Authorization header stripping for\n redirects using default ports (http/80, https/443).\n\n2.20.0 (2018-10-18)\n-------------------\n\n**Bugfixes**\n\n- Content-Type header parsing is now case-insensitive (e.g.\n charset=utf8 v Charset=utf8).\n- Fixed exception leak where certain redirect urls would raise\n uncaught urllib3 exceptions.\n- Requests removes Authorization header from requests redirected\n from https to http on the same hostname. (CVE-2018-18074)\n- `should_bypass_proxies` now handles URIs without hostnames (e.g.\n files).\n\n**Dependencies**\n\n- Requests now supports urllib3 v1.24.\n\n**Deprecations**\n\n- Requests has officially stopped support for Python 2.6.\n\n2.19.1 (2018-06-14)\n-------------------\n\n**Bugfixes**\n\n- Fixed issue where status\\_codes.py's `init` function failed trying\n to append to a `__doc__` value of `None`.\n\n2.19.0 (2018-06-12)\n-------------------\n\n**Improvements**\n\n- Warn user about possible slowdown when using cryptography version\n < 1.3.4\n- Check for invalid host in proxy URL, before forwarding request to\n adapter.\n- Fragments are now properly maintained across redirects. (RFC7231\n 7.1.2)\n- Removed use of cgi module to expedite library load time.\n- Added support for SHA-256 and SHA-512 digest auth algorithms.\n- Minor performance improvement to `Request.content`.\n- Migrate to using collections.abc for 3.7 compatibility.\n\n**Bugfixes**\n\n- Parsing empty `Link` headers with `parse_header_links()` no longer\n return one bogus entry.\n- Fixed issue where loading the default certificate bundle from a zip\n archive would raise an `IOError`.\n- Fixed issue with unexpected `ImportError` on windows system which do\n not support `winreg` module.\n- DNS resolution in proxy bypass no longer includes the username and\n password in the request. This also fixes the issue of DNS queries\n failing on macOS.\n- Properly normalize adapter prefixes for url comparison.\n- Passing `None` as a file pointer to the `files` param no longer\n raises an exception.\n- Calling `copy` on a `RequestsCookieJar` will now preserve the cookie\n policy correctly.\n\n**Dependencies**\n\n- We now support idna v2.7.\n- We now support urllib3 v1.23.\n\n2.18.4 (2017-08-15)\n-------------------\n\n**Improvements**\n\n- Error messages for invalid headers now include the header name for\n easier debugging\n\n**Dependencies**\n\n- We now support idna v2.6.\n\n2.18.3 (2017-08-02)\n-------------------\n\n**Improvements**\n\n- Running `$ python -m requests.help` now includes the installed\n version of idna.\n\n**Bugfixes**\n\n- Fixed issue where Requests would raise `ConnectionError` instead of\n `SSLError` when encountering SSL problems when using urllib3 v1.22.\n\n2.18.2 (2017-07-25)\n-------------------\n\n**Bugfixes**\n\n- `requests.help` no longer fails on Python 2.6 due to the absence of\n `ssl.OPENSSL_VERSION_NUMBER`.\n\n**Dependencies**\n\n- We now support urllib3 v1.22.\n\n2.18.1 (2017-06-14)\n-------------------\n\n**Bugfixes**\n\n- Fix an error in the packaging whereby the `*.whl` contained\n incorrect data that regressed the fix in v2.17.3.\n\n2.18.0 (2017-06-14)\n-------------------\n\n**Improvements**\n\n- `Response` is now a context manager, so can be used directly in a\n `with` statement without first having to be wrapped by\n `contextlib.closing()`.\n\n**Bugfixes**\n\n- Resolve installation failure if multiprocessing is not available\n- Resolve tests crash if multiprocessing is not able to determine the\n number of CPU cores\n- Resolve error swallowing in utils set\\_environ generator\n\n2.17.3 (2017-05-29)\n-------------------\n\n**Improvements**\n\n- Improved `packages` namespace identity support, for monkeypatching\n libraries.\n\n2.17.2 (2017-05-29)\n-------------------\n\n**Improvements**\n\n- Improved `packages` namespace identity support, for monkeypatching\n libraries.\n\n2.17.1 (2017-05-29)\n-------------------\n\n**Improvements**\n\n- Improved `packages` namespace identity support, for monkeypatching\n libraries.\n\n2.17.0 (2017-05-29)\n-------------------\n\n**Improvements**\n\n- Removal of the 301 redirect cache. This improves thread-safety.\n\n2.16.5 (2017-05-28)\n-------------------\n\n- Improvements to `$ python -m requests.help`.\n\n2.16.4 (2017-05-27)\n-------------------\n\n- Introduction of the `$ python -m requests.help` command, for\n debugging with maintainers!\n\n2.16.3 (2017-05-27)\n-------------------\n\n- Further restored the `requests.packages` namespace for compatibility\n reasons.\n\n2.16.2 (2017-05-27)\n-------------------\n\n- Further restored the `requests.packages` namespace for compatibility\n reasons.\n\nNo code modification (noted below) should be necessary any longer.\n\n2.16.1 (2017-05-27)\n-------------------\n\n- Restored the `requests.packages` namespace for compatibility\n reasons.\n- Bugfix for `urllib3` version parsing.\n\n**Note**: code that was written to import against the\n`requests.packages` namespace previously will have to import code that\nrests at this module-level now.\n\nFor example:\n\n from requests.packages.urllib3.poolmanager import PoolManager\n\nWill need to be re-written to be:\n\n from requests.packages import urllib3\n urllib3.poolmanager.PoolManager\n\nOr, even better:\n\n from urllib3.poolmanager import PoolManager\n\n2.16.0 (2017-05-26)\n-------------------\n\n- Unvendor ALL the things!\n\n2.15.1 (2017-05-26)\n-------------------\n\n- Everyone makes mistakes.\n\n2.15.0 (2017-05-26)\n-------------------\n\n**Improvements**\n\n- Introduction of the `Response.next` property, for getting the next\n `PreparedResponse` from a redirect chain (when\n `allow_redirects=False`).\n- Internal refactoring of `__version__` module.\n\n**Bugfixes**\n\n- Restored once-optional parameter for\n `requests.utils.get_environ_proxies()`.\n\n2.14.2 (2017-05-10)\n-------------------\n\n**Bugfixes**\n\n- Changed a less-than to an equal-to and an or in the dependency\n markers to widen compatibility with older setuptools releases.\n\n2.14.1 (2017-05-09)\n-------------------\n\n**Bugfixes**\n\n- Changed the dependency markers to widen compatibility with older pip\n releases.\n\n2.14.0 (2017-05-09)\n-------------------\n\n**Improvements**\n\n- It is now possible to pass `no_proxy` as a key to the `proxies`\n dictionary to provide handling similar to the `NO_PROXY` environment\n variable.\n- When users provide invalid paths to certificate bundle files or\n directories Requests now raises `IOError`, rather than failing at\n the time of the HTTPS request with a fairly inscrutable certificate\n validation error.\n- The behavior of `SessionRedirectMixin` was slightly altered.\n `resolve_redirects` will now detect a redirect by calling\n `get_redirect_target(response)` instead of directly querying\n `Response.is_redirect` and `Response.headers['location']`. Advanced\n users will be able to process malformed redirects more easily.\n- Changed the internal calculation of elapsed request time to have\n higher resolution on Windows.\n- Added `win_inet_pton` as conditional dependency for the `[socks]`\n extra on Windows with Python 2.7.\n- Changed the proxy bypass implementation on Windows: the proxy bypass\n check doesn't use forward and reverse DNS requests anymore\n- URLs with schemes that begin with `http` but are not `http` or\n `https` no longer have their host parts forced to lowercase.\n\n**Bugfixes**\n\n- Much improved handling of non-ASCII `Location` header values in\n redirects. Fewer `UnicodeDecodeErrors` are encountered on Python 2,\n and Python 3 now correctly understands that Latin-1 is unlikely to\n be the correct encoding.\n- If an attempt to `seek` file to find out its length fails, we now\n appropriately handle that by aborting our content-length\n calculations.\n- Restricted `HTTPDigestAuth` to only respond to auth challenges made\n on 4XX responses, rather than to all auth challenges.\n- Fixed some code that was firing `DeprecationWarning` on Python 3.6.\n- The dismayed person emoticon (`/o\\\\`) no longer has a big head. I'm\n sure this is what you were all worrying about most.\n\n**Miscellaneous**\n\n- Updated bundled urllib3 to v1.21.1.\n- Updated bundled chardet to v3.0.2.\n- Updated bundled idna to v2.5.\n- Updated bundled certifi to 2017.4.17.\n\n2.13.0 (2017-01-24)\n-------------------\n\n**Features**\n\n- Only load the `idna` library when we've determined we need it. This\n will save some memory for users.\n\n**Miscellaneous**\n\n- Updated bundled urllib3 to 1.20.\n- Updated bundled idna to 2.2.\n\n2.12.5 (2017-01-18)\n-------------------\n\n**Bugfixes**\n\n- Fixed an issue with JSON encoding detection, specifically detecting\n big-endian UTF-32 with BOM.\n\n2.12.4 (2016-12-14)\n-------------------\n\n**Bugfixes**\n\n- Fixed regression from 2.12.2 where non-string types were rejected in\n the basic auth parameters. While support for this behaviour has been\n re-added, the behaviour is deprecated and will be removed in the\n future.\n\n2.12.3 (2016-12-01)\n-------------------\n\n**Bugfixes**\n\n- Fixed regression from v2.12.1 for URLs with schemes that begin with\n \"http\". These URLs have historically been processed as though they\n were HTTP-schemed URLs, and so have had parameters added. This was\n removed in v2.12.2 in an overzealous attempt to resolve problems\n with IDNA-encoding those URLs. This change was reverted: the other\n fixes for IDNA-encoding have been judged to be sufficient to return\n to the behaviour Requests had before v2.12.0.\n\n2.12.2 (2016-11-30)\n-------------------\n\n**Bugfixes**\n\n- Fixed several issues with IDNA-encoding URLs that are technically\n invalid but which are widely accepted. Requests will now attempt to\n IDNA-encode a URL if it can but, if it fails, and the host contains\n only ASCII characters, it will be passed through optimistically.\n This will allow users to opt-in to using IDNA2003 themselves if they\n want to, and will also allow technically invalid but still common\n hostnames.\n- Fixed an issue where URLs with leading whitespace would raise\n `InvalidSchema` errors.\n- Fixed an issue where some URLs without the HTTP or HTTPS schemes\n would still have HTTP URL preparation applied to them.\n- Fixed an issue where Unicode strings could not be used in basic\n auth.\n- Fixed an issue encountered by some Requests plugins where\n constructing a Response object would cause `Response.content` to\n raise an `AttributeError`.\n\n2.12.1 (2016-11-16)\n-------------------\n\n**Bugfixes**\n\n- Updated setuptools 'security' extra for the new PyOpenSSL backend in\n urllib3.\n\n**Miscellaneous**\n\n- Updated bundled urllib3 to 1.19.1.\n\n2.12.0 (2016-11-15)\n-------------------\n\n**Improvements**\n\n- Updated support for internationalized domain names from IDNA2003 to\n IDNA2008. This updated support is required for several forms of IDNs\n and is mandatory for .de domains.\n- Much improved heuristics for guessing content lengths: Requests will\n no longer read an entire `StringIO` into memory.\n- Much improved logic for recalculating `Content-Length` headers for\n `PreparedRequest` objects.\n- Improved tolerance for file-like objects that have no `tell` method\n but do have a `seek` method.\n- Anything that is a subclass of `Mapping` is now treated like a\n dictionary by the `data=` keyword argument.\n- Requests now tolerates empty passwords in proxy credentials, rather\n than stripping the credentials.\n- If a request is made with a file-like object as the body and that\n request is redirected with a 307 or 308 status code, Requests will\n now attempt to rewind the body object so it can be replayed.\n\n**Bugfixes**\n\n- When calling `response.close`, the call to `close` will be\n propagated through to non-urllib3 backends.\n- Fixed issue where the `ALL_PROXY` environment variable would be\n preferred over scheme-specific variables like `HTTP_PROXY`.\n- Fixed issue where non-UTF8 reason phrases got severely mangled by\n falling back to decoding using ISO 8859-1 instead.\n- Fixed a bug where Requests would not correctly correlate cookies set\n when using custom Host headers if those Host headers did not use the\n native string type for the platform.\n\n**Miscellaneous**\n\n- Updated bundled urllib3 to 1.19.\n- Updated bundled certifi certs to 2016.09.26.\n\n2.11.1 (2016-08-17)\n-------------------\n\n**Bugfixes**\n\n- Fixed a bug when using `iter_content` with `decode_unicode=True` for\n streamed bodies would raise `AttributeError`. This bug was\n introduced in 2.11.\n- Strip Content-Type and Transfer-Encoding headers from the header\n block when following a redirect that transforms the verb from\n POST/PUT to GET.\n\n2.11.0 (2016-08-08)\n-------------------\n\n**Improvements**\n\n- Added support for the `ALL_PROXY` environment variable.\n- Reject header values that contain leading whitespace or newline\n characters to reduce risk of header smuggling.\n\n**Bugfixes**\n\n- Fixed occasional `TypeError` when attempting to decode a JSON\n response that occurred in an error case. Now correctly returns a\n `ValueError`.\n- Requests would incorrectly ignore a non-CIDR IP address in the\n `NO_PROXY` environment variables: Requests now treats it as a\n specific IP.\n- Fixed a bug when sending JSON data that could cause us to encounter\n obscure OpenSSL errors in certain network conditions (yes, really).\n- Added type checks to ensure that `iter_content` only accepts\n integers and `None` for chunk sizes.\n- Fixed issue where responses whose body had not been fully consumed\n would have the underlying connection closed but not returned to the\n connection pool, which could cause Requests to hang in situations\n where the `HTTPAdapter` had been configured to use a blocking\n connection pool.\n\n**Miscellaneous**\n\n- Updated bundled urllib3 to 1.16.\n- Some previous releases accidentally accepted non-strings as\n acceptable header values. This release does not.\n\n2.10.0 (2016-04-29)\n-------------------\n\n**New Features**\n\n- SOCKS Proxy Support! (requires PySocks;\n `$ pip install requests[socks]`)\n\n**Miscellaneous**\n\n- Updated bundled urllib3 to 1.15.1.\n\n2.9.2 (2016-04-29)\n------------------\n\n**Improvements**\n\n- Change built-in CaseInsensitiveDict (used for headers) to use\n OrderedDict as its underlying datastore.\n\n**Bugfixes**\n\n- Don't use redirect\\_cache if allow\\_redirects=False\n- When passed objects that throw exceptions from `tell()`, send them\n via chunked transfer encoding instead of failing.\n- Raise a ProxyError for proxy related connection issues.\n\n2.9.1 (2015-12-21)\n------------------\n\n**Bugfixes**\n\n- Resolve regression introduced in 2.9.0 that made it impossible to\n send binary strings as bodies in Python 3.\n- Fixed errors when calculating cookie expiration dates in certain\n locales.\n\n**Miscellaneous**\n\n- Updated bundled urllib3 to 1.13.1.\n\n2.9.0 (2015-12-15)\n------------------\n\n**Minor Improvements** (Backwards compatible)\n\n- The `verify` keyword argument now supports being passed a path to a\n directory of CA certificates, not just a single-file bundle.\n- Warnings are now emitted when sending files opened in text mode.\n- Added the 511 Network Authentication Required status code to the\n status code registry.\n\n**Bugfixes**\n\n- For file-like objects that are not sought to the very beginning, we\n now send the content length for the number of bytes we will actually\n read, rather than the total size of the file, allowing partial file\n uploads.\n- When uploading file-like objects, if they are empty or have no\n obvious content length we set `Transfer-Encoding: chunked` rather\n than `Content-Length: 0`.\n- We correctly receive the response in buffered mode when uploading\n chunked bodies.\n- We now handle being passed a query string as a bytestring on Python\n 3, by decoding it as UTF-8.\n- Sessions are now closed in all cases (exceptional and not) when\n using the functional API rather than leaking and waiting for the\n garbage collector to clean them up.\n- Correctly handle digest auth headers with a malformed `qop`\n directive that contains no token, by treating it the same as if no\n `qop` directive was provided at all.\n- Minor performance improvements when removing specific cookies by\n name.\n\n**Miscellaneous**\n\n- Updated urllib3 to 1.13.\n\n2.8.1 (2015-10-13)\n------------------\n\n**Bugfixes**\n\n- Update certificate bundle to match `certifi` 2015.9.6.2's weak\n certificate bundle.\n- Fix a bug in 2.8.0 where requests would raise `ConnectTimeout`\n instead of `ConnectionError`\n- When using the PreparedRequest flow, requests will now correctly\n respect the `json` parameter. Broken in 2.8.0.\n- When using the PreparedRequest flow, requests will now correctly\n handle a Unicode-string method name on Python 2. Broken in 2.8.0.\n\n2.8.0 (2015-10-05)\n------------------\n\n**Minor Improvements** (Backwards Compatible)\n\n- Requests now supports per-host proxies. This allows the `proxies`\n dictionary to have entries of the form\n `{'<scheme>://<hostname>': '<proxy>'}`. Host-specific proxies will\n be used in preference to the previously-supported scheme-specific\n ones, but the previous syntax will continue to work.\n- `Response.raise_for_status` now prints the URL that failed as part\n of the exception message.\n- `requests.utils.get_netrc_auth` now takes an `raise_errors` kwarg,\n defaulting to `False`. When `True`, errors parsing `.netrc` files\n cause exceptions to be thrown.\n- Change to bundled projects import logic to make it easier to\n unbundle requests downstream.\n- Changed the default User-Agent string to avoid leaking data on\n Linux: now contains only the requests version.\n\n**Bugfixes**\n\n- The `json` parameter to `post()` and friends will now only be used\n if neither `data` nor `files` are present, consistent with the\n documentation.\n- We now ignore empty fields in the `NO_PROXY` environment variable.\n- Fixed problem where `httplib.BadStatusLine` would get raised if\n combining `stream=True` with `contextlib.closing`.\n- Prevented bugs where we would attempt to return the same connection\n back to the connection pool twice when sending a Chunked body.\n- Miscellaneous minor internal changes.\n- Digest Auth support is now thread safe.\n\n**Updates**\n\n- Updated urllib3 to 1.12.\n\n2.7.0 (2015-05-03)\n------------------\n\nThis is the first release that follows our new release process. For\nmore, see [our\ndocumentation](https://requests.readthedocs.io/en/latest/community/release-process/).\n\n**Bugfixes**\n\n- Updated urllib3 to 1.10.4, resolving several bugs involving chunked\n transfer encoding and response framing.\n\n2.6.2 (2015-04-23)\n------------------\n\n**Bugfixes**\n\n- Fix regression where compressed data that was sent as chunked data\n was not properly decompressed. (\\#2561)\n\n2.6.1 (2015-04-22)\n------------------\n\n**Bugfixes**\n\n- Remove VendorAlias import machinery introduced in v2.5.2.\n- Simplify the PreparedRequest.prepare API: We no longer require the\n user to pass an empty list to the hooks keyword argument. (c.f.\n \\#2552)\n- Resolve redirects now receives and forwards all of the original\n arguments to the adapter. (\\#2503)\n- Handle UnicodeDecodeErrors when trying to deal with a unicode URL\n that cannot be encoded in ASCII. (\\#2540)\n- Populate the parsed path of the URI field when performing Digest\n Authentication. (\\#2426)\n- Copy a PreparedRequest's CookieJar more reliably when it is not an\n instance of RequestsCookieJar. (\\#2527)\n\n2.6.0 (2015-03-14)\n------------------\n\n**Bugfixes**\n\n- CVE-2015-2296: Fix handling of cookies on redirect. Previously a\n cookie without a host value set would use the hostname for the\n redirected URL exposing requests users to session fixation attacks\n and potentially cookie stealing. This was disclosed privately by\n Matthew Daley of [BugFuzz](https://bugfuzz.com). This affects all\n versions of requests from v2.1.0 to v2.5.3 (inclusive on both ends).\n- Fix error when requests is an `install_requires` dependency and\n `python setup.py test` is run. (\\#2462)\n- Fix error when urllib3 is unbundled and requests continues to use\n the vendored import location.\n- Include fixes to `urllib3`'s header handling.\n- Requests' handling of unvendored dependencies is now more\n restrictive.\n\n**Features and Improvements**\n\n- Support bytearrays when passed as parameters in the `files`\n argument. (\\#2468)\n- Avoid data duplication when creating a request with `str`, `bytes`,\n or `bytearray` input to the `files` argument.\n\n2.5.3 (2015-02-24)\n------------------\n\n**Bugfixes**\n\n- Revert changes to our vendored certificate bundle. For more context\n see (\\#2455, \\#2456, and <https://bugs.python.org/issue23476>)\n\n2.5.2 (2015-02-23)\n------------------\n\n**Features and Improvements**\n\n- Add sha256 fingerprint support.\n ([shazow/urllib3\\#540](https://github.com/shazow/urllib3/pull/540))\n- Improve the performance of headers.\n ([shazow/urllib3\\#544](https://github.com/shazow/urllib3/pull/544))\n\n**Bugfixes**\n\n- Copy pip's import machinery. When downstream redistributors remove\n requests.packages.urllib3 the import machinery will continue to let\n those same symbols work. Example usage in requests' documentation\n and 3rd-party libraries relying on the vendored copies of urllib3\n will work without having to fallback to the system urllib3.\n- Attempt to quote parts of the URL on redirect if unquoting and then\n quoting fails. (\\#2356)\n- Fix filename type check for multipart form-data uploads. (\\#2411)\n- Properly handle the case where a server issuing digest\n authentication challenges provides both auth and auth-int\n qop-values. (\\#2408)\n- Fix a socket leak.\n ([shazow/urllib3\\#549](https://github.com/shazow/urllib3/pull/549))\n- Fix multiple `Set-Cookie` headers properly.\n ([shazow/urllib3\\#534](https://github.com/shazow/urllib3/pull/534))\n- Disable the built-in hostname verification.\n ([shazow/urllib3\\#526](https://github.com/shazow/urllib3/pull/526))\n- Fix the behaviour of decoding an exhausted stream.\n ([shazow/urllib3\\#535](https://github.com/shazow/urllib3/pull/535))\n\n**Security**\n\n- Pulled in an updated `cacert.pem`.\n- Drop RC4 from the default cipher list.\n ([shazow/urllib3\\#551](https://github.com/shazow/urllib3/pull/551))\n\n2.5.1 (2014-12-23)\n------------------\n\n**Behavioural Changes**\n\n- Only catch HTTPErrors in raise\\_for\\_status (\\#2382)\n\n**Bugfixes**\n\n- Handle LocationParseError from urllib3 (\\#2344)\n- Handle file-like object filenames that are not strings (\\#2379)\n- Unbreak HTTPDigestAuth handler. Allow new nonces to be negotiated\n (\\#2389)\n\n2.5.0 (2014-12-01)\n------------------\n\n**Improvements**\n\n- Allow usage of urllib3's Retry object with HTTPAdapters (\\#2216)\n- The `iter_lines` method on a response now accepts a delimiter with\n which to split the content (\\#2295)\n\n**Behavioural Changes**\n\n- Add deprecation warnings to functions in requests.utils that will be\n removed in 3.0 (\\#2309)\n- Sessions used by the functional API are always closed (\\#2326)\n- Restrict requests to HTTP/1.1 and HTTP/1.0 (stop accepting HTTP/0.9)\n (\\#2323)\n\n**Bugfixes**\n\n- Only parse the URL once (\\#2353)\n- Allow Content-Length header to always be overridden (\\#2332)\n- Properly handle files in HTTPDigestAuth (\\#2333)\n- Cap redirect\\_cache size to prevent memory abuse (\\#2299)\n- Fix HTTPDigestAuth handling of redirects after authenticating\n successfully (\\#2253)\n- Fix crash with custom method parameter to Session.request (\\#2317)\n- Fix how Link headers are parsed using the regular expression library\n (\\#2271)\n\n**Documentation**\n\n- Add more references for interlinking (\\#2348)\n- Update CSS for theme (\\#2290)\n- Update width of buttons and sidebar (\\#2289)\n- Replace references of Gittip with Gratipay (\\#2282)\n- Add link to changelog in sidebar (\\#2273)\n\n2.4.3 (2014-10-06)\n------------------\n\n**Bugfixes**\n\n- Unicode URL improvements for Python 2.\n- Re-order JSON param for backwards compat.\n- Automatically defrag authentication schemes from host/pass URIs.\n ([\\#2249](https://github.com/psf/requests/issues/2249))\n\n2.4.2 (2014-10-05)\n------------------\n\n**Improvements**\n\n- FINALLY! Add json parameter for uploads!\n ([\\#2258](https://github.com/psf/requests/pull/2258))\n- Support for bytestring URLs on Python 3.x\n ([\\#2238](https://github.com/psf/requests/pull/2238))\n\n**Bugfixes**\n\n- Avoid getting stuck in a loop\n ([\\#2244](https://github.com/psf/requests/pull/2244))\n- Multiple calls to iter\\* fail with unhelpful error.\n ([\\#2240](https://github.com/psf/requests/issues/2240),\n [\\#2241](https://github.com/psf/requests/issues/2241))\n\n**Documentation**\n\n- Correct redirection introduction\n ([\\#2245](https://github.com/psf/requests/pull/2245/))\n- Added example of how to send multiple files in one request.\n ([\\#2227](https://github.com/psf/requests/pull/2227/))\n- Clarify how to pass a custom set of CAs\n ([\\#2248](https://github.com/psf/requests/pull/2248/))\n\n2.4.1 (2014-09-09)\n------------------\n\n- Now has a \"security\" package extras set,\n `$ pip install requests[security]`\n- Requests will now use Certifi if it is available.\n- Capture and re-raise urllib3 ProtocolError\n- Bugfix for responses that attempt to redirect to themselves forever\n (wtf?).\n\n2.4.0 (2014-08-29)\n------------------\n\n**Behavioral Changes**\n\n- `Connection: keep-alive` header is now sent automatically.\n\n**Improvements**\n\n- Support for connect timeouts! Timeout now accepts a tuple (connect,\n read) which is used to set individual connect and read timeouts.\n- Allow copying of PreparedRequests without headers/cookies.\n- Updated bundled urllib3 version.\n- Refactored settings loading from environment -- new\n Session.merge\\_environment\\_settings.\n- Handle socket errors in iter\\_content.\n\n2.3.0 (2014-05-16)\n------------------\n\n**API Changes**\n\n- New `Response` property `is_redirect`, which is true when the\n library could have processed this response as a redirection (whether\n or not it actually did).\n- The `timeout` parameter now affects requests with both `stream=True`\n and `stream=False` equally.\n- The change in v2.0.0 to mandate explicit proxy schemes has been\n reverted. Proxy schemes now default to `http://`.\n- The `CaseInsensitiveDict` used for HTTP headers now behaves like a\n normal dictionary when references as string or viewed in the\n interpreter.\n\n**Bugfixes**\n\n- No longer expose Authorization or Proxy-Authorization headers on\n redirect. Fix CVE-2014-1829 and CVE-2014-1830 respectively.\n- Authorization is re-evaluated each redirect.\n- On redirect, pass url as native strings.\n- Fall-back to autodetected encoding for JSON when Unicode detection\n fails.\n- Headers set to `None` on the `Session` are now correctly not sent.\n- Correctly honor `decode_unicode` even if it wasn't used earlier in\n the same response.\n- Stop advertising `compress` as a supported Content-Encoding.\n- The `Response.history` parameter is now always a list.\n- Many, many `urllib3` bugfixes.\n\n2.2.1 (2014-01-23)\n------------------\n\n**Bugfixes**\n\n- Fixes incorrect parsing of proxy credentials that contain a literal\n or encoded '\\#' character.\n- Assorted urllib3 fixes.\n\n2.2.0 (2014-01-09)\n------------------\n\n**API Changes**\n\n- New exception: `ContentDecodingError`. Raised instead of `urllib3`\n `DecodeError` exceptions.\n\n**Bugfixes**\n\n- Avoid many many exceptions from the buggy implementation of\n `proxy_bypass` on OS X in Python 2.6.\n- Avoid crashing when attempting to get authentication credentials\n from \\~/.netrc when running as a user without a home directory.\n- Use the correct pool size for pools of connections to proxies.\n- Fix iteration of `CookieJar` objects.\n- Ensure that cookies are persisted over redirect.\n- Switch back to using chardet, since it has merged with charade.\n\n2.1.0 (2013-12-05)\n------------------\n\n- Updated CA Bundle, of course.\n- Cookies set on individual Requests through a `Session` (e.g. via\n `Session.get()`) are no longer persisted to the `Session`.\n- Clean up connections when we hit problems during chunked upload,\n rather than leaking them.\n- Return connections to the pool when a chunked upload is successful,\n rather than leaking it.\n- Match the HTTPbis recommendation for HTTP 301 redirects.\n- Prevent hanging when using streaming uploads and Digest Auth when a\n 401 is received.\n- Values of headers set by Requests are now always the native string\n type.\n- Fix previously broken SNI support.\n- Fix accessing HTTP proxies using proxy authentication.\n- Unencode HTTP Basic usernames and passwords extracted from URLs.\n- Support for IP address ranges for no\\_proxy environment variable\n- Parse headers correctly when users override the default `Host:`\n header.\n- Avoid munging the URL in case of case-sensitive servers.\n- Looser URL handling for non-HTTP/HTTPS urls.\n- Accept unicode methods in Python 2.6 and 2.7.\n- More resilient cookie handling.\n- Make `Response` objects pickleable.\n- Actually added MD5-sess to Digest Auth instead of pretending to like\n last time.\n- Updated internal urllib3.\n- Fixed @Lukasa's lack of taste.\n\n2.0.1 (2013-10-24)\n------------------\n\n- Updated included CA Bundle with new mistrusts and automated process\n for the future\n- Added MD5-sess to Digest Auth\n- Accept per-file headers in multipart file POST messages.\n- Fixed: Don't send the full URL on CONNECT messages.\n- Fixed: Correctly lowercase a redirect scheme.\n- Fixed: Cookies not persisted when set via functional API.\n- Fixed: Translate urllib3 ProxyError into a requests ProxyError\n derived from ConnectionError.\n- Updated internal urllib3 and chardet.\n\n2.0.0 (2013-09-24)\n------------------\n\n**API Changes:**\n\n- Keys in the Headers dictionary are now native strings on all Python\n versions, i.e. bytestrings on Python 2, unicode on Python 3.\n- Proxy URLs now *must* have an explicit scheme. A `MissingSchema`\n exception will be raised if they don't.\n- Timeouts now apply to read time if `Stream=False`.\n- `RequestException` is now a subclass of `IOError`, not\n `RuntimeError`.\n- Added new method to `PreparedRequest` objects:\n `PreparedRequest.copy()`.\n- Added new method to `Session` objects: `Session.update_request()`.\n This method updates a `Request` object with the data (e.g. cookies)\n stored on the `Session`.\n- Added new method to `Session` objects: `Session.prepare_request()`.\n This method updates and prepares a `Request` object, and returns the\n corresponding `PreparedRequest` object.\n- Added new method to `HTTPAdapter` objects:\n `HTTPAdapter.proxy_headers()`. This should not be called directly,\n but improves the subclass interface.\n- `httplib.IncompleteRead` exceptions caused by incorrect chunked\n encoding will now raise a Requests `ChunkedEncodingError` instead.\n- Invalid percent-escape sequences now cause a Requests `InvalidURL`\n exception to be raised.\n- HTTP 208 no longer uses reason phrase `\"im_used\"`. Correctly uses\n `\"already_reported\"`.\n- HTTP 226 reason added (`\"im_used\"`).\n\n**Bugfixes:**\n\n- Vastly improved proxy support, including the CONNECT verb. Special\n thanks to the many contributors who worked towards this improvement.\n- Cookies are now properly managed when 401 authentication responses\n are received.\n- Chunked encoding fixes.\n- Support for mixed case schemes.\n- Better handling of streaming downloads.\n- Retrieve environment proxies from more locations.\n- Minor cookies fixes.\n- Improved redirect behaviour.\n- Improved streaming behaviour, particularly for compressed data.\n- Miscellaneous small Python 3 text encoding bugs.\n- `.netrc` no longer overrides explicit auth.\n- Cookies set by hooks are now correctly persisted on Sessions.\n- Fix problem with cookies that specify port numbers in their host\n field.\n- `BytesIO` can be used to perform streaming uploads.\n- More generous parsing of the `no_proxy` environment variable.\n- Non-string objects can be passed in data values alongside files.\n\n1.2.3 (2013-05-25)\n------------------\n\n- Simple packaging fix\n\n1.2.2 (2013-05-23)\n------------------\n\n- Simple packaging fix\n\n1.2.1 (2013-05-20)\n------------------\n\n- 301 and 302 redirects now change the verb to GET for all verbs, not\n just POST, improving browser compatibility.\n- Python 3.3.2 compatibility\n- Always percent-encode location headers\n- Fix connection adapter matching to be most-specific first\n- new argument to the default connection adapter for passing a block\n argument\n- prevent a KeyError when there's no link headers\n\n1.2.0 (2013-03-31)\n------------------\n\n- Fixed cookies on sessions and on requests\n- Significantly change how hooks are dispatched - hooks now receive\n all the arguments specified by the user when making a request so\n hooks can make a secondary request with the same parameters. This is\n especially necessary for authentication handler authors\n- certifi support was removed\n- Fixed bug where using OAuth 1 with body `signature_type` sent no\n data\n- Major proxy work thanks to @Lukasa including parsing of proxy\n authentication from the proxy url\n- Fix DigestAuth handling too many 401s\n- Update vendored urllib3 to include SSL bug fixes\n- Allow keyword arguments to be passed to `json.loads()` via the\n `Response.json()` method\n- Don't send `Content-Length` header by default on `GET` or `HEAD`\n requests\n- Add `elapsed` attribute to `Response` objects to time how long a\n request took.\n- Fix `RequestsCookieJar`\n- Sessions and Adapters are now picklable, i.e., can be used with the\n multiprocessing library\n- Update charade to version 1.0.3\n\nThe change in how hooks are dispatched will likely cause a great deal of\nissues.\n\n1.1.0 (2013-01-10)\n------------------\n\n- CHUNKED REQUESTS\n- Support for iterable response bodies\n- Assume servers persist redirect params\n- Allow explicit content types to be specified for file data\n- Make merge\\_kwargs case-insensitive when looking up keys\n\n1.0.3 (2012-12-18)\n------------------\n\n- Fix file upload encoding bug\n- Fix cookie behavior\n\n1.0.2 (2012-12-17)\n------------------\n\n- Proxy fix for HTTPAdapter.\n\n1.0.1 (2012-12-17)\n------------------\n\n- Cert verification exception bug.\n- Proxy fix for HTTPAdapter.\n\n1.0.0 (2012-12-17)\n------------------\n\n- Massive Refactor and Simplification\n- Switch to Apache 2.0 license\n- Swappable Connection Adapters\n- Mountable Connection Adapters\n- Mutable ProcessedRequest chain\n- /s/prefetch/stream\n- Removal of all configuration\n- Standard library logging\n- Make Response.json() callable, not property.\n- Usage of new charade project, which provides python 2 and 3\n simultaneous chardet.\n- Removal of all hooks except 'response'\n- Removal of all authentication helpers (OAuth, Kerberos)\n\nThis is not a backwards compatible change.\n\n0.14.2 (2012-10-27)\n-------------------\n\n- Improved mime-compatible JSON handling\n- Proxy fixes\n- Path hack fixes\n- Case-Insensitive Content-Encoding headers\n- Support for CJK parameters in form posts\n\n0.14.1 (2012-10-01)\n-------------------\n\n- Python 3.3 Compatibility\n- Simply default accept-encoding\n- Bugfixes\n\n0.14.0 (2012-09-02)\n-------------------\n\n- No more iter\\_content errors if already downloaded.\n\n0.13.9 (2012-08-25)\n-------------------\n\n- Fix for OAuth + POSTs\n- Remove exception eating from dispatch\\_hook\n- General bugfixes\n\n0.13.8 (2012-08-21)\n-------------------\n\n- Incredible Link header support :)\n\n0.13.7 (2012-08-19)\n-------------------\n\n- Support for (key, value) lists everywhere.\n- Digest Authentication improvements.\n- Ensure proxy exclusions work properly.\n- Clearer UnicodeError exceptions.\n- Automatic casting of URLs to strings (fURL and such)\n- Bugfixes.\n\n0.13.6 (2012-08-06)\n-------------------\n\n- Long awaited fix for hanging connections!\n\n0.13.5 (2012-07-27)\n-------------------\n\n- Packaging fix\n\n0.13.4 (2012-07-27)\n-------------------\n\n- GSSAPI/Kerberos authentication!\n- App Engine 2.7 Fixes!\n- Fix leaking connections (from urllib3 update)\n- OAuthlib path hack fix\n- OAuthlib URL parameters fix.\n\n0.13.3 (2012-07-12)\n-------------------\n\n- Use simplejson if available.\n- Do not hide SSLErrors behind Timeouts.\n- Fixed param handling with urls containing fragments.\n- Significantly improved information in User Agent.\n- client certificates are ignored when verify=False\n\n0.13.2 (2012-06-28)\n-------------------\n\n- Zero dependencies (once again)!\n- New: Response.reason\n- Sign querystring parameters in OAuth 1.0\n- Client certificates no longer ignored when verify=False\n- Add openSUSE certificate support\n\n0.13.1 (2012-06-07)\n-------------------\n\n- Allow passing a file or file-like object as data.\n- Allow hooks to return responses that indicate errors.\n- Fix Response.text and Response.json for body-less responses.\n\n0.13.0 (2012-05-29)\n-------------------\n\n- Removal of Requests.async in favor of\n [grequests](https://github.com/kennethreitz/grequests)\n- Allow disabling of cookie persistence.\n- New implementation of safe\\_mode\n- cookies.get now supports default argument\n- Session cookies not saved when Session.request is called with\n return\\_response=False\n- Env: no\\_proxy support.\n- RequestsCookieJar improvements.\n- Various bug fixes.\n\n0.12.1 (2012-05-08)\n-------------------\n\n- New `Response.json` property.\n- Ability to add string file uploads.\n- Fix out-of-range issue with iter\\_lines.\n- Fix iter\\_content default size.\n- Fix POST redirects containing files.\n\n0.12.0 (2012-05-02)\n-------------------\n\n- EXPERIMENTAL OAUTH SUPPORT!\n- Proper CookieJar-backed cookies interface with awesome dict-like\n interface.\n- Speed fix for non-iterated content chunks.\n- Move `pre_request` to a more usable place.\n- New `pre_send` hook.\n- Lazily encode data, params, files.\n- Load system Certificate Bundle if `certify` isn't available.\n- Cleanups, fixes.\n\n0.11.2 (2012-04-22)\n-------------------\n\n- Attempt to use the OS's certificate bundle if `certifi` isn't\n available.\n- Infinite digest auth redirect fix.\n- Multi-part file upload improvements.\n- Fix decoding of invalid %encodings in URLs.\n- If there is no content in a response don't throw an error the second\n time that content is attempted to be read.\n- Upload data on redirects.\n\n0.11.1 (2012-03-30)\n-------------------\n\n- POST redirects now break RFC to do what browsers do: Follow up with\n a GET.\n- New `strict_mode` configuration to disable new redirect behavior.\n\n0.11.0 (2012-03-14)\n-------------------\n\n- Private SSL Certificate support\n- Remove select.poll from Gevent monkeypatching\n- Remove redundant generator for chunked transfer encoding\n- Fix: Response.ok raises Timeout Exception in safe\\_mode\n\n0.10.8 (2012-03-09)\n-------------------\n\n- Generate chunked ValueError fix\n- Proxy configuration by environment variables\n- Simplification of iter\\_lines.\n- New trust\\_env configuration for disabling system/environment hints.\n- Suppress cookie errors.\n\n0.10.7 (2012-03-07)\n-------------------\n\n- encode\\_uri = False\n\n0.10.6 (2012-02-25)\n-------------------\n\n- Allow '=' in cookies.\n\n0.10.5 (2012-02-25)\n-------------------\n\n- Response body with 0 content-length fix.\n- New async.imap.\n- Don't fail on netrc.\n\n0.10.4 (2012-02-20)\n-------------------\n\n- Honor netrc.\n\n0.10.3 (2012-02-20)\n-------------------\n\n- HEAD requests don't follow redirects anymore.\n- raise\\_for\\_status() doesn't raise for 3xx anymore.\n- Make Session objects picklable.\n- ValueError for invalid schema URLs.\n\n0.10.2 (2012-01-15)\n-------------------\n\n- Vastly improved URL quoting.\n- Additional allowed cookie key values.\n- Attempted fix for \"Too many open files\" Error\n- Replace unicode errors on first pass, no need for second pass.\n- Append '/' to bare-domain urls before query insertion.\n- Exceptions now inherit from RuntimeError.\n- Binary uploads + auth fix.\n- Bugfixes.\n\n0.10.1 (2012-01-23)\n-------------------\n\n- PYTHON 3 SUPPORT!\n- Dropped 2.5 Support. (*Backwards Incompatible*)\n\n0.10.0 (2012-01-21)\n-------------------\n\n- `Response.content` is now bytes-only. (*Backwards Incompatible*)\n- New `Response.text` is unicode-only.\n- If no `Response.encoding` is specified and `chardet` is available,\n `Response.text` will guess an encoding.\n- Default to ISO-8859-1 (Western) encoding for \"text\" subtypes.\n- Removal of decode\\_unicode. (*Backwards Incompatible*)\n- New multiple-hooks system.\n- New `Response.register_hook` for registering hooks within the\n pipeline.\n- `Response.url` is now Unicode.\n\n0.9.3 (2012-01-18)\n------------------\n\n- SSL verify=False bugfix (apparent on windows machines).\n\n0.9.2 (2012-01-18)\n------------------\n\n- Asynchronous async.send method.\n- Support for proper chunk streams with boundaries.\n- session argument for Session classes.\n- Print entire hook tracebacks, not just exception instance.\n- Fix response.iter\\_lines from pending next line.\n- Fix but in HTTP-digest auth w/ URI having query strings.\n- Fix in Event Hooks section.\n- Urllib3 update.\n\n0.9.1 (2012-01-06)\n------------------\n\n- danger\\_mode for automatic Response.raise\\_for\\_status()\n- Response.iter\\_lines refactor\n\n0.9.0 (2011-12-28)\n------------------\n\n- verify ssl is default.\n\n0.8.9 (2011-12-28)\n------------------\n\n- Packaging fix.\n\n0.8.8 (2011-12-28)\n------------------\n\n- SSL CERT VERIFICATION!\n- Release of Cerifi: Mozilla's cert list.\n- New 'verify' argument for SSL requests.\n- Urllib3 update.\n\n0.8.7 (2011-12-24)\n------------------\n\n- iter\\_lines last-line truncation fix\n- Force safe\\_mode for async requests\n- Handle safe\\_mode exceptions more consistently\n- Fix iteration on null responses in safe\\_mode\n\n0.8.6 (2011-12-18)\n------------------\n\n- Socket timeout fixes.\n- Proxy Authorization support.\n\n0.8.5 (2011-12-14)\n------------------\n\n- Response.iter\\_lines!\n\n0.8.4 (2011-12-11)\n------------------\n\n- Prefetch bugfix.\n- Added license to installed version.\n\n0.8.3 (2011-11-27)\n------------------\n\n- Converted auth system to use simpler callable objects.\n- New session parameter to API methods.\n- Display full URL while logging.\n\n0.8.2 (2011-11-19)\n------------------\n\n- New Unicode decoding system, based on over-ridable\n Response.encoding.\n- Proper URL slash-quote handling.\n- Cookies with `[`, `]`, and `_` allowed.\n\n0.8.1 (2011-11-15)\n------------------\n\n- URL Request path fix\n- Proxy fix.\n- Timeouts fix.\n\n0.8.0 (2011-11-13)\n------------------\n\n- Keep-alive support!\n- Complete removal of Urllib2\n- Complete removal of Poster\n- Complete removal of CookieJars\n- New ConnectionError raising\n- Safe\\_mode for error catching\n- prefetch parameter for request methods\n- OPTION method\n- Async pool size throttling\n- File uploads send real names\n- Vendored in urllib3\n\n0.7.6 (2011-11-07)\n------------------\n\n- Digest authentication bugfix (attach query data to path)\n\n0.7.5 (2011-11-04)\n------------------\n\n- Response.content = None if there was an invalid response.\n- Redirection auth handling.\n\n0.7.4 (2011-10-26)\n------------------\n\n- Session Hooks fix.\n\n0.7.3 (2011-10-23)\n------------------\n\n- Digest Auth fix.\n\n0.7.2 (2011-10-23)\n------------------\n\n- PATCH Fix.\n\n0.7.1 (2011-10-23)\n------------------\n\n- Move away from urllib2 authentication handling.\n- Fully Remove AuthManager, AuthObject, &c.\n- New tuple-based auth system with handler callbacks.\n\n0.7.0 (2011-10-22)\n------------------\n\n- Sessions are now the primary interface.\n- Deprecated InvalidMethodException.\n- PATCH fix.\n- New config system (no more global settings).\n\n0.6.6 (2011-10-19)\n------------------\n\n- Session parameter bugfix (params merging).\n\n0.6.5 (2011-10-18)\n------------------\n\n- Offline (fast) test suite.\n- Session dictionary argument merging.\n\n0.6.4 (2011-10-13)\n------------------\n\n- Automatic decoding of unicode, based on HTTP Headers.\n- New `decode_unicode` setting.\n- Removal of `r.read/close` methods.\n- New `r.faw` interface for advanced response usage.\\*\n- Automatic expansion of parameterized headers.\n\n0.6.3 (2011-10-13)\n------------------\n\n- Beautiful `requests.async` module, for making async requests w/\n gevent.\n\n0.6.2 (2011-10-09)\n------------------\n\n- GET/HEAD obeys allow\\_redirects=False.\n\n0.6.1 (2011-08-20)\n------------------\n\n- Enhanced status codes experience `\\o/`\n- Set a maximum number of redirects (`settings.max_redirects`)\n- Full Unicode URL support\n- Support for protocol-less redirects.\n- Allow for arbitrary request types.\n- Bugfixes\n\n0.6.0 (2011-08-17)\n------------------\n\n- New callback hook system\n- New persistent sessions object and context manager\n- Transparent Dict-cookie handling\n- Status code reference object\n- Removed Response.cached\n- Added Response.request\n- All args are kwargs\n- Relative redirect support\n- HTTPError handling improvements\n- Improved https testing\n- Bugfixes\n\n0.5.1 (2011-07-23)\n------------------\n\n- International Domain Name Support!\n- Access headers without fetching entire body (`read()`)\n- Use lists as dicts for parameters\n- Add Forced Basic Authentication\n- Forced Basic is default authentication type\n- `python-requests.org` default User-Agent header\n- CaseInsensitiveDict lower-case caching\n- Response.history bugfix\n\n0.5.0 (2011-06-21)\n------------------\n\n- PATCH Support\n- Support for Proxies\n- HTTPBin Test Suite\n- Redirect Fixes\n- settings.verbose stream writing\n- Querystrings for all methods\n- URLErrors (Connection Refused, Timeout, Invalid URLs) are treated as\n explicitly raised\n `r.requests.get('hwe://blah'); r.raise_for_status()`\n\n0.4.1 (2011-05-22)\n------------------\n\n- Improved Redirection Handling\n- New 'allow\\_redirects' param for following non-GET/HEAD Redirects\n- Settings module refactoring\n\n0.4.0 (2011-05-15)\n------------------\n\n- Response.history: list of redirected responses\n- Case-Insensitive Header Dictionaries!\n- Unicode URLs\n\n0.3.4 (2011-05-14)\n------------------\n\n- Urllib2 HTTPAuthentication Recursion fix (Basic/Digest)\n- Internal Refactor\n- Bytes data upload Bugfix\n\n0.3.3 (2011-05-12)\n------------------\n\n- Request timeouts\n- Unicode url-encoded data\n- Settings context manager and module\n\n0.3.2 (2011-04-15)\n------------------\n\n- Automatic Decompression of GZip Encoded Content\n- AutoAuth Support for Tupled HTTP Auth\n\n0.3.1 (2011-04-01)\n------------------\n\n- Cookie Changes\n- Response.read()\n- Poster fix\n\n0.3.0 (2011-02-25)\n------------------\n\n- Automatic Authentication API Change\n- Smarter Query URL Parameterization\n- Allow file uploads and POST data together\n-\n\n New Authentication Manager System\n\n : - Simpler Basic HTTP System\n - Supports all built-in urllib2 Auths\n - Allows for custom Auth Handlers\n\n0.2.4 (2011-02-19)\n------------------\n\n- Python 2.5 Support\n- PyPy-c v1.4 Support\n- Auto-Authentication tests\n- Improved Request object constructor\n\n0.2.3 (2011-02-15)\n------------------\n\n-\n\n New HTTPHandling Methods\n\n : - Response.\\_\\_nonzero\\_\\_ (false if bad HTTP Status)\n - Response.ok (True if expected HTTP Status)\n - Response.error (Logged HTTPError if bad HTTP Status)\n - Response.raise\\_for\\_status() (Raises stored HTTPError)\n\n0.2.2 (2011-02-14)\n------------------\n\n- Still handles request in the event of an HTTPError. (Issue \\#2)\n- Eventlet and Gevent Monkeypatch support.\n- Cookie Support (Issue \\#1)\n\n0.2.1 (2011-02-14)\n------------------\n\n- Added file attribute to POST and PUT requests for multipart-encode\n file uploads.\n- Added Request.url attribute for context and redirects\n\n0.2.0 (2011-02-14)\n------------------\n\n- Birth!\n\n0.0.1 (2011-02-13)\n------------------\n\n- Frustration\n- Conception\n",
"path": "HISTORY.md"
},
{
"content": "# -*- coding: utf-8 -*-\n\n\"\"\"\nrequests.models\n~~~~~~~~~~~~~~~\n\nThis module contains the primary objects that power Requests.\n\"\"\"\n\nimport datetime\nimport sys\n\n# Import encoding now, to avoid implicit import later.\n# Implicit import within threads may cause LookupError when standard library is in a ZIP,\n# such as in Embedded Python. See https://github.com/psf/requests/issues/3578.\nimport encodings.idna\n\nfrom urllib3.fields import RequestField\nfrom urllib3.filepost import encode_multipart_formdata\nfrom urllib3.util import parse_url\nfrom urllib3.exceptions import (\n DecodeError, ReadTimeoutError, ProtocolError, LocationParseError)\n\nfrom io import UnsupportedOperation\nfrom .hooks import default_hooks\nfrom .structures import CaseInsensitiveDict\n\nfrom .auth import HTTPBasicAuth\nfrom .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jar\nfrom .exceptions import (\n HTTPError, MissingSchema, InvalidURL, ChunkedEncodingError,\n ContentDecodingError, ConnectionError, StreamConsumedError,\n InvalidJSONError)\nfrom .exceptions import JSONDecodeError as RequestsJSONDecodeError\nfrom ._internal_utils import to_native_string, unicode_is_ascii\nfrom .utils import (\n guess_filename, get_auth_from_url, requote_uri,\n stream_decode_response_unicode, to_key_val_list, parse_header_links,\n iter_slices, guess_json_utf, super_len, check_header_validity)\nfrom .compat import (\n Callable, Mapping,\n cookielib, urlunparse, urlsplit, urlencode, str, bytes,\n is_py2, chardet, builtin_str, basestring, JSONDecodeError)\nfrom .compat import json as complexjson\nfrom .status_codes import codes\n\n#: The set of HTTP status codes that indicate an automatically\n#: processable redirect.\nREDIRECT_STATI = (\n codes.moved, # 301\n codes.found, # 302\n codes.other, # 303\n codes.temporary_redirect, # 307\n codes.permanent_redirect, # 308\n)\n\nDEFAULT_REDIRECT_LIMIT = 30\nCONTENT_CHUNK_SIZE = 10 * 1024\nITER_CHUNK_SIZE = 512\n\n\nclass RequestEncodingMixin(object):\n @property\n def path_url(self):\n \"\"\"Build the path URL to use.\"\"\"\n\n url = []\n\n p = urlsplit(self.url)\n\n path = p.path\n if not path:\n path = '/'\n\n url.append(path)\n\n query = p.query\n if query:\n url.append('?')\n url.append(query)\n\n return ''.join(url)\n\n @staticmethod\n def _encode_params(data):\n \"\"\"Encode parameters in a piece of data.\n\n Will successfully encode parameters when passed as a dict or a list of\n 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary\n if parameters are supplied as a dict.\n \"\"\"\n\n if isinstance(data, (str, bytes)):\n return data\n elif hasattr(data, 'read'):\n return data\n elif hasattr(data, '__iter__'):\n result = []\n for k, vs in to_key_val_list(data):\n if isinstance(vs, basestring) or not hasattr(vs, '__iter__'):\n vs = [vs]\n for v in vs:\n if v is not None:\n result.append(\n (k.encode('utf-8') if isinstance(k, str) else k,\n v.encode('utf-8') if isinstance(v, str) else v))\n return urlencode(result, doseq=True)\n else:\n return data\n\n @staticmethod\n def _encode_files(files, data):\n \"\"\"Build the body for a multipart/form-data request.\n\n Will successfully encode files when passed as a dict or a list of\n tuples. Order is retained if data is a list of tuples but arbitrary\n if parameters are supplied as a dict.\n The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype)\n or 4-tuples (filename, fileobj, contentype, custom_headers).\n \"\"\"\n if (not files):\n raise ValueError(\"Files must be provided.\")\n elif isinstance(data, basestring):\n raise ValueError(\"Data must not be a string.\")\n\n new_fields = []\n fields = to_key_val_list(data or {})\n files = to_key_val_list(files or {})\n\n for field, val in fields:\n if isinstance(val, basestring) or not hasattr(val, '__iter__'):\n val = [val]\n for v in val:\n if v is not None:\n # Don't call str() on bytestrings: in Py3 it all goes wrong.\n if not isinstance(v, bytes):\n v = str(v)\n\n new_fields.append(\n (field.decode('utf-8') if isinstance(field, bytes) else field,\n v.encode('utf-8') if isinstance(v, str) else v))\n\n for (k, v) in files:\n # support for explicit filename\n ft = None\n fh = None\n if isinstance(v, (tuple, list)):\n if len(v) == 2:\n fn, fp = v\n elif len(v) == 3:\n fn, fp, ft = v\n else:\n fn, fp, ft, fh = v\n else:\n fn = guess_filename(v) or k\n fp = v\n\n if isinstance(fp, (str, bytes, bytearray)):\n fdata = fp\n elif hasattr(fp, 'read'):\n fdata = fp.read()\n elif fp is None:\n continue\n else:\n fdata = fp\n\n rf = RequestField(name=k, data=fdata, filename=fn, headers=fh)\n rf.make_multipart(content_type=ft)\n new_fields.append(rf)\n\n body, content_type = encode_multipart_formdata(new_fields)\n\n return body, content_type\n\n\nclass RequestHooksMixin(object):\n def register_hook(self, event, hook):\n \"\"\"Properly register a hook.\"\"\"\n\n if event not in self.hooks:\n raise ValueError('Unsupported event specified, with event name \"%s\"' % (event))\n\n if isinstance(hook, Callable):\n self.hooks[event].append(hook)\n elif hasattr(hook, '__iter__'):\n self.hooks[event].extend(h for h in hook if isinstance(h, Callable))\n\n def deregister_hook(self, event, hook):\n \"\"\"Deregister a previously registered hook.\n Returns True if the hook existed, False if not.\n \"\"\"\n\n try:\n self.hooks[event].remove(hook)\n return True\n except ValueError:\n return False\n\n\nclass Request(RequestHooksMixin):\n \"\"\"A user-created :class:`Request <Request>` object.\n\n Used to prepare a :class:`PreparedRequest <PreparedRequest>`, which is sent to the server.\n\n :param method: HTTP method to use.\n :param url: URL to send.\n :param headers: dictionary of headers to send.\n :param files: dictionary of {filename: fileobject} files to multipart upload.\n :param data: the body to attach to the request. If a dictionary or\n list of tuples ``[(key, value)]`` is provided, form-encoding will\n take place.\n :param json: json for the body to attach to the request (if files or data is not specified).\n :param params: URL parameters to append to the URL. If a dictionary or\n list of tuples ``[(key, value)]`` is provided, form-encoding will\n take place.\n :param auth: Auth handler or (user, pass) tuple.\n :param cookies: dictionary or CookieJar of cookies to attach to this request.\n :param hooks: dictionary of callback hooks, for internal usage.\n\n Usage::\n\n >>> import requests\n >>> req = requests.Request('GET', 'https://httpbin.org/get')\n >>> req.prepare()\n <PreparedRequest [GET]>\n \"\"\"\n\n def __init__(self,\n method=None, url=None, headers=None, files=None, data=None,\n params=None, auth=None, cookies=None, hooks=None, json=None):\n\n # Default empty dicts for dict params.\n data = [] if data is None else data\n files = [] if files is None else files\n headers = {} if headers is None else headers\n params = {} if params is None else params\n hooks = {} if hooks is None else hooks\n\n self.hooks = default_hooks()\n for (k, v) in list(hooks.items()):\n self.register_hook(event=k, hook=v)\n\n self.method = method\n self.url = url\n self.headers = headers\n self.files = files\n self.data = data\n self.json = json\n self.params = params\n self.auth = auth\n self.cookies = cookies\n\n def __repr__(self):\n return '<Request [%s]>' % (self.method)\n\n def prepare(self):\n \"\"\"Constructs a :class:`PreparedRequest <PreparedRequest>` for transmission and returns it.\"\"\"\n p = PreparedRequest()\n p.prepare(\n method=self.method,\n url=self.url,\n headers=self.headers,\n files=self.files,\n data=self.data,\n json=self.json,\n params=self.params,\n auth=self.auth,\n cookies=self.cookies,\n hooks=self.hooks,\n )\n return p\n\n\nclass PreparedRequest(RequestEncodingMixin, RequestHooksMixin):\n \"\"\"The fully mutable :class:`PreparedRequest <PreparedRequest>` object,\n containing the exact bytes that will be sent to the server.\n\n Instances are generated from a :class:`Request <Request>` object, and\n should not be instantiated manually; doing so may produce undesirable\n effects.\n\n Usage::\n\n >>> import requests\n >>> req = requests.Request('GET', 'https://httpbin.org/get')\n >>> r = req.prepare()\n >>> r\n <PreparedRequest [GET]>\n\n >>> s = requests.Session()\n >>> s.send(r)\n <Response [200]>\n \"\"\"\n\n def __init__(self):\n #: HTTP verb to send to the server.\n self.method = None\n #: HTTP URL to send the request to.\n self.url = None\n #: dictionary of HTTP headers.\n self.headers = None\n # The `CookieJar` used to create the Cookie header will be stored here\n # after prepare_cookies is called\n self._cookies = None\n #: request body to send to the server.\n self.body = None\n #: dictionary of callback hooks, for internal usage.\n self.hooks = default_hooks()\n #: integer denoting starting position of a readable file-like body.\n self._body_position = None\n\n def prepare(self,\n method=None, url=None, headers=None, files=None, data=None,\n params=None, auth=None, cookies=None, hooks=None, json=None):\n \"\"\"Prepares the entire request with the given parameters.\"\"\"\n\n self.prepare_method(method)\n self.prepare_url(url, params)\n self.prepare_headers(headers)\n self.prepare_cookies(cookies)\n self.prepare_body(data, files, json)\n self.prepare_auth(auth, url)\n\n # Note that prepare_auth must be last to enable authentication schemes\n # such as OAuth to work on a fully prepared request.\n\n # This MUST go after prepare_auth. Authenticators could add a hook\n self.prepare_hooks(hooks)\n\n def __repr__(self):\n return '<PreparedRequest [%s]>' % (self.method)\n\n def copy(self):\n p = PreparedRequest()\n p.method = self.method\n p.url = self.url\n p.headers = self.headers.copy() if self.headers is not None else None\n p._cookies = _copy_cookie_jar(self._cookies)\n p.body = self.body\n p.hooks = self.hooks\n p._body_position = self._body_position\n return p\n\n def prepare_method(self, method):\n \"\"\"Prepares the given HTTP method.\"\"\"\n self.method = method\n if self.method is not None:\n self.method = to_native_string(self.method.upper())\n\n @staticmethod\n def _get_idna_encoded_host(host):\n import idna\n\n try:\n host = idna.encode(host, uts46=True).decode('utf-8')\n except idna.IDNAError:\n raise UnicodeError\n return host\n\n def prepare_url(self, url, params):\n \"\"\"Prepares the given HTTP URL.\"\"\"\n #: Accept objects that have string representations.\n #: We're unable to blindly call unicode/str functions\n #: as this will include the bytestring indicator (b'')\n #: on python 3.x.\n #: https://github.com/psf/requests/pull/2238\n if isinstance(url, bytes):\n url = url.decode('utf8')\n else:\n url = unicode(url) if is_py2 else str(url)\n\n # Remove leading whitespaces from url\n url = url.lstrip()\n\n # Don't do any URL preparation for non-HTTP schemes like `mailto`,\n # `data` etc to work around exceptions from `url_parse`, which\n # handles RFC 3986 only.\n if ':' in url and not url.lower().startswith('http'):\n self.url = url\n return\n\n # Support for unicode domain names and paths.\n try:\n scheme, auth, host, port, path, query, fragment = parse_url(url)\n except LocationParseError as e:\n raise InvalidURL(*e.args)\n\n if not scheme:\n error = (\"Invalid URL {0!r}: No scheme supplied. Perhaps you meant http://{0}?\")\n error = error.format(to_native_string(url, 'utf8'))\n\n raise MissingSchema(error)\n\n if not host:\n raise InvalidURL(\"Invalid URL %r: No host supplied\" % url)\n\n # In general, we want to try IDNA encoding the hostname if the string contains\n # non-ASCII characters. This allows users to automatically get the correct IDNA\n # behaviour. For strings containing only ASCII characters, we need to also verify\n # it doesn't start with a wildcard (*), before allowing the unencoded hostname.\n if not unicode_is_ascii(host):\n try:\n host = self._get_idna_encoded_host(host)\n except UnicodeError:\n raise InvalidURL('URL has an invalid label.')\n elif host.startswith((u'*', u'.')):\n raise InvalidURL('URL has an invalid label.')\n\n # Carefully reconstruct the network location\n netloc = auth or ''\n if netloc:\n netloc += '@'\n netloc += host\n if port:\n netloc += ':' + str(port)\n\n # Bare domains aren't valid URLs.\n if not path:\n path = '/'\n\n if is_py2:\n if isinstance(scheme, str):\n scheme = scheme.encode('utf-8')\n if isinstance(netloc, str):\n netloc = netloc.encode('utf-8')\n if isinstance(path, str):\n path = path.encode('utf-8')\n if isinstance(query, str):\n query = query.encode('utf-8')\n if isinstance(fragment, str):\n fragment = fragment.encode('utf-8')\n\n if isinstance(params, (str, bytes)):\n params = to_native_string(params)\n\n enc_params = self._encode_params(params)\n if enc_params:\n if query:\n query = '%s&%s' % (query, enc_params)\n else:\n query = enc_params\n\n url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment]))\n self.url = url\n\n def prepare_headers(self, headers):\n \"\"\"Prepares the given HTTP headers.\"\"\"\n\n self.headers = CaseInsensitiveDict()\n if headers:\n for header in headers.items():\n # Raise exception on invalid header value.\n check_header_validity(header)\n name, value = header\n self.headers[to_native_string(name)] = value\n\n def prepare_body(self, data, files, json=None):\n \"\"\"Prepares the given HTTP body data.\"\"\"\n\n # Check if file, fo, generator, iterator.\n # If not, run through normal process.\n\n # Nottin' on you.\n body = None\n content_type = None\n\n if not data and json is not None:\n # urllib3 requires a bytes-like body. Python 2's json.dumps\n # provides this natively, but Python 3 gives a Unicode string.\n content_type = 'application/json'\n\n try:\n body = complexjson.dumps(json, allow_nan=False)\n except ValueError as ve:\n raise InvalidJSONError(ve, request=self)\n\n if not isinstance(body, bytes):\n body = body.encode('utf-8')\n\n is_stream = all([\n hasattr(data, '__iter__'),\n not isinstance(data, (basestring, list, tuple, Mapping))\n ])\n\n if is_stream:\n try:\n length = super_len(data)\n except (TypeError, AttributeError, UnsupportedOperation):\n length = None\n\n body = data\n\n if getattr(body, 'tell', None) is not None:\n # Record the current file position before reading.\n # This will allow us to rewind a file in the event\n # of a redirect.\n try:\n self._body_position = body.tell()\n except (IOError, OSError):\n # This differentiates from None, allowing us to catch\n # a failed `tell()` later when trying to rewind the body\n self._body_position = object()\n\n if files:\n raise NotImplementedError('Streamed bodies and files are mutually exclusive.')\n\n if length:\n self.headers['Content-Length'] = builtin_str(length)\n else:\n self.headers['Transfer-Encoding'] = 'chunked'\n else:\n # Multi-part file uploads.\n if files:\n (body, content_type) = self._encode_files(files, data)\n else:\n if data:\n body = self._encode_params(data)\n if isinstance(data, basestring) or hasattr(data, 'read'):\n content_type = None\n else:\n content_type = 'application/x-www-form-urlencoded'\n\n self.prepare_content_length(body)\n\n # Add content-type if it wasn't explicitly provided.\n if content_type and ('content-type' not in self.headers):\n self.headers['Content-Type'] = content_type\n\n self.body = body\n\n def prepare_content_length(self, body):\n \"\"\"Prepare Content-Length header based on request method and body\"\"\"\n if body is not None:\n length = super_len(body)\n if length:\n # If length exists, set it. Otherwise, we fallback\n # to Transfer-Encoding: chunked.\n self.headers['Content-Length'] = builtin_str(length)\n elif self.method not in ('GET', 'HEAD') and self.headers.get('Content-Length') is None:\n # Set Content-Length to 0 for methods that can have a body\n # but don't provide one. (i.e. not GET or HEAD)\n self.headers['Content-Length'] = '0'\n\n def prepare_auth(self, auth, url=''):\n \"\"\"Prepares the given HTTP auth data.\"\"\"\n\n # If no Auth is explicitly provided, extract it from the URL first.\n if auth is None:\n url_auth = get_auth_from_url(self.url)\n auth = url_auth if any(url_auth) else None\n\n if auth:\n if isinstance(auth, tuple) and len(auth) == 2:\n # special-case basic HTTP auth\n auth = HTTPBasicAuth(*auth)\n\n # Allow auth to make its changes.\n r = auth(self)\n\n # Update self to reflect the auth changes.\n self.__dict__.update(r.__dict__)\n\n # Recompute Content-Length\n self.prepare_content_length(self.body)\n\n def prepare_cookies(self, cookies):\n \"\"\"Prepares the given HTTP cookie data.\n\n This function eventually generates a ``Cookie`` header from the\n given cookies using cookielib. Due to cookielib's design, the header\n will not be regenerated if it already exists, meaning this function\n can only be called once for the life of the\n :class:`PreparedRequest <PreparedRequest>` object. Any subsequent calls\n to ``prepare_cookies`` will have no actual effect, unless the \"Cookie\"\n header is removed beforehand.\n \"\"\"\n if isinstance(cookies, cookielib.CookieJar):\n self._cookies = cookies\n else:\n self._cookies = cookiejar_from_dict(cookies)\n\n cookie_header = get_cookie_header(self._cookies, self)\n if cookie_header is not None:\n self.headers['Cookie'] = cookie_header\n\n def prepare_hooks(self, hooks):\n \"\"\"Prepares the given hooks.\"\"\"\n # hooks can be passed as None to the prepare method and to this\n # method. To prevent iterating over None, simply use an empty list\n # if hooks is False-y\n hooks = hooks or []\n for event in hooks:\n self.register_hook(event, hooks[event])\n\n\nclass Response(object):\n \"\"\"The :class:`Response <Response>` object, which contains a\n server's response to an HTTP request.\n \"\"\"\n\n __attrs__ = [\n '_content', 'status_code', 'headers', 'url', 'history',\n 'encoding', 'reason', 'cookies', 'elapsed', 'request'\n ]\n\n def __init__(self):\n self._content = False\n self._content_consumed = False\n self._next = None\n\n #: Integer Code of responded HTTP Status, e.g. 404 or 200.\n self.status_code = None\n\n #: Case-insensitive Dictionary of Response Headers.\n #: For example, ``headers['content-encoding']`` will return the\n #: value of a ``'Content-Encoding'`` response header.\n self.headers = CaseInsensitiveDict()\n\n #: File-like object representation of response (for advanced usage).\n #: Use of ``raw`` requires that ``stream=True`` be set on the request.\n #: This requirement does not apply for use internally to Requests.\n self.raw = None\n\n #: Final URL location of Response.\n self.url = None\n\n #: Encoding to decode with when accessing r.text.\n self.encoding = None\n\n #: A list of :class:`Response <Response>` objects from\n #: the history of the Request. Any redirect responses will end\n #: up here. The list is sorted from the oldest to the most recent request.\n self.history = []\n\n #: Textual reason of responded HTTP Status, e.g. \"Not Found\" or \"OK\".\n self.reason = None\n\n #: A CookieJar of Cookies the server sent back.\n self.cookies = cookiejar_from_dict({})\n\n #: The amount of time elapsed between sending the request\n #: and the arrival of the response (as a timedelta).\n #: This property specifically measures the time taken between sending\n #: the first byte of the request and finishing parsing the headers. It\n #: is therefore unaffected by consuming the response content or the\n #: value of the ``stream`` keyword argument.\n self.elapsed = datetime.timedelta(0)\n\n #: The :class:`PreparedRequest <PreparedRequest>` object to which this\n #: is a response.\n self.request = None\n\n def __enter__(self):\n return self\n\n def __exit__(self, *args):\n self.close()\n\n def __getstate__(self):\n # Consume everything; accessing the content attribute makes\n # sure the content has been fully read.\n if not self._content_consumed:\n self.content\n\n return {attr: getattr(self, attr, None) for attr in self.__attrs__}\n\n def __setstate__(self, state):\n for name, value in state.items():\n setattr(self, name, value)\n\n # pickled objects do not have .raw\n setattr(self, '_content_consumed', True)\n setattr(self, 'raw', None)\n\n def __repr__(self):\n return '<Response [%s]>' % (self.status_code)\n\n def __bool__(self):\n \"\"\"Returns True if :attr:`status_code` is less than 400.\n\n This attribute checks if the status code of the response is between\n 400 and 600 to see if there was a client error or a server error. If\n the status code, is between 200 and 400, this will return True. This\n is **not** a check to see if the response code is ``200 OK``.\n \"\"\"\n return self.ok\n\n def __nonzero__(self):\n \"\"\"Returns True if :attr:`status_code` is less than 400.\n\n This attribute checks if the status code of the response is between\n 400 and 600 to see if there was a client error or a server error. If\n the status code, is between 200 and 400, this will return True. This\n is **not** a check to see if the response code is ``200 OK``.\n \"\"\"\n return self.ok\n\n def __iter__(self):\n \"\"\"Allows you to use a response as an iterator.\"\"\"\n return self.iter_content(128)\n\n @property\n def ok(self):\n \"\"\"Returns True if :attr:`status_code` is less than 400, False if not.\n\n This attribute checks if the status code of the response is between\n 400 and 600 to see if there was a client error or a server error. If\n the status code is between 200 and 400, this will return True. This\n is **not** a check to see if the response code is ``200 OK``.\n \"\"\"\n try:\n self.raise_for_status()\n except HTTPError:\n return False\n return True\n\n @property\n def is_redirect(self):\n \"\"\"True if this Response is a well-formed HTTP redirect that could have\n been processed automatically (by :meth:`Session.resolve_redirects`).\n \"\"\"\n return ('location' in self.headers and self.status_code in REDIRECT_STATI)\n\n @property\n def is_permanent_redirect(self):\n \"\"\"True if this Response one of the permanent versions of redirect.\"\"\"\n return ('location' in self.headers and self.status_code in (codes.moved_permanently, codes.permanent_redirect))\n\n @property\n def next(self):\n \"\"\"Returns a PreparedRequest for the next request in a redirect chain, if there is one.\"\"\"\n return self._next\n\n @property\n def apparent_encoding(self):\n \"\"\"The apparent encoding, provided by the charset_normalizer or chardet libraries.\"\"\"\n return chardet.detect(self.content)['encoding']\n\n def iter_content(self, chunk_size=1, decode_unicode=False):\n \"\"\"Iterates over the response data. When stream=True is set on the\n request, this avoids reading the content at once into memory for\n large responses. The chunk size is the number of bytes it should\n read into memory. This is not necessarily the length of each item\n returned as decoding can take place.\n\n chunk_size must be of type int or None. A value of None will\n function differently depending on the value of `stream`.\n stream=True will read data as it arrives in whatever size the\n chunks are received. If stream=False, data is returned as\n a single chunk.\n\n If decode_unicode is True, content will be decoded using the best\n available encoding based on the response.\n \"\"\"\n\n def generate():\n # Special case for urllib3.\n if hasattr(self.raw, 'stream'):\n try:\n for chunk in self.raw.stream(chunk_size, decode_content=True):\n yield chunk\n except ProtocolError as e:\n raise ChunkedEncodingError(e)\n except DecodeError as e:\n raise ContentDecodingError(e)\n except ReadTimeoutError as e:\n raise ConnectionError(e)\n else:\n # Standard file-like object.\n while True:\n chunk = self.raw.read(chunk_size)\n if not chunk:\n break\n yield chunk\n\n self._content_consumed = True\n\n if self._content_consumed and isinstance(self._content, bool):\n raise StreamConsumedError()\n elif chunk_size is not None and not isinstance(chunk_size, int):\n raise TypeError(\"chunk_size must be an int, it is instead a %s.\" % type(chunk_size))\n # simulate reading small chunks of the content\n reused_chunks = iter_slices(self._content, chunk_size)\n\n stream_chunks = generate()\n\n chunks = reused_chunks if self._content_consumed else stream_chunks\n\n if decode_unicode:\n chunks = stream_decode_response_unicode(chunks, self)\n\n return chunks\n\n def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=False, delimiter=None):\n \"\"\"Iterates over the response data, one line at a time. When\n stream=True is set on the request, this avoids reading the\n content at once into memory for large responses.\n\n .. note:: This method is not reentrant safe.\n \"\"\"\n\n pending = None\n\n for chunk in self.iter_content(chunk_size=chunk_size, decode_unicode=decode_unicode):\n\n if pending is not None:\n chunk = pending + chunk\n\n if delimiter:\n lines = chunk.split(delimiter)\n else:\n lines = chunk.splitlines()\n\n if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]:\n pending = lines.pop()\n else:\n pending = None\n\n for line in lines:\n yield line\n\n if pending is not None:\n yield pending\n\n @property\n def content(self):\n \"\"\"Content of the response, in bytes.\"\"\"\n\n if self._content is False:\n # Read the contents.\n if self._content_consumed:\n raise RuntimeError(\n 'The content for this response was already consumed')\n\n if self.status_code == 0 or self.raw is None:\n self._content = None\n else:\n self._content = b''.join(self.iter_content(CONTENT_CHUNK_SIZE)) or b''\n\n self._content_consumed = True\n # don't need to release the connection; that's been handled by urllib3\n # since we exhausted the data.\n return self._content\n\n @property\n def text(self):\n \"\"\"Content of the response, in unicode.\n\n If Response.encoding is None, encoding will be guessed using\n ``charset_normalizer`` or ``chardet``.\n\n The encoding of the response content is determined based solely on HTTP\n headers, following RFC 2616 to the letter. If you can take advantage of\n non-HTTP knowledge to make a better guess at the encoding, you should\n set ``r.encoding`` appropriately before accessing this property.\n \"\"\"\n\n # Try charset from content-type\n content = None\n encoding = self.encoding\n\n if not self.content:\n return str('')\n\n # Fallback to auto-detected encoding.\n if self.encoding is None:\n encoding = self.apparent_encoding\n\n # Decode unicode from given encoding.\n try:\n content = str(self.content, encoding, errors='replace')\n except (LookupError, TypeError):\n # A LookupError is raised if the encoding was not found which could\n # indicate a misspelling or similar mistake.\n #\n # A TypeError can be raised if encoding is None\n #\n # So we try blindly encoding.\n content = str(self.content, errors='replace')\n\n return content\n\n def json(self, **kwargs):\n r\"\"\"Returns the json-encoded content of a response, if any.\n\n :param \\*\\*kwargs: Optional arguments that ``json.loads`` takes.\n :raises requests.exceptions.JSONDecodeError: If the response body does not\n contain valid json.\n \"\"\"\n\n if not self.encoding and self.content and len(self.content) > 3:\n # No encoding set. JSON RFC 4627 section 3 states we should expect\n # UTF-8, -16 or -32. Detect which one to use; If the detection or\n # decoding fails, fall back to `self.text` (using charset_normalizer to make\n # a best guess).\n encoding = guess_json_utf(self.content)\n if encoding is not None:\n try:\n return complexjson.loads(\n self.content.decode(encoding), **kwargs\n )\n except UnicodeDecodeError:\n # Wrong UTF codec detected; usually because it's not UTF-8\n # but some other 8-bit codec. This is an RFC violation,\n # and the server didn't bother to tell us what codec *was*\n # used.\n pass\n\n try:\n return complexjson.loads(self.text, **kwargs)\n except JSONDecodeError as e:\n # Catch JSON-related errors and raise as requests.JSONDecodeError\n # This aliases json.JSONDecodeError and simplejson.JSONDecodeError\n if is_py2: # e is a ValueError\n raise RequestsJSONDecodeError(e.message)\n else:\n raise RequestsJSONDecodeError(e.msg, e.doc, e.pos)\n\n @property\n def links(self):\n \"\"\"Returns the parsed header links of the response, if any.\"\"\"\n\n header = self.headers.get('link')\n\n # l = MultiDict()\n l = {}\n\n if header:\n links = parse_header_links(header)\n\n for link in links:\n key = link.get('rel') or link.get('url')\n l[key] = link\n\n return l\n\n def raise_for_status(self):\n \"\"\"Raises :class:`HTTPError`, if one occurred.\"\"\"\n\n http_error_msg = ''\n if isinstance(self.reason, bytes):\n # We attempt to decode utf-8 first because some servers\n # choose to localize their reason strings. If the string\n # isn't utf-8, we fall back to iso-8859-1 for all other\n # encodings. (See PR #3538)\n try:\n reason = self.reason.decode('utf-8')\n except UnicodeDecodeError:\n reason = self.reason.decode('iso-8859-1')\n else:\n reason = self.reason\n\n if 400 <= self.status_code < 500:\n http_error_msg = u'%s Client Error: %s for url: %s' % (self.status_code, reason, self.url)\n\n elif 500 <= self.status_code < 600:\n http_error_msg = u'%s Server Error: %s for url: %s' % (self.status_code, reason, self.url)\n\n if http_error_msg:\n raise HTTPError(http_error_msg, response=self)\n\n def close(self):\n \"\"\"Releases the connection back to the pool. Once this method has been\n called the underlying ``raw`` object must not be accessed again.\n\n *Note: Should not normally need to be called explicitly.*\n \"\"\"\n if not self._content_consumed:\n self.raw.close()\n\n release_conn = getattr(self.raw, 'release_conn', None)\n if release_conn is not None:\n release_conn()\n",
"path": "requests/models.py"
},
{
"content": "# -*- coding: utf-8 -*-\n\n\"\"\"Tests for Requests.\"\"\"\n\nfrom __future__ import division\nimport json\nimport os\nimport pickle\nimport collections\nimport contextlib\nimport warnings\nimport re\n\nimport io\nimport requests\nimport pytest\nfrom requests.adapters import HTTPAdapter\nfrom requests.auth import HTTPDigestAuth, _basic_auth_str\nfrom requests.compat import (\n Morsel, cookielib, getproxies, str, urlparse,\n builtin_str)\nfrom requests.cookies import (\n cookiejar_from_dict, morsel_to_cookie)\nfrom requests.exceptions import (\n ConnectionError, ConnectTimeout, InvalidSchema, InvalidURL,\n MissingSchema, ReadTimeout, Timeout, RetryError, RequestException, TooManyRedirects,\n ProxyError, InvalidHeader, UnrewindableBodyError, SSLError, InvalidProxyURL, InvalidJSONError)\nfrom requests.models import PreparedRequest\nfrom requests.structures import CaseInsensitiveDict\nfrom requests.sessions import SessionRedirectMixin\nfrom requests.models import urlencode\nfrom requests.hooks import default_hooks\nfrom requests.compat import JSONDecodeError, is_py3, MutableMapping\n\nfrom .compat import StringIO, u\nfrom .utils import override_environ\nfrom urllib3.util import Timeout as Urllib3Timeout\n\n# Requests to this URL should always fail with a connection timeout (nothing\n# listening on that port)\nTARPIT = 'http://10.255.255.1'\n\n# This is to avoid waiting the timeout of using TARPIT\nINVALID_PROXY='http://localhost:1'\n\ntry:\n from ssl import SSLContext\n del SSLContext\n HAS_MODERN_SSL = True\nexcept ImportError:\n HAS_MODERN_SSL = False\n\ntry:\n requests.pyopenssl\n HAS_PYOPENSSL = True\nexcept AttributeError:\n HAS_PYOPENSSL = False\n\n\nclass TestRequests:\n\n digest_auth_algo = ('MD5', 'SHA-256', 'SHA-512')\n\n def test_entry_points(self):\n\n requests.session\n requests.session().get\n requests.session().head\n requests.get\n requests.head\n requests.put\n requests.patch\n requests.post\n # Not really an entry point, but people rely on it.\n from requests.packages.urllib3.poolmanager import PoolManager\n\n @pytest.mark.parametrize(\n 'exception, url', (\n (MissingSchema, 'hiwpefhipowhefopw'),\n (InvalidSchema, 'localhost:3128'),\n (InvalidSchema, 'localhost.localdomain:3128/'),\n (InvalidSchema, '10.122.1.1:3128/'),\n (InvalidURL, 'http://'),\n (InvalidURL, 'http://*example.com'),\n (InvalidURL, 'http://.example.com'),\n ))\n def test_invalid_url(self, exception, url):\n with pytest.raises(exception):\n requests.get(url)\n\n def test_basic_building(self):\n req = requests.Request()\n req.url = 'http://kennethreitz.org/'\n req.data = {'life': '42'}\n\n pr = req.prepare()\n assert pr.url == req.url\n assert pr.body == 'life=42'\n\n @pytest.mark.parametrize('method', ('GET', 'HEAD'))\n def test_no_content_length(self, httpbin, method):\n req = requests.Request(method, httpbin(method.lower())).prepare()\n assert 'Content-Length' not in req.headers\n\n @pytest.mark.parametrize('method', ('POST', 'PUT', 'PATCH', 'OPTIONS'))\n def test_no_body_content_length(self, httpbin, method):\n req = requests.Request(method, httpbin(method.lower())).prepare()\n assert req.headers['Content-Length'] == '0'\n\n @pytest.mark.parametrize('method', ('POST', 'PUT', 'PATCH', 'OPTIONS'))\n def test_empty_content_length(self, httpbin, method):\n req = requests.Request(method, httpbin(method.lower()), data='').prepare()\n assert req.headers['Content-Length'] == '0'\n\n def test_override_content_length(self, httpbin):\n headers = {\n 'Content-Length': 'not zero'\n }\n r = requests.Request('POST', httpbin('post'), headers=headers).prepare()\n assert 'Content-Length' in r.headers\n assert r.headers['Content-Length'] == 'not zero'\n\n def test_path_is_not_double_encoded(self):\n request = requests.Request('GET', \"http://0.0.0.0/get/test case\").prepare()\n\n assert request.path_url == '/get/test%20case'\n\n @pytest.mark.parametrize(\n 'url, expected', (\n ('http://example.com/path#fragment', 'http://example.com/path?a=b#fragment'),\n ('http://example.com/path?key=value#fragment', 'http://example.com/path?key=value&a=b#fragment')\n ))\n def test_params_are_added_before_fragment(self, url, expected):\n request = requests.Request('GET', url, params={\"a\": \"b\"}).prepare()\n assert request.url == expected\n\n def test_params_original_order_is_preserved_by_default(self):\n param_ordered_dict = collections.OrderedDict((('z', 1), ('a', 1), ('k', 1), ('d', 1)))\n session = requests.Session()\n request = requests.Request('GET', 'http://example.com/', params=param_ordered_dict)\n prep = session.prepare_request(request)\n assert prep.url == 'http://example.com/?z=1&a=1&k=1&d=1'\n\n def test_params_bytes_are_encoded(self):\n request = requests.Request('GET', 'http://example.com',\n params=b'test=foo').prepare()\n assert request.url == 'http://example.com/?test=foo'\n\n def test_binary_put(self):\n request = requests.Request('PUT', 'http://example.com',\n data=u\"ööö\".encode(\"utf-8\")).prepare()\n assert isinstance(request.body, bytes)\n\n def test_whitespaces_are_removed_from_url(self):\n # Test for issue #3696\n request = requests.Request('GET', ' http://example.com').prepare()\n assert request.url == 'http://example.com/'\n\n @pytest.mark.parametrize('scheme', ('http://', 'HTTP://', 'hTTp://', 'HttP://'))\n def test_mixed_case_scheme_acceptable(self, httpbin, scheme):\n s = requests.Session()\n s.proxies = getproxies()\n parts = urlparse(httpbin('get'))\n url = scheme + parts.netloc + parts.path\n r = requests.Request('GET', url)\n r = s.send(r.prepare())\n assert r.status_code == 200, 'failed for scheme {}'.format(scheme)\n\n def test_HTTP_200_OK_GET_ALTERNATIVE(self, httpbin):\n r = requests.Request('GET', httpbin('get'))\n s = requests.Session()\n s.proxies = getproxies()\n\n r = s.send(r.prepare())\n\n assert r.status_code == 200\n\n def test_HTTP_302_ALLOW_REDIRECT_GET(self, httpbin):\n r = requests.get(httpbin('redirect', '1'))\n assert r.status_code == 200\n assert r.history[0].status_code == 302\n assert r.history[0].is_redirect\n\n def test_HTTP_307_ALLOW_REDIRECT_POST(self, httpbin):\n r = requests.post(httpbin('redirect-to'), data='test', params={'url': 'post', 'status_code': 307})\n assert r.status_code == 200\n assert r.history[0].status_code == 307\n assert r.history[0].is_redirect\n assert r.json()['data'] == 'test'\n\n def test_HTTP_307_ALLOW_REDIRECT_POST_WITH_SEEKABLE(self, httpbin):\n byte_str = b'test'\n r = requests.post(httpbin('redirect-to'), data=io.BytesIO(byte_str), params={'url': 'post', 'status_code': 307})\n assert r.status_code == 200\n assert r.history[0].status_code == 307\n assert r.history[0].is_redirect\n assert r.json()['data'] == byte_str.decode('utf-8')\n\n def test_HTTP_302_TOO_MANY_REDIRECTS(self, httpbin):\n try:\n requests.get(httpbin('relative-redirect', '50'))\n except TooManyRedirects as e:\n url = httpbin('relative-redirect', '20')\n assert e.request.url == url\n assert e.response.url == url\n assert len(e.response.history) == 30\n else:\n pytest.fail('Expected redirect to raise TooManyRedirects but it did not')\n\n def test_HTTP_302_TOO_MANY_REDIRECTS_WITH_PARAMS(self, httpbin):\n s = requests.session()\n s.max_redirects = 5\n try:\n s.get(httpbin('relative-redirect', '50'))\n except TooManyRedirects as e:\n url = httpbin('relative-redirect', '45')\n assert e.request.url == url\n assert e.response.url == url\n assert len(e.response.history) == 5\n else:\n pytest.fail('Expected custom max number of redirects to be respected but was not')\n\n def test_http_301_changes_post_to_get(self, httpbin):\n r = requests.post(httpbin('status', '301'))\n assert r.status_code == 200\n assert r.request.method == 'GET'\n assert r.history[0].status_code == 301\n assert r.history[0].is_redirect\n\n def test_http_301_doesnt_change_head_to_get(self, httpbin):\n r = requests.head(httpbin('status', '301'), allow_redirects=True)\n print(r.content)\n assert r.status_code == 200\n assert r.request.method == 'HEAD'\n assert r.history[0].status_code == 301\n assert r.history[0].is_redirect\n\n def test_http_302_changes_post_to_get(self, httpbin):\n r = requests.post(httpbin('status', '302'))\n assert r.status_code == 200\n assert r.request.method == 'GET'\n assert r.history[0].status_code == 302\n assert r.history[0].is_redirect\n\n def test_http_302_doesnt_change_head_to_get(self, httpbin):\n r = requests.head(httpbin('status', '302'), allow_redirects=True)\n assert r.status_code == 200\n assert r.request.method == 'HEAD'\n assert r.history[0].status_code == 302\n assert r.history[0].is_redirect\n\n def test_http_303_changes_post_to_get(self, httpbin):\n r = requests.post(httpbin('status', '303'))\n assert r.status_code == 200\n assert r.request.method == 'GET'\n assert r.history[0].status_code == 303\n assert r.history[0].is_redirect\n\n def test_http_303_doesnt_change_head_to_get(self, httpbin):\n r = requests.head(httpbin('status', '303'), allow_redirects=True)\n assert r.status_code == 200\n assert r.request.method == 'HEAD'\n assert r.history[0].status_code == 303\n assert r.history[0].is_redirect\n\n def test_header_and_body_removal_on_redirect(self, httpbin):\n purged_headers = ('Content-Length', 'Content-Type')\n ses = requests.Session()\n req = requests.Request('POST', httpbin('post'), data={'test': 'data'})\n prep = ses.prepare_request(req)\n resp = ses.send(prep)\n\n # Mimic a redirect response\n resp.status_code = 302\n resp.headers['location'] = 'get'\n\n # Run request through resolve_redirects\n next_resp = next(ses.resolve_redirects(resp, prep))\n assert next_resp.request.body is None\n for header in purged_headers:\n assert header not in next_resp.request.headers\n\n def test_transfer_enc_removal_on_redirect(self, httpbin):\n purged_headers = ('Transfer-Encoding', 'Content-Type')\n ses = requests.Session()\n req = requests.Request('POST', httpbin('post'), data=(b'x' for x in range(1)))\n prep = ses.prepare_request(req)\n assert 'Transfer-Encoding' in prep.headers\n\n # Create Response to avoid https://github.com/kevin1024/pytest-httpbin/issues/33\n resp = requests.Response()\n resp.raw = io.BytesIO(b'the content')\n resp.request = prep\n setattr(resp.raw, 'release_conn', lambda *args: args)\n\n # Mimic a redirect response\n resp.status_code = 302\n resp.headers['location'] = httpbin('get')\n\n # Run request through resolve_redirect\n next_resp = next(ses.resolve_redirects(resp, prep))\n assert next_resp.request.body is None\n for header in purged_headers:\n assert header not in next_resp.request.headers\n\n def test_fragment_maintained_on_redirect(self, httpbin):\n fragment = \"#view=edit&token=hunter2\"\n r = requests.get(httpbin('redirect-to?url=get')+fragment)\n\n assert len(r.history) > 0\n assert r.history[0].request.url == httpbin('redirect-to?url=get')+fragment\n assert r.url == httpbin('get')+fragment\n\n def test_HTTP_200_OK_GET_WITH_PARAMS(self, httpbin):\n heads = {'User-agent': 'Mozilla/5.0'}\n\n r = requests.get(httpbin('user-agent'), headers=heads)\n\n assert heads['User-agent'] in r.text\n assert r.status_code == 200\n\n def test_HTTP_200_OK_GET_WITH_MIXED_PARAMS(self, httpbin):\n heads = {'User-agent': 'Mozilla/5.0'}\n\n r = requests.get(httpbin('get') + '?test=true', params={'q': 'test'}, headers=heads)\n assert r.status_code == 200\n\n def test_set_cookie_on_301(self, httpbin):\n s = requests.session()\n url = httpbin('cookies/set?foo=bar')\n s.get(url)\n assert s.cookies['foo'] == 'bar'\n\n def test_cookie_sent_on_redirect(self, httpbin):\n s = requests.session()\n s.get(httpbin('cookies/set?foo=bar'))\n r = s.get(httpbin('redirect/1')) # redirects to httpbin('get')\n assert 'Cookie' in r.json()['headers']\n\n def test_cookie_removed_on_expire(self, httpbin):\n s = requests.session()\n s.get(httpbin('cookies/set?foo=bar'))\n assert s.cookies['foo'] == 'bar'\n s.get(\n httpbin('response-headers'),\n params={\n 'Set-Cookie':\n 'foo=deleted; expires=Thu, 01-Jan-1970 00:00:01 GMT'\n }\n )\n assert 'foo' not in s.cookies\n\n def test_cookie_quote_wrapped(self, httpbin):\n s = requests.session()\n s.get(httpbin('cookies/set?foo=\"bar:baz\"'))\n assert s.cookies['foo'] == '\"bar:baz\"'\n\n def test_cookie_persists_via_api(self, httpbin):\n s = requests.session()\n r = s.get(httpbin('redirect/1'), cookies={'foo': 'bar'})\n assert 'foo' in r.request.headers['Cookie']\n assert 'foo' in r.history[0].request.headers['Cookie']\n\n def test_request_cookie_overrides_session_cookie(self, httpbin):\n s = requests.session()\n s.cookies['foo'] = 'bar'\n r = s.get(httpbin('cookies'), cookies={'foo': 'baz'})\n assert r.json()['cookies']['foo'] == 'baz'\n # Session cookie should not be modified\n assert s.cookies['foo'] == 'bar'\n\n def test_request_cookies_not_persisted(self, httpbin):\n s = requests.session()\n s.get(httpbin('cookies'), cookies={'foo': 'baz'})\n # Sending a request with cookies should not add cookies to the session\n assert not s.cookies\n\n def test_generic_cookiejar_works(self, httpbin):\n cj = cookielib.CookieJar()\n cookiejar_from_dict({'foo': 'bar'}, cj)\n s = requests.session()\n s.cookies = cj\n r = s.get(httpbin('cookies'))\n # Make sure the cookie was sent\n assert r.json()['cookies']['foo'] == 'bar'\n # Make sure the session cj is still the custom one\n assert s.cookies is cj\n\n def test_param_cookiejar_works(self, httpbin):\n cj = cookielib.CookieJar()\n cookiejar_from_dict({'foo': 'bar'}, cj)\n s = requests.session()\n r = s.get(httpbin('cookies'), cookies=cj)\n # Make sure the cookie was sent\n assert r.json()['cookies']['foo'] == 'bar'\n\n def test_cookielib_cookiejar_on_redirect(self, httpbin):\n \"\"\"Tests resolve_redirect doesn't fail when merging cookies\n with non-RequestsCookieJar cookiejar.\n\n See GH #3579\n \"\"\"\n cj = cookiejar_from_dict({'foo': 'bar'}, cookielib.CookieJar())\n s = requests.Session()\n s.cookies = cookiejar_from_dict({'cookie': 'tasty'})\n\n # Prepare request without using Session\n req = requests.Request('GET', httpbin('headers'), cookies=cj)\n prep_req = req.prepare()\n\n # Send request and simulate redirect\n resp = s.send(prep_req)\n resp.status_code = 302\n resp.headers['location'] = httpbin('get')\n redirects = s.resolve_redirects(resp, prep_req)\n resp = next(redirects)\n\n # Verify CookieJar isn't being converted to RequestsCookieJar\n assert isinstance(prep_req._cookies, cookielib.CookieJar)\n assert isinstance(resp.request._cookies, cookielib.CookieJar)\n assert not isinstance(resp.request._cookies, requests.cookies.RequestsCookieJar)\n\n cookies = {}\n for c in resp.request._cookies:\n cookies[c.name] = c.value\n assert cookies['foo'] == 'bar'\n assert cookies['cookie'] == 'tasty'\n\n def test_requests_in_history_are_not_overridden(self, httpbin):\n resp = requests.get(httpbin('redirect/3'))\n urls = [r.url for r in resp.history]\n req_urls = [r.request.url for r in resp.history]\n assert urls == req_urls\n\n def test_history_is_always_a_list(self, httpbin):\n \"\"\"Show that even with redirects, Response.history is always a list.\"\"\"\n resp = requests.get(httpbin('get'))\n assert isinstance(resp.history, list)\n resp = requests.get(httpbin('redirect/1'))\n assert isinstance(resp.history, list)\n assert not isinstance(resp.history, tuple)\n\n def test_headers_on_session_with_None_are_not_sent(self, httpbin):\n \"\"\"Do not send headers in Session.headers with None values.\"\"\"\n ses = requests.Session()\n ses.headers['Accept-Encoding'] = None\n req = requests.Request('GET', httpbin('get'))\n prep = ses.prepare_request(req)\n assert 'Accept-Encoding' not in prep.headers\n\n def test_headers_preserve_order(self, httpbin):\n \"\"\"Preserve order when headers provided as OrderedDict.\"\"\"\n ses = requests.Session()\n ses.headers = collections.OrderedDict()\n ses.headers['Accept-Encoding'] = 'identity'\n ses.headers['First'] = '1'\n ses.headers['Second'] = '2'\n headers = collections.OrderedDict([('Third', '3'), ('Fourth', '4')])\n headers['Fifth'] = '5'\n headers['Second'] = '222'\n req = requests.Request('GET', httpbin('get'), headers=headers)\n prep = ses.prepare_request(req)\n items = list(prep.headers.items())\n assert items[0] == ('Accept-Encoding', 'identity')\n assert items[1] == ('First', '1')\n assert items[2] == ('Second', '222')\n assert items[3] == ('Third', '3')\n assert items[4] == ('Fourth', '4')\n assert items[5] == ('Fifth', '5')\n\n @pytest.mark.parametrize('key', ('User-agent', 'user-agent'))\n def test_user_agent_transfers(self, httpbin, key):\n\n heads = {key: 'Mozilla/5.0 (github.com/psf/requests)'}\n\n r = requests.get(httpbin('user-agent'), headers=heads)\n assert heads[key] in r.text\n\n def test_HTTP_200_OK_HEAD(self, httpbin):\n r = requests.head(httpbin('get'))\n assert r.status_code == 200\n\n def test_HTTP_200_OK_PUT(self, httpbin):\n r = requests.put(httpbin('put'))\n assert r.status_code == 200\n\n def test_BASICAUTH_TUPLE_HTTP_200_OK_GET(self, httpbin):\n auth = ('user', 'pass')\n url = httpbin('basic-auth', 'user', 'pass')\n\n r = requests.get(url, auth=auth)\n assert r.status_code == 200\n\n r = requests.get(url)\n assert r.status_code == 401\n\n s = requests.session()\n s.auth = auth\n r = s.get(url)\n assert r.status_code == 200\n\n @pytest.mark.parametrize(\n 'username, password', (\n ('user', 'pass'),\n (u'имя'.encode('utf-8'), u'пароль'.encode('utf-8')),\n (42, 42),\n (None, None),\n ))\n def test_set_basicauth(self, httpbin, username, password):\n auth = (username, password)\n url = httpbin('get')\n\n r = requests.Request('GET', url, auth=auth)\n p = r.prepare()\n\n assert p.headers['Authorization'] == _basic_auth_str(username, password)\n\n def test_basicauth_encodes_byte_strings(self):\n \"\"\"Ensure b'test' formats as the byte string \"test\" rather\n than the unicode string \"b'test'\" in Python 3.\n \"\"\"\n auth = (b'\\xc5\\xafsername', b'test\\xc6\\xb6')\n r = requests.Request('GET', 'http://localhost', auth=auth)\n p = r.prepare()\n\n assert p.headers['Authorization'] == 'Basic xa9zZXJuYW1lOnRlc3TGtg=='\n\n @pytest.mark.parametrize(\n 'url, exception', (\n # Connecting to an unknown domain should raise a ConnectionError\n ('http://doesnotexist.google.com', ConnectionError),\n # Connecting to an invalid port should raise a ConnectionError\n ('http://localhost:1', ConnectionError),\n # Inputing a URL that cannot be parsed should raise an InvalidURL error\n ('http://fe80::5054:ff:fe5a:fc0', InvalidURL)\n ))\n def test_errors(self, url, exception):\n with pytest.raises(exception):\n requests.get(url, timeout=1)\n\n def test_proxy_error(self):\n # any proxy related error (address resolution, no route to host, etc) should result in a ProxyError\n with pytest.raises(ProxyError):\n requests.get('http://localhost:1', proxies={'http': 'non-resolvable-address'})\n\n def test_proxy_error_on_bad_url(self, httpbin, httpbin_secure):\n with pytest.raises(InvalidProxyURL):\n requests.get(httpbin_secure(), proxies={'https': 'http:/badproxyurl:3128'})\n\n with pytest.raises(InvalidProxyURL):\n requests.get(httpbin(), proxies={'http': 'http://:8080'})\n\n with pytest.raises(InvalidProxyURL):\n requests.get(httpbin_secure(), proxies={'https': 'https://'})\n\n with pytest.raises(InvalidProxyURL):\n requests.get(httpbin(), proxies={'http': 'http:///example.com:8080'})\n\n def test_respect_proxy_env_on_send_self_prepared_request(self, httpbin):\n with override_environ(http_proxy=INVALID_PROXY):\n with pytest.raises(ProxyError):\n session = requests.Session()\n request = requests.Request('GET', httpbin())\n session.send(request.prepare())\n\n def test_respect_proxy_env_on_send_session_prepared_request(self, httpbin):\n with override_environ(http_proxy=INVALID_PROXY):\n with pytest.raises(ProxyError):\n session = requests.Session()\n request = requests.Request('GET', httpbin())\n prepared = session.prepare_request(request)\n session.send(prepared)\n\n def test_respect_proxy_env_on_send_with_redirects(self, httpbin):\n with override_environ(http_proxy=INVALID_PROXY):\n with pytest.raises(ProxyError):\n session = requests.Session()\n url = httpbin('redirect/1')\n print(url)\n request = requests.Request('GET', url)\n session.send(request.prepare())\n\n def test_respect_proxy_env_on_get(self, httpbin):\n with override_environ(http_proxy=INVALID_PROXY):\n with pytest.raises(ProxyError):\n session = requests.Session()\n session.get(httpbin())\n\n def test_respect_proxy_env_on_request(self, httpbin):\n with override_environ(http_proxy=INVALID_PROXY):\n with pytest.raises(ProxyError):\n session = requests.Session()\n session.request(method='GET', url=httpbin())\n\n def test_proxy_authorization_preserved_on_request(self, httpbin):\n proxy_auth_value = \"Bearer XXX\"\n session = requests.Session()\n session.headers.update({\"Proxy-Authorization\": proxy_auth_value})\n resp = session.request(method='GET', url=httpbin('get'))\n sent_headers = resp.json().get('headers', {})\n\n assert sent_headers.get(\"Proxy-Authorization\") == proxy_auth_value\n\n def test_basicauth_with_netrc(self, httpbin):\n auth = ('user', 'pass')\n wrong_auth = ('wronguser', 'wrongpass')\n url = httpbin('basic-auth', 'user', 'pass')\n\n old_auth = requests.sessions.get_netrc_auth\n\n try:\n def get_netrc_auth_mock(url):\n return auth\n requests.sessions.get_netrc_auth = get_netrc_auth_mock\n\n # Should use netrc and work.\n r = requests.get(url)\n assert r.status_code == 200\n\n # Given auth should override and fail.\n r = requests.get(url, auth=wrong_auth)\n assert r.status_code == 401\n\n s = requests.session()\n\n # Should use netrc and work.\n r = s.get(url)\n assert r.status_code == 200\n\n # Given auth should override and fail.\n s.auth = wrong_auth\n r = s.get(url)\n assert r.status_code == 401\n finally:\n requests.sessions.get_netrc_auth = old_auth\n\n def test_DIGEST_HTTP_200_OK_GET(self, httpbin):\n\n for authtype in self.digest_auth_algo:\n auth = HTTPDigestAuth('user', 'pass')\n url = httpbin('digest-auth', 'auth', 'user', 'pass', authtype, 'never')\n\n r = requests.get(url, auth=auth)\n assert r.status_code == 200\n\n r = requests.get(url)\n assert r.status_code == 401\n print(r.headers['WWW-Authenticate'])\n\n s = requests.session()\n s.auth = HTTPDigestAuth('user', 'pass')\n r = s.get(url)\n assert r.status_code == 200\n\n def test_DIGEST_AUTH_RETURNS_COOKIE(self, httpbin):\n\n for authtype in self.digest_auth_algo:\n url = httpbin('digest-auth', 'auth', 'user', 'pass', authtype)\n auth = HTTPDigestAuth('user', 'pass')\n r = requests.get(url)\n assert r.cookies['fake'] == 'fake_value'\n\n r = requests.get(url, auth=auth)\n assert r.status_code == 200\n\n def test_DIGEST_AUTH_SETS_SESSION_COOKIES(self, httpbin):\n\n for authtype in self.digest_auth_algo:\n url = httpbin('digest-auth', 'auth', 'user', 'pass', authtype)\n auth = HTTPDigestAuth('user', 'pass')\n s = requests.Session()\n s.get(url, auth=auth)\n assert s.cookies['fake'] == 'fake_value'\n\n def test_DIGEST_STREAM(self, httpbin):\n\n for authtype in self.digest_auth_algo:\n auth = HTTPDigestAuth('user', 'pass')\n url = httpbin('digest-auth', 'auth', 'user', 'pass', authtype)\n\n r = requests.get(url, auth=auth, stream=True)\n assert r.raw.read() != b''\n\n r = requests.get(url, auth=auth, stream=False)\n assert r.raw.read() == b''\n\n def test_DIGESTAUTH_WRONG_HTTP_401_GET(self, httpbin):\n\n for authtype in self.digest_auth_algo:\n auth = HTTPDigestAuth('user', 'wrongpass')\n url = httpbin('digest-auth', 'auth', 'user', 'pass', authtype)\n\n r = requests.get(url, auth=auth)\n assert r.status_code == 401\n\n r = requests.get(url)\n assert r.status_code == 401\n\n s = requests.session()\n s.auth = auth\n r = s.get(url)\n assert r.status_code == 401\n\n def test_DIGESTAUTH_QUOTES_QOP_VALUE(self, httpbin):\n\n for authtype in self.digest_auth_algo:\n auth = HTTPDigestAuth('user', 'pass')\n url = httpbin('digest-auth', 'auth', 'user', 'pass', authtype)\n\n r = requests.get(url, auth=auth)\n assert '\"auth\"' in r.request.headers['Authorization']\n\n def test_POSTBIN_GET_POST_FILES(self, httpbin):\n\n url = httpbin('post')\n requests.post(url).raise_for_status()\n\n post1 = requests.post(url, data={'some': 'data'})\n assert post1.status_code == 200\n\n with open('requirements-dev.txt') as f:\n post2 = requests.post(url, files={'some': f})\n assert post2.status_code == 200\n\n post4 = requests.post(url, data='[{\"some\": \"json\"}]')\n assert post4.status_code == 200\n\n with pytest.raises(ValueError):\n requests.post(url, files=['bad file data'])\n\n def test_invalid_files_input(self, httpbin):\n\n url = httpbin('post')\n post = requests.post(url,\n files={\"random-file-1\": None, \"random-file-2\": 1})\n assert b'name=\"random-file-1\"' not in post.request.body\n assert b'name=\"random-file-2\"' in post.request.body\n\n def test_POSTBIN_SEEKED_OBJECT_WITH_NO_ITER(self, httpbin):\n\n class TestStream(object):\n def __init__(self, data):\n self.data = data.encode()\n self.length = len(self.data)\n self.index = 0\n\n def __len__(self):\n return self.length\n\n def read(self, size=None):\n if size:\n ret = self.data[self.index:self.index + size]\n self.index += size\n else:\n ret = self.data[self.index:]\n self.index = self.length\n return ret\n\n def tell(self):\n return self.index\n\n def seek(self, offset, where=0):\n if where == 0:\n self.index = offset\n elif where == 1:\n self.index += offset\n elif where == 2:\n self.index = self.length + offset\n\n test = TestStream('test')\n post1 = requests.post(httpbin('post'), data=test)\n assert post1.status_code == 200\n assert post1.json()['data'] == 'test'\n\n test = TestStream('test')\n test.seek(2)\n post2 = requests.post(httpbin('post'), data=test)\n assert post2.status_code == 200\n assert post2.json()['data'] == 'st'\n\n def test_POSTBIN_GET_POST_FILES_WITH_DATA(self, httpbin):\n\n url = httpbin('post')\n requests.post(url).raise_for_status()\n\n post1 = requests.post(url, data={'some': 'data'})\n assert post1.status_code == 200\n\n with open('requirements-dev.txt') as f:\n post2 = requests.post(url, data={'some': 'data'}, files={'some': f})\n assert post2.status_code == 200\n\n post4 = requests.post(url, data='[{\"some\": \"json\"}]')\n assert post4.status_code == 200\n\n with pytest.raises(ValueError):\n requests.post(url, files=['bad file data'])\n\n def test_post_with_custom_mapping(self, httpbin):\n class CustomMapping(MutableMapping):\n def __init__(self, *args, **kwargs):\n self.data = dict(*args, **kwargs)\n\n def __delitem__(self, key):\n del self.data[key]\n\n def __getitem__(self, key):\n return self.data[key]\n\n def __setitem__(self, key, value):\n self.data[key] = value\n\n def __iter__(self):\n return iter(self.data)\n\n def __len__(self):\n return len(self.data)\n\n data = CustomMapping({'some': 'data'})\n url = httpbin('post')\n found_json = requests.post(url, data=data).json().get('form')\n assert found_json == {'some': 'data'}\n\n def test_conflicting_post_params(self, httpbin):\n url = httpbin('post')\n with open('requirements-dev.txt') as f:\n with pytest.raises(ValueError):\n requests.post(url, data='[{\"some\": \"data\"}]', files={'some': f})\n with pytest.raises(ValueError):\n requests.post(url, data=u('[{\"some\": \"data\"}]'), files={'some': f})\n\n def test_request_ok_set(self, httpbin):\n r = requests.get(httpbin('status', '404'))\n assert not r.ok\n\n def test_status_raising(self, httpbin):\n r = requests.get(httpbin('status', '404'))\n with pytest.raises(requests.exceptions.HTTPError):\n r.raise_for_status()\n\n r = requests.get(httpbin('status', '500'))\n assert not r.ok\n\n def test_decompress_gzip(self, httpbin):\n r = requests.get(httpbin('gzip'))\n r.content.decode('ascii')\n\n @pytest.mark.parametrize(\n 'url, params', (\n ('/get', {'foo': 'føø'}),\n ('/get', {'føø': 'føø'}),\n ('/get', {'føø': 'føø'}),\n ('/get', {'foo': 'foo'}),\n ('ø', {'foo': 'foo'}),\n ))\n def test_unicode_get(self, httpbin, url, params):\n requests.get(httpbin(url), params=params)\n\n def test_unicode_header_name(self, httpbin):\n requests.put(\n httpbin('put'),\n headers={str('Content-Type'): 'application/octet-stream'},\n data='\\xff') # compat.str is unicode.\n\n def test_pyopenssl_redirect(self, httpbin_secure, httpbin_ca_bundle):\n requests.get(httpbin_secure('status', '301'), verify=httpbin_ca_bundle)\n\n def test_invalid_ca_certificate_path(self, httpbin_secure):\n INVALID_PATH = '/garbage'\n with pytest.raises(IOError) as e:\n requests.get(httpbin_secure(), verify=INVALID_PATH)\n assert str(e.value) == 'Could not find a suitable TLS CA certificate bundle, invalid path: {}'.format(INVALID_PATH)\n\n def test_invalid_ssl_certificate_files(self, httpbin_secure):\n INVALID_PATH = '/garbage'\n with pytest.raises(IOError) as e:\n requests.get(httpbin_secure(), cert=INVALID_PATH)\n assert str(e.value) == 'Could not find the TLS certificate file, invalid path: {}'.format(INVALID_PATH)\n\n with pytest.raises(IOError) as e:\n requests.get(httpbin_secure(), cert=('.', INVALID_PATH))\n assert str(e.value) == 'Could not find the TLS key file, invalid path: {}'.format(INVALID_PATH)\n\n def test_http_with_certificate(self, httpbin):\n r = requests.get(httpbin(), cert='.')\n assert r.status_code == 200\n\n def test_https_warnings(self, nosan_server):\n \"\"\"warnings are emitted with requests.get\"\"\"\n host, port, ca_bundle = nosan_server\n if HAS_MODERN_SSL or HAS_PYOPENSSL:\n warnings_expected = ('SubjectAltNameWarning', )\n else:\n warnings_expected = ('SNIMissingWarning',\n 'InsecurePlatformWarning',\n 'SubjectAltNameWarning', )\n\n with pytest.warns(None) as warning_records:\n warnings.simplefilter('always')\n requests.get(\"https://localhost:{}/\".format(port), verify=ca_bundle)\n\n warning_records = [item for item in warning_records\n if item.category.__name__ != 'ResourceWarning']\n\n warnings_category = tuple(\n item.category.__name__ for item in warning_records)\n assert warnings_category == warnings_expected\n\n def test_certificate_failure(self, httpbin_secure):\n \"\"\"\n When underlying SSL problems occur, an SSLError is raised.\n \"\"\"\n with pytest.raises(SSLError):\n # Our local httpbin does not have a trusted CA, so this call will\n # fail if we use our default trust bundle.\n requests.get(httpbin_secure('status', '200'))\n\n def test_urlencoded_get_query_multivalued_param(self, httpbin):\n\n r = requests.get(httpbin('get'), params={'test': ['foo', 'baz']})\n assert r.status_code == 200\n assert r.url == httpbin('get?test=foo&test=baz')\n\n def test_form_encoded_post_query_multivalued_element(self, httpbin):\n r = requests.Request(method='POST', url=httpbin('post'),\n data=dict(test=['foo', 'baz']))\n prep = r.prepare()\n assert prep.body == 'test=foo&test=baz'\n\n def test_different_encodings_dont_break_post(self, httpbin):\n r = requests.post(httpbin('post'),\n data={'stuff': json.dumps({'a': 123})},\n params={'blah': 'asdf1234'},\n files={'file': ('test_requests.py', open(__file__, 'rb'))})\n assert r.status_code == 200\n\n @pytest.mark.parametrize(\n 'data', (\n {'stuff': u('ëlïxr')},\n {'stuff': u('ëlïxr').encode('utf-8')},\n {'stuff': 'elixr'},\n {'stuff': 'elixr'.encode('utf-8')},\n ))\n def test_unicode_multipart_post(self, httpbin, data):\n r = requests.post(httpbin('post'),\n data=data,\n files={'file': ('test_requests.py', open(__file__, 'rb'))})\n assert r.status_code == 200\n\n def test_unicode_multipart_post_fieldnames(self, httpbin):\n filename = os.path.splitext(__file__)[0] + '.py'\n r = requests.Request(\n method='POST', url=httpbin('post'),\n data={'stuff'.encode('utf-8'): 'elixr'},\n files={'file': ('test_requests.py', open(filename, 'rb'))})\n prep = r.prepare()\n assert b'name=\"stuff\"' in prep.body\n assert b'name=\"b\\'stuff\\'\"' not in prep.body\n\n def test_unicode_method_name(self, httpbin):\n files = {'file': open(__file__, 'rb')}\n r = requests.request(\n method=u('POST'), url=httpbin('post'), files=files)\n assert r.status_code == 200\n\n def test_unicode_method_name_with_request_object(self, httpbin):\n files = {'file': open(__file__, 'rb')}\n s = requests.Session()\n req = requests.Request(u('POST'), httpbin('post'), files=files)\n prep = s.prepare_request(req)\n assert isinstance(prep.method, builtin_str)\n assert prep.method == 'POST'\n\n resp = s.send(prep)\n assert resp.status_code == 200\n\n def test_non_prepared_request_error(self):\n s = requests.Session()\n req = requests.Request(u('POST'), '/')\n\n with pytest.raises(ValueError) as e:\n s.send(req)\n assert str(e.value) == 'You can only send PreparedRequests.'\n\n def test_custom_content_type(self, httpbin):\n r = requests.post(\n httpbin('post'),\n data={'stuff': json.dumps({'a': 123})},\n files={\n 'file1': ('test_requests.py', open(__file__, 'rb')),\n 'file2': ('test_requests', open(__file__, 'rb'),\n 'text/py-content-type')})\n assert r.status_code == 200\n assert b\"text/py-content-type\" in r.request.body\n\n def test_hook_receives_request_arguments(self, httpbin):\n def hook(resp, **kwargs):\n assert resp is not None\n assert kwargs != {}\n\n s = requests.Session()\n r = requests.Request('GET', httpbin(), hooks={'response': hook})\n prep = s.prepare_request(r)\n s.send(prep)\n\n def test_session_hooks_are_used_with_no_request_hooks(self, httpbin):\n hook = lambda x, *args, **kwargs: x\n s = requests.Session()\n s.hooks['response'].append(hook)\n r = requests.Request('GET', httpbin())\n prep = s.prepare_request(r)\n assert prep.hooks['response'] != []\n assert prep.hooks['response'] == [hook]\n\n def test_session_hooks_are_overridden_by_request_hooks(self, httpbin):\n hook1 = lambda x, *args, **kwargs: x\n hook2 = lambda x, *args, **kwargs: x\n assert hook1 is not hook2\n s = requests.Session()\n s.hooks['response'].append(hook2)\n r = requests.Request('GET', httpbin(), hooks={'response': [hook1]})\n prep = s.prepare_request(r)\n assert prep.hooks['response'] == [hook1]\n\n def test_prepared_request_hook(self, httpbin):\n def hook(resp, **kwargs):\n resp.hook_working = True\n return resp\n\n req = requests.Request('GET', httpbin(), hooks={'response': hook})\n prep = req.prepare()\n\n s = requests.Session()\n s.proxies = getproxies()\n resp = s.send(prep)\n\n assert hasattr(resp, 'hook_working')\n\n def test_prepared_from_session(self, httpbin):\n class DummyAuth(requests.auth.AuthBase):\n def __call__(self, r):\n r.headers['Dummy-Auth-Test'] = 'dummy-auth-test-ok'\n return r\n\n req = requests.Request('GET', httpbin('headers'))\n assert not req.auth\n\n s = requests.Session()\n s.auth = DummyAuth()\n\n prep = s.prepare_request(req)\n resp = s.send(prep)\n\n assert resp.json()['headers'][\n 'Dummy-Auth-Test'] == 'dummy-auth-test-ok'\n\n def test_prepare_request_with_bytestring_url(self):\n req = requests.Request('GET', b'https://httpbin.org/')\n s = requests.Session()\n prep = s.prepare_request(req)\n assert prep.url == \"https://httpbin.org/\"\n\n def test_request_with_bytestring_host(self, httpbin):\n s = requests.Session()\n resp = s.request(\n 'GET',\n httpbin('cookies/set?cookie=value'),\n allow_redirects=False,\n headers={'Host': b'httpbin.org'}\n )\n assert resp.cookies.get('cookie') == 'value'\n\n def test_links(self):\n r = requests.Response()\n r.headers = {\n 'cache-control': 'public, max-age=60, s-maxage=60',\n 'connection': 'keep-alive',\n 'content-encoding': 'gzip',\n 'content-type': 'application/json; charset=utf-8',\n 'date': 'Sat, 26 Jan 2013 16:47:56 GMT',\n 'etag': '\"6ff6a73c0e446c1f61614769e3ceb778\"',\n 'last-modified': 'Sat, 26 Jan 2013 16:22:39 GMT',\n 'link': ('<https://api.github.com/users/kennethreitz/repos?'\n 'page=2&per_page=10>; rel=\"next\", <https://api.github.'\n 'com/users/kennethreitz/repos?page=7&per_page=10>; '\n ' rel=\"last\"'),\n 'server': 'GitHub.com',\n 'status': '200 OK',\n 'vary': 'Accept',\n 'x-content-type-options': 'nosniff',\n 'x-github-media-type': 'github.beta',\n 'x-ratelimit-limit': '60',\n 'x-ratelimit-remaining': '57'\n }\n assert r.links['next']['rel'] == 'next'\n\n def test_cookie_parameters(self):\n key = 'some_cookie'\n value = 'some_value'\n secure = True\n domain = 'test.com'\n rest = {'HttpOnly': True}\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value, secure=secure, domain=domain, rest=rest)\n\n assert len(jar) == 1\n assert 'some_cookie' in jar\n\n cookie = list(jar)[0]\n assert cookie.secure == secure\n assert cookie.domain == domain\n assert cookie._rest['HttpOnly'] == rest['HttpOnly']\n\n def test_cookie_as_dict_keeps_len(self):\n key = 'some_cookie'\n value = 'some_value'\n\n key1 = 'some_cookie1'\n value1 = 'some_value1'\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value)\n jar.set(key1, value1)\n\n d1 = dict(jar)\n d2 = dict(jar.iteritems())\n d3 = dict(jar.items())\n\n assert len(jar) == 2\n assert len(d1) == 2\n assert len(d2) == 2\n assert len(d3) == 2\n\n def test_cookie_as_dict_keeps_items(self):\n key = 'some_cookie'\n value = 'some_value'\n\n key1 = 'some_cookie1'\n value1 = 'some_value1'\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value)\n jar.set(key1, value1)\n\n d1 = dict(jar)\n d2 = dict(jar.iteritems())\n d3 = dict(jar.items())\n\n assert d1['some_cookie'] == 'some_value'\n assert d2['some_cookie'] == 'some_value'\n assert d3['some_cookie1'] == 'some_value1'\n\n def test_cookie_as_dict_keys(self):\n key = 'some_cookie'\n value = 'some_value'\n\n key1 = 'some_cookie1'\n value1 = 'some_value1'\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value)\n jar.set(key1, value1)\n\n keys = jar.keys()\n assert keys == list(keys)\n # make sure one can use keys multiple times\n assert list(keys) == list(keys)\n\n def test_cookie_as_dict_values(self):\n key = 'some_cookie'\n value = 'some_value'\n\n key1 = 'some_cookie1'\n value1 = 'some_value1'\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value)\n jar.set(key1, value1)\n\n values = jar.values()\n assert values == list(values)\n # make sure one can use values multiple times\n assert list(values) == list(values)\n\n def test_cookie_as_dict_items(self):\n key = 'some_cookie'\n value = 'some_value'\n\n key1 = 'some_cookie1'\n value1 = 'some_value1'\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value)\n jar.set(key1, value1)\n\n items = jar.items()\n assert items == list(items)\n # make sure one can use items multiple times\n assert list(items) == list(items)\n\n def test_cookie_duplicate_names_different_domains(self):\n key = 'some_cookie'\n value = 'some_value'\n domain1 = 'test1.com'\n domain2 = 'test2.com'\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value, domain=domain1)\n jar.set(key, value, domain=domain2)\n assert key in jar\n items = jar.items()\n assert len(items) == 2\n\n # Verify that CookieConflictError is raised if domain is not specified\n with pytest.raises(requests.cookies.CookieConflictError):\n jar.get(key)\n\n # Verify that CookieConflictError is not raised if domain is specified\n cookie = jar.get(key, domain=domain1)\n assert cookie == value\n\n def test_cookie_duplicate_names_raises_cookie_conflict_error(self):\n key = 'some_cookie'\n value = 'some_value'\n path = 'some_path'\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value, path=path)\n jar.set(key, value)\n with pytest.raises(requests.cookies.CookieConflictError):\n jar.get(key)\n\n def test_cookie_policy_copy(self):\n class MyCookiePolicy(cookielib.DefaultCookiePolicy):\n pass\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set_policy(MyCookiePolicy())\n assert isinstance(jar.copy().get_policy(), MyCookiePolicy)\n\n def test_time_elapsed_blank(self, httpbin):\n r = requests.get(httpbin('get'))\n td = r.elapsed\n total_seconds = ((td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6) / 10**6)\n assert total_seconds > 0.0\n\n def test_empty_response_has_content_none(self):\n r = requests.Response()\n assert r.content is None\n\n def test_response_is_iterable(self):\n r = requests.Response()\n io = StringIO.StringIO('abc')\n read_ = io.read\n\n def read_mock(amt, decode_content=None):\n return read_(amt)\n setattr(io, 'read', read_mock)\n r.raw = io\n assert next(iter(r))\n io.close()\n\n def test_response_decode_unicode(self):\n \"\"\"When called with decode_unicode, Response.iter_content should always\n return unicode.\n \"\"\"\n r = requests.Response()\n r._content_consumed = True\n r._content = b'the content'\n r.encoding = 'ascii'\n\n chunks = r.iter_content(decode_unicode=True)\n assert all(isinstance(chunk, str) for chunk in chunks)\n\n # also for streaming\n r = requests.Response()\n r.raw = io.BytesIO(b'the content')\n r.encoding = 'ascii'\n chunks = r.iter_content(decode_unicode=True)\n assert all(isinstance(chunk, str) for chunk in chunks)\n\n def test_response_reason_unicode(self):\n # check for unicode HTTP status\n r = requests.Response()\n r.url = u'unicode URL'\n r.reason = u'Komponenttia ei löydy'.encode('utf-8')\n r.status_code = 404\n r.encoding = None\n assert not r.ok # old behaviour - crashes here\n\n def test_response_reason_unicode_fallback(self):\n # check raise_status falls back to ISO-8859-1\n r = requests.Response()\n r.url = 'some url'\n reason = u'Komponenttia ei löydy'\n r.reason = reason.encode('latin-1')\n r.status_code = 500\n r.encoding = None\n with pytest.raises(requests.exceptions.HTTPError) as e:\n r.raise_for_status()\n assert reason in e.value.args[0]\n\n def test_response_chunk_size_type(self):\n \"\"\"Ensure that chunk_size is passed as None or an integer, otherwise\n raise a TypeError.\n \"\"\"\n r = requests.Response()\n r.raw = io.BytesIO(b'the content')\n chunks = r.iter_content(1)\n assert all(len(chunk) == 1 for chunk in chunks)\n\n r = requests.Response()\n r.raw = io.BytesIO(b'the content')\n chunks = r.iter_content(None)\n assert list(chunks) == [b'the content']\n\n r = requests.Response()\n r.raw = io.BytesIO(b'the content')\n with pytest.raises(TypeError):\n chunks = r.iter_content(\"1024\")\n\n def test_request_and_response_are_pickleable(self, httpbin):\n r = requests.get(httpbin('get'))\n\n # verify we can pickle the original request\n assert pickle.loads(pickle.dumps(r.request))\n\n # verify we can pickle the response and that we have access to\n # the original request.\n pr = pickle.loads(pickle.dumps(r))\n assert r.request.url == pr.request.url\n assert r.request.headers == pr.request.headers\n\n def test_prepared_request_is_pickleable(self, httpbin):\n p = requests.Request('GET', httpbin('get')).prepare()\n\n # Verify PreparedRequest can be pickled and unpickled\n r = pickle.loads(pickle.dumps(p))\n assert r.url == p.url\n assert r.headers == p.headers\n assert r.body == p.body\n\n # Verify unpickled PreparedRequest sends properly\n s = requests.Session()\n resp = s.send(r)\n assert resp.status_code == 200\n\n def test_prepared_request_with_file_is_pickleable(self, httpbin):\n files = {'file': open(__file__, 'rb')}\n r = requests.Request('POST', httpbin('post'), files=files)\n p = r.prepare()\n\n # Verify PreparedRequest can be pickled and unpickled\n r = pickle.loads(pickle.dumps(p))\n assert r.url == p.url\n assert r.headers == p.headers\n assert r.body == p.body\n\n # Verify unpickled PreparedRequest sends properly\n s = requests.Session()\n resp = s.send(r)\n assert resp.status_code == 200\n\n def test_prepared_request_with_hook_is_pickleable(self, httpbin):\n r = requests.Request('GET', httpbin('get'), hooks=default_hooks())\n p = r.prepare()\n\n # Verify PreparedRequest can be pickled\n r = pickle.loads(pickle.dumps(p))\n assert r.url == p.url\n assert r.headers == p.headers\n assert r.body == p.body\n assert r.hooks == p.hooks\n\n # Verify unpickled PreparedRequest sends properly\n s = requests.Session()\n resp = s.send(r)\n assert resp.status_code == 200\n\n def test_cannot_send_unprepared_requests(self, httpbin):\n r = requests.Request(url=httpbin())\n with pytest.raises(ValueError):\n requests.Session().send(r)\n\n def test_http_error(self):\n error = requests.exceptions.HTTPError()\n assert not error.response\n response = requests.Response()\n error = requests.exceptions.HTTPError(response=response)\n assert error.response == response\n error = requests.exceptions.HTTPError('message', response=response)\n assert str(error) == 'message'\n assert error.response == response\n\n def test_session_pickling(self, httpbin):\n r = requests.Request('GET', httpbin('get'))\n s = requests.Session()\n\n s = pickle.loads(pickle.dumps(s))\n s.proxies = getproxies()\n\n r = s.send(r.prepare())\n assert r.status_code == 200\n\n def test_fixes_1329(self, httpbin):\n \"\"\"Ensure that header updates are done case-insensitively.\"\"\"\n s = requests.Session()\n s.headers.update({'ACCEPT': 'BOGUS'})\n s.headers.update({'accept': 'application/json'})\n r = s.get(httpbin('get'))\n headers = r.request.headers\n assert headers['accept'] == 'application/json'\n assert headers['Accept'] == 'application/json'\n assert headers['ACCEPT'] == 'application/json'\n\n def test_uppercase_scheme_redirect(self, httpbin):\n parts = urlparse(httpbin('html'))\n url = \"HTTP://\" + parts.netloc + parts.path\n r = requests.get(httpbin('redirect-to'), params={'url': url})\n assert r.status_code == 200\n assert r.url.lower() == url.lower()\n\n def test_transport_adapter_ordering(self):\n s = requests.Session()\n order = ['https://', 'http://']\n assert order == list(s.adapters)\n s.mount('http://git', HTTPAdapter())\n s.mount('http://github', HTTPAdapter())\n s.mount('http://github.com', HTTPAdapter())\n s.mount('http://github.com/about/', HTTPAdapter())\n order = [\n 'http://github.com/about/',\n 'http://github.com',\n 'http://github',\n 'http://git',\n 'https://',\n 'http://',\n ]\n assert order == list(s.adapters)\n s.mount('http://gittip', HTTPAdapter())\n s.mount('http://gittip.com', HTTPAdapter())\n s.mount('http://gittip.com/about/', HTTPAdapter())\n order = [\n 'http://github.com/about/',\n 'http://gittip.com/about/',\n 'http://github.com',\n 'http://gittip.com',\n 'http://github',\n 'http://gittip',\n 'http://git',\n 'https://',\n 'http://',\n ]\n assert order == list(s.adapters)\n s2 = requests.Session()\n s2.adapters = {'http://': HTTPAdapter()}\n s2.mount('https://', HTTPAdapter())\n assert 'http://' in s2.adapters\n assert 'https://' in s2.adapters\n\n def test_session_get_adapter_prefix_matching(self):\n prefix = 'https://example.com'\n more_specific_prefix = prefix + '/some/path'\n\n url_matching_only_prefix = prefix + '/another/path'\n url_matching_more_specific_prefix = more_specific_prefix + '/longer/path'\n url_not_matching_prefix = 'https://another.example.com/'\n\n s = requests.Session()\n prefix_adapter = HTTPAdapter()\n more_specific_prefix_adapter = HTTPAdapter()\n s.mount(prefix, prefix_adapter)\n s.mount(more_specific_prefix, more_specific_prefix_adapter)\n\n assert s.get_adapter(url_matching_only_prefix) is prefix_adapter\n assert s.get_adapter(url_matching_more_specific_prefix) is more_specific_prefix_adapter\n assert s.get_adapter(url_not_matching_prefix) not in (prefix_adapter, more_specific_prefix_adapter)\n\n def test_session_get_adapter_prefix_matching_mixed_case(self):\n mixed_case_prefix = 'hTtPs://eXamPle.CoM/MixEd_CAse_PREfix'\n url_matching_prefix = mixed_case_prefix + '/full_url'\n\n s = requests.Session()\n my_adapter = HTTPAdapter()\n s.mount(mixed_case_prefix, my_adapter)\n\n assert s.get_adapter(url_matching_prefix) is my_adapter\n\n def test_session_get_adapter_prefix_matching_is_case_insensitive(self):\n mixed_case_prefix = 'hTtPs://eXamPle.CoM/MixEd_CAse_PREfix'\n url_matching_prefix_with_different_case = 'HtTpS://exaMPLe.cOm/MiXeD_caSE_preFIX/another_url'\n\n s = requests.Session()\n my_adapter = HTTPAdapter()\n s.mount(mixed_case_prefix, my_adapter)\n\n assert s.get_adapter(url_matching_prefix_with_different_case) is my_adapter\n\n def test_header_remove_is_case_insensitive(self, httpbin):\n # From issue #1321\n s = requests.Session()\n s.headers['foo'] = 'bar'\n r = s.get(httpbin('get'), headers={'FOO': None})\n assert 'foo' not in r.request.headers\n\n def test_params_are_merged_case_sensitive(self, httpbin):\n s = requests.Session()\n s.params['foo'] = 'bar'\n r = s.get(httpbin('get'), params={'FOO': 'bar'})\n assert r.json()['args'] == {'foo': 'bar', 'FOO': 'bar'}\n\n def test_long_authinfo_in_url(self):\n url = 'http://{}:{}@{}:9000/path?query#frag'.format(\n 'E8A3BE87-9E3F-4620-8858-95478E385B5B',\n 'EA770032-DA4D-4D84-8CE9-29C6D910BF1E',\n 'exactly-------------sixty-----------three------------characters',\n )\n r = requests.Request('GET', url).prepare()\n assert r.url == url\n\n def test_header_keys_are_native(self, httpbin):\n headers = {u('unicode'): 'blah', 'byte'.encode('ascii'): 'blah'}\n r = requests.Request('GET', httpbin('get'), headers=headers)\n p = r.prepare()\n\n # This is testing that they are builtin strings. A bit weird, but there\n # we go.\n assert 'unicode' in p.headers.keys()\n assert 'byte' in p.headers.keys()\n\n def test_header_validation(self, httpbin):\n \"\"\"Ensure prepare_headers regex isn't flagging valid header contents.\"\"\"\n headers_ok = {'foo': 'bar baz qux',\n 'bar': u'fbbq'.encode('utf8'),\n 'baz': '',\n 'qux': '1'}\n r = requests.get(httpbin('get'), headers=headers_ok)\n assert r.request.headers['foo'] == headers_ok['foo']\n\n def test_header_value_not_str(self, httpbin):\n \"\"\"Ensure the header value is of type string or bytes as\n per discussion in GH issue #3386\n \"\"\"\n headers_int = {'foo': 3}\n headers_dict = {'bar': {'foo': 'bar'}}\n headers_list = {'baz': ['foo', 'bar']}\n\n # Test for int\n with pytest.raises(InvalidHeader) as excinfo:\n r = requests.get(httpbin('get'), headers=headers_int)\n assert 'foo' in str(excinfo.value)\n # Test for dict\n with pytest.raises(InvalidHeader) as excinfo:\n r = requests.get(httpbin('get'), headers=headers_dict)\n assert 'bar' in str(excinfo.value)\n # Test for list\n with pytest.raises(InvalidHeader) as excinfo:\n r = requests.get(httpbin('get'), headers=headers_list)\n assert 'baz' in str(excinfo.value)\n\n def test_header_no_return_chars(self, httpbin):\n \"\"\"Ensure that a header containing return character sequences raise an\n exception. Otherwise, multiple headers are created from single string.\n \"\"\"\n headers_ret = {'foo': 'bar\\r\\nbaz: qux'}\n headers_lf = {'foo': 'bar\\nbaz: qux'}\n headers_cr = {'foo': 'bar\\rbaz: qux'}\n\n # Test for newline\n with pytest.raises(InvalidHeader):\n r = requests.get(httpbin('get'), headers=headers_ret)\n # Test for line feed\n with pytest.raises(InvalidHeader):\n r = requests.get(httpbin('get'), headers=headers_lf)\n # Test for carriage return\n with pytest.raises(InvalidHeader):\n r = requests.get(httpbin('get'), headers=headers_cr)\n\n def test_header_no_leading_space(self, httpbin):\n \"\"\"Ensure headers containing leading whitespace raise\n InvalidHeader Error before sending.\n \"\"\"\n headers_space = {'foo': ' bar'}\n headers_tab = {'foo': ' bar'}\n\n # Test for whitespace\n with pytest.raises(InvalidHeader):\n r = requests.get(httpbin('get'), headers=headers_space)\n # Test for tab\n with pytest.raises(InvalidHeader):\n r = requests.get(httpbin('get'), headers=headers_tab)\n\n @pytest.mark.parametrize('files', ('foo', b'foo', bytearray(b'foo')))\n def test_can_send_objects_with_files(self, httpbin, files):\n data = {'a': 'this is a string'}\n files = {'b': files}\n r = requests.Request('POST', httpbin('post'), data=data, files=files)\n p = r.prepare()\n assert 'multipart/form-data' in p.headers['Content-Type']\n\n def test_can_send_file_object_with_non_string_filename(self, httpbin):\n f = io.BytesIO()\n f.name = 2\n r = requests.Request('POST', httpbin('post'), files={'f': f})\n p = r.prepare()\n\n assert 'multipart/form-data' in p.headers['Content-Type']\n\n def test_autoset_header_values_are_native(self, httpbin):\n data = 'this is a string'\n length = '16'\n req = requests.Request('POST', httpbin('post'), data=data)\n p = req.prepare()\n\n assert p.headers['Content-Length'] == length\n\n def test_nonhttp_schemes_dont_check_URLs(self):\n test_urls = (\n 'data:image/gif;base64,R0lGODlhAQABAHAAACH5BAUAAAAALAAAAAABAAEAAAICRAEAOw==',\n 'file:///etc/passwd',\n 'magnet:?xt=urn:btih:be08f00302bc2d1d3cfa3af02024fa647a271431',\n )\n for test_url in test_urls:\n req = requests.Request('GET', test_url)\n preq = req.prepare()\n assert test_url == preq.url\n\n def test_auth_is_stripped_on_http_downgrade(self, httpbin, httpbin_secure, httpbin_ca_bundle):\n r = requests.get(\n httpbin_secure('redirect-to'),\n params={'url': httpbin('get')},\n auth=('user', 'pass'),\n verify=httpbin_ca_bundle\n )\n assert r.history[0].request.headers['Authorization']\n assert 'Authorization' not in r.request.headers\n\n def test_auth_is_retained_for_redirect_on_host(self, httpbin):\n r = requests.get(httpbin('redirect/1'), auth=('user', 'pass'))\n h1 = r.history[0].request.headers['Authorization']\n h2 = r.request.headers['Authorization']\n\n assert h1 == h2\n\n def test_should_strip_auth_host_change(self):\n s = requests.Session()\n assert s.should_strip_auth('http://example.com/foo', 'http://another.example.com/')\n\n def test_should_strip_auth_http_downgrade(self):\n s = requests.Session()\n assert s.should_strip_auth('https://example.com/foo', 'http://example.com/bar')\n\n def test_should_strip_auth_https_upgrade(self):\n s = requests.Session()\n assert not s.should_strip_auth('http://example.com/foo', 'https://example.com/bar')\n assert not s.should_strip_auth('http://example.com:80/foo', 'https://example.com/bar')\n assert not s.should_strip_auth('http://example.com/foo', 'https://example.com:443/bar')\n # Non-standard ports should trigger stripping\n assert s.should_strip_auth('http://example.com:8080/foo', 'https://example.com/bar')\n assert s.should_strip_auth('http://example.com/foo', 'https://example.com:8443/bar')\n\n def test_should_strip_auth_port_change(self):\n s = requests.Session()\n assert s.should_strip_auth('http://example.com:1234/foo', 'https://example.com:4321/bar')\n\n @pytest.mark.parametrize(\n 'old_uri, new_uri', (\n ('https://example.com:443/foo', 'https://example.com/bar'),\n ('http://example.com:80/foo', 'http://example.com/bar'),\n ('https://example.com/foo', 'https://example.com:443/bar'),\n ('http://example.com/foo', 'http://example.com:80/bar')\n ))\n def test_should_strip_auth_default_port(self, old_uri, new_uri):\n s = requests.Session()\n assert not s.should_strip_auth(old_uri, new_uri)\n\n def test_manual_redirect_with_partial_body_read(self, httpbin):\n s = requests.Session()\n r1 = s.get(httpbin('redirect/2'), allow_redirects=False, stream=True)\n assert r1.is_redirect\n rg = s.resolve_redirects(r1, r1.request, stream=True)\n\n # read only the first eight bytes of the response body,\n # then follow the redirect\n r1.iter_content(8)\n r2 = next(rg)\n assert r2.is_redirect\n\n # read all of the response via iter_content,\n # then follow the redirect\n for _ in r2.iter_content():\n pass\n r3 = next(rg)\n assert not r3.is_redirect\n\n def test_prepare_body_position_non_stream(self):\n data = b'the data'\n prep = requests.Request('GET', 'http://example.com', data=data).prepare()\n assert prep._body_position is None\n\n def test_rewind_body(self):\n data = io.BytesIO(b'the data')\n prep = requests.Request('GET', 'http://example.com', data=data).prepare()\n assert prep._body_position == 0\n assert prep.body.read() == b'the data'\n\n # the data has all been read\n assert prep.body.read() == b''\n\n # rewind it back\n requests.utils.rewind_body(prep)\n assert prep.body.read() == b'the data'\n\n def test_rewind_partially_read_body(self):\n data = io.BytesIO(b'the data')\n data.read(4) # read some data\n prep = requests.Request('GET', 'http://example.com', data=data).prepare()\n assert prep._body_position == 4\n assert prep.body.read() == b'data'\n\n # the data has all been read\n assert prep.body.read() == b''\n\n # rewind it back\n requests.utils.rewind_body(prep)\n assert prep.body.read() == b'data'\n\n def test_rewind_body_no_seek(self):\n class BadFileObj:\n def __init__(self, data):\n self.data = data\n\n def tell(self):\n return 0\n\n def __iter__(self):\n return\n\n data = BadFileObj('the data')\n prep = requests.Request('GET', 'http://example.com', data=data).prepare()\n assert prep._body_position == 0\n\n with pytest.raises(UnrewindableBodyError) as e:\n requests.utils.rewind_body(prep)\n\n assert 'Unable to rewind request body' in str(e)\n\n def test_rewind_body_failed_seek(self):\n class BadFileObj:\n def __init__(self, data):\n self.data = data\n\n def tell(self):\n return 0\n\n def seek(self, pos, whence=0):\n raise OSError()\n\n def __iter__(self):\n return\n\n data = BadFileObj('the data')\n prep = requests.Request('GET', 'http://example.com', data=data).prepare()\n assert prep._body_position == 0\n\n with pytest.raises(UnrewindableBodyError) as e:\n requests.utils.rewind_body(prep)\n\n assert 'error occurred when rewinding request body' in str(e)\n\n def test_rewind_body_failed_tell(self):\n class BadFileObj:\n def __init__(self, data):\n self.data = data\n\n def tell(self):\n raise OSError()\n\n def __iter__(self):\n return\n\n data = BadFileObj('the data')\n prep = requests.Request('GET', 'http://example.com', data=data).prepare()\n assert prep._body_position is not None\n\n with pytest.raises(UnrewindableBodyError) as e:\n requests.utils.rewind_body(prep)\n\n assert 'Unable to rewind request body' in str(e)\n\n def _patch_adapter_gzipped_redirect(self, session, url):\n adapter = session.get_adapter(url=url)\n org_build_response = adapter.build_response\n self._patched_response = False\n\n def build_response(*args, **kwargs):\n resp = org_build_response(*args, **kwargs)\n if not self._patched_response:\n resp.raw.headers['content-encoding'] = 'gzip'\n self._patched_response = True\n return resp\n\n adapter.build_response = build_response\n\n def test_redirect_with_wrong_gzipped_header(self, httpbin):\n s = requests.Session()\n url = httpbin('redirect/1')\n self._patch_adapter_gzipped_redirect(s, url)\n s.get(url)\n\n @pytest.mark.parametrize(\n 'username, password, auth_str', (\n ('test', 'test', 'Basic dGVzdDp0ZXN0'),\n (u'имя'.encode('utf-8'), u'пароль'.encode('utf-8'), 'Basic 0LjQvNGPOtC/0LDRgNC+0LvRjA=='),\n ))\n def test_basic_auth_str_is_always_native(self, username, password, auth_str):\n s = _basic_auth_str(username, password)\n assert isinstance(s, builtin_str)\n assert s == auth_str\n\n def test_requests_history_is_saved(self, httpbin):\n r = requests.get(httpbin('redirect/5'))\n total = r.history[-1].history\n i = 0\n for item in r.history:\n assert item.history == total[0:i]\n i += 1\n\n def test_json_param_post_content_type_works(self, httpbin):\n r = requests.post(\n httpbin('post'),\n json={'life': 42}\n )\n assert r.status_code == 200\n assert 'application/json' in r.request.headers['Content-Type']\n assert {'life': 42} == r.json()['json']\n\n def test_json_param_post_should_not_override_data_param(self, httpbin):\n r = requests.Request(method='POST', url=httpbin('post'),\n data={'stuff': 'elixr'},\n json={'music': 'flute'})\n prep = r.prepare()\n assert 'stuff=elixr' == prep.body\n\n def test_response_iter_lines(self, httpbin):\n r = requests.get(httpbin('stream/4'), stream=True)\n assert r.status_code == 200\n\n it = r.iter_lines()\n next(it)\n assert len(list(it)) == 3\n\n def test_response_context_manager(self, httpbin):\n with requests.get(httpbin('stream/4'), stream=True) as response:\n assert isinstance(response, requests.Response)\n\n assert response.raw.closed\n\n def test_unconsumed_session_response_closes_connection(self, httpbin):\n s = requests.session()\n\n with contextlib.closing(s.get(httpbin('stream/4'), stream=True)) as response:\n pass\n\n assert response._content_consumed is False\n assert response.raw.closed\n\n @pytest.mark.xfail\n def test_response_iter_lines_reentrant(self, httpbin):\n \"\"\"Response.iter_lines() is not reentrant safe\"\"\"\n r = requests.get(httpbin('stream/4'), stream=True)\n assert r.status_code == 200\n\n next(r.iter_lines())\n assert len(list(r.iter_lines())) == 3\n\n def test_session_close_proxy_clear(self, mocker):\n proxies = {\n 'one': mocker.Mock(),\n 'two': mocker.Mock(),\n }\n session = requests.Session()\n mocker.patch.dict(session.adapters['http://'].proxy_manager, proxies)\n session.close()\n proxies['one'].clear.assert_called_once_with()\n proxies['two'].clear.assert_called_once_with()\n\n def test_proxy_auth(self):\n adapter = HTTPAdapter()\n headers = adapter.proxy_headers(\"http://user:pass@httpbin.org\")\n assert headers == {'Proxy-Authorization': 'Basic dXNlcjpwYXNz'}\n\n def test_proxy_auth_empty_pass(self):\n adapter = HTTPAdapter()\n headers = adapter.proxy_headers(\"http://user:@httpbin.org\")\n assert headers == {'Proxy-Authorization': 'Basic dXNlcjo='}\n\n def test_response_json_when_content_is_None(self, httpbin):\n r = requests.get(httpbin('/status/204'))\n # Make sure r.content is None\n r.status_code = 0\n r._content = False\n r._content_consumed = False\n\n assert r.content is None\n with pytest.raises(ValueError):\n r.json()\n\n def test_response_without_release_conn(self):\n \"\"\"Test `close` call for non-urllib3-like raw objects.\n Should work when `release_conn` attr doesn't exist on `response.raw`.\n \"\"\"\n resp = requests.Response()\n resp.raw = StringIO.StringIO('test')\n assert not resp.raw.closed\n resp.close()\n assert resp.raw.closed\n\n def test_empty_stream_with_auth_does_not_set_content_length_header(self, httpbin):\n \"\"\"Ensure that a byte stream with size 0 will not set both a Content-Length\n and Transfer-Encoding header.\n \"\"\"\n auth = ('user', 'pass')\n url = httpbin('post')\n file_obj = io.BytesIO(b'')\n r = requests.Request('POST', url, auth=auth, data=file_obj)\n prepared_request = r.prepare()\n assert 'Transfer-Encoding' in prepared_request.headers\n assert 'Content-Length' not in prepared_request.headers\n\n def test_stream_with_auth_does_not_set_transfer_encoding_header(self, httpbin):\n \"\"\"Ensure that a byte stream with size > 0 will not set both a Content-Length\n and Transfer-Encoding header.\n \"\"\"\n auth = ('user', 'pass')\n url = httpbin('post')\n file_obj = io.BytesIO(b'test data')\n r = requests.Request('POST', url, auth=auth, data=file_obj)\n prepared_request = r.prepare()\n assert 'Transfer-Encoding' not in prepared_request.headers\n assert 'Content-Length' in prepared_request.headers\n\n def test_chunked_upload_does_not_set_content_length_header(self, httpbin):\n \"\"\"Ensure that requests with a generator body stream using\n Transfer-Encoding: chunked, not a Content-Length header.\n \"\"\"\n data = (i for i in [b'a', b'b', b'c'])\n url = httpbin('post')\n r = requests.Request('POST', url, data=data)\n prepared_request = r.prepare()\n assert 'Transfer-Encoding' in prepared_request.headers\n assert 'Content-Length' not in prepared_request.headers\n\n def test_custom_redirect_mixin(self, httpbin):\n \"\"\"Tests a custom mixin to overwrite ``get_redirect_target``.\n\n Ensures a subclassed ``requests.Session`` can handle a certain type of\n malformed redirect responses.\n\n 1. original request receives a proper response: 302 redirect\n 2. following the redirect, a malformed response is given:\n status code = HTTP 200\n location = alternate url\n 3. the custom session catches the edge case and follows the redirect\n \"\"\"\n url_final = httpbin('html')\n querystring_malformed = urlencode({'location': url_final})\n url_redirect_malformed = httpbin('response-headers?%s' % querystring_malformed)\n querystring_redirect = urlencode({'url': url_redirect_malformed})\n url_redirect = httpbin('redirect-to?%s' % querystring_redirect)\n urls_test = [url_redirect,\n url_redirect_malformed,\n url_final,\n ]\n\n class CustomRedirectSession(requests.Session):\n def get_redirect_target(self, resp):\n # default behavior\n if resp.is_redirect:\n return resp.headers['location']\n # edge case - check to see if 'location' is in headers anyways\n location = resp.headers.get('location')\n if location and (location != resp.url):\n return location\n return None\n\n session = CustomRedirectSession()\n r = session.get(urls_test[0])\n assert len(r.history) == 2\n assert r.status_code == 200\n assert r.history[0].status_code == 302\n assert r.history[0].is_redirect\n assert r.history[1].status_code == 200\n assert not r.history[1].is_redirect\n assert r.url == urls_test[2]\n\n\nclass TestCaseInsensitiveDict:\n\n @pytest.mark.parametrize(\n 'cid', (\n CaseInsensitiveDict({'Foo': 'foo', 'BAr': 'bar'}),\n CaseInsensitiveDict([('Foo', 'foo'), ('BAr', 'bar')]),\n CaseInsensitiveDict(FOO='foo', BAr='bar'),\n ))\n def test_init(self, cid):\n assert len(cid) == 2\n assert 'foo' in cid\n assert 'bar' in cid\n\n def test_docstring_example(self):\n cid = CaseInsensitiveDict()\n cid['Accept'] = 'application/json'\n assert cid['aCCEPT'] == 'application/json'\n assert list(cid) == ['Accept']\n\n def test_len(self):\n cid = CaseInsensitiveDict({'a': 'a', 'b': 'b'})\n cid['A'] = 'a'\n assert len(cid) == 2\n\n def test_getitem(self):\n cid = CaseInsensitiveDict({'Spam': 'blueval'})\n assert cid['spam'] == 'blueval'\n assert cid['SPAM'] == 'blueval'\n\n def test_fixes_649(self):\n \"\"\"__setitem__ should behave case-insensitively.\"\"\"\n cid = CaseInsensitiveDict()\n cid['spam'] = 'oneval'\n cid['Spam'] = 'twoval'\n cid['sPAM'] = 'redval'\n cid['SPAM'] = 'blueval'\n assert cid['spam'] == 'blueval'\n assert cid['SPAM'] == 'blueval'\n assert list(cid.keys()) == ['SPAM']\n\n def test_delitem(self):\n cid = CaseInsensitiveDict()\n cid['Spam'] = 'someval'\n del cid['sPam']\n assert 'spam' not in cid\n assert len(cid) == 0\n\n def test_contains(self):\n cid = CaseInsensitiveDict()\n cid['Spam'] = 'someval'\n assert 'Spam' in cid\n assert 'spam' in cid\n assert 'SPAM' in cid\n assert 'sPam' in cid\n assert 'notspam' not in cid\n\n def test_get(self):\n cid = CaseInsensitiveDict()\n cid['spam'] = 'oneval'\n cid['SPAM'] = 'blueval'\n assert cid.get('spam') == 'blueval'\n assert cid.get('SPAM') == 'blueval'\n assert cid.get('sPam') == 'blueval'\n assert cid.get('notspam', 'default') == 'default'\n\n def test_update(self):\n cid = CaseInsensitiveDict()\n cid['spam'] = 'blueval'\n cid.update({'sPam': 'notblueval'})\n assert cid['spam'] == 'notblueval'\n cid = CaseInsensitiveDict({'Foo': 'foo', 'BAr': 'bar'})\n cid.update({'fOO': 'anotherfoo', 'bAR': 'anotherbar'})\n assert len(cid) == 2\n assert cid['foo'] == 'anotherfoo'\n assert cid['bar'] == 'anotherbar'\n\n def test_update_retains_unchanged(self):\n cid = CaseInsensitiveDict({'foo': 'foo', 'bar': 'bar'})\n cid.update({'foo': 'newfoo'})\n assert cid['bar'] == 'bar'\n\n def test_iter(self):\n cid = CaseInsensitiveDict({'Spam': 'spam', 'Eggs': 'eggs'})\n keys = frozenset(['Spam', 'Eggs'])\n assert frozenset(iter(cid)) == keys\n\n def test_equality(self):\n cid = CaseInsensitiveDict({'SPAM': 'blueval', 'Eggs': 'redval'})\n othercid = CaseInsensitiveDict({'spam': 'blueval', 'eggs': 'redval'})\n assert cid == othercid\n del othercid['spam']\n assert cid != othercid\n assert cid == {'spam': 'blueval', 'eggs': 'redval'}\n assert cid != object()\n\n def test_setdefault(self):\n cid = CaseInsensitiveDict({'Spam': 'blueval'})\n assert cid.setdefault('spam', 'notblueval') == 'blueval'\n assert cid.setdefault('notspam', 'notblueval') == 'notblueval'\n\n def test_lower_items(self):\n cid = CaseInsensitiveDict({\n 'Accept': 'application/json',\n 'user-Agent': 'requests',\n })\n keyset = frozenset(lowerkey for lowerkey, v in cid.lower_items())\n lowerkeyset = frozenset(['accept', 'user-agent'])\n assert keyset == lowerkeyset\n\n def test_preserve_key_case(self):\n cid = CaseInsensitiveDict({\n 'Accept': 'application/json',\n 'user-Agent': 'requests',\n })\n keyset = frozenset(['Accept', 'user-Agent'])\n assert frozenset(i[0] for i in cid.items()) == keyset\n assert frozenset(cid.keys()) == keyset\n assert frozenset(cid) == keyset\n\n def test_preserve_last_key_case(self):\n cid = CaseInsensitiveDict({\n 'Accept': 'application/json',\n 'user-Agent': 'requests',\n })\n cid.update({'ACCEPT': 'application/json'})\n cid['USER-AGENT'] = 'requests'\n keyset = frozenset(['ACCEPT', 'USER-AGENT'])\n assert frozenset(i[0] for i in cid.items()) == keyset\n assert frozenset(cid.keys()) == keyset\n assert frozenset(cid) == keyset\n\n def test_copy(self):\n cid = CaseInsensitiveDict({\n 'Accept': 'application/json',\n 'user-Agent': 'requests',\n })\n cid_copy = cid.copy()\n assert cid == cid_copy\n cid['changed'] = True\n assert cid != cid_copy\n\n\nclass TestMorselToCookieExpires:\n \"\"\"Tests for morsel_to_cookie when morsel contains expires.\"\"\"\n\n def test_expires_valid_str(self):\n \"\"\"Test case where we convert expires from string time.\"\"\"\n\n morsel = Morsel()\n morsel['expires'] = 'Thu, 01-Jan-1970 00:00:01 GMT'\n cookie = morsel_to_cookie(morsel)\n assert cookie.expires == 1\n\n @pytest.mark.parametrize(\n 'value, exception', (\n (100, TypeError),\n ('woops', ValueError),\n ))\n def test_expires_invalid_int(self, value, exception):\n \"\"\"Test case where an invalid type is passed for expires.\"\"\"\n morsel = Morsel()\n morsel['expires'] = value\n with pytest.raises(exception):\n morsel_to_cookie(morsel)\n\n def test_expires_none(self):\n \"\"\"Test case where expires is None.\"\"\"\n\n morsel = Morsel()\n morsel['expires'] = None\n cookie = morsel_to_cookie(morsel)\n assert cookie.expires is None\n\n\nclass TestMorselToCookieMaxAge:\n\n \"\"\"Tests for morsel_to_cookie when morsel contains max-age.\"\"\"\n\n def test_max_age_valid_int(self):\n \"\"\"Test case where a valid max age in seconds is passed.\"\"\"\n\n morsel = Morsel()\n morsel['max-age'] = 60\n cookie = morsel_to_cookie(morsel)\n assert isinstance(cookie.expires, int)\n\n def test_max_age_invalid_str(self):\n \"\"\"Test case where a invalid max age is passed.\"\"\"\n\n morsel = Morsel()\n morsel['max-age'] = 'woops'\n with pytest.raises(TypeError):\n morsel_to_cookie(morsel)\n\n\nclass TestTimeout:\n\n def test_stream_timeout(self, httpbin):\n try:\n requests.get(httpbin('delay/10'), timeout=2.0)\n except requests.exceptions.Timeout as e:\n assert 'Read timed out' in e.args[0].args[0]\n\n @pytest.mark.parametrize(\n 'timeout, error_text', (\n ((3, 4, 5), '(connect, read)'),\n ('foo', 'must be an int, float or None'),\n ))\n def test_invalid_timeout(self, httpbin, timeout, error_text):\n with pytest.raises(ValueError) as e:\n requests.get(httpbin('get'), timeout=timeout)\n assert error_text in str(e)\n\n @pytest.mark.parametrize(\n 'timeout', (\n None,\n Urllib3Timeout(connect=None, read=None)\n ))\n def test_none_timeout(self, httpbin, timeout):\n \"\"\"Check that you can set None as a valid timeout value.\n\n To actually test this behavior, we'd want to check that setting the\n timeout to None actually lets the request block past the system default\n timeout. However, this would make the test suite unbearably slow.\n Instead we verify that setting the timeout to None does not prevent the\n request from succeeding.\n \"\"\"\n r = requests.get(httpbin('get'), timeout=timeout)\n assert r.status_code == 200\n\n @pytest.mark.parametrize(\n 'timeout', (\n (None, 0.1),\n Urllib3Timeout(connect=None, read=0.1)\n ))\n def test_read_timeout(self, httpbin, timeout):\n try:\n requests.get(httpbin('delay/10'), timeout=timeout)\n pytest.fail('The recv() request should time out.')\n except ReadTimeout:\n pass\n\n @pytest.mark.parametrize(\n 'timeout', (\n (0.1, None),\n Urllib3Timeout(connect=0.1, read=None)\n ))\n def test_connect_timeout(self, timeout):\n try:\n requests.get(TARPIT, timeout=timeout)\n pytest.fail('The connect() request should time out.')\n except ConnectTimeout as e:\n assert isinstance(e, ConnectionError)\n assert isinstance(e, Timeout)\n\n @pytest.mark.parametrize(\n 'timeout', (\n (0.1, 0.1),\n Urllib3Timeout(connect=0.1, read=0.1)\n ))\n def test_total_timeout_connect(self, timeout):\n try:\n requests.get(TARPIT, timeout=timeout)\n pytest.fail('The connect() request should time out.')\n except ConnectTimeout:\n pass\n\n def test_encoded_methods(self, httpbin):\n \"\"\"See: https://github.com/psf/requests/issues/2316\"\"\"\n r = requests.request(b'GET', httpbin('get'))\n assert r.ok\n\n\nSendCall = collections.namedtuple('SendCall', ('args', 'kwargs'))\n\n\nclass RedirectSession(SessionRedirectMixin):\n def __init__(self, order_of_redirects):\n self.redirects = order_of_redirects\n self.calls = []\n self.max_redirects = 30\n self.cookies = {}\n self.trust_env = False\n\n def send(self, *args, **kwargs):\n self.calls.append(SendCall(args, kwargs))\n return self.build_response()\n\n def build_response(self):\n request = self.calls[-1].args[0]\n r = requests.Response()\n\n try:\n r.status_code = int(self.redirects.pop(0))\n except IndexError:\n r.status_code = 200\n\n r.headers = CaseInsensitiveDict({'Location': '/'})\n r.raw = self._build_raw()\n r.request = request\n return r\n\n def _build_raw(self):\n string = StringIO.StringIO('')\n setattr(string, 'release_conn', lambda *args: args)\n return string\n\n\ndef test_json_encodes_as_bytes():\n # urllib3 expects bodies as bytes-like objects\n body = {\"key\": \"value\"}\n p = PreparedRequest()\n p.prepare(\n method='GET',\n url='https://www.example.com/',\n json=body\n )\n assert isinstance(p.body, bytes)\n\n\ndef test_requests_are_updated_each_time(httpbin):\n session = RedirectSession([303, 307])\n prep = requests.Request('POST', httpbin('post')).prepare()\n r0 = session.send(prep)\n assert r0.request.method == 'POST'\n assert session.calls[-1] == SendCall((r0.request,), {})\n redirect_generator = session.resolve_redirects(r0, prep)\n default_keyword_args = {\n 'stream': False,\n 'verify': True,\n 'cert': None,\n 'timeout': None,\n 'allow_redirects': False,\n 'proxies': {},\n }\n for response in redirect_generator:\n assert response.request.method == 'GET'\n send_call = SendCall((response.request,), default_keyword_args)\n assert session.calls[-1] == send_call\n\n\n@pytest.mark.parametrize(\"var,url,proxy\", [\n ('http_proxy', 'http://example.com', 'socks5://proxy.com:9876'),\n ('https_proxy', 'https://example.com', 'socks5://proxy.com:9876'),\n ('all_proxy', 'http://example.com', 'socks5://proxy.com:9876'),\n ('all_proxy', 'https://example.com', 'socks5://proxy.com:9876'),\n])\ndef test_proxy_env_vars_override_default(var, url, proxy):\n session = requests.Session()\n prep = PreparedRequest()\n prep.prepare(method='GET', url=url)\n\n kwargs = {\n var: proxy\n }\n scheme = urlparse(url).scheme\n with override_environ(**kwargs):\n proxies = session.rebuild_proxies(prep, {})\n assert scheme in proxies\n assert proxies[scheme] == proxy\n\n\n@pytest.mark.parametrize(\n 'data', (\n (('a', 'b'), ('c', 'd')),\n (('c', 'd'), ('a', 'b')),\n (('a', 'b'), ('c', 'd'), ('e', 'f')),\n ))\ndef test_data_argument_accepts_tuples(data):\n \"\"\"Ensure that the data argument will accept tuples of strings\n and properly encode them.\n \"\"\"\n p = PreparedRequest()\n p.prepare(\n method='GET',\n url='http://www.example.com',\n data=data,\n hooks=default_hooks()\n )\n assert p.body == urlencode(data)\n\n\n@pytest.mark.parametrize(\n 'kwargs', (\n None,\n {\n 'method': 'GET',\n 'url': 'http://www.example.com',\n 'data': 'foo=bar',\n 'hooks': default_hooks()\n },\n {\n 'method': 'GET',\n 'url': 'http://www.example.com',\n 'data': 'foo=bar',\n 'hooks': default_hooks(),\n 'cookies': {'foo': 'bar'}\n },\n {\n 'method': 'GET',\n 'url': u('http://www.example.com/üniçø∂é')\n },\n ))\ndef test_prepared_copy(kwargs):\n p = PreparedRequest()\n if kwargs:\n p.prepare(**kwargs)\n copy = p.copy()\n for attr in ('method', 'url', 'headers', '_cookies', 'body', 'hooks'):\n assert getattr(p, attr) == getattr(copy, attr)\n\n\ndef test_urllib3_retries(httpbin):\n from urllib3.util import Retry\n s = requests.Session()\n s.mount('http://', HTTPAdapter(max_retries=Retry(\n total=2, status_forcelist=[500]\n )))\n\n with pytest.raises(RetryError):\n s.get(httpbin('status/500'))\n\n\ndef test_urllib3_pool_connection_closed(httpbin):\n s = requests.Session()\n s.mount('http://', HTTPAdapter(pool_connections=0, pool_maxsize=0))\n\n try:\n s.get(httpbin('status/200'))\n except ConnectionError as e:\n assert u\"Pool is closed.\" in str(e)\n\n\nclass TestPreparingURLs(object):\n @pytest.mark.parametrize(\n 'url,expected',\n (\n ('http://google.com', 'http://google.com/'),\n (u'http://ジェーピーニック.jp', u'http://xn--hckqz9bzb1cyrb.jp/'),\n (u'http://xn--n3h.net/', u'http://xn--n3h.net/'),\n (\n u'http://ジェーピーニック.jp'.encode('utf-8'),\n u'http://xn--hckqz9bzb1cyrb.jp/'\n ),\n (\n u'http://straße.de/straße',\n u'http://xn--strae-oqa.de/stra%C3%9Fe'\n ),\n (\n u'http://straße.de/straße'.encode('utf-8'),\n u'http://xn--strae-oqa.de/stra%C3%9Fe'\n ),\n (\n u'http://Königsgäßchen.de/straße',\n u'http://xn--knigsgchen-b4a3dun.de/stra%C3%9Fe'\n ),\n (\n u'http://Königsgäßchen.de/straße'.encode('utf-8'),\n u'http://xn--knigsgchen-b4a3dun.de/stra%C3%9Fe'\n ),\n (\n b'http://xn--n3h.net/',\n u'http://xn--n3h.net/'\n ),\n (\n b'http://[1200:0000:ab00:1234:0000:2552:7777:1313]:12345/',\n u'http://[1200:0000:ab00:1234:0000:2552:7777:1313]:12345/'\n ),\n (\n u'http://[1200:0000:ab00:1234:0000:2552:7777:1313]:12345/',\n u'http://[1200:0000:ab00:1234:0000:2552:7777:1313]:12345/'\n )\n )\n )\n def test_preparing_url(self, url, expected):\n\n def normalize_percent_encode(x):\n # Helper function that normalizes equivalent \n # percent-encoded bytes before comparisons\n for c in re.findall(r'%[a-fA-F0-9]{2}', x):\n x = x.replace(c, c.upper())\n return x\n \n r = requests.Request('GET', url=url)\n p = r.prepare()\n assert normalize_percent_encode(p.url) == expected\n\n @pytest.mark.parametrize(\n 'url',\n (\n b\"http://*.google.com\",\n b\"http://*\",\n u\"http://*.google.com\",\n u\"http://*\",\n u\"http://☃.net/\"\n )\n )\n def test_preparing_bad_url(self, url):\n r = requests.Request('GET', url=url)\n with pytest.raises(requests.exceptions.InvalidURL):\n r.prepare()\n\n @pytest.mark.parametrize(\n 'url, exception',\n (\n ('http://localhost:-1', InvalidURL),\n )\n )\n def test_redirecting_to_bad_url(self, httpbin, url, exception):\n with pytest.raises(exception):\n r = requests.get(httpbin('redirect-to'), params={'url': url})\n\n @pytest.mark.parametrize(\n 'input, expected',\n (\n (\n b\"http+unix://%2Fvar%2Frun%2Fsocket/path%7E\",\n u\"http+unix://%2Fvar%2Frun%2Fsocket/path~\",\n ),\n (\n u\"http+unix://%2Fvar%2Frun%2Fsocket/path%7E\",\n u\"http+unix://%2Fvar%2Frun%2Fsocket/path~\",\n ),\n (\n b\"mailto:user@example.org\",\n u\"mailto:user@example.org\",\n ),\n (\n u\"mailto:user@example.org\",\n u\"mailto:user@example.org\",\n ),\n (\n b\"data:SSDimaUgUHl0aG9uIQ==\",\n u\"data:SSDimaUgUHl0aG9uIQ==\",\n )\n )\n )\n def test_url_mutation(self, input, expected):\n \"\"\"\n This test validates that we correctly exclude some URLs from\n preparation, and that we handle others. Specifically, it tests that\n any URL whose scheme doesn't begin with \"http\" is left alone, and\n those whose scheme *does* begin with \"http\" are mutated.\n \"\"\"\n r = requests.Request('GET', url=input)\n p = r.prepare()\n assert p.url == expected\n\n @pytest.mark.parametrize(\n 'input, params, expected',\n (\n (\n b\"http+unix://%2Fvar%2Frun%2Fsocket/path\",\n {\"key\": \"value\"},\n u\"http+unix://%2Fvar%2Frun%2Fsocket/path?key=value\",\n ),\n (\n u\"http+unix://%2Fvar%2Frun%2Fsocket/path\",\n {\"key\": \"value\"},\n u\"http+unix://%2Fvar%2Frun%2Fsocket/path?key=value\",\n ),\n (\n b\"mailto:user@example.org\",\n {\"key\": \"value\"},\n u\"mailto:user@example.org\",\n ),\n (\n u\"mailto:user@example.org\",\n {\"key\": \"value\"},\n u\"mailto:user@example.org\",\n ),\n )\n )\n def test_parameters_for_nonstandard_schemes(self, input, params, expected):\n \"\"\"\n Setting parameters for nonstandard schemes is allowed if those schemes\n begin with \"http\", and is forbidden otherwise.\n \"\"\"\n r = requests.Request('GET', url=input, params=params)\n p = r.prepare()\n assert p.url == expected\n\n def test_post_json_nan(self, httpbin):\n data = {\"foo\": float(\"nan\")}\n with pytest.raises(requests.exceptions.InvalidJSONError):\n r = requests.post(httpbin('post'), json=data)\n\n def test_json_decode_compatibility(self, httpbin):\n r = requests.get(httpbin('bytes/20'))\n with pytest.raises(requests.exceptions.JSONDecodeError) as excinfo:\n r.json()\n assert isinstance(excinfo.value, RequestException)\n assert isinstance(excinfo.value, JSONDecodeError)\n assert r.text not in str(excinfo.value)\n\n @pytest.mark.skipif(not is_py3, reason=\"doc attribute is only present on py3\")\n def test_json_decode_persists_doc_attr(self, httpbin):\n r = requests.get(httpbin('bytes/20'))\n with pytest.raises(requests.exceptions.JSONDecodeError) as excinfo:\n r.json()\n assert excinfo.value.doc == r.text\n",
"path": "tests/test_requests.py"
}
] | 13_2 | python | import sys
import urllib3
import pytest
# Requests to this URL should always fail with a connection timeout (nothing
# listening on that port)
TARPIT = "http://10.255.255.1"
# This is to avoid waiting the timeout of using TARPIT
INVALID_PROXY = "http://localhost:1"
class TestRequests:
from unittest import mock
from requests.exceptions import SSLError as RequestsSSLError
from requests.exceptions import (
ChunkedEncodingError,
ConnectionError,
ContentDecodingError,
)
@pytest.mark.parametrize(
"exception, args, expected",
(
(urllib3.exceptions.ProtocolError, tuple(), ChunkedEncodingError),
(urllib3.exceptions.DecodeError, tuple(), ContentDecodingError),
(urllib3.exceptions.ReadTimeoutError, (None, "", ""), ConnectionError),
(urllib3.exceptions.SSLError, tuple(), RequestsSSLError),
),
)
def test_iter_content_wraps_exceptions(self, httpbin, exception, args, expected):
import requests
from unittest import mock
r = requests.Response()
r.raw = mock.Mock()
# ReadTimeoutError can't be initialized by mock
# so we'll manually create the instance with args
r.raw.stream.side_effect = exception(*args)
with pytest.raises(expected):
next(r.iter_content(1024))
def main():
import pytest
# Run the pytest tests programmatically
exit_code = pytest.main(["-v", __file__])
# Exit with status code 1 if any test fails, otherwise 0
if exit_code != 0:
sys.exit(1)
else:
sys.exit(0)
if __name__ == '__main__':
main()
|
https://github.com/teamqurrent/requests | Your objective is to to enhance the way the requests library handles SSL certificate bundle settings from environment variables. Specifically, focus on improving the `merge_environment_settings` function in `requests/sessions.py` to better handle the REQUESTS_CA_BUNDLE and CURL_CA_BUNDLE environment variables. The goal is to ensure that the library correctly prioritizes these variables when determining the SSL certificate bundle to use for verifying server certificates. | 5e74954 | -e .[socks]
pytest
pytest-cov
pytest-httpbin==1.0.0
pytest-mock
httpbin==0.7.0
trustme
wheel
chardet>=3.0.2,<3.1.0
idna>=2.5,<2.8
urllib3>=1.21.1,<1.24
certifi>=2017.4.17
# Flask Stack
Flask>1.0,<2.0
markupsafe<2.1
| python3.9 | 79c4a017 | diff --git a/requests/sessions.py b/requests/sessions.py
--- a/requests/sessions.py
+++ b/requests/sessions.py
@@ -702,11 +702,14 @@ class Session(SessionRedirectMixin):
for (k, v) in env_proxies.items():
proxies.setdefault(k, v)
- # Look for requests environment configuration and be compatible
- # with cURL.
+ # Look for requests environment configuration
+ # and be compatible with cURL.
if verify is True or verify is None:
- verify = (os.environ.get('REQUESTS_CA_BUNDLE') or
- os.environ.get('CURL_CA_BUNDLE'))
+ verify = (
+ os.environ.get('REQUESTS_CA_BUNDLE')
+ or os.environ.get('CURL_CA_BUNDLE')
+ or verify
+ )
# Merge all the kwargs.
proxies = merge_setting(proxies, self.proxies)
@@ -714,8 +717,12 @@ class Session(SessionRedirectMixin):
verify = merge_setting(verify, self.verify)
cert = merge_setting(cert, self.cert)
- return {'verify': verify, 'proxies': proxies, 'stream': stream,
- 'cert': cert}
+ return {
+ 'proxies': proxies,
+ 'stream': stream,
+ 'verify': verify,
+ 'cert': cert
+ }
def get_adapter(self, url):
"""
diff --git a/tests/test_requests.py b/tests/test_requests.py
--- a/tests/test_requests.py
+++ b/tests/test_requests.py
@@ -898,6 +898,42 @@ class TestRequests:
requests.get(httpbin_secure(), cert=('.', INVALID_PATH))
assert str(e.value) == 'Could not find the TLS key file, invalid path: {}'.format(INVALID_PATH)
+ @pytest.mark.parametrize(
+ 'env, expected', (
+ ({}, True),
+ ({'REQUESTS_CA_BUNDLE': '/some/path'}, '/some/path'),
+ ({'REQUESTS_CA_BUNDLE': ''}, True),
+ ({'CURL_CA_BUNDLE': '/some/path'}, '/some/path'),
+ ({'CURL_CA_BUNDLE': ''}, True),
+ ({'REQUESTS_CA_BUNDLE': '', 'CURL_CA_BUNDLE': ''}, True),
+ (
+ {
+ 'REQUESTS_CA_BUNDLE': '/some/path',
+ 'CURL_CA_BUNDLE': '/curl/path',
+ },
+ '/some/path',
+ ),
+ (
+ {
+ 'REQUESTS_CA_BUNDLE': '',
+ 'CURL_CA_BUNDLE': '/curl/path',
+ },
+ '/curl/path',
+ ),
+ )
+ )
+ def test_env_cert_bundles(self, httpbin, mocker, env, expected):
+ s = requests.Session()
+ mocker.patch('os.environ', env)
+ settings = s.merge_environment_settings(
+ url=httpbin('get'),
+ proxies={},
+ stream=False,
+ verify=True,
+ cert=None
+ )
+ assert settings['verify'] == expected
+
def test_http_with_certificate(self, httpbin):
r = requests.get(httpbin(), cert='.')
assert r.status_code == 200
| [
{
"content": "# -*- coding: utf-8 -*-\n\n\"\"\"\nrequests.sessions\n~~~~~~~~~~~~~~~~~\n\nThis module provides a Session object to manage and persist settings across\nrequests (cookies, auth, proxies).\n\"\"\"\nimport os\nimport sys\nimport time\nfrom datetime import timedelta\nfrom collections import OrderedDict\n\nfrom .auth import _basic_auth_str\nfrom .compat import cookielib, is_py3, urljoin, urlparse, Mapping\nfrom .cookies import (\n cookiejar_from_dict, extract_cookies_to_jar, RequestsCookieJar, merge_cookies)\nfrom .models import Request, PreparedRequest, DEFAULT_REDIRECT_LIMIT\nfrom .hooks import default_hooks, dispatch_hook\nfrom ._internal_utils import to_native_string\nfrom .utils import to_key_val_list, default_headers, DEFAULT_PORTS\nfrom .exceptions import (\n TooManyRedirects, InvalidSchema, ChunkedEncodingError, ContentDecodingError)\n\nfrom .structures import CaseInsensitiveDict\nfrom .adapters import HTTPAdapter\n\nfrom .utils import (\n requote_uri, get_environ_proxies, get_netrc_auth, should_bypass_proxies,\n get_auth_from_url, rewind_body, resolve_proxies\n)\n\nfrom .status_codes import codes\n\n# formerly defined here, reexposed here for backward compatibility\nfrom .models import REDIRECT_STATI\n\n# Preferred clock, based on which one is more accurate on a given system.\nif sys.platform == 'win32':\n try: # Python 3.4+\n preferred_clock = time.perf_counter\n except AttributeError: # Earlier than Python 3.\n preferred_clock = time.clock\nelse:\n preferred_clock = time.time\n\n\ndef merge_setting(request_setting, session_setting, dict_class=OrderedDict):\n \"\"\"Determines appropriate setting for a given request, taking into account\n the explicit setting on that request, and the setting in the session. If a\n setting is a dictionary, they will be merged together using `dict_class`\n \"\"\"\n\n if session_setting is None:\n return request_setting\n\n if request_setting is None:\n return session_setting\n\n # Bypass if not a dictionary (e.g. verify)\n if not (\n isinstance(session_setting, Mapping) and\n isinstance(request_setting, Mapping)\n ):\n return request_setting\n\n merged_setting = dict_class(to_key_val_list(session_setting))\n merged_setting.update(to_key_val_list(request_setting))\n\n # Remove keys that are set to None. Extract keys first to avoid altering\n # the dictionary during iteration.\n none_keys = [k for (k, v) in merged_setting.items() if v is None]\n for key in none_keys:\n del merged_setting[key]\n\n return merged_setting\n\n\ndef merge_hooks(request_hooks, session_hooks, dict_class=OrderedDict):\n \"\"\"Properly merges both requests and session hooks.\n\n This is necessary because when request_hooks == {'response': []}, the\n merge breaks Session hooks entirely.\n \"\"\"\n if session_hooks is None or session_hooks.get('response') == []:\n return request_hooks\n\n if request_hooks is None or request_hooks.get('response') == []:\n return session_hooks\n\n return merge_setting(request_hooks, session_hooks, dict_class)\n\n\nclass SessionRedirectMixin(object):\n\n def get_redirect_target(self, resp):\n \"\"\"Receives a Response. Returns a redirect URI or ``None``\"\"\"\n # Due to the nature of how requests processes redirects this method will\n # be called at least once upon the original response and at least twice\n # on each subsequent redirect response (if any).\n # If a custom mixin is used to handle this logic, it may be advantageous\n # to cache the redirect location onto the response object as a private\n # attribute.\n if resp.is_redirect:\n location = resp.headers['location']\n # Currently the underlying http module on py3 decode headers\n # in latin1, but empirical evidence suggests that latin1 is very\n # rarely used with non-ASCII characters in HTTP headers.\n # It is more likely to get UTF8 header rather than latin1.\n # This causes incorrect handling of UTF8 encoded location headers.\n # To solve this, we re-encode the location in latin1.\n if is_py3:\n location = location.encode('latin1')\n return to_native_string(location, 'utf8')\n return None\n\n def should_strip_auth(self, old_url, new_url):\n \"\"\"Decide whether Authorization header should be removed when redirecting\"\"\"\n old_parsed = urlparse(old_url)\n new_parsed = urlparse(new_url)\n if old_parsed.hostname != new_parsed.hostname:\n return True\n # Special case: allow http -> https redirect when using the standard\n # ports. This isn't specified by RFC 7235, but is kept to avoid\n # breaking backwards compatibility with older versions of requests\n # that allowed any redirects on the same host.\n if (old_parsed.scheme == 'http' and old_parsed.port in (80, None)\n and new_parsed.scheme == 'https' and new_parsed.port in (443, None)):\n return False\n\n # Handle default port usage corresponding to scheme.\n changed_port = old_parsed.port != new_parsed.port\n changed_scheme = old_parsed.scheme != new_parsed.scheme\n default_port = (DEFAULT_PORTS.get(old_parsed.scheme, None), None)\n if (not changed_scheme and old_parsed.port in default_port\n and new_parsed.port in default_port):\n return False\n\n # Standard case: root URI must match\n return changed_port or changed_scheme\n\n def resolve_redirects(self, resp, req, stream=False, timeout=None,\n verify=True, cert=None, proxies=None, yield_requests=False, **adapter_kwargs):\n \"\"\"Receives a Response. Returns a generator of Responses or Requests.\"\"\"\n\n hist = [] # keep track of history\n\n url = self.get_redirect_target(resp)\n previous_fragment = urlparse(req.url).fragment\n while url:\n prepared_request = req.copy()\n\n # Update history and keep track of redirects.\n # resp.history must ignore the original request in this loop\n hist.append(resp)\n resp.history = hist[1:]\n\n try:\n resp.content # Consume socket so it can be released\n except (ChunkedEncodingError, ContentDecodingError, RuntimeError):\n resp.raw.read(decode_content=False)\n\n if len(resp.history) >= self.max_redirects:\n raise TooManyRedirects('Exceeded {} redirects.'.format(self.max_redirects), response=resp)\n\n # Release the connection back into the pool.\n resp.close()\n\n # Handle redirection without scheme (see: RFC 1808 Section 4)\n if url.startswith('//'):\n parsed_rurl = urlparse(resp.url)\n url = ':'.join([to_native_string(parsed_rurl.scheme), url])\n\n # Normalize url case and attach previous fragment if needed (RFC 7231 7.1.2)\n parsed = urlparse(url)\n if parsed.fragment == '' and previous_fragment:\n parsed = parsed._replace(fragment=previous_fragment)\n elif parsed.fragment:\n previous_fragment = parsed.fragment\n url = parsed.geturl()\n\n # Facilitate relative 'location' headers, as allowed by RFC 7231.\n # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource')\n # Compliant with RFC3986, we percent encode the url.\n if not parsed.netloc:\n url = urljoin(resp.url, requote_uri(url))\n else:\n url = requote_uri(url)\n\n prepared_request.url = to_native_string(url)\n\n self.rebuild_method(prepared_request, resp)\n\n # https://github.com/psf/requests/issues/1084\n if resp.status_code not in (codes.temporary_redirect, codes.permanent_redirect):\n # https://github.com/psf/requests/issues/3490\n purged_headers = ('Content-Length', 'Content-Type', 'Transfer-Encoding')\n for header in purged_headers:\n prepared_request.headers.pop(header, None)\n prepared_request.body = None\n\n headers = prepared_request.headers\n headers.pop('Cookie', None)\n\n # Extract any cookies sent on the response to the cookiejar\n # in the new request. Because we've mutated our copied prepared\n # request, use the old one that we haven't yet touched.\n extract_cookies_to_jar(prepared_request._cookies, req, resp.raw)\n merge_cookies(prepared_request._cookies, self.cookies)\n prepared_request.prepare_cookies(prepared_request._cookies)\n\n # Rebuild auth and proxy information.\n proxies = self.rebuild_proxies(prepared_request, proxies)\n self.rebuild_auth(prepared_request, resp)\n\n # A failed tell() sets `_body_position` to `object()`. This non-None\n # value ensures `rewindable` will be True, allowing us to raise an\n # UnrewindableBodyError, instead of hanging the connection.\n rewindable = (\n prepared_request._body_position is not None and\n ('Content-Length' in headers or 'Transfer-Encoding' in headers)\n )\n\n # Attempt to rewind consumed file-like object.\n if rewindable:\n rewind_body(prepared_request)\n\n # Override the original request.\n req = prepared_request\n\n if yield_requests:\n yield req\n else:\n\n resp = self.send(\n req,\n stream=stream,\n timeout=timeout,\n verify=verify,\n cert=cert,\n proxies=proxies,\n allow_redirects=False,\n **adapter_kwargs\n )\n\n extract_cookies_to_jar(self.cookies, prepared_request, resp.raw)\n\n # extract redirect url, if any, for the next loop\n url = self.get_redirect_target(resp)\n yield resp\n\n def rebuild_auth(self, prepared_request, response):\n \"\"\"When being redirected we may want to strip authentication from the\n request to avoid leaking credentials. This method intelligently removes\n and reapplies authentication where possible to avoid credential loss.\n \"\"\"\n headers = prepared_request.headers\n url = prepared_request.url\n\n if 'Authorization' in headers and self.should_strip_auth(response.request.url, url):\n # If we get redirected to a new host, we should strip out any\n # authentication headers.\n del headers['Authorization']\n\n # .netrc might have more auth for us on our new host.\n new_auth = get_netrc_auth(url) if self.trust_env else None\n if new_auth is not None:\n prepared_request.prepare_auth(new_auth)\n\n def rebuild_proxies(self, prepared_request, proxies):\n \"\"\"This method re-evaluates the proxy configuration by considering the\n environment variables. If we are redirected to a URL covered by\n NO_PROXY, we strip the proxy configuration. Otherwise, we set missing\n proxy keys for this URL (in case they were stripped by a previous\n redirect).\n\n This method also replaces the Proxy-Authorization header where\n necessary.\n\n :rtype: dict\n \"\"\"\n headers = prepared_request.headers\n scheme = urlparse(prepared_request.url).scheme\n new_proxies = resolve_proxies(prepared_request, proxies, self.trust_env)\n\n if 'Proxy-Authorization' in headers:\n del headers['Proxy-Authorization']\n\n try:\n username, password = get_auth_from_url(new_proxies[scheme])\n except KeyError:\n username, password = None, None\n\n if username and password:\n headers['Proxy-Authorization'] = _basic_auth_str(username, password)\n\n return new_proxies\n\n def rebuild_method(self, prepared_request, response):\n \"\"\"When being redirected we may want to change the method of the request\n based on certain specs or browser behavior.\n \"\"\"\n method = prepared_request.method\n\n # https://tools.ietf.org/html/rfc7231#section-6.4.4\n if response.status_code == codes.see_other and method != 'HEAD':\n method = 'GET'\n\n # Do what the browsers do, despite standards...\n # First, turn 302s into GETs.\n if response.status_code == codes.found and method != 'HEAD':\n method = 'GET'\n\n # Second, if a POST is responded to with a 301, turn it into a GET.\n # This bizarre behaviour is explained in Issue 1704.\n if response.status_code == codes.moved and method == 'POST':\n method = 'GET'\n\n prepared_request.method = method\n\n\nclass Session(SessionRedirectMixin):\n \"\"\"A Requests session.\n\n Provides cookie persistence, connection-pooling, and configuration.\n\n Basic Usage::\n\n >>> import requests\n >>> s = requests.Session()\n >>> s.get('https://httpbin.org/get')\n <Response [200]>\n\n Or as a context manager::\n\n >>> with requests.Session() as s:\n ... s.get('https://httpbin.org/get')\n <Response [200]>\n \"\"\"\n\n __attrs__ = [\n 'headers', 'cookies', 'auth', 'proxies', 'hooks', 'params', 'verify',\n 'cert', 'adapters', 'stream', 'trust_env',\n 'max_redirects',\n ]\n\n def __init__(self):\n\n #: A case-insensitive dictionary of headers to be sent on each\n #: :class:`Request <Request>` sent from this\n #: :class:`Session <Session>`.\n self.headers = default_headers()\n\n #: Default Authentication tuple or object to attach to\n #: :class:`Request <Request>`.\n self.auth = None\n\n #: Dictionary mapping protocol or protocol and host to the URL of the proxy\n #: (e.g. {'http': 'foo.bar:3128', 'http://host.name': 'foo.bar:4012'}) to\n #: be used on each :class:`Request <Request>`.\n self.proxies = {}\n\n #: Event-handling hooks.\n self.hooks = default_hooks()\n\n #: Dictionary of querystring data to attach to each\n #: :class:`Request <Request>`. The dictionary values may be lists for\n #: representing multivalued query parameters.\n self.params = {}\n\n #: Stream response content default.\n self.stream = False\n\n #: SSL Verification default.\n #: Defaults to `True`, requiring requests to verify the TLS certificate at the\n #: remote end.\n #: If verify is set to `False`, requests will accept any TLS certificate\n #: presented by the server, and will ignore hostname mismatches and/or\n #: expired certificates, which will make your application vulnerable to\n #: man-in-the-middle (MitM) attacks.\n #: Only set this to `False` for testing.\n self.verify = True\n\n #: SSL client certificate default, if String, path to ssl client\n #: cert file (.pem). If Tuple, ('cert', 'key') pair.\n self.cert = None\n\n #: Maximum number of redirects allowed. If the request exceeds this\n #: limit, a :class:`TooManyRedirects` exception is raised.\n #: This defaults to requests.models.DEFAULT_REDIRECT_LIMIT, which is\n #: 30.\n self.max_redirects = DEFAULT_REDIRECT_LIMIT\n\n #: Trust environment settings for proxy configuration, default\n #: authentication and similar.\n self.trust_env = True\n\n #: A CookieJar containing all currently outstanding cookies set on this\n #: session. By default it is a\n #: :class:`RequestsCookieJar <requests.cookies.RequestsCookieJar>`, but\n #: may be any other ``cookielib.CookieJar`` compatible object.\n self.cookies = cookiejar_from_dict({})\n\n # Default connection adapters.\n self.adapters = OrderedDict()\n self.mount('https://', HTTPAdapter())\n self.mount('http://', HTTPAdapter())\n\n def __enter__(self):\n return self\n\n def __exit__(self, *args):\n self.close()\n\n def prepare_request(self, request):\n \"\"\"Constructs a :class:`PreparedRequest <PreparedRequest>` for\n transmission and returns it. The :class:`PreparedRequest` has settings\n merged from the :class:`Request <Request>` instance and those of the\n :class:`Session`.\n\n :param request: :class:`Request` instance to prepare with this\n session's settings.\n :rtype: requests.PreparedRequest\n \"\"\"\n cookies = request.cookies or {}\n\n # Bootstrap CookieJar.\n if not isinstance(cookies, cookielib.CookieJar):\n cookies = cookiejar_from_dict(cookies)\n\n # Merge with session cookies\n merged_cookies = merge_cookies(\n merge_cookies(RequestsCookieJar(), self.cookies), cookies)\n\n # Set environment's basic authentication if not explicitly set.\n auth = request.auth\n if self.trust_env and not auth and not self.auth:\n auth = get_netrc_auth(request.url)\n\n p = PreparedRequest()\n p.prepare(\n method=request.method.upper(),\n url=request.url,\n files=request.files,\n data=request.data,\n json=request.json,\n headers=merge_setting(request.headers, self.headers, dict_class=CaseInsensitiveDict),\n params=merge_setting(request.params, self.params),\n auth=merge_setting(auth, self.auth),\n cookies=merged_cookies,\n hooks=merge_hooks(request.hooks, self.hooks),\n )\n return p\n\n def request(self, method, url,\n params=None, data=None, headers=None, cookies=None, files=None,\n auth=None, timeout=None, allow_redirects=True, proxies=None,\n hooks=None, stream=None, verify=None, cert=None, json=None):\n \"\"\"Constructs a :class:`Request <Request>`, prepares it and sends it.\n Returns :class:`Response <Response>` object.\n\n :param method: method for the new :class:`Request` object.\n :param url: URL for the new :class:`Request` object.\n :param params: (optional) Dictionary or bytes to be sent in the query\n string for the :class:`Request`.\n :param data: (optional) Dictionary, list of tuples, bytes, or file-like\n object to send in the body of the :class:`Request`.\n :param json: (optional) json to send in the body of the\n :class:`Request`.\n :param headers: (optional) Dictionary of HTTP Headers to send with the\n :class:`Request`.\n :param cookies: (optional) Dict or CookieJar object to send with the\n :class:`Request`.\n :param files: (optional) Dictionary of ``'filename': file-like-objects``\n for multipart encoding upload.\n :param auth: (optional) Auth tuple or callable to enable\n Basic/Digest/Custom HTTP Auth.\n :param timeout: (optional) How long to wait for the server to send\n data before giving up, as a float, or a :ref:`(connect timeout,\n read timeout) <timeouts>` tuple.\n :type timeout: float or tuple\n :param allow_redirects: (optional) Set to True by default.\n :type allow_redirects: bool\n :param proxies: (optional) Dictionary mapping protocol or protocol and\n hostname to the URL of the proxy.\n :param stream: (optional) whether to immediately download the response\n content. Defaults to ``False``.\n :param verify: (optional) Either a boolean, in which case it controls whether we verify\n the server's TLS certificate, or a string, in which case it must be a path\n to a CA bundle to use. Defaults to ``True``. When set to\n ``False``, requests will accept any TLS certificate presented by\n the server, and will ignore hostname mismatches and/or expired\n certificates, which will make your application vulnerable to\n man-in-the-middle (MitM) attacks. Setting verify to ``False`` \n may be useful during local development or testing.\n :param cert: (optional) if String, path to ssl client cert file (.pem).\n If Tuple, ('cert', 'key') pair.\n :rtype: requests.Response\n \"\"\"\n # Create the Request.\n req = Request(\n method=method.upper(),\n url=url,\n headers=headers,\n files=files,\n data=data or {},\n json=json,\n params=params or {},\n auth=auth,\n cookies=cookies,\n hooks=hooks,\n )\n prep = self.prepare_request(req)\n\n proxies = proxies or {}\n\n settings = self.merge_environment_settings(\n prep.url, proxies, stream, verify, cert\n )\n\n # Send the request.\n send_kwargs = {\n 'timeout': timeout,\n 'allow_redirects': allow_redirects,\n }\n send_kwargs.update(settings)\n resp = self.send(prep, **send_kwargs)\n\n return resp\n\n def get(self, url, **kwargs):\n r\"\"\"Sends a GET request. Returns :class:`Response` object.\n\n :param url: URL for the new :class:`Request` object.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :rtype: requests.Response\n \"\"\"\n\n kwargs.setdefault('allow_redirects', True)\n return self.request('GET', url, **kwargs)\n\n def options(self, url, **kwargs):\n r\"\"\"Sends a OPTIONS request. Returns :class:`Response` object.\n\n :param url: URL for the new :class:`Request` object.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :rtype: requests.Response\n \"\"\"\n\n kwargs.setdefault('allow_redirects', True)\n return self.request('OPTIONS', url, **kwargs)\n\n def head(self, url, **kwargs):\n r\"\"\"Sends a HEAD request. Returns :class:`Response` object.\n\n :param url: URL for the new :class:`Request` object.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :rtype: requests.Response\n \"\"\"\n\n kwargs.setdefault('allow_redirects', False)\n return self.request('HEAD', url, **kwargs)\n\n def post(self, url, data=None, json=None, **kwargs):\n r\"\"\"Sends a POST request. Returns :class:`Response` object.\n\n :param url: URL for the new :class:`Request` object.\n :param data: (optional) Dictionary, list of tuples, bytes, or file-like\n object to send in the body of the :class:`Request`.\n :param json: (optional) json to send in the body of the :class:`Request`.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :rtype: requests.Response\n \"\"\"\n\n return self.request('POST', url, data=data, json=json, **kwargs)\n\n def put(self, url, data=None, **kwargs):\n r\"\"\"Sends a PUT request. Returns :class:`Response` object.\n\n :param url: URL for the new :class:`Request` object.\n :param data: (optional) Dictionary, list of tuples, bytes, or file-like\n object to send in the body of the :class:`Request`.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :rtype: requests.Response\n \"\"\"\n\n return self.request('PUT', url, data=data, **kwargs)\n\n def patch(self, url, data=None, **kwargs):\n r\"\"\"Sends a PATCH request. Returns :class:`Response` object.\n\n :param url: URL for the new :class:`Request` object.\n :param data: (optional) Dictionary, list of tuples, bytes, or file-like\n object to send in the body of the :class:`Request`.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :rtype: requests.Response\n \"\"\"\n\n return self.request('PATCH', url, data=data, **kwargs)\n\n def delete(self, url, **kwargs):\n r\"\"\"Sends a DELETE request. Returns :class:`Response` object.\n\n :param url: URL for the new :class:`Request` object.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :rtype: requests.Response\n \"\"\"\n\n return self.request('DELETE', url, **kwargs)\n\n def send(self, request, **kwargs):\n \"\"\"Send a given PreparedRequest.\n\n :rtype: requests.Response\n \"\"\"\n # Set defaults that the hooks can utilize to ensure they always have\n # the correct parameters to reproduce the previous request.\n kwargs.setdefault('stream', self.stream)\n kwargs.setdefault('verify', self.verify)\n kwargs.setdefault('cert', self.cert)\n if 'proxies' not in kwargs:\n kwargs['proxies'] = resolve_proxies(\n request, self.proxies, self.trust_env\n )\n\n # It's possible that users might accidentally send a Request object.\n # Guard against that specific failure case.\n if isinstance(request, Request):\n raise ValueError('You can only send PreparedRequests.')\n\n # Set up variables needed for resolve_redirects and dispatching of hooks\n allow_redirects = kwargs.pop('allow_redirects', True)\n stream = kwargs.get('stream')\n hooks = request.hooks\n\n # Get the appropriate adapter to use\n adapter = self.get_adapter(url=request.url)\n\n # Start time (approximately) of the request\n start = preferred_clock()\n\n # Send the request\n r = adapter.send(request, **kwargs)\n\n # Total elapsed time of the request (approximately)\n elapsed = preferred_clock() - start\n r.elapsed = timedelta(seconds=elapsed)\n\n # Response manipulation hooks\n r = dispatch_hook('response', hooks, r, **kwargs)\n\n # Persist cookies\n if r.history:\n\n # If the hooks create history then we want those cookies too\n for resp in r.history:\n extract_cookies_to_jar(self.cookies, resp.request, resp.raw)\n\n extract_cookies_to_jar(self.cookies, request, r.raw)\n\n # Resolve redirects if allowed.\n if allow_redirects:\n # Redirect resolving generator.\n gen = self.resolve_redirects(r, request, **kwargs)\n history = [resp for resp in gen]\n else:\n history = []\n\n # Shuffle things around if there's history.\n if history:\n # Insert the first (original) request at the start\n history.insert(0, r)\n # Get the last request made\n r = history.pop()\n r.history = history\n\n # If redirects aren't being followed, store the response on the Request for Response.next().\n if not allow_redirects:\n try:\n r._next = next(self.resolve_redirects(r, request, yield_requests=True, **kwargs))\n except StopIteration:\n pass\n\n if not stream:\n r.content\n\n return r\n\n def merge_environment_settings(self, url, proxies, stream, verify, cert):\n \"\"\"\n Check the environment and merge it with some settings.\n\n :rtype: dict\n \"\"\"\n # Gather clues from the surrounding environment.\n if self.trust_env:\n # Set environment's proxies.\n no_proxy = proxies.get('no_proxy') if proxies is not None else None\n env_proxies = get_environ_proxies(url, no_proxy=no_proxy)\n for (k, v) in env_proxies.items():\n proxies.setdefault(k, v)\n\n # Look for requests environment configuration and be compatible\n # with cURL.\n if verify is True or verify is None:\n verify = (os.environ.get('REQUESTS_CA_BUNDLE') or\n os.environ.get('CURL_CA_BUNDLE'))\n\n # Merge all the kwargs.\n proxies = merge_setting(proxies, self.proxies)\n stream = merge_setting(stream, self.stream)\n verify = merge_setting(verify, self.verify)\n cert = merge_setting(cert, self.cert)\n\n return {'verify': verify, 'proxies': proxies, 'stream': stream,\n 'cert': cert}\n\n def get_adapter(self, url):\n \"\"\"\n Returns the appropriate connection adapter for the given URL.\n\n :rtype: requests.adapters.BaseAdapter\n \"\"\"\n for (prefix, adapter) in self.adapters.items():\n\n if url.lower().startswith(prefix.lower()):\n return adapter\n\n # Nothing matches :-/\n raise InvalidSchema(\"No connection adapters were found for {!r}\".format(url))\n\n def close(self):\n \"\"\"Closes all adapters and as such the session\"\"\"\n for v in self.adapters.values():\n v.close()\n\n def mount(self, prefix, adapter):\n \"\"\"Registers a connection adapter to a prefix.\n\n Adapters are sorted in descending order by prefix length.\n \"\"\"\n self.adapters[prefix] = adapter\n keys_to_move = [k for k in self.adapters if len(k) < len(prefix)]\n\n for key in keys_to_move:\n self.adapters[key] = self.adapters.pop(key)\n\n def __getstate__(self):\n state = {attr: getattr(self, attr, None) for attr in self.__attrs__}\n return state\n\n def __setstate__(self, state):\n for attr, value in state.items():\n setattr(self, attr, value)\n\n\ndef session():\n \"\"\"\n Returns a :class:`Session` for context-management.\n\n .. deprecated:: 1.0.0\n\n This method has been deprecated since version 1.0.0 and is only kept for\n backwards compatibility. New code should use :class:`~requests.sessions.Session`\n to create a session. This may be removed at a future date.\n\n :rtype: Session\n \"\"\"\n return Session()\n",
"path": "requests/sessions.py"
},
{
"content": "# -*- coding: utf-8 -*-\n\n\"\"\"Tests for Requests.\"\"\"\n\nfrom __future__ import division\nimport json\nimport os\nimport pickle\nimport collections\nimport contextlib\nimport warnings\nimport re\n\nimport io\nimport requests\nimport pytest\nimport urllib3\nfrom requests.adapters import HTTPAdapter\nfrom requests.auth import HTTPDigestAuth, _basic_auth_str\nfrom requests.compat import (\n Morsel, cookielib, getproxies, str, urlparse,\n builtin_str)\nfrom requests.cookies import (\n cookiejar_from_dict, morsel_to_cookie)\nfrom requests.exceptions import (\n ChunkedEncodingError,\n ConnectionError,\n ConnectTimeout,\n ContentDecodingError,\n InvalidHeader,\n InvalidJSONError,\n InvalidProxyURL,\n InvalidSchema,\n InvalidURL,\n MissingSchema,\n ProxyError,\n ReadTimeout,\n RequestException,\n RetryError,\n Timeout,\n TooManyRedirects,\n UnrewindableBodyError,\n)\nfrom requests.exceptions import SSLError as RequestsSSLError\nfrom requests.models import PreparedRequest\nfrom requests.structures import CaseInsensitiveDict\nfrom requests.sessions import SessionRedirectMixin\nfrom requests.models import urlencode\nfrom requests.hooks import default_hooks\nfrom requests.compat import JSONDecodeError, is_py3, MutableMapping\n\nfrom .compat import StringIO, u\nfrom .utils import override_environ\nfrom urllib3.util import Timeout as Urllib3Timeout\n\n# Requests to this URL should always fail with a connection timeout (nothing\n# listening on that port)\nTARPIT = 'http://10.255.255.1'\n\n# This is to avoid waiting the timeout of using TARPIT\nINVALID_PROXY='http://localhost:1'\n\ntry:\n from ssl import SSLContext\n del SSLContext\n HAS_MODERN_SSL = True\nexcept ImportError:\n HAS_MODERN_SSL = False\n\ntry:\n requests.pyopenssl\n HAS_PYOPENSSL = True\nexcept AttributeError:\n HAS_PYOPENSSL = False\n\n\nclass TestRequests:\n\n digest_auth_algo = ('MD5', 'SHA-256', 'SHA-512')\n\n def test_entry_points(self):\n\n requests.session\n requests.session().get\n requests.session().head\n requests.get\n requests.head\n requests.put\n requests.patch\n requests.post\n # Not really an entry point, but people rely on it.\n from requests.packages.urllib3.poolmanager import PoolManager\n\n @pytest.mark.parametrize(\n 'exception, url', (\n (MissingSchema, 'hiwpefhipowhefopw'),\n (InvalidSchema, 'localhost:3128'),\n (InvalidSchema, 'localhost.localdomain:3128/'),\n (InvalidSchema, '10.122.1.1:3128/'),\n (InvalidURL, 'http://'),\n (InvalidURL, 'http://*example.com'),\n (InvalidURL, 'http://.example.com'),\n ))\n def test_invalid_url(self, exception, url):\n with pytest.raises(exception):\n requests.get(url)\n\n def test_basic_building(self):\n req = requests.Request()\n req.url = 'http://kennethreitz.org/'\n req.data = {'life': '42'}\n\n pr = req.prepare()\n assert pr.url == req.url\n assert pr.body == 'life=42'\n\n @pytest.mark.parametrize('method', ('GET', 'HEAD'))\n def test_no_content_length(self, httpbin, method):\n req = requests.Request(method, httpbin(method.lower())).prepare()\n assert 'Content-Length' not in req.headers\n\n @pytest.mark.parametrize('method', ('POST', 'PUT', 'PATCH', 'OPTIONS'))\n def test_no_body_content_length(self, httpbin, method):\n req = requests.Request(method, httpbin(method.lower())).prepare()\n assert req.headers['Content-Length'] == '0'\n\n @pytest.mark.parametrize('method', ('POST', 'PUT', 'PATCH', 'OPTIONS'))\n def test_empty_content_length(self, httpbin, method):\n req = requests.Request(method, httpbin(method.lower()), data='').prepare()\n assert req.headers['Content-Length'] == '0'\n\n def test_override_content_length(self, httpbin):\n headers = {\n 'Content-Length': 'not zero'\n }\n r = requests.Request('POST', httpbin('post'), headers=headers).prepare()\n assert 'Content-Length' in r.headers\n assert r.headers['Content-Length'] == 'not zero'\n\n def test_path_is_not_double_encoded(self):\n request = requests.Request('GET', \"http://0.0.0.0/get/test case\").prepare()\n\n assert request.path_url == '/get/test%20case'\n\n @pytest.mark.parametrize(\n 'url, expected', (\n ('http://example.com/path#fragment', 'http://example.com/path?a=b#fragment'),\n ('http://example.com/path?key=value#fragment', 'http://example.com/path?key=value&a=b#fragment')\n ))\n def test_params_are_added_before_fragment(self, url, expected):\n request = requests.Request('GET', url, params={\"a\": \"b\"}).prepare()\n assert request.url == expected\n\n def test_params_original_order_is_preserved_by_default(self):\n param_ordered_dict = collections.OrderedDict((('z', 1), ('a', 1), ('k', 1), ('d', 1)))\n session = requests.Session()\n request = requests.Request('GET', 'http://example.com/', params=param_ordered_dict)\n prep = session.prepare_request(request)\n assert prep.url == 'http://example.com/?z=1&a=1&k=1&d=1'\n\n def test_params_bytes_are_encoded(self):\n request = requests.Request('GET', 'http://example.com',\n params=b'test=foo').prepare()\n assert request.url == 'http://example.com/?test=foo'\n\n def test_binary_put(self):\n request = requests.Request('PUT', 'http://example.com',\n data=u\"ööö\".encode(\"utf-8\")).prepare()\n assert isinstance(request.body, bytes)\n\n def test_whitespaces_are_removed_from_url(self):\n # Test for issue #3696\n request = requests.Request('GET', ' http://example.com').prepare()\n assert request.url == 'http://example.com/'\n\n @pytest.mark.parametrize('scheme', ('http://', 'HTTP://', 'hTTp://', 'HttP://'))\n def test_mixed_case_scheme_acceptable(self, httpbin, scheme):\n s = requests.Session()\n s.proxies = getproxies()\n parts = urlparse(httpbin('get'))\n url = scheme + parts.netloc + parts.path\n r = requests.Request('GET', url)\n r = s.send(r.prepare())\n assert r.status_code == 200, 'failed for scheme {}'.format(scheme)\n\n def test_HTTP_200_OK_GET_ALTERNATIVE(self, httpbin):\n r = requests.Request('GET', httpbin('get'))\n s = requests.Session()\n s.proxies = getproxies()\n\n r = s.send(r.prepare())\n\n assert r.status_code == 200\n\n def test_HTTP_302_ALLOW_REDIRECT_GET(self, httpbin):\n r = requests.get(httpbin('redirect', '1'))\n assert r.status_code == 200\n assert r.history[0].status_code == 302\n assert r.history[0].is_redirect\n\n def test_HTTP_307_ALLOW_REDIRECT_POST(self, httpbin):\n r = requests.post(httpbin('redirect-to'), data='test', params={'url': 'post', 'status_code': 307})\n assert r.status_code == 200\n assert r.history[0].status_code == 307\n assert r.history[0].is_redirect\n assert r.json()['data'] == 'test'\n\n def test_HTTP_307_ALLOW_REDIRECT_POST_WITH_SEEKABLE(self, httpbin):\n byte_str = b'test'\n r = requests.post(httpbin('redirect-to'), data=io.BytesIO(byte_str), params={'url': 'post', 'status_code': 307})\n assert r.status_code == 200\n assert r.history[0].status_code == 307\n assert r.history[0].is_redirect\n assert r.json()['data'] == byte_str.decode('utf-8')\n\n def test_HTTP_302_TOO_MANY_REDIRECTS(self, httpbin):\n try:\n requests.get(httpbin('relative-redirect', '50'))\n except TooManyRedirects as e:\n url = httpbin('relative-redirect', '20')\n assert e.request.url == url\n assert e.response.url == url\n assert len(e.response.history) == 30\n else:\n pytest.fail('Expected redirect to raise TooManyRedirects but it did not')\n\n def test_HTTP_302_TOO_MANY_REDIRECTS_WITH_PARAMS(self, httpbin):\n s = requests.session()\n s.max_redirects = 5\n try:\n s.get(httpbin('relative-redirect', '50'))\n except TooManyRedirects as e:\n url = httpbin('relative-redirect', '45')\n assert e.request.url == url\n assert e.response.url == url\n assert len(e.response.history) == 5\n else:\n pytest.fail('Expected custom max number of redirects to be respected but was not')\n\n def test_http_301_changes_post_to_get(self, httpbin):\n r = requests.post(httpbin('status', '301'))\n assert r.status_code == 200\n assert r.request.method == 'GET'\n assert r.history[0].status_code == 301\n assert r.history[0].is_redirect\n\n def test_http_301_doesnt_change_head_to_get(self, httpbin):\n r = requests.head(httpbin('status', '301'), allow_redirects=True)\n print(r.content)\n assert r.status_code == 200\n assert r.request.method == 'HEAD'\n assert r.history[0].status_code == 301\n assert r.history[0].is_redirect\n\n def test_http_302_changes_post_to_get(self, httpbin):\n r = requests.post(httpbin('status', '302'))\n assert r.status_code == 200\n assert r.request.method == 'GET'\n assert r.history[0].status_code == 302\n assert r.history[0].is_redirect\n\n def test_http_302_doesnt_change_head_to_get(self, httpbin):\n r = requests.head(httpbin('status', '302'), allow_redirects=True)\n assert r.status_code == 200\n assert r.request.method == 'HEAD'\n assert r.history[0].status_code == 302\n assert r.history[0].is_redirect\n\n def test_http_303_changes_post_to_get(self, httpbin):\n r = requests.post(httpbin('status', '303'))\n assert r.status_code == 200\n assert r.request.method == 'GET'\n assert r.history[0].status_code == 303\n assert r.history[0].is_redirect\n\n def test_http_303_doesnt_change_head_to_get(self, httpbin):\n r = requests.head(httpbin('status', '303'), allow_redirects=True)\n assert r.status_code == 200\n assert r.request.method == 'HEAD'\n assert r.history[0].status_code == 303\n assert r.history[0].is_redirect\n\n def test_header_and_body_removal_on_redirect(self, httpbin):\n purged_headers = ('Content-Length', 'Content-Type')\n ses = requests.Session()\n req = requests.Request('POST', httpbin('post'), data={'test': 'data'})\n prep = ses.prepare_request(req)\n resp = ses.send(prep)\n\n # Mimic a redirect response\n resp.status_code = 302\n resp.headers['location'] = 'get'\n\n # Run request through resolve_redirects\n next_resp = next(ses.resolve_redirects(resp, prep))\n assert next_resp.request.body is None\n for header in purged_headers:\n assert header not in next_resp.request.headers\n\n def test_transfer_enc_removal_on_redirect(self, httpbin):\n purged_headers = ('Transfer-Encoding', 'Content-Type')\n ses = requests.Session()\n req = requests.Request('POST', httpbin('post'), data=(b'x' for x in range(1)))\n prep = ses.prepare_request(req)\n assert 'Transfer-Encoding' in prep.headers\n\n # Create Response to avoid https://github.com/kevin1024/pytest-httpbin/issues/33\n resp = requests.Response()\n resp.raw = io.BytesIO(b'the content')\n resp.request = prep\n setattr(resp.raw, 'release_conn', lambda *args: args)\n\n # Mimic a redirect response\n resp.status_code = 302\n resp.headers['location'] = httpbin('get')\n\n # Run request through resolve_redirect\n next_resp = next(ses.resolve_redirects(resp, prep))\n assert next_resp.request.body is None\n for header in purged_headers:\n assert header not in next_resp.request.headers\n\n def test_fragment_maintained_on_redirect(self, httpbin):\n fragment = \"#view=edit&token=hunter2\"\n r = requests.get(httpbin('redirect-to?url=get')+fragment)\n\n assert len(r.history) > 0\n assert r.history[0].request.url == httpbin('redirect-to?url=get')+fragment\n assert r.url == httpbin('get')+fragment\n\n def test_HTTP_200_OK_GET_WITH_PARAMS(self, httpbin):\n heads = {'User-agent': 'Mozilla/5.0'}\n\n r = requests.get(httpbin('user-agent'), headers=heads)\n\n assert heads['User-agent'] in r.text\n assert r.status_code == 200\n\n def test_HTTP_200_OK_GET_WITH_MIXED_PARAMS(self, httpbin):\n heads = {'User-agent': 'Mozilla/5.0'}\n\n r = requests.get(httpbin('get') + '?test=true', params={'q': 'test'}, headers=heads)\n assert r.status_code == 200\n\n def test_set_cookie_on_301(self, httpbin):\n s = requests.session()\n url = httpbin('cookies/set?foo=bar')\n s.get(url)\n assert s.cookies['foo'] == 'bar'\n\n def test_cookie_sent_on_redirect(self, httpbin):\n s = requests.session()\n s.get(httpbin('cookies/set?foo=bar'))\n r = s.get(httpbin('redirect/1')) # redirects to httpbin('get')\n assert 'Cookie' in r.json()['headers']\n\n def test_cookie_removed_on_expire(self, httpbin):\n s = requests.session()\n s.get(httpbin('cookies/set?foo=bar'))\n assert s.cookies['foo'] == 'bar'\n s.get(\n httpbin('response-headers'),\n params={\n 'Set-Cookie':\n 'foo=deleted; expires=Thu, 01-Jan-1970 00:00:01 GMT'\n }\n )\n assert 'foo' not in s.cookies\n\n def test_cookie_quote_wrapped(self, httpbin):\n s = requests.session()\n s.get(httpbin('cookies/set?foo=\"bar:baz\"'))\n assert s.cookies['foo'] == '\"bar:baz\"'\n\n def test_cookie_persists_via_api(self, httpbin):\n s = requests.session()\n r = s.get(httpbin('redirect/1'), cookies={'foo': 'bar'})\n assert 'foo' in r.request.headers['Cookie']\n assert 'foo' in r.history[0].request.headers['Cookie']\n\n def test_request_cookie_overrides_session_cookie(self, httpbin):\n s = requests.session()\n s.cookies['foo'] = 'bar'\n r = s.get(httpbin('cookies'), cookies={'foo': 'baz'})\n assert r.json()['cookies']['foo'] == 'baz'\n # Session cookie should not be modified\n assert s.cookies['foo'] == 'bar'\n\n def test_request_cookies_not_persisted(self, httpbin):\n s = requests.session()\n s.get(httpbin('cookies'), cookies={'foo': 'baz'})\n # Sending a request with cookies should not add cookies to the session\n assert not s.cookies\n\n def test_generic_cookiejar_works(self, httpbin):\n cj = cookielib.CookieJar()\n cookiejar_from_dict({'foo': 'bar'}, cj)\n s = requests.session()\n s.cookies = cj\n r = s.get(httpbin('cookies'))\n # Make sure the cookie was sent\n assert r.json()['cookies']['foo'] == 'bar'\n # Make sure the session cj is still the custom one\n assert s.cookies is cj\n\n def test_param_cookiejar_works(self, httpbin):\n cj = cookielib.CookieJar()\n cookiejar_from_dict({'foo': 'bar'}, cj)\n s = requests.session()\n r = s.get(httpbin('cookies'), cookies=cj)\n # Make sure the cookie was sent\n assert r.json()['cookies']['foo'] == 'bar'\n\n def test_cookielib_cookiejar_on_redirect(self, httpbin):\n \"\"\"Tests resolve_redirect doesn't fail when merging cookies\n with non-RequestsCookieJar cookiejar.\n\n See GH #3579\n \"\"\"\n cj = cookiejar_from_dict({'foo': 'bar'}, cookielib.CookieJar())\n s = requests.Session()\n s.cookies = cookiejar_from_dict({'cookie': 'tasty'})\n\n # Prepare request without using Session\n req = requests.Request('GET', httpbin('headers'), cookies=cj)\n prep_req = req.prepare()\n\n # Send request and simulate redirect\n resp = s.send(prep_req)\n resp.status_code = 302\n resp.headers['location'] = httpbin('get')\n redirects = s.resolve_redirects(resp, prep_req)\n resp = next(redirects)\n\n # Verify CookieJar isn't being converted to RequestsCookieJar\n assert isinstance(prep_req._cookies, cookielib.CookieJar)\n assert isinstance(resp.request._cookies, cookielib.CookieJar)\n assert not isinstance(resp.request._cookies, requests.cookies.RequestsCookieJar)\n\n cookies = {}\n for c in resp.request._cookies:\n cookies[c.name] = c.value\n assert cookies['foo'] == 'bar'\n assert cookies['cookie'] == 'tasty'\n\n def test_requests_in_history_are_not_overridden(self, httpbin):\n resp = requests.get(httpbin('redirect/3'))\n urls = [r.url for r in resp.history]\n req_urls = [r.request.url for r in resp.history]\n assert urls == req_urls\n\n def test_history_is_always_a_list(self, httpbin):\n \"\"\"Show that even with redirects, Response.history is always a list.\"\"\"\n resp = requests.get(httpbin('get'))\n assert isinstance(resp.history, list)\n resp = requests.get(httpbin('redirect/1'))\n assert isinstance(resp.history, list)\n assert not isinstance(resp.history, tuple)\n\n def test_headers_on_session_with_None_are_not_sent(self, httpbin):\n \"\"\"Do not send headers in Session.headers with None values.\"\"\"\n ses = requests.Session()\n ses.headers['Accept-Encoding'] = None\n req = requests.Request('GET', httpbin('get'))\n prep = ses.prepare_request(req)\n assert 'Accept-Encoding' not in prep.headers\n\n def test_headers_preserve_order(self, httpbin):\n \"\"\"Preserve order when headers provided as OrderedDict.\"\"\"\n ses = requests.Session()\n ses.headers = collections.OrderedDict()\n ses.headers['Accept-Encoding'] = 'identity'\n ses.headers['First'] = '1'\n ses.headers['Second'] = '2'\n headers = collections.OrderedDict([('Third', '3'), ('Fourth', '4')])\n headers['Fifth'] = '5'\n headers['Second'] = '222'\n req = requests.Request('GET', httpbin('get'), headers=headers)\n prep = ses.prepare_request(req)\n items = list(prep.headers.items())\n assert items[0] == ('Accept-Encoding', 'identity')\n assert items[1] == ('First', '1')\n assert items[2] == ('Second', '222')\n assert items[3] == ('Third', '3')\n assert items[4] == ('Fourth', '4')\n assert items[5] == ('Fifth', '5')\n\n @pytest.mark.parametrize('key', ('User-agent', 'user-agent'))\n def test_user_agent_transfers(self, httpbin, key):\n\n heads = {key: 'Mozilla/5.0 (github.com/psf/requests)'}\n\n r = requests.get(httpbin('user-agent'), headers=heads)\n assert heads[key] in r.text\n\n def test_HTTP_200_OK_HEAD(self, httpbin):\n r = requests.head(httpbin('get'))\n assert r.status_code == 200\n\n def test_HTTP_200_OK_PUT(self, httpbin):\n r = requests.put(httpbin('put'))\n assert r.status_code == 200\n\n def test_BASICAUTH_TUPLE_HTTP_200_OK_GET(self, httpbin):\n auth = ('user', 'pass')\n url = httpbin('basic-auth', 'user', 'pass')\n\n r = requests.get(url, auth=auth)\n assert r.status_code == 200\n\n r = requests.get(url)\n assert r.status_code == 401\n\n s = requests.session()\n s.auth = auth\n r = s.get(url)\n assert r.status_code == 200\n\n @pytest.mark.parametrize(\n 'username, password', (\n ('user', 'pass'),\n (u'имя'.encode('utf-8'), u'пароль'.encode('utf-8')),\n (42, 42),\n (None, None),\n ))\n def test_set_basicauth(self, httpbin, username, password):\n auth = (username, password)\n url = httpbin('get')\n\n r = requests.Request('GET', url, auth=auth)\n p = r.prepare()\n\n assert p.headers['Authorization'] == _basic_auth_str(username, password)\n\n def test_basicauth_encodes_byte_strings(self):\n \"\"\"Ensure b'test' formats as the byte string \"test\" rather\n than the unicode string \"b'test'\" in Python 3.\n \"\"\"\n auth = (b'\\xc5\\xafsername', b'test\\xc6\\xb6')\n r = requests.Request('GET', 'http://localhost', auth=auth)\n p = r.prepare()\n\n assert p.headers['Authorization'] == 'Basic xa9zZXJuYW1lOnRlc3TGtg=='\n\n @pytest.mark.parametrize(\n 'url, exception', (\n # Connecting to an unknown domain should raise a ConnectionError\n ('http://doesnotexist.google.com', ConnectionError),\n # Connecting to an invalid port should raise a ConnectionError\n ('http://localhost:1', ConnectionError),\n # Inputing a URL that cannot be parsed should raise an InvalidURL error\n ('http://fe80::5054:ff:fe5a:fc0', InvalidURL)\n ))\n def test_errors(self, url, exception):\n with pytest.raises(exception):\n requests.get(url, timeout=1)\n\n def test_proxy_error(self):\n # any proxy related error (address resolution, no route to host, etc) should result in a ProxyError\n with pytest.raises(ProxyError):\n requests.get('http://localhost:1', proxies={'http': 'non-resolvable-address'})\n\n def test_proxy_error_on_bad_url(self, httpbin, httpbin_secure):\n with pytest.raises(InvalidProxyURL):\n requests.get(httpbin_secure(), proxies={'https': 'http:/badproxyurl:3128'})\n\n with pytest.raises(InvalidProxyURL):\n requests.get(httpbin(), proxies={'http': 'http://:8080'})\n\n with pytest.raises(InvalidProxyURL):\n requests.get(httpbin_secure(), proxies={'https': 'https://'})\n\n with pytest.raises(InvalidProxyURL):\n requests.get(httpbin(), proxies={'http': 'http:///example.com:8080'})\n\n def test_respect_proxy_env_on_send_self_prepared_request(self, httpbin):\n with override_environ(http_proxy=INVALID_PROXY):\n with pytest.raises(ProxyError):\n session = requests.Session()\n request = requests.Request('GET', httpbin())\n session.send(request.prepare())\n\n def test_respect_proxy_env_on_send_session_prepared_request(self, httpbin):\n with override_environ(http_proxy=INVALID_PROXY):\n with pytest.raises(ProxyError):\n session = requests.Session()\n request = requests.Request('GET', httpbin())\n prepared = session.prepare_request(request)\n session.send(prepared)\n\n def test_respect_proxy_env_on_send_with_redirects(self, httpbin):\n with override_environ(http_proxy=INVALID_PROXY):\n with pytest.raises(ProxyError):\n session = requests.Session()\n url = httpbin('redirect/1')\n print(url)\n request = requests.Request('GET', url)\n session.send(request.prepare())\n\n def test_respect_proxy_env_on_get(self, httpbin):\n with override_environ(http_proxy=INVALID_PROXY):\n with pytest.raises(ProxyError):\n session = requests.Session()\n session.get(httpbin())\n\n def test_respect_proxy_env_on_request(self, httpbin):\n with override_environ(http_proxy=INVALID_PROXY):\n with pytest.raises(ProxyError):\n session = requests.Session()\n session.request(method='GET', url=httpbin())\n\n def test_proxy_authorization_preserved_on_request(self, httpbin):\n proxy_auth_value = \"Bearer XXX\"\n session = requests.Session()\n session.headers.update({\"Proxy-Authorization\": proxy_auth_value})\n resp = session.request(method='GET', url=httpbin('get'))\n sent_headers = resp.json().get('headers', {})\n\n assert sent_headers.get(\"Proxy-Authorization\") == proxy_auth_value\n\n def test_basicauth_with_netrc(self, httpbin):\n auth = ('user', 'pass')\n wrong_auth = ('wronguser', 'wrongpass')\n url = httpbin('basic-auth', 'user', 'pass')\n\n old_auth = requests.sessions.get_netrc_auth\n\n try:\n def get_netrc_auth_mock(url):\n return auth\n requests.sessions.get_netrc_auth = get_netrc_auth_mock\n\n # Should use netrc and work.\n r = requests.get(url)\n assert r.status_code == 200\n\n # Given auth should override and fail.\n r = requests.get(url, auth=wrong_auth)\n assert r.status_code == 401\n\n s = requests.session()\n\n # Should use netrc and work.\n r = s.get(url)\n assert r.status_code == 200\n\n # Given auth should override and fail.\n s.auth = wrong_auth\n r = s.get(url)\n assert r.status_code == 401\n finally:\n requests.sessions.get_netrc_auth = old_auth\n\n def test_DIGEST_HTTP_200_OK_GET(self, httpbin):\n\n for authtype in self.digest_auth_algo:\n auth = HTTPDigestAuth('user', 'pass')\n url = httpbin('digest-auth', 'auth', 'user', 'pass', authtype, 'never')\n\n r = requests.get(url, auth=auth)\n assert r.status_code == 200\n\n r = requests.get(url)\n assert r.status_code == 401\n print(r.headers['WWW-Authenticate'])\n\n s = requests.session()\n s.auth = HTTPDigestAuth('user', 'pass')\n r = s.get(url)\n assert r.status_code == 200\n\n def test_DIGEST_AUTH_RETURNS_COOKIE(self, httpbin):\n\n for authtype in self.digest_auth_algo:\n url = httpbin('digest-auth', 'auth', 'user', 'pass', authtype)\n auth = HTTPDigestAuth('user', 'pass')\n r = requests.get(url)\n assert r.cookies['fake'] == 'fake_value'\n\n r = requests.get(url, auth=auth)\n assert r.status_code == 200\n\n def test_DIGEST_AUTH_SETS_SESSION_COOKIES(self, httpbin):\n\n for authtype in self.digest_auth_algo:\n url = httpbin('digest-auth', 'auth', 'user', 'pass', authtype)\n auth = HTTPDigestAuth('user', 'pass')\n s = requests.Session()\n s.get(url, auth=auth)\n assert s.cookies['fake'] == 'fake_value'\n\n def test_DIGEST_STREAM(self, httpbin):\n\n for authtype in self.digest_auth_algo:\n auth = HTTPDigestAuth('user', 'pass')\n url = httpbin('digest-auth', 'auth', 'user', 'pass', authtype)\n\n r = requests.get(url, auth=auth, stream=True)\n assert r.raw.read() != b''\n\n r = requests.get(url, auth=auth, stream=False)\n assert r.raw.read() == b''\n\n def test_DIGESTAUTH_WRONG_HTTP_401_GET(self, httpbin):\n\n for authtype in self.digest_auth_algo:\n auth = HTTPDigestAuth('user', 'wrongpass')\n url = httpbin('digest-auth', 'auth', 'user', 'pass', authtype)\n\n r = requests.get(url, auth=auth)\n assert r.status_code == 401\n\n r = requests.get(url)\n assert r.status_code == 401\n\n s = requests.session()\n s.auth = auth\n r = s.get(url)\n assert r.status_code == 401\n\n def test_DIGESTAUTH_QUOTES_QOP_VALUE(self, httpbin):\n\n for authtype in self.digest_auth_algo:\n auth = HTTPDigestAuth('user', 'pass')\n url = httpbin('digest-auth', 'auth', 'user', 'pass', authtype)\n\n r = requests.get(url, auth=auth)\n assert '\"auth\"' in r.request.headers['Authorization']\n\n def test_POSTBIN_GET_POST_FILES(self, httpbin):\n\n url = httpbin('post')\n requests.post(url).raise_for_status()\n\n post1 = requests.post(url, data={'some': 'data'})\n assert post1.status_code == 200\n\n with open('requirements-dev.txt') as f:\n post2 = requests.post(url, files={'some': f})\n assert post2.status_code == 200\n\n post4 = requests.post(url, data='[{\"some\": \"json\"}]')\n assert post4.status_code == 200\n\n with pytest.raises(ValueError):\n requests.post(url, files=['bad file data'])\n\n def test_invalid_files_input(self, httpbin):\n\n url = httpbin('post')\n post = requests.post(url,\n files={\"random-file-1\": None, \"random-file-2\": 1})\n assert b'name=\"random-file-1\"' not in post.request.body\n assert b'name=\"random-file-2\"' in post.request.body\n\n def test_POSTBIN_SEEKED_OBJECT_WITH_NO_ITER(self, httpbin):\n\n class TestStream(object):\n def __init__(self, data):\n self.data = data.encode()\n self.length = len(self.data)\n self.index = 0\n\n def __len__(self):\n return self.length\n\n def read(self, size=None):\n if size:\n ret = self.data[self.index:self.index + size]\n self.index += size\n else:\n ret = self.data[self.index:]\n self.index = self.length\n return ret\n\n def tell(self):\n return self.index\n\n def seek(self, offset, where=0):\n if where == 0:\n self.index = offset\n elif where == 1:\n self.index += offset\n elif where == 2:\n self.index = self.length + offset\n\n test = TestStream('test')\n post1 = requests.post(httpbin('post'), data=test)\n assert post1.status_code == 200\n assert post1.json()['data'] == 'test'\n\n test = TestStream('test')\n test.seek(2)\n post2 = requests.post(httpbin('post'), data=test)\n assert post2.status_code == 200\n assert post2.json()['data'] == 'st'\n\n def test_POSTBIN_GET_POST_FILES_WITH_DATA(self, httpbin):\n\n url = httpbin('post')\n requests.post(url).raise_for_status()\n\n post1 = requests.post(url, data={'some': 'data'})\n assert post1.status_code == 200\n\n with open('requirements-dev.txt') as f:\n post2 = requests.post(url, data={'some': 'data'}, files={'some': f})\n assert post2.status_code == 200\n\n post4 = requests.post(url, data='[{\"some\": \"json\"}]')\n assert post4.status_code == 200\n\n with pytest.raises(ValueError):\n requests.post(url, files=['bad file data'])\n\n def test_post_with_custom_mapping(self, httpbin):\n class CustomMapping(MutableMapping):\n def __init__(self, *args, **kwargs):\n self.data = dict(*args, **kwargs)\n\n def __delitem__(self, key):\n del self.data[key]\n\n def __getitem__(self, key):\n return self.data[key]\n\n def __setitem__(self, key, value):\n self.data[key] = value\n\n def __iter__(self):\n return iter(self.data)\n\n def __len__(self):\n return len(self.data)\n\n data = CustomMapping({'some': 'data'})\n url = httpbin('post')\n found_json = requests.post(url, data=data).json().get('form')\n assert found_json == {'some': 'data'}\n\n def test_conflicting_post_params(self, httpbin):\n url = httpbin('post')\n with open('requirements-dev.txt') as f:\n with pytest.raises(ValueError):\n requests.post(url, data='[{\"some\": \"data\"}]', files={'some': f})\n with pytest.raises(ValueError):\n requests.post(url, data=u('[{\"some\": \"data\"}]'), files={'some': f})\n\n def test_request_ok_set(self, httpbin):\n r = requests.get(httpbin('status', '404'))\n assert not r.ok\n\n def test_status_raising(self, httpbin):\n r = requests.get(httpbin('status', '404'))\n with pytest.raises(requests.exceptions.HTTPError):\n r.raise_for_status()\n\n r = requests.get(httpbin('status', '500'))\n assert not r.ok\n\n def test_decompress_gzip(self, httpbin):\n r = requests.get(httpbin('gzip'))\n r.content.decode('ascii')\n\n @pytest.mark.parametrize(\n 'url, params', (\n ('/get', {'foo': 'føø'}),\n ('/get', {'føø': 'føø'}),\n ('/get', {'føø': 'føø'}),\n ('/get', {'foo': 'foo'}),\n ('ø', {'foo': 'foo'}),\n ))\n def test_unicode_get(self, httpbin, url, params):\n requests.get(httpbin(url), params=params)\n\n def test_unicode_header_name(self, httpbin):\n requests.put(\n httpbin('put'),\n headers={str('Content-Type'): 'application/octet-stream'},\n data='\\xff') # compat.str is unicode.\n\n def test_pyopenssl_redirect(self, httpbin_secure, httpbin_ca_bundle):\n requests.get(httpbin_secure('status', '301'), verify=httpbin_ca_bundle)\n\n def test_invalid_ca_certificate_path(self, httpbin_secure):\n INVALID_PATH = '/garbage'\n with pytest.raises(IOError) as e:\n requests.get(httpbin_secure(), verify=INVALID_PATH)\n assert str(e.value) == 'Could not find a suitable TLS CA certificate bundle, invalid path: {}'.format(INVALID_PATH)\n\n def test_invalid_ssl_certificate_files(self, httpbin_secure):\n INVALID_PATH = '/garbage'\n with pytest.raises(IOError) as e:\n requests.get(httpbin_secure(), cert=INVALID_PATH)\n assert str(e.value) == 'Could not find the TLS certificate file, invalid path: {}'.format(INVALID_PATH)\n\n with pytest.raises(IOError) as e:\n requests.get(httpbin_secure(), cert=('.', INVALID_PATH))\n assert str(e.value) == 'Could not find the TLS key file, invalid path: {}'.format(INVALID_PATH)\n\n def test_http_with_certificate(self, httpbin):\n r = requests.get(httpbin(), cert='.')\n assert r.status_code == 200\n\n def test_https_warnings(self, nosan_server):\n \"\"\"warnings are emitted with requests.get\"\"\"\n host, port, ca_bundle = nosan_server\n if HAS_MODERN_SSL or HAS_PYOPENSSL:\n warnings_expected = ('SubjectAltNameWarning', )\n else:\n warnings_expected = ('SNIMissingWarning',\n 'InsecurePlatformWarning',\n 'SubjectAltNameWarning', )\n\n with pytest.warns(None) as warning_records:\n warnings.simplefilter('always')\n requests.get(\"https://localhost:{}/\".format(port), verify=ca_bundle)\n\n warning_records = [item for item in warning_records\n if item.category.__name__ != 'ResourceWarning']\n\n warnings_category = tuple(\n item.category.__name__ for item in warning_records)\n assert warnings_category == warnings_expected\n\n def test_certificate_failure(self, httpbin_secure):\n \"\"\"\n When underlying SSL problems occur, an SSLError is raised.\n \"\"\"\n with pytest.raises(RequestsSSLError):\n # Our local httpbin does not have a trusted CA, so this call will\n # fail if we use our default trust bundle.\n requests.get(httpbin_secure('status', '200'))\n\n def test_urlencoded_get_query_multivalued_param(self, httpbin):\n\n r = requests.get(httpbin('get'), params={'test': ['foo', 'baz']})\n assert r.status_code == 200\n assert r.url == httpbin('get?test=foo&test=baz')\n\n def test_form_encoded_post_query_multivalued_element(self, httpbin):\n r = requests.Request(method='POST', url=httpbin('post'),\n data=dict(test=['foo', 'baz']))\n prep = r.prepare()\n assert prep.body == 'test=foo&test=baz'\n\n def test_different_encodings_dont_break_post(self, httpbin):\n r = requests.post(httpbin('post'),\n data={'stuff': json.dumps({'a': 123})},\n params={'blah': 'asdf1234'},\n files={'file': ('test_requests.py', open(__file__, 'rb'))})\n assert r.status_code == 200\n\n @pytest.mark.parametrize(\n 'data', (\n {'stuff': u('ëlïxr')},\n {'stuff': u('ëlïxr').encode('utf-8')},\n {'stuff': 'elixr'},\n {'stuff': 'elixr'.encode('utf-8')},\n ))\n def test_unicode_multipart_post(self, httpbin, data):\n r = requests.post(httpbin('post'),\n data=data,\n files={'file': ('test_requests.py', open(__file__, 'rb'))})\n assert r.status_code == 200\n\n def test_unicode_multipart_post_fieldnames(self, httpbin):\n filename = os.path.splitext(__file__)[0] + '.py'\n r = requests.Request(\n method='POST', url=httpbin('post'),\n data={'stuff'.encode('utf-8'): 'elixr'},\n files={'file': ('test_requests.py', open(filename, 'rb'))})\n prep = r.prepare()\n assert b'name=\"stuff\"' in prep.body\n assert b'name=\"b\\'stuff\\'\"' not in prep.body\n\n def test_unicode_method_name(self, httpbin):\n files = {'file': open(__file__, 'rb')}\n r = requests.request(\n method=u('POST'), url=httpbin('post'), files=files)\n assert r.status_code == 200\n\n def test_unicode_method_name_with_request_object(self, httpbin):\n files = {'file': open(__file__, 'rb')}\n s = requests.Session()\n req = requests.Request(u('POST'), httpbin('post'), files=files)\n prep = s.prepare_request(req)\n assert isinstance(prep.method, builtin_str)\n assert prep.method == 'POST'\n\n resp = s.send(prep)\n assert resp.status_code == 200\n\n def test_non_prepared_request_error(self):\n s = requests.Session()\n req = requests.Request(u('POST'), '/')\n\n with pytest.raises(ValueError) as e:\n s.send(req)\n assert str(e.value) == 'You can only send PreparedRequests.'\n\n def test_custom_content_type(self, httpbin):\n r = requests.post(\n httpbin('post'),\n data={'stuff': json.dumps({'a': 123})},\n files={\n 'file1': ('test_requests.py', open(__file__, 'rb')),\n 'file2': ('test_requests', open(__file__, 'rb'),\n 'text/py-content-type')})\n assert r.status_code == 200\n assert b\"text/py-content-type\" in r.request.body\n\n def test_hook_receives_request_arguments(self, httpbin):\n def hook(resp, **kwargs):\n assert resp is not None\n assert kwargs != {}\n\n s = requests.Session()\n r = requests.Request('GET', httpbin(), hooks={'response': hook})\n prep = s.prepare_request(r)\n s.send(prep)\n\n def test_session_hooks_are_used_with_no_request_hooks(self, httpbin):\n hook = lambda x, *args, **kwargs: x\n s = requests.Session()\n s.hooks['response'].append(hook)\n r = requests.Request('GET', httpbin())\n prep = s.prepare_request(r)\n assert prep.hooks['response'] != []\n assert prep.hooks['response'] == [hook]\n\n def test_session_hooks_are_overridden_by_request_hooks(self, httpbin):\n hook1 = lambda x, *args, **kwargs: x\n hook2 = lambda x, *args, **kwargs: x\n assert hook1 is not hook2\n s = requests.Session()\n s.hooks['response'].append(hook2)\n r = requests.Request('GET', httpbin(), hooks={'response': [hook1]})\n prep = s.prepare_request(r)\n assert prep.hooks['response'] == [hook1]\n\n def test_prepared_request_hook(self, httpbin):\n def hook(resp, **kwargs):\n resp.hook_working = True\n return resp\n\n req = requests.Request('GET', httpbin(), hooks={'response': hook})\n prep = req.prepare()\n\n s = requests.Session()\n s.proxies = getproxies()\n resp = s.send(prep)\n\n assert hasattr(resp, 'hook_working')\n\n def test_prepared_from_session(self, httpbin):\n class DummyAuth(requests.auth.AuthBase):\n def __call__(self, r):\n r.headers['Dummy-Auth-Test'] = 'dummy-auth-test-ok'\n return r\n\n req = requests.Request('GET', httpbin('headers'))\n assert not req.auth\n\n s = requests.Session()\n s.auth = DummyAuth()\n\n prep = s.prepare_request(req)\n resp = s.send(prep)\n\n assert resp.json()['headers'][\n 'Dummy-Auth-Test'] == 'dummy-auth-test-ok'\n\n def test_prepare_request_with_bytestring_url(self):\n req = requests.Request('GET', b'https://httpbin.org/')\n s = requests.Session()\n prep = s.prepare_request(req)\n assert prep.url == \"https://httpbin.org/\"\n\n def test_request_with_bytestring_host(self, httpbin):\n s = requests.Session()\n resp = s.request(\n 'GET',\n httpbin('cookies/set?cookie=value'),\n allow_redirects=False,\n headers={'Host': b'httpbin.org'}\n )\n assert resp.cookies.get('cookie') == 'value'\n\n def test_links(self):\n r = requests.Response()\n r.headers = {\n 'cache-control': 'public, max-age=60, s-maxage=60',\n 'connection': 'keep-alive',\n 'content-encoding': 'gzip',\n 'content-type': 'application/json; charset=utf-8',\n 'date': 'Sat, 26 Jan 2013 16:47:56 GMT',\n 'etag': '\"6ff6a73c0e446c1f61614769e3ceb778\"',\n 'last-modified': 'Sat, 26 Jan 2013 16:22:39 GMT',\n 'link': ('<https://api.github.com/users/kennethreitz/repos?'\n 'page=2&per_page=10>; rel=\"next\", <https://api.github.'\n 'com/users/kennethreitz/repos?page=7&per_page=10>; '\n ' rel=\"last\"'),\n 'server': 'GitHub.com',\n 'status': '200 OK',\n 'vary': 'Accept',\n 'x-content-type-options': 'nosniff',\n 'x-github-media-type': 'github.beta',\n 'x-ratelimit-limit': '60',\n 'x-ratelimit-remaining': '57'\n }\n assert r.links['next']['rel'] == 'next'\n\n def test_cookie_parameters(self):\n key = 'some_cookie'\n value = 'some_value'\n secure = True\n domain = 'test.com'\n rest = {'HttpOnly': True}\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value, secure=secure, domain=domain, rest=rest)\n\n assert len(jar) == 1\n assert 'some_cookie' in jar\n\n cookie = list(jar)[0]\n assert cookie.secure == secure\n assert cookie.domain == domain\n assert cookie._rest['HttpOnly'] == rest['HttpOnly']\n\n def test_cookie_as_dict_keeps_len(self):\n key = 'some_cookie'\n value = 'some_value'\n\n key1 = 'some_cookie1'\n value1 = 'some_value1'\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value)\n jar.set(key1, value1)\n\n d1 = dict(jar)\n d2 = dict(jar.iteritems())\n d3 = dict(jar.items())\n\n assert len(jar) == 2\n assert len(d1) == 2\n assert len(d2) == 2\n assert len(d3) == 2\n\n def test_cookie_as_dict_keeps_items(self):\n key = 'some_cookie'\n value = 'some_value'\n\n key1 = 'some_cookie1'\n value1 = 'some_value1'\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value)\n jar.set(key1, value1)\n\n d1 = dict(jar)\n d2 = dict(jar.iteritems())\n d3 = dict(jar.items())\n\n assert d1['some_cookie'] == 'some_value'\n assert d2['some_cookie'] == 'some_value'\n assert d3['some_cookie1'] == 'some_value1'\n\n def test_cookie_as_dict_keys(self):\n key = 'some_cookie'\n value = 'some_value'\n\n key1 = 'some_cookie1'\n value1 = 'some_value1'\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value)\n jar.set(key1, value1)\n\n keys = jar.keys()\n assert keys == list(keys)\n # make sure one can use keys multiple times\n assert list(keys) == list(keys)\n\n def test_cookie_as_dict_values(self):\n key = 'some_cookie'\n value = 'some_value'\n\n key1 = 'some_cookie1'\n value1 = 'some_value1'\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value)\n jar.set(key1, value1)\n\n values = jar.values()\n assert values == list(values)\n # make sure one can use values multiple times\n assert list(values) == list(values)\n\n def test_cookie_as_dict_items(self):\n key = 'some_cookie'\n value = 'some_value'\n\n key1 = 'some_cookie1'\n value1 = 'some_value1'\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value)\n jar.set(key1, value1)\n\n items = jar.items()\n assert items == list(items)\n # make sure one can use items multiple times\n assert list(items) == list(items)\n\n def test_cookie_duplicate_names_different_domains(self):\n key = 'some_cookie'\n value = 'some_value'\n domain1 = 'test1.com'\n domain2 = 'test2.com'\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value, domain=domain1)\n jar.set(key, value, domain=domain2)\n assert key in jar\n items = jar.items()\n assert len(items) == 2\n\n # Verify that CookieConflictError is raised if domain is not specified\n with pytest.raises(requests.cookies.CookieConflictError):\n jar.get(key)\n\n # Verify that CookieConflictError is not raised if domain is specified\n cookie = jar.get(key, domain=domain1)\n assert cookie == value\n\n def test_cookie_duplicate_names_raises_cookie_conflict_error(self):\n key = 'some_cookie'\n value = 'some_value'\n path = 'some_path'\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value, path=path)\n jar.set(key, value)\n with pytest.raises(requests.cookies.CookieConflictError):\n jar.get(key)\n\n def test_cookie_policy_copy(self):\n class MyCookiePolicy(cookielib.DefaultCookiePolicy):\n pass\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set_policy(MyCookiePolicy())\n assert isinstance(jar.copy().get_policy(), MyCookiePolicy)\n\n def test_time_elapsed_blank(self, httpbin):\n r = requests.get(httpbin('get'))\n td = r.elapsed\n total_seconds = ((td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6) / 10**6)\n assert total_seconds > 0.0\n\n def test_empty_response_has_content_none(self):\n r = requests.Response()\n assert r.content is None\n\n def test_response_is_iterable(self):\n r = requests.Response()\n io = StringIO.StringIO('abc')\n read_ = io.read\n\n def read_mock(amt, decode_content=None):\n return read_(amt)\n setattr(io, 'read', read_mock)\n r.raw = io\n assert next(iter(r))\n io.close()\n\n def test_response_decode_unicode(self):\n \"\"\"When called with decode_unicode, Response.iter_content should always\n return unicode.\n \"\"\"\n r = requests.Response()\n r._content_consumed = True\n r._content = b'the content'\n r.encoding = 'ascii'\n\n chunks = r.iter_content(decode_unicode=True)\n assert all(isinstance(chunk, str) for chunk in chunks)\n\n # also for streaming\n r = requests.Response()\n r.raw = io.BytesIO(b'the content')\n r.encoding = 'ascii'\n chunks = r.iter_content(decode_unicode=True)\n assert all(isinstance(chunk, str) for chunk in chunks)\n\n def test_response_reason_unicode(self):\n # check for unicode HTTP status\n r = requests.Response()\n r.url = u'unicode URL'\n r.reason = u'Komponenttia ei löydy'.encode('utf-8')\n r.status_code = 404\n r.encoding = None\n assert not r.ok # old behaviour - crashes here\n\n def test_response_reason_unicode_fallback(self):\n # check raise_status falls back to ISO-8859-1\n r = requests.Response()\n r.url = 'some url'\n reason = u'Komponenttia ei löydy'\n r.reason = reason.encode('latin-1')\n r.status_code = 500\n r.encoding = None\n with pytest.raises(requests.exceptions.HTTPError) as e:\n r.raise_for_status()\n assert reason in e.value.args[0]\n\n def test_response_chunk_size_type(self):\n \"\"\"Ensure that chunk_size is passed as None or an integer, otherwise\n raise a TypeError.\n \"\"\"\n r = requests.Response()\n r.raw = io.BytesIO(b'the content')\n chunks = r.iter_content(1)\n assert all(len(chunk) == 1 for chunk in chunks)\n\n r = requests.Response()\n r.raw = io.BytesIO(b'the content')\n chunks = r.iter_content(None)\n assert list(chunks) == [b'the content']\n\n r = requests.Response()\n r.raw = io.BytesIO(b'the content')\n with pytest.raises(TypeError):\n chunks = r.iter_content(\"1024\")\n\n @pytest.mark.parametrize(\n 'exception, args, expected', (\n (urllib3.exceptions.ProtocolError, tuple(), ChunkedEncodingError),\n (urllib3.exceptions.DecodeError, tuple(), ContentDecodingError),\n (urllib3.exceptions.ReadTimeoutError, (None, '', ''), ConnectionError),\n (urllib3.exceptions.SSLError, tuple(), RequestsSSLError),\n )\n )\n def test_iter_content_wraps_exceptions(\n self, httpbin, mocker, exception, args, expected\n ):\n r = requests.Response()\n r.raw = mocker.Mock()\n # ReadTimeoutError can't be initialized by mock\n # so we'll manually create the instance with args\n r.raw.stream.side_effect = exception(*args)\n\n with pytest.raises(expected):\n next(r.iter_content(1024))\n\n def test_request_and_response_are_pickleable(self, httpbin):\n r = requests.get(httpbin('get'))\n\n # verify we can pickle the original request\n assert pickle.loads(pickle.dumps(r.request))\n\n # verify we can pickle the response and that we have access to\n # the original request.\n pr = pickle.loads(pickle.dumps(r))\n assert r.request.url == pr.request.url\n assert r.request.headers == pr.request.headers\n\n def test_prepared_request_is_pickleable(self, httpbin):\n p = requests.Request('GET', httpbin('get')).prepare()\n\n # Verify PreparedRequest can be pickled and unpickled\n r = pickle.loads(pickle.dumps(p))\n assert r.url == p.url\n assert r.headers == p.headers\n assert r.body == p.body\n\n # Verify unpickled PreparedRequest sends properly\n s = requests.Session()\n resp = s.send(r)\n assert resp.status_code == 200\n\n def test_prepared_request_with_file_is_pickleable(self, httpbin):\n files = {'file': open(__file__, 'rb')}\n r = requests.Request('POST', httpbin('post'), files=files)\n p = r.prepare()\n\n # Verify PreparedRequest can be pickled and unpickled\n r = pickle.loads(pickle.dumps(p))\n assert r.url == p.url\n assert r.headers == p.headers\n assert r.body == p.body\n\n # Verify unpickled PreparedRequest sends properly\n s = requests.Session()\n resp = s.send(r)\n assert resp.status_code == 200\n\n def test_prepared_request_with_hook_is_pickleable(self, httpbin):\n r = requests.Request('GET', httpbin('get'), hooks=default_hooks())\n p = r.prepare()\n\n # Verify PreparedRequest can be pickled\n r = pickle.loads(pickle.dumps(p))\n assert r.url == p.url\n assert r.headers == p.headers\n assert r.body == p.body\n assert r.hooks == p.hooks\n\n # Verify unpickled PreparedRequest sends properly\n s = requests.Session()\n resp = s.send(r)\n assert resp.status_code == 200\n\n def test_cannot_send_unprepared_requests(self, httpbin):\n r = requests.Request(url=httpbin())\n with pytest.raises(ValueError):\n requests.Session().send(r)\n\n def test_http_error(self):\n error = requests.exceptions.HTTPError()\n assert not error.response\n response = requests.Response()\n error = requests.exceptions.HTTPError(response=response)\n assert error.response == response\n error = requests.exceptions.HTTPError('message', response=response)\n assert str(error) == 'message'\n assert error.response == response\n\n def test_session_pickling(self, httpbin):\n r = requests.Request('GET', httpbin('get'))\n s = requests.Session()\n\n s = pickle.loads(pickle.dumps(s))\n s.proxies = getproxies()\n\n r = s.send(r.prepare())\n assert r.status_code == 200\n\n def test_fixes_1329(self, httpbin):\n \"\"\"Ensure that header updates are done case-insensitively.\"\"\"\n s = requests.Session()\n s.headers.update({'ACCEPT': 'BOGUS'})\n s.headers.update({'accept': 'application/json'})\n r = s.get(httpbin('get'))\n headers = r.request.headers\n assert headers['accept'] == 'application/json'\n assert headers['Accept'] == 'application/json'\n assert headers['ACCEPT'] == 'application/json'\n\n def test_uppercase_scheme_redirect(self, httpbin):\n parts = urlparse(httpbin('html'))\n url = \"HTTP://\" + parts.netloc + parts.path\n r = requests.get(httpbin('redirect-to'), params={'url': url})\n assert r.status_code == 200\n assert r.url.lower() == url.lower()\n\n def test_transport_adapter_ordering(self):\n s = requests.Session()\n order = ['https://', 'http://']\n assert order == list(s.adapters)\n s.mount('http://git', HTTPAdapter())\n s.mount('http://github', HTTPAdapter())\n s.mount('http://github.com', HTTPAdapter())\n s.mount('http://github.com/about/', HTTPAdapter())\n order = [\n 'http://github.com/about/',\n 'http://github.com',\n 'http://github',\n 'http://git',\n 'https://',\n 'http://',\n ]\n assert order == list(s.adapters)\n s.mount('http://gittip', HTTPAdapter())\n s.mount('http://gittip.com', HTTPAdapter())\n s.mount('http://gittip.com/about/', HTTPAdapter())\n order = [\n 'http://github.com/about/',\n 'http://gittip.com/about/',\n 'http://github.com',\n 'http://gittip.com',\n 'http://github',\n 'http://gittip',\n 'http://git',\n 'https://',\n 'http://',\n ]\n assert order == list(s.adapters)\n s2 = requests.Session()\n s2.adapters = {'http://': HTTPAdapter()}\n s2.mount('https://', HTTPAdapter())\n assert 'http://' in s2.adapters\n assert 'https://' in s2.adapters\n\n def test_session_get_adapter_prefix_matching(self):\n prefix = 'https://example.com'\n more_specific_prefix = prefix + '/some/path'\n\n url_matching_only_prefix = prefix + '/another/path'\n url_matching_more_specific_prefix = more_specific_prefix + '/longer/path'\n url_not_matching_prefix = 'https://another.example.com/'\n\n s = requests.Session()\n prefix_adapter = HTTPAdapter()\n more_specific_prefix_adapter = HTTPAdapter()\n s.mount(prefix, prefix_adapter)\n s.mount(more_specific_prefix, more_specific_prefix_adapter)\n\n assert s.get_adapter(url_matching_only_prefix) is prefix_adapter\n assert s.get_adapter(url_matching_more_specific_prefix) is more_specific_prefix_adapter\n assert s.get_adapter(url_not_matching_prefix) not in (prefix_adapter, more_specific_prefix_adapter)\n\n def test_session_get_adapter_prefix_matching_mixed_case(self):\n mixed_case_prefix = 'hTtPs://eXamPle.CoM/MixEd_CAse_PREfix'\n url_matching_prefix = mixed_case_prefix + '/full_url'\n\n s = requests.Session()\n my_adapter = HTTPAdapter()\n s.mount(mixed_case_prefix, my_adapter)\n\n assert s.get_adapter(url_matching_prefix) is my_adapter\n\n def test_session_get_adapter_prefix_matching_is_case_insensitive(self):\n mixed_case_prefix = 'hTtPs://eXamPle.CoM/MixEd_CAse_PREfix'\n url_matching_prefix_with_different_case = 'HtTpS://exaMPLe.cOm/MiXeD_caSE_preFIX/another_url'\n\n s = requests.Session()\n my_adapter = HTTPAdapter()\n s.mount(mixed_case_prefix, my_adapter)\n\n assert s.get_adapter(url_matching_prefix_with_different_case) is my_adapter\n\n def test_header_remove_is_case_insensitive(self, httpbin):\n # From issue #1321\n s = requests.Session()\n s.headers['foo'] = 'bar'\n r = s.get(httpbin('get'), headers={'FOO': None})\n assert 'foo' not in r.request.headers\n\n def test_params_are_merged_case_sensitive(self, httpbin):\n s = requests.Session()\n s.params['foo'] = 'bar'\n r = s.get(httpbin('get'), params={'FOO': 'bar'})\n assert r.json()['args'] == {'foo': 'bar', 'FOO': 'bar'}\n\n def test_long_authinfo_in_url(self):\n url = 'http://{}:{}@{}:9000/path?query#frag'.format(\n 'E8A3BE87-9E3F-4620-8858-95478E385B5B',\n 'EA770032-DA4D-4D84-8CE9-29C6D910BF1E',\n 'exactly-------------sixty-----------three------------characters',\n )\n r = requests.Request('GET', url).prepare()\n assert r.url == url\n\n def test_header_keys_are_native(self, httpbin):\n headers = {u('unicode'): 'blah', 'byte'.encode('ascii'): 'blah'}\n r = requests.Request('GET', httpbin('get'), headers=headers)\n p = r.prepare()\n\n # This is testing that they are builtin strings. A bit weird, but there\n # we go.\n assert 'unicode' in p.headers.keys()\n assert 'byte' in p.headers.keys()\n\n def test_header_validation(self, httpbin):\n \"\"\"Ensure prepare_headers regex isn't flagging valid header contents.\"\"\"\n headers_ok = {'foo': 'bar baz qux',\n 'bar': u'fbbq'.encode('utf8'),\n 'baz': '',\n 'qux': '1'}\n r = requests.get(httpbin('get'), headers=headers_ok)\n assert r.request.headers['foo'] == headers_ok['foo']\n\n def test_header_value_not_str(self, httpbin):\n \"\"\"Ensure the header value is of type string or bytes as\n per discussion in GH issue #3386\n \"\"\"\n headers_int = {'foo': 3}\n headers_dict = {'bar': {'foo': 'bar'}}\n headers_list = {'baz': ['foo', 'bar']}\n\n # Test for int\n with pytest.raises(InvalidHeader) as excinfo:\n r = requests.get(httpbin('get'), headers=headers_int)\n assert 'foo' in str(excinfo.value)\n # Test for dict\n with pytest.raises(InvalidHeader) as excinfo:\n r = requests.get(httpbin('get'), headers=headers_dict)\n assert 'bar' in str(excinfo.value)\n # Test for list\n with pytest.raises(InvalidHeader) as excinfo:\n r = requests.get(httpbin('get'), headers=headers_list)\n assert 'baz' in str(excinfo.value)\n\n def test_header_no_return_chars(self, httpbin):\n \"\"\"Ensure that a header containing return character sequences raise an\n exception. Otherwise, multiple headers are created from single string.\n \"\"\"\n headers_ret = {'foo': 'bar\\r\\nbaz: qux'}\n headers_lf = {'foo': 'bar\\nbaz: qux'}\n headers_cr = {'foo': 'bar\\rbaz: qux'}\n\n # Test for newline\n with pytest.raises(InvalidHeader):\n r = requests.get(httpbin('get'), headers=headers_ret)\n # Test for line feed\n with pytest.raises(InvalidHeader):\n r = requests.get(httpbin('get'), headers=headers_lf)\n # Test for carriage return\n with pytest.raises(InvalidHeader):\n r = requests.get(httpbin('get'), headers=headers_cr)\n\n def test_header_no_leading_space(self, httpbin):\n \"\"\"Ensure headers containing leading whitespace raise\n InvalidHeader Error before sending.\n \"\"\"\n headers_space = {'foo': ' bar'}\n headers_tab = {'foo': ' bar'}\n\n # Test for whitespace\n with pytest.raises(InvalidHeader):\n r = requests.get(httpbin('get'), headers=headers_space)\n # Test for tab\n with pytest.raises(InvalidHeader):\n r = requests.get(httpbin('get'), headers=headers_tab)\n\n @pytest.mark.parametrize('files', ('foo', b'foo', bytearray(b'foo')))\n def test_can_send_objects_with_files(self, httpbin, files):\n data = {'a': 'this is a string'}\n files = {'b': files}\n r = requests.Request('POST', httpbin('post'), data=data, files=files)\n p = r.prepare()\n assert 'multipart/form-data' in p.headers['Content-Type']\n\n def test_can_send_file_object_with_non_string_filename(self, httpbin):\n f = io.BytesIO()\n f.name = 2\n r = requests.Request('POST', httpbin('post'), files={'f': f})\n p = r.prepare()\n\n assert 'multipart/form-data' in p.headers['Content-Type']\n\n def test_autoset_header_values_are_native(self, httpbin):\n data = 'this is a string'\n length = '16'\n req = requests.Request('POST', httpbin('post'), data=data)\n p = req.prepare()\n\n assert p.headers['Content-Length'] == length\n\n def test_nonhttp_schemes_dont_check_URLs(self):\n test_urls = (\n 'data:image/gif;base64,R0lGODlhAQABAHAAACH5BAUAAAAALAAAAAABAAEAAAICRAEAOw==',\n 'file:///etc/passwd',\n 'magnet:?xt=urn:btih:be08f00302bc2d1d3cfa3af02024fa647a271431',\n )\n for test_url in test_urls:\n req = requests.Request('GET', test_url)\n preq = req.prepare()\n assert test_url == preq.url\n\n def test_auth_is_stripped_on_http_downgrade(self, httpbin, httpbin_secure, httpbin_ca_bundle):\n r = requests.get(\n httpbin_secure('redirect-to'),\n params={'url': httpbin('get')},\n auth=('user', 'pass'),\n verify=httpbin_ca_bundle\n )\n assert r.history[0].request.headers['Authorization']\n assert 'Authorization' not in r.request.headers\n\n def test_auth_is_retained_for_redirect_on_host(self, httpbin):\n r = requests.get(httpbin('redirect/1'), auth=('user', 'pass'))\n h1 = r.history[0].request.headers['Authorization']\n h2 = r.request.headers['Authorization']\n\n assert h1 == h2\n\n def test_should_strip_auth_host_change(self):\n s = requests.Session()\n assert s.should_strip_auth('http://example.com/foo', 'http://another.example.com/')\n\n def test_should_strip_auth_http_downgrade(self):\n s = requests.Session()\n assert s.should_strip_auth('https://example.com/foo', 'http://example.com/bar')\n\n def test_should_strip_auth_https_upgrade(self):\n s = requests.Session()\n assert not s.should_strip_auth('http://example.com/foo', 'https://example.com/bar')\n assert not s.should_strip_auth('http://example.com:80/foo', 'https://example.com/bar')\n assert not s.should_strip_auth('http://example.com/foo', 'https://example.com:443/bar')\n # Non-standard ports should trigger stripping\n assert s.should_strip_auth('http://example.com:8080/foo', 'https://example.com/bar')\n assert s.should_strip_auth('http://example.com/foo', 'https://example.com:8443/bar')\n\n def test_should_strip_auth_port_change(self):\n s = requests.Session()\n assert s.should_strip_auth('http://example.com:1234/foo', 'https://example.com:4321/bar')\n\n @pytest.mark.parametrize(\n 'old_uri, new_uri', (\n ('https://example.com:443/foo', 'https://example.com/bar'),\n ('http://example.com:80/foo', 'http://example.com/bar'),\n ('https://example.com/foo', 'https://example.com:443/bar'),\n ('http://example.com/foo', 'http://example.com:80/bar')\n ))\n def test_should_strip_auth_default_port(self, old_uri, new_uri):\n s = requests.Session()\n assert not s.should_strip_auth(old_uri, new_uri)\n\n def test_manual_redirect_with_partial_body_read(self, httpbin):\n s = requests.Session()\n r1 = s.get(httpbin('redirect/2'), allow_redirects=False, stream=True)\n assert r1.is_redirect\n rg = s.resolve_redirects(r1, r1.request, stream=True)\n\n # read only the first eight bytes of the response body,\n # then follow the redirect\n r1.iter_content(8)\n r2 = next(rg)\n assert r2.is_redirect\n\n # read all of the response via iter_content,\n # then follow the redirect\n for _ in r2.iter_content():\n pass\n r3 = next(rg)\n assert not r3.is_redirect\n\n def test_prepare_body_position_non_stream(self):\n data = b'the data'\n prep = requests.Request('GET', 'http://example.com', data=data).prepare()\n assert prep._body_position is None\n\n def test_rewind_body(self):\n data = io.BytesIO(b'the data')\n prep = requests.Request('GET', 'http://example.com', data=data).prepare()\n assert prep._body_position == 0\n assert prep.body.read() == b'the data'\n\n # the data has all been read\n assert prep.body.read() == b''\n\n # rewind it back\n requests.utils.rewind_body(prep)\n assert prep.body.read() == b'the data'\n\n def test_rewind_partially_read_body(self):\n data = io.BytesIO(b'the data')\n data.read(4) # read some data\n prep = requests.Request('GET', 'http://example.com', data=data).prepare()\n assert prep._body_position == 4\n assert prep.body.read() == b'data'\n\n # the data has all been read\n assert prep.body.read() == b''\n\n # rewind it back\n requests.utils.rewind_body(prep)\n assert prep.body.read() == b'data'\n\n def test_rewind_body_no_seek(self):\n class BadFileObj:\n def __init__(self, data):\n self.data = data\n\n def tell(self):\n return 0\n\n def __iter__(self):\n return\n\n data = BadFileObj('the data')\n prep = requests.Request('GET', 'http://example.com', data=data).prepare()\n assert prep._body_position == 0\n\n with pytest.raises(UnrewindableBodyError) as e:\n requests.utils.rewind_body(prep)\n\n assert 'Unable to rewind request body' in str(e)\n\n def test_rewind_body_failed_seek(self):\n class BadFileObj:\n def __init__(self, data):\n self.data = data\n\n def tell(self):\n return 0\n\n def seek(self, pos, whence=0):\n raise OSError()\n\n def __iter__(self):\n return\n\n data = BadFileObj('the data')\n prep = requests.Request('GET', 'http://example.com', data=data).prepare()\n assert prep._body_position == 0\n\n with pytest.raises(UnrewindableBodyError) as e:\n requests.utils.rewind_body(prep)\n\n assert 'error occurred when rewinding request body' in str(e)\n\n def test_rewind_body_failed_tell(self):\n class BadFileObj:\n def __init__(self, data):\n self.data = data\n\n def tell(self):\n raise OSError()\n\n def __iter__(self):\n return\n\n data = BadFileObj('the data')\n prep = requests.Request('GET', 'http://example.com', data=data).prepare()\n assert prep._body_position is not None\n\n with pytest.raises(UnrewindableBodyError) as e:\n requests.utils.rewind_body(prep)\n\n assert 'Unable to rewind request body' in str(e)\n\n def _patch_adapter_gzipped_redirect(self, session, url):\n adapter = session.get_adapter(url=url)\n org_build_response = adapter.build_response\n self._patched_response = False\n\n def build_response(*args, **kwargs):\n resp = org_build_response(*args, **kwargs)\n if not self._patched_response:\n resp.raw.headers['content-encoding'] = 'gzip'\n self._patched_response = True\n return resp\n\n adapter.build_response = build_response\n\n def test_redirect_with_wrong_gzipped_header(self, httpbin):\n s = requests.Session()\n url = httpbin('redirect/1')\n self._patch_adapter_gzipped_redirect(s, url)\n s.get(url)\n\n @pytest.mark.parametrize(\n 'username, password, auth_str', (\n ('test', 'test', 'Basic dGVzdDp0ZXN0'),\n (u'имя'.encode('utf-8'), u'пароль'.encode('utf-8'), 'Basic 0LjQvNGPOtC/0LDRgNC+0LvRjA=='),\n ))\n def test_basic_auth_str_is_always_native(self, username, password, auth_str):\n s = _basic_auth_str(username, password)\n assert isinstance(s, builtin_str)\n assert s == auth_str\n\n def test_requests_history_is_saved(self, httpbin):\n r = requests.get(httpbin('redirect/5'))\n total = r.history[-1].history\n i = 0\n for item in r.history:\n assert item.history == total[0:i]\n i += 1\n\n def test_json_param_post_content_type_works(self, httpbin):\n r = requests.post(\n httpbin('post'),\n json={'life': 42}\n )\n assert r.status_code == 200\n assert 'application/json' in r.request.headers['Content-Type']\n assert {'life': 42} == r.json()['json']\n\n def test_json_param_post_should_not_override_data_param(self, httpbin):\n r = requests.Request(method='POST', url=httpbin('post'),\n data={'stuff': 'elixr'},\n json={'music': 'flute'})\n prep = r.prepare()\n assert 'stuff=elixr' == prep.body\n\n def test_response_iter_lines(self, httpbin):\n r = requests.get(httpbin('stream/4'), stream=True)\n assert r.status_code == 200\n\n it = r.iter_lines()\n next(it)\n assert len(list(it)) == 3\n\n def test_response_context_manager(self, httpbin):\n with requests.get(httpbin('stream/4'), stream=True) as response:\n assert isinstance(response, requests.Response)\n\n assert response.raw.closed\n\n def test_unconsumed_session_response_closes_connection(self, httpbin):\n s = requests.session()\n\n with contextlib.closing(s.get(httpbin('stream/4'), stream=True)) as response:\n pass\n\n assert response._content_consumed is False\n assert response.raw.closed\n\n @pytest.mark.xfail\n def test_response_iter_lines_reentrant(self, httpbin):\n \"\"\"Response.iter_lines() is not reentrant safe\"\"\"\n r = requests.get(httpbin('stream/4'), stream=True)\n assert r.status_code == 200\n\n next(r.iter_lines())\n assert len(list(r.iter_lines())) == 3\n\n def test_session_close_proxy_clear(self, mocker):\n proxies = {\n 'one': mocker.Mock(),\n 'two': mocker.Mock(),\n }\n session = requests.Session()\n mocker.patch.dict(session.adapters['http://'].proxy_manager, proxies)\n session.close()\n proxies['one'].clear.assert_called_once_with()\n proxies['two'].clear.assert_called_once_with()\n\n def test_proxy_auth(self):\n adapter = HTTPAdapter()\n headers = adapter.proxy_headers(\"http://user:pass@httpbin.org\")\n assert headers == {'Proxy-Authorization': 'Basic dXNlcjpwYXNz'}\n\n def test_proxy_auth_empty_pass(self):\n adapter = HTTPAdapter()\n headers = adapter.proxy_headers(\"http://user:@httpbin.org\")\n assert headers == {'Proxy-Authorization': 'Basic dXNlcjo='}\n\n def test_response_json_when_content_is_None(self, httpbin):\n r = requests.get(httpbin('/status/204'))\n # Make sure r.content is None\n r.status_code = 0\n r._content = False\n r._content_consumed = False\n\n assert r.content is None\n with pytest.raises(ValueError):\n r.json()\n\n def test_response_without_release_conn(self):\n \"\"\"Test `close` call for non-urllib3-like raw objects.\n Should work when `release_conn` attr doesn't exist on `response.raw`.\n \"\"\"\n resp = requests.Response()\n resp.raw = StringIO.StringIO('test')\n assert not resp.raw.closed\n resp.close()\n assert resp.raw.closed\n\n def test_empty_stream_with_auth_does_not_set_content_length_header(self, httpbin):\n \"\"\"Ensure that a byte stream with size 0 will not set both a Content-Length\n and Transfer-Encoding header.\n \"\"\"\n auth = ('user', 'pass')\n url = httpbin('post')\n file_obj = io.BytesIO(b'')\n r = requests.Request('POST', url, auth=auth, data=file_obj)\n prepared_request = r.prepare()\n assert 'Transfer-Encoding' in prepared_request.headers\n assert 'Content-Length' not in prepared_request.headers\n\n def test_stream_with_auth_does_not_set_transfer_encoding_header(self, httpbin):\n \"\"\"Ensure that a byte stream with size > 0 will not set both a Content-Length\n and Transfer-Encoding header.\n \"\"\"\n auth = ('user', 'pass')\n url = httpbin('post')\n file_obj = io.BytesIO(b'test data')\n r = requests.Request('POST', url, auth=auth, data=file_obj)\n prepared_request = r.prepare()\n assert 'Transfer-Encoding' not in prepared_request.headers\n assert 'Content-Length' in prepared_request.headers\n\n def test_chunked_upload_does_not_set_content_length_header(self, httpbin):\n \"\"\"Ensure that requests with a generator body stream using\n Transfer-Encoding: chunked, not a Content-Length header.\n \"\"\"\n data = (i for i in [b'a', b'b', b'c'])\n url = httpbin('post')\n r = requests.Request('POST', url, data=data)\n prepared_request = r.prepare()\n assert 'Transfer-Encoding' in prepared_request.headers\n assert 'Content-Length' not in prepared_request.headers\n\n def test_custom_redirect_mixin(self, httpbin):\n \"\"\"Tests a custom mixin to overwrite ``get_redirect_target``.\n\n Ensures a subclassed ``requests.Session`` can handle a certain type of\n malformed redirect responses.\n\n 1. original request receives a proper response: 302 redirect\n 2. following the redirect, a malformed response is given:\n status code = HTTP 200\n location = alternate url\n 3. the custom session catches the edge case and follows the redirect\n \"\"\"\n url_final = httpbin('html')\n querystring_malformed = urlencode({'location': url_final})\n url_redirect_malformed = httpbin('response-headers?%s' % querystring_malformed)\n querystring_redirect = urlencode({'url': url_redirect_malformed})\n url_redirect = httpbin('redirect-to?%s' % querystring_redirect)\n urls_test = [url_redirect,\n url_redirect_malformed,\n url_final,\n ]\n\n class CustomRedirectSession(requests.Session):\n def get_redirect_target(self, resp):\n # default behavior\n if resp.is_redirect:\n return resp.headers['location']\n # edge case - check to see if 'location' is in headers anyways\n location = resp.headers.get('location')\n if location and (location != resp.url):\n return location\n return None\n\n session = CustomRedirectSession()\n r = session.get(urls_test[0])\n assert len(r.history) == 2\n assert r.status_code == 200\n assert r.history[0].status_code == 302\n assert r.history[0].is_redirect\n assert r.history[1].status_code == 200\n assert not r.history[1].is_redirect\n assert r.url == urls_test[2]\n\n\nclass TestCaseInsensitiveDict:\n\n @pytest.mark.parametrize(\n 'cid', (\n CaseInsensitiveDict({'Foo': 'foo', 'BAr': 'bar'}),\n CaseInsensitiveDict([('Foo', 'foo'), ('BAr', 'bar')]),\n CaseInsensitiveDict(FOO='foo', BAr='bar'),\n ))\n def test_init(self, cid):\n assert len(cid) == 2\n assert 'foo' in cid\n assert 'bar' in cid\n\n def test_docstring_example(self):\n cid = CaseInsensitiveDict()\n cid['Accept'] = 'application/json'\n assert cid['aCCEPT'] == 'application/json'\n assert list(cid) == ['Accept']\n\n def test_len(self):\n cid = CaseInsensitiveDict({'a': 'a', 'b': 'b'})\n cid['A'] = 'a'\n assert len(cid) == 2\n\n def test_getitem(self):\n cid = CaseInsensitiveDict({'Spam': 'blueval'})\n assert cid['spam'] == 'blueval'\n assert cid['SPAM'] == 'blueval'\n\n def test_fixes_649(self):\n \"\"\"__setitem__ should behave case-insensitively.\"\"\"\n cid = CaseInsensitiveDict()\n cid['spam'] = 'oneval'\n cid['Spam'] = 'twoval'\n cid['sPAM'] = 'redval'\n cid['SPAM'] = 'blueval'\n assert cid['spam'] == 'blueval'\n assert cid['SPAM'] == 'blueval'\n assert list(cid.keys()) == ['SPAM']\n\n def test_delitem(self):\n cid = CaseInsensitiveDict()\n cid['Spam'] = 'someval'\n del cid['sPam']\n assert 'spam' not in cid\n assert len(cid) == 0\n\n def test_contains(self):\n cid = CaseInsensitiveDict()\n cid['Spam'] = 'someval'\n assert 'Spam' in cid\n assert 'spam' in cid\n assert 'SPAM' in cid\n assert 'sPam' in cid\n assert 'notspam' not in cid\n\n def test_get(self):\n cid = CaseInsensitiveDict()\n cid['spam'] = 'oneval'\n cid['SPAM'] = 'blueval'\n assert cid.get('spam') == 'blueval'\n assert cid.get('SPAM') == 'blueval'\n assert cid.get('sPam') == 'blueval'\n assert cid.get('notspam', 'default') == 'default'\n\n def test_update(self):\n cid = CaseInsensitiveDict()\n cid['spam'] = 'blueval'\n cid.update({'sPam': 'notblueval'})\n assert cid['spam'] == 'notblueval'\n cid = CaseInsensitiveDict({'Foo': 'foo', 'BAr': 'bar'})\n cid.update({'fOO': 'anotherfoo', 'bAR': 'anotherbar'})\n assert len(cid) == 2\n assert cid['foo'] == 'anotherfoo'\n assert cid['bar'] == 'anotherbar'\n\n def test_update_retains_unchanged(self):\n cid = CaseInsensitiveDict({'foo': 'foo', 'bar': 'bar'})\n cid.update({'foo': 'newfoo'})\n assert cid['bar'] == 'bar'\n\n def test_iter(self):\n cid = CaseInsensitiveDict({'Spam': 'spam', 'Eggs': 'eggs'})\n keys = frozenset(['Spam', 'Eggs'])\n assert frozenset(iter(cid)) == keys\n\n def test_equality(self):\n cid = CaseInsensitiveDict({'SPAM': 'blueval', 'Eggs': 'redval'})\n othercid = CaseInsensitiveDict({'spam': 'blueval', 'eggs': 'redval'})\n assert cid == othercid\n del othercid['spam']\n assert cid != othercid\n assert cid == {'spam': 'blueval', 'eggs': 'redval'}\n assert cid != object()\n\n def test_setdefault(self):\n cid = CaseInsensitiveDict({'Spam': 'blueval'})\n assert cid.setdefault('spam', 'notblueval') == 'blueval'\n assert cid.setdefault('notspam', 'notblueval') == 'notblueval'\n\n def test_lower_items(self):\n cid = CaseInsensitiveDict({\n 'Accept': 'application/json',\n 'user-Agent': 'requests',\n })\n keyset = frozenset(lowerkey for lowerkey, v in cid.lower_items())\n lowerkeyset = frozenset(['accept', 'user-agent'])\n assert keyset == lowerkeyset\n\n def test_preserve_key_case(self):\n cid = CaseInsensitiveDict({\n 'Accept': 'application/json',\n 'user-Agent': 'requests',\n })\n keyset = frozenset(['Accept', 'user-Agent'])\n assert frozenset(i[0] for i in cid.items()) == keyset\n assert frozenset(cid.keys()) == keyset\n assert frozenset(cid) == keyset\n\n def test_preserve_last_key_case(self):\n cid = CaseInsensitiveDict({\n 'Accept': 'application/json',\n 'user-Agent': 'requests',\n })\n cid.update({'ACCEPT': 'application/json'})\n cid['USER-AGENT'] = 'requests'\n keyset = frozenset(['ACCEPT', 'USER-AGENT'])\n assert frozenset(i[0] for i in cid.items()) == keyset\n assert frozenset(cid.keys()) == keyset\n assert frozenset(cid) == keyset\n\n def test_copy(self):\n cid = CaseInsensitiveDict({\n 'Accept': 'application/json',\n 'user-Agent': 'requests',\n })\n cid_copy = cid.copy()\n assert cid == cid_copy\n cid['changed'] = True\n assert cid != cid_copy\n\n\nclass TestMorselToCookieExpires:\n \"\"\"Tests for morsel_to_cookie when morsel contains expires.\"\"\"\n\n def test_expires_valid_str(self):\n \"\"\"Test case where we convert expires from string time.\"\"\"\n\n morsel = Morsel()\n morsel['expires'] = 'Thu, 01-Jan-1970 00:00:01 GMT'\n cookie = morsel_to_cookie(morsel)\n assert cookie.expires == 1\n\n @pytest.mark.parametrize(\n 'value, exception', (\n (100, TypeError),\n ('woops', ValueError),\n ))\n def test_expires_invalid_int(self, value, exception):\n \"\"\"Test case where an invalid type is passed for expires.\"\"\"\n morsel = Morsel()\n morsel['expires'] = value\n with pytest.raises(exception):\n morsel_to_cookie(morsel)\n\n def test_expires_none(self):\n \"\"\"Test case where expires is None.\"\"\"\n\n morsel = Morsel()\n morsel['expires'] = None\n cookie = morsel_to_cookie(morsel)\n assert cookie.expires is None\n\n\nclass TestMorselToCookieMaxAge:\n\n \"\"\"Tests for morsel_to_cookie when morsel contains max-age.\"\"\"\n\n def test_max_age_valid_int(self):\n \"\"\"Test case where a valid max age in seconds is passed.\"\"\"\n\n morsel = Morsel()\n morsel['max-age'] = 60\n cookie = morsel_to_cookie(morsel)\n assert isinstance(cookie.expires, int)\n\n def test_max_age_invalid_str(self):\n \"\"\"Test case where a invalid max age is passed.\"\"\"\n\n morsel = Morsel()\n morsel['max-age'] = 'woops'\n with pytest.raises(TypeError):\n morsel_to_cookie(morsel)\n\n\nclass TestTimeout:\n\n def test_stream_timeout(self, httpbin):\n try:\n requests.get(httpbin('delay/10'), timeout=2.0)\n except requests.exceptions.Timeout as e:\n assert 'Read timed out' in e.args[0].args[0]\n\n @pytest.mark.parametrize(\n 'timeout, error_text', (\n ((3, 4, 5), '(connect, read)'),\n ('foo', 'must be an int, float or None'),\n ))\n def test_invalid_timeout(self, httpbin, timeout, error_text):\n with pytest.raises(ValueError) as e:\n requests.get(httpbin('get'), timeout=timeout)\n assert error_text in str(e)\n\n @pytest.mark.parametrize(\n 'timeout', (\n None,\n Urllib3Timeout(connect=None, read=None)\n ))\n def test_none_timeout(self, httpbin, timeout):\n \"\"\"Check that you can set None as a valid timeout value.\n\n To actually test this behavior, we'd want to check that setting the\n timeout to None actually lets the request block past the system default\n timeout. However, this would make the test suite unbearably slow.\n Instead we verify that setting the timeout to None does not prevent the\n request from succeeding.\n \"\"\"\n r = requests.get(httpbin('get'), timeout=timeout)\n assert r.status_code == 200\n\n @pytest.mark.parametrize(\n 'timeout', (\n (None, 0.1),\n Urllib3Timeout(connect=None, read=0.1)\n ))\n def test_read_timeout(self, httpbin, timeout):\n try:\n requests.get(httpbin('delay/10'), timeout=timeout)\n pytest.fail('The recv() request should time out.')\n except ReadTimeout:\n pass\n\n @pytest.mark.parametrize(\n 'timeout', (\n (0.1, None),\n Urllib3Timeout(connect=0.1, read=None)\n ))\n def test_connect_timeout(self, timeout):\n try:\n requests.get(TARPIT, timeout=timeout)\n pytest.fail('The connect() request should time out.')\n except ConnectTimeout as e:\n assert isinstance(e, ConnectionError)\n assert isinstance(e, Timeout)\n\n @pytest.mark.parametrize(\n 'timeout', (\n (0.1, 0.1),\n Urllib3Timeout(connect=0.1, read=0.1)\n ))\n def test_total_timeout_connect(self, timeout):\n try:\n requests.get(TARPIT, timeout=timeout)\n pytest.fail('The connect() request should time out.')\n except ConnectTimeout:\n pass\n\n def test_encoded_methods(self, httpbin):\n \"\"\"See: https://github.com/psf/requests/issues/2316\"\"\"\n r = requests.request(b'GET', httpbin('get'))\n assert r.ok\n\n\nSendCall = collections.namedtuple('SendCall', ('args', 'kwargs'))\n\n\nclass RedirectSession(SessionRedirectMixin):\n def __init__(self, order_of_redirects):\n self.redirects = order_of_redirects\n self.calls = []\n self.max_redirects = 30\n self.cookies = {}\n self.trust_env = False\n\n def send(self, *args, **kwargs):\n self.calls.append(SendCall(args, kwargs))\n return self.build_response()\n\n def build_response(self):\n request = self.calls[-1].args[0]\n r = requests.Response()\n\n try:\n r.status_code = int(self.redirects.pop(0))\n except IndexError:\n r.status_code = 200\n\n r.headers = CaseInsensitiveDict({'Location': '/'})\n r.raw = self._build_raw()\n r.request = request\n return r\n\n def _build_raw(self):\n string = StringIO.StringIO('')\n setattr(string, 'release_conn', lambda *args: args)\n return string\n\n\ndef test_json_encodes_as_bytes():\n # urllib3 expects bodies as bytes-like objects\n body = {\"key\": \"value\"}\n p = PreparedRequest()\n p.prepare(\n method='GET',\n url='https://www.example.com/',\n json=body\n )\n assert isinstance(p.body, bytes)\n\n\ndef test_requests_are_updated_each_time(httpbin):\n session = RedirectSession([303, 307])\n prep = requests.Request('POST', httpbin('post')).prepare()\n r0 = session.send(prep)\n assert r0.request.method == 'POST'\n assert session.calls[-1] == SendCall((r0.request,), {})\n redirect_generator = session.resolve_redirects(r0, prep)\n default_keyword_args = {\n 'stream': False,\n 'verify': True,\n 'cert': None,\n 'timeout': None,\n 'allow_redirects': False,\n 'proxies': {},\n }\n for response in redirect_generator:\n assert response.request.method == 'GET'\n send_call = SendCall((response.request,), default_keyword_args)\n assert session.calls[-1] == send_call\n\n\n@pytest.mark.parametrize(\"var,url,proxy\", [\n ('http_proxy', 'http://example.com', 'socks5://proxy.com:9876'),\n ('https_proxy', 'https://example.com', 'socks5://proxy.com:9876'),\n ('all_proxy', 'http://example.com', 'socks5://proxy.com:9876'),\n ('all_proxy', 'https://example.com', 'socks5://proxy.com:9876'),\n])\ndef test_proxy_env_vars_override_default(var, url, proxy):\n session = requests.Session()\n prep = PreparedRequest()\n prep.prepare(method='GET', url=url)\n\n kwargs = {\n var: proxy\n }\n scheme = urlparse(url).scheme\n with override_environ(**kwargs):\n proxies = session.rebuild_proxies(prep, {})\n assert scheme in proxies\n assert proxies[scheme] == proxy\n\n\n@pytest.mark.parametrize(\n 'data', (\n (('a', 'b'), ('c', 'd')),\n (('c', 'd'), ('a', 'b')),\n (('a', 'b'), ('c', 'd'), ('e', 'f')),\n ))\ndef test_data_argument_accepts_tuples(data):\n \"\"\"Ensure that the data argument will accept tuples of strings\n and properly encode them.\n \"\"\"\n p = PreparedRequest()\n p.prepare(\n method='GET',\n url='http://www.example.com',\n data=data,\n hooks=default_hooks()\n )\n assert p.body == urlencode(data)\n\n\n@pytest.mark.parametrize(\n 'kwargs', (\n None,\n {\n 'method': 'GET',\n 'url': 'http://www.example.com',\n 'data': 'foo=bar',\n 'hooks': default_hooks()\n },\n {\n 'method': 'GET',\n 'url': 'http://www.example.com',\n 'data': 'foo=bar',\n 'hooks': default_hooks(),\n 'cookies': {'foo': 'bar'}\n },\n {\n 'method': 'GET',\n 'url': u('http://www.example.com/üniçø∂é')\n },\n ))\ndef test_prepared_copy(kwargs):\n p = PreparedRequest()\n if kwargs:\n p.prepare(**kwargs)\n copy = p.copy()\n for attr in ('method', 'url', 'headers', '_cookies', 'body', 'hooks'):\n assert getattr(p, attr) == getattr(copy, attr)\n\n\ndef test_urllib3_retries(httpbin):\n from urllib3.util import Retry\n s = requests.Session()\n s.mount('http://', HTTPAdapter(max_retries=Retry(\n total=2, status_forcelist=[500]\n )))\n\n with pytest.raises(RetryError):\n s.get(httpbin('status/500'))\n\n\ndef test_urllib3_pool_connection_closed(httpbin):\n s = requests.Session()\n s.mount('http://', HTTPAdapter(pool_connections=0, pool_maxsize=0))\n\n try:\n s.get(httpbin('status/200'))\n except ConnectionError as e:\n assert u\"Pool is closed.\" in str(e)\n\n\nclass TestPreparingURLs(object):\n @pytest.mark.parametrize(\n 'url,expected',\n (\n ('http://google.com', 'http://google.com/'),\n (u'http://ジェーピーニック.jp', u'http://xn--hckqz9bzb1cyrb.jp/'),\n (u'http://xn--n3h.net/', u'http://xn--n3h.net/'),\n (\n u'http://ジェーピーニック.jp'.encode('utf-8'),\n u'http://xn--hckqz9bzb1cyrb.jp/'\n ),\n (\n u'http://straße.de/straße',\n u'http://xn--strae-oqa.de/stra%C3%9Fe'\n ),\n (\n u'http://straße.de/straße'.encode('utf-8'),\n u'http://xn--strae-oqa.de/stra%C3%9Fe'\n ),\n (\n u'http://Königsgäßchen.de/straße',\n u'http://xn--knigsgchen-b4a3dun.de/stra%C3%9Fe'\n ),\n (\n u'http://Königsgäßchen.de/straße'.encode('utf-8'),\n u'http://xn--knigsgchen-b4a3dun.de/stra%C3%9Fe'\n ),\n (\n b'http://xn--n3h.net/',\n u'http://xn--n3h.net/'\n ),\n (\n b'http://[1200:0000:ab00:1234:0000:2552:7777:1313]:12345/',\n u'http://[1200:0000:ab00:1234:0000:2552:7777:1313]:12345/'\n ),\n (\n u'http://[1200:0000:ab00:1234:0000:2552:7777:1313]:12345/',\n u'http://[1200:0000:ab00:1234:0000:2552:7777:1313]:12345/'\n )\n )\n )\n def test_preparing_url(self, url, expected):\n\n def normalize_percent_encode(x):\n # Helper function that normalizes equivalent \n # percent-encoded bytes before comparisons\n for c in re.findall(r'%[a-fA-F0-9]{2}', x):\n x = x.replace(c, c.upper())\n return x\n \n r = requests.Request('GET', url=url)\n p = r.prepare()\n assert normalize_percent_encode(p.url) == expected\n\n @pytest.mark.parametrize(\n 'url',\n (\n b\"http://*.google.com\",\n b\"http://*\",\n u\"http://*.google.com\",\n u\"http://*\",\n u\"http://☃.net/\"\n )\n )\n def test_preparing_bad_url(self, url):\n r = requests.Request('GET', url=url)\n with pytest.raises(requests.exceptions.InvalidURL):\n r.prepare()\n\n @pytest.mark.parametrize(\n 'url, exception',\n (\n ('http://localhost:-1', InvalidURL),\n )\n )\n def test_redirecting_to_bad_url(self, httpbin, url, exception):\n with pytest.raises(exception):\n r = requests.get(httpbin('redirect-to'), params={'url': url})\n\n @pytest.mark.parametrize(\n 'input, expected',\n (\n (\n b\"http+unix://%2Fvar%2Frun%2Fsocket/path%7E\",\n u\"http+unix://%2Fvar%2Frun%2Fsocket/path~\",\n ),\n (\n u\"http+unix://%2Fvar%2Frun%2Fsocket/path%7E\",\n u\"http+unix://%2Fvar%2Frun%2Fsocket/path~\",\n ),\n (\n b\"mailto:user@example.org\",\n u\"mailto:user@example.org\",\n ),\n (\n u\"mailto:user@example.org\",\n u\"mailto:user@example.org\",\n ),\n (\n b\"data:SSDimaUgUHl0aG9uIQ==\",\n u\"data:SSDimaUgUHl0aG9uIQ==\",\n )\n )\n )\n def test_url_mutation(self, input, expected):\n \"\"\"\n This test validates that we correctly exclude some URLs from\n preparation, and that we handle others. Specifically, it tests that\n any URL whose scheme doesn't begin with \"http\" is left alone, and\n those whose scheme *does* begin with \"http\" are mutated.\n \"\"\"\n r = requests.Request('GET', url=input)\n p = r.prepare()\n assert p.url == expected\n\n @pytest.mark.parametrize(\n 'input, params, expected',\n (\n (\n b\"http+unix://%2Fvar%2Frun%2Fsocket/path\",\n {\"key\": \"value\"},\n u\"http+unix://%2Fvar%2Frun%2Fsocket/path?key=value\",\n ),\n (\n u\"http+unix://%2Fvar%2Frun%2Fsocket/path\",\n {\"key\": \"value\"},\n u\"http+unix://%2Fvar%2Frun%2Fsocket/path?key=value\",\n ),\n (\n b\"mailto:user@example.org\",\n {\"key\": \"value\"},\n u\"mailto:user@example.org\",\n ),\n (\n u\"mailto:user@example.org\",\n {\"key\": \"value\"},\n u\"mailto:user@example.org\",\n ),\n )\n )\n def test_parameters_for_nonstandard_schemes(self, input, params, expected):\n \"\"\"\n Setting parameters for nonstandard schemes is allowed if those schemes\n begin with \"http\", and is forbidden otherwise.\n \"\"\"\n r = requests.Request('GET', url=input, params=params)\n p = r.prepare()\n assert p.url == expected\n\n def test_post_json_nan(self, httpbin):\n data = {\"foo\": float(\"nan\")}\n with pytest.raises(requests.exceptions.InvalidJSONError):\n r = requests.post(httpbin('post'), json=data)\n\n def test_json_decode_compatibility(self, httpbin):\n r = requests.get(httpbin('bytes/20'))\n with pytest.raises(requests.exceptions.JSONDecodeError) as excinfo:\n r.json()\n assert isinstance(excinfo.value, RequestException)\n assert isinstance(excinfo.value, JSONDecodeError)\n assert r.text not in str(excinfo.value)\n\n @pytest.mark.skipif(not is_py3, reason=\"doc attribute is only present on py3\")\n def test_json_decode_persists_doc_attr(self, httpbin):\n r = requests.get(httpbin('bytes/20'))\n with pytest.raises(requests.exceptions.JSONDecodeError) as excinfo:\n r.json()\n assert excinfo.value.doc == r.text\n",
"path": "tests/test_requests.py"
}
] | 13_3 | python | import sys
import pytest
from unittest import mock
# Requests to this URL should always fail with a connection timeout (nothing
# listening on that port)
TARPIT = "http://10.255.255.1"
# This is to avoid waiting the timeout of using TARPIT
INVALID_PROXY = "http://localhost:1"
class TestRequests:
import requests
from requests.exceptions import (
InvalidHeader
)
try:
from ssl import SSLContext
del SSLContext
HAS_MODERN_SSL = True
except ImportError:
HAS_MODERN_SSL = False
try:
requests.pyopenssl
HAS_PYOPENSSL = True
except AttributeError:
HAS_PYOPENSSL = False
try:
from http.server import HTTPServer, SimpleHTTPRequestHandler
except ImportError:
from BaseHTTPServer import HTTPServer
from SimpleHTTPServer import SimpleHTTPRequestHandler
@staticmethod
def prepare_url(value):
from requests.compat import urljoin
# Issue #1483: Make sure the URL always has a trailing slash
httpbin_url = value.url.rstrip("/") + "/"
def inner(*suffix):
return urljoin(httpbin_url, "/".join(suffix))
return inner
@pytest.fixture
def httpbin(self, httpbin):
return self.prepare_url(httpbin)
@pytest.fixture
def httpbin_secure(self, httpbin_secure):
return self.prepare_url(httpbin_secure)
@pytest.fixture
def nosan_server(self, tmp_path_factory):
# delay importing until the fixture in order to make it possible
# to deselect the test via command-line when trustme is not available
import trustme
import ssl
import threading
tmpdir = tmp_path_factory.mktemp("certs")
ca = trustme.CA()
# only commonName, no subjectAltName
server_cert = ca.issue_cert(common_name="localhost")
ca_bundle = str(tmpdir / "ca.pem")
ca.cert_pem.write_to_path(ca_bundle)
context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
server_cert.configure_cert(context)
server = self.HTTPServer(("localhost", 0), self.SimpleHTTPRequestHandler)
server.socket = context.wrap_socket(server.socket, server_side=True)
server_thread = threading.Thread(target=server.serve_forever)
server_thread.start()
yield "localhost", server.server_address[1], ca_bundle
server.shutdown()
server_thread.join()
@pytest.mark.parametrize(
"env, expected",
(
({}, True),
({"REQUESTS_CA_BUNDLE": "/some/path"}, "/some/path"),
({"REQUESTS_CA_BUNDLE": ""}, True),
({"CURL_CA_BUNDLE": "/some/path"}, "/some/path"),
({"CURL_CA_BUNDLE": ""}, True),
({"REQUESTS_CA_BUNDLE": "", "CURL_CA_BUNDLE": ""}, True),
(
{
"REQUESTS_CA_BUNDLE": "/some/path",
"CURL_CA_BUNDLE": "/curl/path",
},
"/some/path",
),
(
{
"REQUESTS_CA_BUNDLE": "",
"CURL_CA_BUNDLE": "/curl/path",
},
"/curl/path",
),
),
)
def test_env_cert_bundles(self, httpbin, env, expected):
import requests
s = requests.Session()
with mock.patch("os.environ", env):
settings = s.merge_environment_settings(
url=httpbin("get"), proxies={}, stream=False, verify=True, cert=None
)
assert settings["verify"] == expected
def main():
import pytest
# Run the pytest tests programmatically
exit_code = pytest.main(["-v", __file__])
# Exit with status code 1 if any test fails, otherwise 0
if exit_code != 0:
sys.exit(1)
else:
sys.exit(0)
if __name__ == '__main__':
main()
|
https://github.com/teamqurrent/requests | Your objective is to refine the exception handling in the requests library, particularly for JSON decoding errors. The goal is to modify the JSONDecodeError class in `requests/exceptions.py` to ensure that it properly inherits and integrates the functionalities of both InvalidJSONError from requests and JSONDecodeError from the json module. This enhancement is aimed at preserving the error message specific to JSON decoding issues while maintaining the hierarchy of exceptions in the requests library. | d15a3b6 | -e .[socks]
pytest
pytest-cov
pytest-httpbin==1.0.0
pytest-mock
httpbin==0.7.0
trustme
wheel
chardet>=3.0.2,<3.1.0
idna>=2.5,<2.8
urllib3>=1.21.1,<1.24
certifi>=2017.4.17
# Flask Stack
Flask>1.0,<2.0
markupsafe<2.1
| python3.9 | fa1b0a36 | diff --git a/requests/exceptions.py b/requests/exceptions.py
--- a/requests/exceptions.py
+++ b/requests/exceptions.py
@@ -34,6 +34,16 @@ class InvalidJSONError(RequestException):
class JSONDecodeError(InvalidJSONError, CompatJSONDecodeError):
"""Couldn't decode the text into json"""
+ def __init__(self, *args, **kwargs):
+ """
+ Construct the JSONDecodeError instance first with all
+ args. Then use it's args to construct the IOError so that
+ the json specific args aren't used as IOError specific args
+ and the error message from JSONDecodeError is preserved.
+ """
+ CompatJSONDecodeError.__init__(self, *args)
+ InvalidJSONError.__init__(self, *self.args, **kwargs)
+
class HTTPError(RequestException):
"""An HTTP error occurred."""
diff --git a/tests/test_requests.py b/tests/test_requests.py
--- a/tests/test_requests.py
+++ b/tests/test_requests.py
@@ -23,14 +23,14 @@ from requests.cookies import (
cookiejar_from_dict, morsel_to_cookie)
from requests.exceptions import (
ConnectionError, ConnectTimeout, InvalidSchema, InvalidURL,
- MissingSchema, ReadTimeout, Timeout, RetryError, TooManyRedirects,
+ MissingSchema, ReadTimeout, Timeout, RetryError, RequestException, TooManyRedirects,
ProxyError, InvalidHeader, UnrewindableBodyError, SSLError, InvalidProxyURL, InvalidJSONError)
from requests.models import PreparedRequest
from requests.structures import CaseInsensitiveDict
from requests.sessions import SessionRedirectMixin
from requests.models import urlencode
from requests.hooks import default_hooks
-from requests.compat import MutableMapping
+from requests.compat import JSONDecodeError, is_py3, MutableMapping
from .compat import StringIO, u
from .utils import override_environ
@@ -2585,5 +2585,15 @@ class TestPreparingURLs(object):
def test_json_decode_compatibility(self, httpbin):
r = requests.get(httpbin('bytes/20'))
- with pytest.raises(requests.exceptions.JSONDecodeError):
+ with pytest.raises(requests.exceptions.JSONDecodeError) as excinfo:
r.json()
+ assert isinstance(excinfo.value, RequestException)
+ assert isinstance(excinfo.value, JSONDecodeError)
+ assert r.text not in str(excinfo.value)
+
+ @pytest.mark.skipif(not is_py3, reason="doc attribute is only present on py3")
+ def test_json_decode_persists_doc_attr(self, httpbin):
+ r = requests.get(httpbin('bytes/20'))
+ with pytest.raises(requests.exceptions.JSONDecodeError) as excinfo:
+ r.json()
+ assert excinfo.value.doc == r.text
| [
{
"content": "# -*- coding: utf-8 -*-\n\n\"\"\"\nrequests.exceptions\n~~~~~~~~~~~~~~~~~~~\n\nThis module contains the set of Requests' exceptions.\n\"\"\"\nfrom urllib3.exceptions import HTTPError as BaseHTTPError\n\nfrom .compat import JSONDecodeError as CompatJSONDecodeError\n\n\nclass RequestException(IOError):\n \"\"\"There was an ambiguous exception that occurred while handling your\n request.\n \"\"\"\n\n def __init__(self, *args, **kwargs):\n \"\"\"Initialize RequestException with `request` and `response` objects.\"\"\"\n response = kwargs.pop('response', None)\n self.response = response\n self.request = kwargs.pop('request', None)\n if (response is not None and not self.request and\n hasattr(response, 'request')):\n self.request = self.response.request\n super(RequestException, self).__init__(*args, **kwargs)\n\n\nclass InvalidJSONError(RequestException):\n \"\"\"A JSON error occurred.\"\"\"\n\n\nclass JSONDecodeError(InvalidJSONError, CompatJSONDecodeError):\n \"\"\"Couldn't decode the text into json\"\"\"\n\n\nclass HTTPError(RequestException):\n \"\"\"An HTTP error occurred.\"\"\"\n\n\nclass ConnectionError(RequestException):\n \"\"\"A Connection error occurred.\"\"\"\n\n\nclass ProxyError(ConnectionError):\n \"\"\"A proxy error occurred.\"\"\"\n\n\nclass SSLError(ConnectionError):\n \"\"\"An SSL error occurred.\"\"\"\n\n\nclass Timeout(RequestException):\n \"\"\"The request timed out.\n\n Catching this error will catch both\n :exc:`~requests.exceptions.ConnectTimeout` and\n :exc:`~requests.exceptions.ReadTimeout` errors.\n \"\"\"\n\n\nclass ConnectTimeout(ConnectionError, Timeout):\n \"\"\"The request timed out while trying to connect to the remote server.\n\n Requests that produced this error are safe to retry.\n \"\"\"\n\n\nclass ReadTimeout(Timeout):\n \"\"\"The server did not send any data in the allotted amount of time.\"\"\"\n\n\nclass URLRequired(RequestException):\n \"\"\"A valid URL is required to make a request.\"\"\"\n\n\nclass TooManyRedirects(RequestException):\n \"\"\"Too many redirects.\"\"\"\n\n\nclass MissingSchema(RequestException, ValueError):\n \"\"\"The URL scheme (e.g. http or https) is missing.\"\"\"\n\n\nclass InvalidSchema(RequestException, ValueError):\n \"\"\"The URL scheme provided is either invalid or unsupported.\"\"\"\n\n\nclass InvalidURL(RequestException, ValueError):\n \"\"\"The URL provided was somehow invalid.\"\"\"\n\n\nclass InvalidHeader(RequestException, ValueError):\n \"\"\"The header value provided was somehow invalid.\"\"\"\n\n\nclass InvalidProxyURL(InvalidURL):\n \"\"\"The proxy URL provided is invalid.\"\"\"\n\n\nclass ChunkedEncodingError(RequestException):\n \"\"\"The server declared chunked encoding but sent an invalid chunk.\"\"\"\n\n\nclass ContentDecodingError(RequestException, BaseHTTPError):\n \"\"\"Failed to decode response content.\"\"\"\n\n\nclass StreamConsumedError(RequestException, TypeError):\n \"\"\"The content for this response was already consumed.\"\"\"\n\n\nclass RetryError(RequestException):\n \"\"\"Custom retries logic failed\"\"\"\n\n\nclass UnrewindableBodyError(RequestException):\n \"\"\"Requests encountered an error when trying to rewind a body.\"\"\"\n\n# Warnings\n\n\nclass RequestsWarning(Warning):\n \"\"\"Base warning for Requests.\"\"\"\n\n\nclass FileModeWarning(RequestsWarning, DeprecationWarning):\n \"\"\"A file was opened in text mode, but Requests determined its binary length.\"\"\"\n\n\nclass RequestsDependencyWarning(RequestsWarning):\n \"\"\"An imported dependency doesn't match the expected version range.\"\"\"\n",
"path": "requests/exceptions.py"
},
{
"content": "# -*- coding: utf-8 -*-\n\n\"\"\"Tests for Requests.\"\"\"\n\nfrom __future__ import division\nimport json\nimport os\nimport pickle\nimport collections\nimport contextlib\nimport warnings\nimport re\n\nimport io\nimport requests\nimport pytest\nfrom requests.adapters import HTTPAdapter\nfrom requests.auth import HTTPDigestAuth, _basic_auth_str\nfrom requests.compat import (\n Morsel, cookielib, getproxies, str, urlparse,\n builtin_str)\nfrom requests.cookies import (\n cookiejar_from_dict, morsel_to_cookie)\nfrom requests.exceptions import (\n ConnectionError, ConnectTimeout, InvalidSchema, InvalidURL,\n MissingSchema, ReadTimeout, Timeout, RetryError, TooManyRedirects,\n ProxyError, InvalidHeader, UnrewindableBodyError, SSLError, InvalidProxyURL, InvalidJSONError)\nfrom requests.models import PreparedRequest\nfrom requests.structures import CaseInsensitiveDict\nfrom requests.sessions import SessionRedirectMixin\nfrom requests.models import urlencode\nfrom requests.hooks import default_hooks\nfrom requests.compat import MutableMapping\n\nfrom .compat import StringIO, u\nfrom .utils import override_environ\nfrom urllib3.util import Timeout as Urllib3Timeout\n\n# Requests to this URL should always fail with a connection timeout (nothing\n# listening on that port)\nTARPIT = 'http://10.255.255.1'\n\n# This is to avoid waiting the timeout of using TARPIT\nINVALID_PROXY='http://localhost:1'\n\ntry:\n from ssl import SSLContext\n del SSLContext\n HAS_MODERN_SSL = True\nexcept ImportError:\n HAS_MODERN_SSL = False\n\ntry:\n requests.pyopenssl\n HAS_PYOPENSSL = True\nexcept AttributeError:\n HAS_PYOPENSSL = False\n\n\nclass TestRequests:\n\n digest_auth_algo = ('MD5', 'SHA-256', 'SHA-512')\n\n def test_entry_points(self):\n\n requests.session\n requests.session().get\n requests.session().head\n requests.get\n requests.head\n requests.put\n requests.patch\n requests.post\n # Not really an entry point, but people rely on it.\n from requests.packages.urllib3.poolmanager import PoolManager\n\n @pytest.mark.parametrize(\n 'exception, url', (\n (MissingSchema, 'hiwpefhipowhefopw'),\n (InvalidSchema, 'localhost:3128'),\n (InvalidSchema, 'localhost.localdomain:3128/'),\n (InvalidSchema, '10.122.1.1:3128/'),\n (InvalidURL, 'http://'),\n (InvalidURL, 'http://*example.com'),\n (InvalidURL, 'http://.example.com'),\n ))\n def test_invalid_url(self, exception, url):\n with pytest.raises(exception):\n requests.get(url)\n\n def test_basic_building(self):\n req = requests.Request()\n req.url = 'http://kennethreitz.org/'\n req.data = {'life': '42'}\n\n pr = req.prepare()\n assert pr.url == req.url\n assert pr.body == 'life=42'\n\n @pytest.mark.parametrize('method', ('GET', 'HEAD'))\n def test_no_content_length(self, httpbin, method):\n req = requests.Request(method, httpbin(method.lower())).prepare()\n assert 'Content-Length' not in req.headers\n\n @pytest.mark.parametrize('method', ('POST', 'PUT', 'PATCH', 'OPTIONS'))\n def test_no_body_content_length(self, httpbin, method):\n req = requests.Request(method, httpbin(method.lower())).prepare()\n assert req.headers['Content-Length'] == '0'\n\n @pytest.mark.parametrize('method', ('POST', 'PUT', 'PATCH', 'OPTIONS'))\n def test_empty_content_length(self, httpbin, method):\n req = requests.Request(method, httpbin(method.lower()), data='').prepare()\n assert req.headers['Content-Length'] == '0'\n\n def test_override_content_length(self, httpbin):\n headers = {\n 'Content-Length': 'not zero'\n }\n r = requests.Request('POST', httpbin('post'), headers=headers).prepare()\n assert 'Content-Length' in r.headers\n assert r.headers['Content-Length'] == 'not zero'\n\n def test_path_is_not_double_encoded(self):\n request = requests.Request('GET', \"http://0.0.0.0/get/test case\").prepare()\n\n assert request.path_url == '/get/test%20case'\n\n @pytest.mark.parametrize(\n 'url, expected', (\n ('http://example.com/path#fragment', 'http://example.com/path?a=b#fragment'),\n ('http://example.com/path?key=value#fragment', 'http://example.com/path?key=value&a=b#fragment')\n ))\n def test_params_are_added_before_fragment(self, url, expected):\n request = requests.Request('GET', url, params={\"a\": \"b\"}).prepare()\n assert request.url == expected\n\n def test_params_original_order_is_preserved_by_default(self):\n param_ordered_dict = collections.OrderedDict((('z', 1), ('a', 1), ('k', 1), ('d', 1)))\n session = requests.Session()\n request = requests.Request('GET', 'http://example.com/', params=param_ordered_dict)\n prep = session.prepare_request(request)\n assert prep.url == 'http://example.com/?z=1&a=1&k=1&d=1'\n\n def test_params_bytes_are_encoded(self):\n request = requests.Request('GET', 'http://example.com',\n params=b'test=foo').prepare()\n assert request.url == 'http://example.com/?test=foo'\n\n def test_binary_put(self):\n request = requests.Request('PUT', 'http://example.com',\n data=u\"ööö\".encode(\"utf-8\")).prepare()\n assert isinstance(request.body, bytes)\n\n def test_whitespaces_are_removed_from_url(self):\n # Test for issue #3696\n request = requests.Request('GET', ' http://example.com').prepare()\n assert request.url == 'http://example.com/'\n\n @pytest.mark.parametrize('scheme', ('http://', 'HTTP://', 'hTTp://', 'HttP://'))\n def test_mixed_case_scheme_acceptable(self, httpbin, scheme):\n s = requests.Session()\n s.proxies = getproxies()\n parts = urlparse(httpbin('get'))\n url = scheme + parts.netloc + parts.path\n r = requests.Request('GET', url)\n r = s.send(r.prepare())\n assert r.status_code == 200, 'failed for scheme {}'.format(scheme)\n\n def test_HTTP_200_OK_GET_ALTERNATIVE(self, httpbin):\n r = requests.Request('GET', httpbin('get'))\n s = requests.Session()\n s.proxies = getproxies()\n\n r = s.send(r.prepare())\n\n assert r.status_code == 200\n\n def test_HTTP_302_ALLOW_REDIRECT_GET(self, httpbin):\n r = requests.get(httpbin('redirect', '1'))\n assert r.status_code == 200\n assert r.history[0].status_code == 302\n assert r.history[0].is_redirect\n\n def test_HTTP_307_ALLOW_REDIRECT_POST(self, httpbin):\n r = requests.post(httpbin('redirect-to'), data='test', params={'url': 'post', 'status_code': 307})\n assert r.status_code == 200\n assert r.history[0].status_code == 307\n assert r.history[0].is_redirect\n assert r.json()['data'] == 'test'\n\n def test_HTTP_307_ALLOW_REDIRECT_POST_WITH_SEEKABLE(self, httpbin):\n byte_str = b'test'\n r = requests.post(httpbin('redirect-to'), data=io.BytesIO(byte_str), params={'url': 'post', 'status_code': 307})\n assert r.status_code == 200\n assert r.history[0].status_code == 307\n assert r.history[0].is_redirect\n assert r.json()['data'] == byte_str.decode('utf-8')\n\n def test_HTTP_302_TOO_MANY_REDIRECTS(self, httpbin):\n try:\n requests.get(httpbin('relative-redirect', '50'))\n except TooManyRedirects as e:\n url = httpbin('relative-redirect', '20')\n assert e.request.url == url\n assert e.response.url == url\n assert len(e.response.history) == 30\n else:\n pytest.fail('Expected redirect to raise TooManyRedirects but it did not')\n\n def test_HTTP_302_TOO_MANY_REDIRECTS_WITH_PARAMS(self, httpbin):\n s = requests.session()\n s.max_redirects = 5\n try:\n s.get(httpbin('relative-redirect', '50'))\n except TooManyRedirects as e:\n url = httpbin('relative-redirect', '45')\n assert e.request.url == url\n assert e.response.url == url\n assert len(e.response.history) == 5\n else:\n pytest.fail('Expected custom max number of redirects to be respected but was not')\n\n def test_http_301_changes_post_to_get(self, httpbin):\n r = requests.post(httpbin('status', '301'))\n assert r.status_code == 200\n assert r.request.method == 'GET'\n assert r.history[0].status_code == 301\n assert r.history[0].is_redirect\n\n def test_http_301_doesnt_change_head_to_get(self, httpbin):\n r = requests.head(httpbin('status', '301'), allow_redirects=True)\n print(r.content)\n assert r.status_code == 200\n assert r.request.method == 'HEAD'\n assert r.history[0].status_code == 301\n assert r.history[0].is_redirect\n\n def test_http_302_changes_post_to_get(self, httpbin):\n r = requests.post(httpbin('status', '302'))\n assert r.status_code == 200\n assert r.request.method == 'GET'\n assert r.history[0].status_code == 302\n assert r.history[0].is_redirect\n\n def test_http_302_doesnt_change_head_to_get(self, httpbin):\n r = requests.head(httpbin('status', '302'), allow_redirects=True)\n assert r.status_code == 200\n assert r.request.method == 'HEAD'\n assert r.history[0].status_code == 302\n assert r.history[0].is_redirect\n\n def test_http_303_changes_post_to_get(self, httpbin):\n r = requests.post(httpbin('status', '303'))\n assert r.status_code == 200\n assert r.request.method == 'GET'\n assert r.history[0].status_code == 303\n assert r.history[0].is_redirect\n\n def test_http_303_doesnt_change_head_to_get(self, httpbin):\n r = requests.head(httpbin('status', '303'), allow_redirects=True)\n assert r.status_code == 200\n assert r.request.method == 'HEAD'\n assert r.history[0].status_code == 303\n assert r.history[0].is_redirect\n\n def test_header_and_body_removal_on_redirect(self, httpbin):\n purged_headers = ('Content-Length', 'Content-Type')\n ses = requests.Session()\n req = requests.Request('POST', httpbin('post'), data={'test': 'data'})\n prep = ses.prepare_request(req)\n resp = ses.send(prep)\n\n # Mimic a redirect response\n resp.status_code = 302\n resp.headers['location'] = 'get'\n\n # Run request through resolve_redirects\n next_resp = next(ses.resolve_redirects(resp, prep))\n assert next_resp.request.body is None\n for header in purged_headers:\n assert header not in next_resp.request.headers\n\n def test_transfer_enc_removal_on_redirect(self, httpbin):\n purged_headers = ('Transfer-Encoding', 'Content-Type')\n ses = requests.Session()\n req = requests.Request('POST', httpbin('post'), data=(b'x' for x in range(1)))\n prep = ses.prepare_request(req)\n assert 'Transfer-Encoding' in prep.headers\n\n # Create Response to avoid https://github.com/kevin1024/pytest-httpbin/issues/33\n resp = requests.Response()\n resp.raw = io.BytesIO(b'the content')\n resp.request = prep\n setattr(resp.raw, 'release_conn', lambda *args: args)\n\n # Mimic a redirect response\n resp.status_code = 302\n resp.headers['location'] = httpbin('get')\n\n # Run request through resolve_redirect\n next_resp = next(ses.resolve_redirects(resp, prep))\n assert next_resp.request.body is None\n for header in purged_headers:\n assert header not in next_resp.request.headers\n\n def test_fragment_maintained_on_redirect(self, httpbin):\n fragment = \"#view=edit&token=hunter2\"\n r = requests.get(httpbin('redirect-to?url=get')+fragment)\n\n assert len(r.history) > 0\n assert r.history[0].request.url == httpbin('redirect-to?url=get')+fragment\n assert r.url == httpbin('get')+fragment\n\n def test_HTTP_200_OK_GET_WITH_PARAMS(self, httpbin):\n heads = {'User-agent': 'Mozilla/5.0'}\n\n r = requests.get(httpbin('user-agent'), headers=heads)\n\n assert heads['User-agent'] in r.text\n assert r.status_code == 200\n\n def test_HTTP_200_OK_GET_WITH_MIXED_PARAMS(self, httpbin):\n heads = {'User-agent': 'Mozilla/5.0'}\n\n r = requests.get(httpbin('get') + '?test=true', params={'q': 'test'}, headers=heads)\n assert r.status_code == 200\n\n def test_set_cookie_on_301(self, httpbin):\n s = requests.session()\n url = httpbin('cookies/set?foo=bar')\n s.get(url)\n assert s.cookies['foo'] == 'bar'\n\n def test_cookie_sent_on_redirect(self, httpbin):\n s = requests.session()\n s.get(httpbin('cookies/set?foo=bar'))\n r = s.get(httpbin('redirect/1')) # redirects to httpbin('get')\n assert 'Cookie' in r.json()['headers']\n\n def test_cookie_removed_on_expire(self, httpbin):\n s = requests.session()\n s.get(httpbin('cookies/set?foo=bar'))\n assert s.cookies['foo'] == 'bar'\n s.get(\n httpbin('response-headers'),\n params={\n 'Set-Cookie':\n 'foo=deleted; expires=Thu, 01-Jan-1970 00:00:01 GMT'\n }\n )\n assert 'foo' not in s.cookies\n\n def test_cookie_quote_wrapped(self, httpbin):\n s = requests.session()\n s.get(httpbin('cookies/set?foo=\"bar:baz\"'))\n assert s.cookies['foo'] == '\"bar:baz\"'\n\n def test_cookie_persists_via_api(self, httpbin):\n s = requests.session()\n r = s.get(httpbin('redirect/1'), cookies={'foo': 'bar'})\n assert 'foo' in r.request.headers['Cookie']\n assert 'foo' in r.history[0].request.headers['Cookie']\n\n def test_request_cookie_overrides_session_cookie(self, httpbin):\n s = requests.session()\n s.cookies['foo'] = 'bar'\n r = s.get(httpbin('cookies'), cookies={'foo': 'baz'})\n assert r.json()['cookies']['foo'] == 'baz'\n # Session cookie should not be modified\n assert s.cookies['foo'] == 'bar'\n\n def test_request_cookies_not_persisted(self, httpbin):\n s = requests.session()\n s.get(httpbin('cookies'), cookies={'foo': 'baz'})\n # Sending a request with cookies should not add cookies to the session\n assert not s.cookies\n\n def test_generic_cookiejar_works(self, httpbin):\n cj = cookielib.CookieJar()\n cookiejar_from_dict({'foo': 'bar'}, cj)\n s = requests.session()\n s.cookies = cj\n r = s.get(httpbin('cookies'))\n # Make sure the cookie was sent\n assert r.json()['cookies']['foo'] == 'bar'\n # Make sure the session cj is still the custom one\n assert s.cookies is cj\n\n def test_param_cookiejar_works(self, httpbin):\n cj = cookielib.CookieJar()\n cookiejar_from_dict({'foo': 'bar'}, cj)\n s = requests.session()\n r = s.get(httpbin('cookies'), cookies=cj)\n # Make sure the cookie was sent\n assert r.json()['cookies']['foo'] == 'bar'\n\n def test_cookielib_cookiejar_on_redirect(self, httpbin):\n \"\"\"Tests resolve_redirect doesn't fail when merging cookies\n with non-RequestsCookieJar cookiejar.\n\n See GH #3579\n \"\"\"\n cj = cookiejar_from_dict({'foo': 'bar'}, cookielib.CookieJar())\n s = requests.Session()\n s.cookies = cookiejar_from_dict({'cookie': 'tasty'})\n\n # Prepare request without using Session\n req = requests.Request('GET', httpbin('headers'), cookies=cj)\n prep_req = req.prepare()\n\n # Send request and simulate redirect\n resp = s.send(prep_req)\n resp.status_code = 302\n resp.headers['location'] = httpbin('get')\n redirects = s.resolve_redirects(resp, prep_req)\n resp = next(redirects)\n\n # Verify CookieJar isn't being converted to RequestsCookieJar\n assert isinstance(prep_req._cookies, cookielib.CookieJar)\n assert isinstance(resp.request._cookies, cookielib.CookieJar)\n assert not isinstance(resp.request._cookies, requests.cookies.RequestsCookieJar)\n\n cookies = {}\n for c in resp.request._cookies:\n cookies[c.name] = c.value\n assert cookies['foo'] == 'bar'\n assert cookies['cookie'] == 'tasty'\n\n def test_requests_in_history_are_not_overridden(self, httpbin):\n resp = requests.get(httpbin('redirect/3'))\n urls = [r.url for r in resp.history]\n req_urls = [r.request.url for r in resp.history]\n assert urls == req_urls\n\n def test_history_is_always_a_list(self, httpbin):\n \"\"\"Show that even with redirects, Response.history is always a list.\"\"\"\n resp = requests.get(httpbin('get'))\n assert isinstance(resp.history, list)\n resp = requests.get(httpbin('redirect/1'))\n assert isinstance(resp.history, list)\n assert not isinstance(resp.history, tuple)\n\n def test_headers_on_session_with_None_are_not_sent(self, httpbin):\n \"\"\"Do not send headers in Session.headers with None values.\"\"\"\n ses = requests.Session()\n ses.headers['Accept-Encoding'] = None\n req = requests.Request('GET', httpbin('get'))\n prep = ses.prepare_request(req)\n assert 'Accept-Encoding' not in prep.headers\n\n def test_headers_preserve_order(self, httpbin):\n \"\"\"Preserve order when headers provided as OrderedDict.\"\"\"\n ses = requests.Session()\n ses.headers = collections.OrderedDict()\n ses.headers['Accept-Encoding'] = 'identity'\n ses.headers['First'] = '1'\n ses.headers['Second'] = '2'\n headers = collections.OrderedDict([('Third', '3'), ('Fourth', '4')])\n headers['Fifth'] = '5'\n headers['Second'] = '222'\n req = requests.Request('GET', httpbin('get'), headers=headers)\n prep = ses.prepare_request(req)\n items = list(prep.headers.items())\n assert items[0] == ('Accept-Encoding', 'identity')\n assert items[1] == ('First', '1')\n assert items[2] == ('Second', '222')\n assert items[3] == ('Third', '3')\n assert items[4] == ('Fourth', '4')\n assert items[5] == ('Fifth', '5')\n\n @pytest.mark.parametrize('key', ('User-agent', 'user-agent'))\n def test_user_agent_transfers(self, httpbin, key):\n\n heads = {key: 'Mozilla/5.0 (github.com/psf/requests)'}\n\n r = requests.get(httpbin('user-agent'), headers=heads)\n assert heads[key] in r.text\n\n def test_HTTP_200_OK_HEAD(self, httpbin):\n r = requests.head(httpbin('get'))\n assert r.status_code == 200\n\n def test_HTTP_200_OK_PUT(self, httpbin):\n r = requests.put(httpbin('put'))\n assert r.status_code == 200\n\n def test_BASICAUTH_TUPLE_HTTP_200_OK_GET(self, httpbin):\n auth = ('user', 'pass')\n url = httpbin('basic-auth', 'user', 'pass')\n\n r = requests.get(url, auth=auth)\n assert r.status_code == 200\n\n r = requests.get(url)\n assert r.status_code == 401\n\n s = requests.session()\n s.auth = auth\n r = s.get(url)\n assert r.status_code == 200\n\n @pytest.mark.parametrize(\n 'username, password', (\n ('user', 'pass'),\n (u'имя'.encode('utf-8'), u'пароль'.encode('utf-8')),\n (42, 42),\n (None, None),\n ))\n def test_set_basicauth(self, httpbin, username, password):\n auth = (username, password)\n url = httpbin('get')\n\n r = requests.Request('GET', url, auth=auth)\n p = r.prepare()\n\n assert p.headers['Authorization'] == _basic_auth_str(username, password)\n\n def test_basicauth_encodes_byte_strings(self):\n \"\"\"Ensure b'test' formats as the byte string \"test\" rather\n than the unicode string \"b'test'\" in Python 3.\n \"\"\"\n auth = (b'\\xc5\\xafsername', b'test\\xc6\\xb6')\n r = requests.Request('GET', 'http://localhost', auth=auth)\n p = r.prepare()\n\n assert p.headers['Authorization'] == 'Basic xa9zZXJuYW1lOnRlc3TGtg=='\n\n @pytest.mark.parametrize(\n 'url, exception', (\n # Connecting to an unknown domain should raise a ConnectionError\n ('http://doesnotexist.google.com', ConnectionError),\n # Connecting to an invalid port should raise a ConnectionError\n ('http://localhost:1', ConnectionError),\n # Inputing a URL that cannot be parsed should raise an InvalidURL error\n ('http://fe80::5054:ff:fe5a:fc0', InvalidURL)\n ))\n def test_errors(self, url, exception):\n with pytest.raises(exception):\n requests.get(url, timeout=1)\n\n def test_proxy_error(self):\n # any proxy related error (address resolution, no route to host, etc) should result in a ProxyError\n with pytest.raises(ProxyError):\n requests.get('http://localhost:1', proxies={'http': 'non-resolvable-address'})\n\n def test_proxy_error_on_bad_url(self, httpbin, httpbin_secure):\n with pytest.raises(InvalidProxyURL):\n requests.get(httpbin_secure(), proxies={'https': 'http:/badproxyurl:3128'})\n\n with pytest.raises(InvalidProxyURL):\n requests.get(httpbin(), proxies={'http': 'http://:8080'})\n\n with pytest.raises(InvalidProxyURL):\n requests.get(httpbin_secure(), proxies={'https': 'https://'})\n\n with pytest.raises(InvalidProxyURL):\n requests.get(httpbin(), proxies={'http': 'http:///example.com:8080'})\n\n def test_respect_proxy_env_on_send_self_prepared_request(self, httpbin):\n with override_environ(http_proxy=INVALID_PROXY):\n with pytest.raises(ProxyError):\n session = requests.Session()\n request = requests.Request('GET', httpbin())\n session.send(request.prepare())\n\n def test_respect_proxy_env_on_send_session_prepared_request(self, httpbin):\n with override_environ(http_proxy=INVALID_PROXY):\n with pytest.raises(ProxyError):\n session = requests.Session()\n request = requests.Request('GET', httpbin())\n prepared = session.prepare_request(request)\n session.send(prepared)\n\n def test_respect_proxy_env_on_send_with_redirects(self, httpbin):\n with override_environ(http_proxy=INVALID_PROXY):\n with pytest.raises(ProxyError):\n session = requests.Session()\n url = httpbin('redirect/1')\n print(url)\n request = requests.Request('GET', url)\n session.send(request.prepare())\n\n def test_respect_proxy_env_on_get(self, httpbin):\n with override_environ(http_proxy=INVALID_PROXY):\n with pytest.raises(ProxyError):\n session = requests.Session()\n session.get(httpbin())\n\n def test_respect_proxy_env_on_request(self, httpbin):\n with override_environ(http_proxy=INVALID_PROXY):\n with pytest.raises(ProxyError):\n session = requests.Session()\n session.request(method='GET', url=httpbin())\n\n def test_proxy_authorization_preserved_on_request(self, httpbin):\n proxy_auth_value = \"Bearer XXX\"\n session = requests.Session()\n session.headers.update({\"Proxy-Authorization\": proxy_auth_value})\n resp = session.request(method='GET', url=httpbin('get'))\n sent_headers = resp.json().get('headers', {})\n\n assert sent_headers.get(\"Proxy-Authorization\") == proxy_auth_value\n\n def test_basicauth_with_netrc(self, httpbin):\n auth = ('user', 'pass')\n wrong_auth = ('wronguser', 'wrongpass')\n url = httpbin('basic-auth', 'user', 'pass')\n\n old_auth = requests.sessions.get_netrc_auth\n\n try:\n def get_netrc_auth_mock(url):\n return auth\n requests.sessions.get_netrc_auth = get_netrc_auth_mock\n\n # Should use netrc and work.\n r = requests.get(url)\n assert r.status_code == 200\n\n # Given auth should override and fail.\n r = requests.get(url, auth=wrong_auth)\n assert r.status_code == 401\n\n s = requests.session()\n\n # Should use netrc and work.\n r = s.get(url)\n assert r.status_code == 200\n\n # Given auth should override and fail.\n s.auth = wrong_auth\n r = s.get(url)\n assert r.status_code == 401\n finally:\n requests.sessions.get_netrc_auth = old_auth\n\n def test_DIGEST_HTTP_200_OK_GET(self, httpbin):\n\n for authtype in self.digest_auth_algo:\n auth = HTTPDigestAuth('user', 'pass')\n url = httpbin('digest-auth', 'auth', 'user', 'pass', authtype, 'never')\n\n r = requests.get(url, auth=auth)\n assert r.status_code == 200\n\n r = requests.get(url)\n assert r.status_code == 401\n print(r.headers['WWW-Authenticate'])\n\n s = requests.session()\n s.auth = HTTPDigestAuth('user', 'pass')\n r = s.get(url)\n assert r.status_code == 200\n\n def test_DIGEST_AUTH_RETURNS_COOKIE(self, httpbin):\n\n for authtype in self.digest_auth_algo:\n url = httpbin('digest-auth', 'auth', 'user', 'pass', authtype)\n auth = HTTPDigestAuth('user', 'pass')\n r = requests.get(url)\n assert r.cookies['fake'] == 'fake_value'\n\n r = requests.get(url, auth=auth)\n assert r.status_code == 200\n\n def test_DIGEST_AUTH_SETS_SESSION_COOKIES(self, httpbin):\n\n for authtype in self.digest_auth_algo:\n url = httpbin('digest-auth', 'auth', 'user', 'pass', authtype)\n auth = HTTPDigestAuth('user', 'pass')\n s = requests.Session()\n s.get(url, auth=auth)\n assert s.cookies['fake'] == 'fake_value'\n\n def test_DIGEST_STREAM(self, httpbin):\n\n for authtype in self.digest_auth_algo:\n auth = HTTPDigestAuth('user', 'pass')\n url = httpbin('digest-auth', 'auth', 'user', 'pass', authtype)\n\n r = requests.get(url, auth=auth, stream=True)\n assert r.raw.read() != b''\n\n r = requests.get(url, auth=auth, stream=False)\n assert r.raw.read() == b''\n\n def test_DIGESTAUTH_WRONG_HTTP_401_GET(self, httpbin):\n\n for authtype in self.digest_auth_algo:\n auth = HTTPDigestAuth('user', 'wrongpass')\n url = httpbin('digest-auth', 'auth', 'user', 'pass', authtype)\n\n r = requests.get(url, auth=auth)\n assert r.status_code == 401\n\n r = requests.get(url)\n assert r.status_code == 401\n\n s = requests.session()\n s.auth = auth\n r = s.get(url)\n assert r.status_code == 401\n\n def test_DIGESTAUTH_QUOTES_QOP_VALUE(self, httpbin):\n\n for authtype in self.digest_auth_algo:\n auth = HTTPDigestAuth('user', 'pass')\n url = httpbin('digest-auth', 'auth', 'user', 'pass', authtype)\n\n r = requests.get(url, auth=auth)\n assert '\"auth\"' in r.request.headers['Authorization']\n\n def test_POSTBIN_GET_POST_FILES(self, httpbin):\n\n url = httpbin('post')\n requests.post(url).raise_for_status()\n\n post1 = requests.post(url, data={'some': 'data'})\n assert post1.status_code == 200\n\n with open('requirements-dev.txt') as f:\n post2 = requests.post(url, files={'some': f})\n assert post2.status_code == 200\n\n post4 = requests.post(url, data='[{\"some\": \"json\"}]')\n assert post4.status_code == 200\n\n with pytest.raises(ValueError):\n requests.post(url, files=['bad file data'])\n\n def test_invalid_files_input(self, httpbin):\n\n url = httpbin('post')\n post = requests.post(url,\n files={\"random-file-1\": None, \"random-file-2\": 1})\n assert b'name=\"random-file-1\"' not in post.request.body\n assert b'name=\"random-file-2\"' in post.request.body\n\n def test_POSTBIN_SEEKED_OBJECT_WITH_NO_ITER(self, httpbin):\n\n class TestStream(object):\n def __init__(self, data):\n self.data = data.encode()\n self.length = len(self.data)\n self.index = 0\n\n def __len__(self):\n return self.length\n\n def read(self, size=None):\n if size:\n ret = self.data[self.index:self.index + size]\n self.index += size\n else:\n ret = self.data[self.index:]\n self.index = self.length\n return ret\n\n def tell(self):\n return self.index\n\n def seek(self, offset, where=0):\n if where == 0:\n self.index = offset\n elif where == 1:\n self.index += offset\n elif where == 2:\n self.index = self.length + offset\n\n test = TestStream('test')\n post1 = requests.post(httpbin('post'), data=test)\n assert post1.status_code == 200\n assert post1.json()['data'] == 'test'\n\n test = TestStream('test')\n test.seek(2)\n post2 = requests.post(httpbin('post'), data=test)\n assert post2.status_code == 200\n assert post2.json()['data'] == 'st'\n\n def test_POSTBIN_GET_POST_FILES_WITH_DATA(self, httpbin):\n\n url = httpbin('post')\n requests.post(url).raise_for_status()\n\n post1 = requests.post(url, data={'some': 'data'})\n assert post1.status_code == 200\n\n with open('requirements-dev.txt') as f:\n post2 = requests.post(url, data={'some': 'data'}, files={'some': f})\n assert post2.status_code == 200\n\n post4 = requests.post(url, data='[{\"some\": \"json\"}]')\n assert post4.status_code == 200\n\n with pytest.raises(ValueError):\n requests.post(url, files=['bad file data'])\n\n def test_post_with_custom_mapping(self, httpbin):\n class CustomMapping(MutableMapping):\n def __init__(self, *args, **kwargs):\n self.data = dict(*args, **kwargs)\n\n def __delitem__(self, key):\n del self.data[key]\n\n def __getitem__(self, key):\n return self.data[key]\n\n def __setitem__(self, key, value):\n self.data[key] = value\n\n def __iter__(self):\n return iter(self.data)\n\n def __len__(self):\n return len(self.data)\n\n data = CustomMapping({'some': 'data'})\n url = httpbin('post')\n found_json = requests.post(url, data=data).json().get('form')\n assert found_json == {'some': 'data'}\n\n def test_conflicting_post_params(self, httpbin):\n url = httpbin('post')\n with open('requirements-dev.txt') as f:\n with pytest.raises(ValueError):\n requests.post(url, data='[{\"some\": \"data\"}]', files={'some': f})\n with pytest.raises(ValueError):\n requests.post(url, data=u('[{\"some\": \"data\"}]'), files={'some': f})\n\n def test_request_ok_set(self, httpbin):\n r = requests.get(httpbin('status', '404'))\n assert not r.ok\n\n def test_status_raising(self, httpbin):\n r = requests.get(httpbin('status', '404'))\n with pytest.raises(requests.exceptions.HTTPError):\n r.raise_for_status()\n\n r = requests.get(httpbin('status', '500'))\n assert not r.ok\n\n def test_decompress_gzip(self, httpbin):\n r = requests.get(httpbin('gzip'))\n r.content.decode('ascii')\n\n @pytest.mark.parametrize(\n 'url, params', (\n ('/get', {'foo': 'føø'}),\n ('/get', {'føø': 'føø'}),\n ('/get', {'føø': 'føø'}),\n ('/get', {'foo': 'foo'}),\n ('ø', {'foo': 'foo'}),\n ))\n def test_unicode_get(self, httpbin, url, params):\n requests.get(httpbin(url), params=params)\n\n def test_unicode_header_name(self, httpbin):\n requests.put(\n httpbin('put'),\n headers={str('Content-Type'): 'application/octet-stream'},\n data='\\xff') # compat.str is unicode.\n\n def test_pyopenssl_redirect(self, httpbin_secure, httpbin_ca_bundle):\n requests.get(httpbin_secure('status', '301'), verify=httpbin_ca_bundle)\n\n def test_invalid_ca_certificate_path(self, httpbin_secure):\n INVALID_PATH = '/garbage'\n with pytest.raises(IOError) as e:\n requests.get(httpbin_secure(), verify=INVALID_PATH)\n assert str(e.value) == 'Could not find a suitable TLS CA certificate bundle, invalid path: {}'.format(INVALID_PATH)\n\n def test_invalid_ssl_certificate_files(self, httpbin_secure):\n INVALID_PATH = '/garbage'\n with pytest.raises(IOError) as e:\n requests.get(httpbin_secure(), cert=INVALID_PATH)\n assert str(e.value) == 'Could not find the TLS certificate file, invalid path: {}'.format(INVALID_PATH)\n\n with pytest.raises(IOError) as e:\n requests.get(httpbin_secure(), cert=('.', INVALID_PATH))\n assert str(e.value) == 'Could not find the TLS key file, invalid path: {}'.format(INVALID_PATH)\n\n def test_http_with_certificate(self, httpbin):\n r = requests.get(httpbin(), cert='.')\n assert r.status_code == 200\n\n def test_https_warnings(self, nosan_server):\n \"\"\"warnings are emitted with requests.get\"\"\"\n host, port, ca_bundle = nosan_server\n if HAS_MODERN_SSL or HAS_PYOPENSSL:\n warnings_expected = ('SubjectAltNameWarning', )\n else:\n warnings_expected = ('SNIMissingWarning',\n 'InsecurePlatformWarning',\n 'SubjectAltNameWarning', )\n\n with pytest.warns(None) as warning_records:\n warnings.simplefilter('always')\n requests.get(\"https://localhost:{}/\".format(port), verify=ca_bundle)\n\n warning_records = [item for item in warning_records\n if item.category.__name__ != 'ResourceWarning']\n\n warnings_category = tuple(\n item.category.__name__ for item in warning_records)\n assert warnings_category == warnings_expected\n\n def test_certificate_failure(self, httpbin_secure):\n \"\"\"\n When underlying SSL problems occur, an SSLError is raised.\n \"\"\"\n with pytest.raises(SSLError):\n # Our local httpbin does not have a trusted CA, so this call will\n # fail if we use our default trust bundle.\n requests.get(httpbin_secure('status', '200'))\n\n def test_urlencoded_get_query_multivalued_param(self, httpbin):\n\n r = requests.get(httpbin('get'), params={'test': ['foo', 'baz']})\n assert r.status_code == 200\n assert r.url == httpbin('get?test=foo&test=baz')\n\n def test_form_encoded_post_query_multivalued_element(self, httpbin):\n r = requests.Request(method='POST', url=httpbin('post'),\n data=dict(test=['foo', 'baz']))\n prep = r.prepare()\n assert prep.body == 'test=foo&test=baz'\n\n def test_different_encodings_dont_break_post(self, httpbin):\n r = requests.post(httpbin('post'),\n data={'stuff': json.dumps({'a': 123})},\n params={'blah': 'asdf1234'},\n files={'file': ('test_requests.py', open(__file__, 'rb'))})\n assert r.status_code == 200\n\n @pytest.mark.parametrize(\n 'data', (\n {'stuff': u('ëlïxr')},\n {'stuff': u('ëlïxr').encode('utf-8')},\n {'stuff': 'elixr'},\n {'stuff': 'elixr'.encode('utf-8')},\n ))\n def test_unicode_multipart_post(self, httpbin, data):\n r = requests.post(httpbin('post'),\n data=data,\n files={'file': ('test_requests.py', open(__file__, 'rb'))})\n assert r.status_code == 200\n\n def test_unicode_multipart_post_fieldnames(self, httpbin):\n filename = os.path.splitext(__file__)[0] + '.py'\n r = requests.Request(\n method='POST', url=httpbin('post'),\n data={'stuff'.encode('utf-8'): 'elixr'},\n files={'file': ('test_requests.py', open(filename, 'rb'))})\n prep = r.prepare()\n assert b'name=\"stuff\"' in prep.body\n assert b'name=\"b\\'stuff\\'\"' not in prep.body\n\n def test_unicode_method_name(self, httpbin):\n files = {'file': open(__file__, 'rb')}\n r = requests.request(\n method=u('POST'), url=httpbin('post'), files=files)\n assert r.status_code == 200\n\n def test_unicode_method_name_with_request_object(self, httpbin):\n files = {'file': open(__file__, 'rb')}\n s = requests.Session()\n req = requests.Request(u('POST'), httpbin('post'), files=files)\n prep = s.prepare_request(req)\n assert isinstance(prep.method, builtin_str)\n assert prep.method == 'POST'\n\n resp = s.send(prep)\n assert resp.status_code == 200\n\n def test_non_prepared_request_error(self):\n s = requests.Session()\n req = requests.Request(u('POST'), '/')\n\n with pytest.raises(ValueError) as e:\n s.send(req)\n assert str(e.value) == 'You can only send PreparedRequests.'\n\n def test_custom_content_type(self, httpbin):\n r = requests.post(\n httpbin('post'),\n data={'stuff': json.dumps({'a': 123})},\n files={\n 'file1': ('test_requests.py', open(__file__, 'rb')),\n 'file2': ('test_requests', open(__file__, 'rb'),\n 'text/py-content-type')})\n assert r.status_code == 200\n assert b\"text/py-content-type\" in r.request.body\n\n def test_hook_receives_request_arguments(self, httpbin):\n def hook(resp, **kwargs):\n assert resp is not None\n assert kwargs != {}\n\n s = requests.Session()\n r = requests.Request('GET', httpbin(), hooks={'response': hook})\n prep = s.prepare_request(r)\n s.send(prep)\n\n def test_session_hooks_are_used_with_no_request_hooks(self, httpbin):\n hook = lambda x, *args, **kwargs: x\n s = requests.Session()\n s.hooks['response'].append(hook)\n r = requests.Request('GET', httpbin())\n prep = s.prepare_request(r)\n assert prep.hooks['response'] != []\n assert prep.hooks['response'] == [hook]\n\n def test_session_hooks_are_overridden_by_request_hooks(self, httpbin):\n hook1 = lambda x, *args, **kwargs: x\n hook2 = lambda x, *args, **kwargs: x\n assert hook1 is not hook2\n s = requests.Session()\n s.hooks['response'].append(hook2)\n r = requests.Request('GET', httpbin(), hooks={'response': [hook1]})\n prep = s.prepare_request(r)\n assert prep.hooks['response'] == [hook1]\n\n def test_prepared_request_hook(self, httpbin):\n def hook(resp, **kwargs):\n resp.hook_working = True\n return resp\n\n req = requests.Request('GET', httpbin(), hooks={'response': hook})\n prep = req.prepare()\n\n s = requests.Session()\n s.proxies = getproxies()\n resp = s.send(prep)\n\n assert hasattr(resp, 'hook_working')\n\n def test_prepared_from_session(self, httpbin):\n class DummyAuth(requests.auth.AuthBase):\n def __call__(self, r):\n r.headers['Dummy-Auth-Test'] = 'dummy-auth-test-ok'\n return r\n\n req = requests.Request('GET', httpbin('headers'))\n assert not req.auth\n\n s = requests.Session()\n s.auth = DummyAuth()\n\n prep = s.prepare_request(req)\n resp = s.send(prep)\n\n assert resp.json()['headers'][\n 'Dummy-Auth-Test'] == 'dummy-auth-test-ok'\n\n def test_prepare_request_with_bytestring_url(self):\n req = requests.Request('GET', b'https://httpbin.org/')\n s = requests.Session()\n prep = s.prepare_request(req)\n assert prep.url == \"https://httpbin.org/\"\n\n def test_request_with_bytestring_host(self, httpbin):\n s = requests.Session()\n resp = s.request(\n 'GET',\n httpbin('cookies/set?cookie=value'),\n allow_redirects=False,\n headers={'Host': b'httpbin.org'}\n )\n assert resp.cookies.get('cookie') == 'value'\n\n def test_links(self):\n r = requests.Response()\n r.headers = {\n 'cache-control': 'public, max-age=60, s-maxage=60',\n 'connection': 'keep-alive',\n 'content-encoding': 'gzip',\n 'content-type': 'application/json; charset=utf-8',\n 'date': 'Sat, 26 Jan 2013 16:47:56 GMT',\n 'etag': '\"6ff6a73c0e446c1f61614769e3ceb778\"',\n 'last-modified': 'Sat, 26 Jan 2013 16:22:39 GMT',\n 'link': ('<https://api.github.com/users/kennethreitz/repos?'\n 'page=2&per_page=10>; rel=\"next\", <https://api.github.'\n 'com/users/kennethreitz/repos?page=7&per_page=10>; '\n ' rel=\"last\"'),\n 'server': 'GitHub.com',\n 'status': '200 OK',\n 'vary': 'Accept',\n 'x-content-type-options': 'nosniff',\n 'x-github-media-type': 'github.beta',\n 'x-ratelimit-limit': '60',\n 'x-ratelimit-remaining': '57'\n }\n assert r.links['next']['rel'] == 'next'\n\n def test_cookie_parameters(self):\n key = 'some_cookie'\n value = 'some_value'\n secure = True\n domain = 'test.com'\n rest = {'HttpOnly': True}\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value, secure=secure, domain=domain, rest=rest)\n\n assert len(jar) == 1\n assert 'some_cookie' in jar\n\n cookie = list(jar)[0]\n assert cookie.secure == secure\n assert cookie.domain == domain\n assert cookie._rest['HttpOnly'] == rest['HttpOnly']\n\n def test_cookie_as_dict_keeps_len(self):\n key = 'some_cookie'\n value = 'some_value'\n\n key1 = 'some_cookie1'\n value1 = 'some_value1'\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value)\n jar.set(key1, value1)\n\n d1 = dict(jar)\n d2 = dict(jar.iteritems())\n d3 = dict(jar.items())\n\n assert len(jar) == 2\n assert len(d1) == 2\n assert len(d2) == 2\n assert len(d3) == 2\n\n def test_cookie_as_dict_keeps_items(self):\n key = 'some_cookie'\n value = 'some_value'\n\n key1 = 'some_cookie1'\n value1 = 'some_value1'\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value)\n jar.set(key1, value1)\n\n d1 = dict(jar)\n d2 = dict(jar.iteritems())\n d3 = dict(jar.items())\n\n assert d1['some_cookie'] == 'some_value'\n assert d2['some_cookie'] == 'some_value'\n assert d3['some_cookie1'] == 'some_value1'\n\n def test_cookie_as_dict_keys(self):\n key = 'some_cookie'\n value = 'some_value'\n\n key1 = 'some_cookie1'\n value1 = 'some_value1'\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value)\n jar.set(key1, value1)\n\n keys = jar.keys()\n assert keys == list(keys)\n # make sure one can use keys multiple times\n assert list(keys) == list(keys)\n\n def test_cookie_as_dict_values(self):\n key = 'some_cookie'\n value = 'some_value'\n\n key1 = 'some_cookie1'\n value1 = 'some_value1'\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value)\n jar.set(key1, value1)\n\n values = jar.values()\n assert values == list(values)\n # make sure one can use values multiple times\n assert list(values) == list(values)\n\n def test_cookie_as_dict_items(self):\n key = 'some_cookie'\n value = 'some_value'\n\n key1 = 'some_cookie1'\n value1 = 'some_value1'\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value)\n jar.set(key1, value1)\n\n items = jar.items()\n assert items == list(items)\n # make sure one can use items multiple times\n assert list(items) == list(items)\n\n def test_cookie_duplicate_names_different_domains(self):\n key = 'some_cookie'\n value = 'some_value'\n domain1 = 'test1.com'\n domain2 = 'test2.com'\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value, domain=domain1)\n jar.set(key, value, domain=domain2)\n assert key in jar\n items = jar.items()\n assert len(items) == 2\n\n # Verify that CookieConflictError is raised if domain is not specified\n with pytest.raises(requests.cookies.CookieConflictError):\n jar.get(key)\n\n # Verify that CookieConflictError is not raised if domain is specified\n cookie = jar.get(key, domain=domain1)\n assert cookie == value\n\n def test_cookie_duplicate_names_raises_cookie_conflict_error(self):\n key = 'some_cookie'\n value = 'some_value'\n path = 'some_path'\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value, path=path)\n jar.set(key, value)\n with pytest.raises(requests.cookies.CookieConflictError):\n jar.get(key)\n\n def test_cookie_policy_copy(self):\n class MyCookiePolicy(cookielib.DefaultCookiePolicy):\n pass\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set_policy(MyCookiePolicy())\n assert isinstance(jar.copy().get_policy(), MyCookiePolicy)\n\n def test_time_elapsed_blank(self, httpbin):\n r = requests.get(httpbin('get'))\n td = r.elapsed\n total_seconds = ((td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6) / 10**6)\n assert total_seconds > 0.0\n\n def test_empty_response_has_content_none(self):\n r = requests.Response()\n assert r.content is None\n\n def test_response_is_iterable(self):\n r = requests.Response()\n io = StringIO.StringIO('abc')\n read_ = io.read\n\n def read_mock(amt, decode_content=None):\n return read_(amt)\n setattr(io, 'read', read_mock)\n r.raw = io\n assert next(iter(r))\n io.close()\n\n def test_response_decode_unicode(self):\n \"\"\"When called with decode_unicode, Response.iter_content should always\n return unicode.\n \"\"\"\n r = requests.Response()\n r._content_consumed = True\n r._content = b'the content'\n r.encoding = 'ascii'\n\n chunks = r.iter_content(decode_unicode=True)\n assert all(isinstance(chunk, str) for chunk in chunks)\n\n # also for streaming\n r = requests.Response()\n r.raw = io.BytesIO(b'the content')\n r.encoding = 'ascii'\n chunks = r.iter_content(decode_unicode=True)\n assert all(isinstance(chunk, str) for chunk in chunks)\n\n def test_response_reason_unicode(self):\n # check for unicode HTTP status\n r = requests.Response()\n r.url = u'unicode URL'\n r.reason = u'Komponenttia ei löydy'.encode('utf-8')\n r.status_code = 404\n r.encoding = None\n assert not r.ok # old behaviour - crashes here\n\n def test_response_reason_unicode_fallback(self):\n # check raise_status falls back to ISO-8859-1\n r = requests.Response()\n r.url = 'some url'\n reason = u'Komponenttia ei löydy'\n r.reason = reason.encode('latin-1')\n r.status_code = 500\n r.encoding = None\n with pytest.raises(requests.exceptions.HTTPError) as e:\n r.raise_for_status()\n assert reason in e.value.args[0]\n\n def test_response_chunk_size_type(self):\n \"\"\"Ensure that chunk_size is passed as None or an integer, otherwise\n raise a TypeError.\n \"\"\"\n r = requests.Response()\n r.raw = io.BytesIO(b'the content')\n chunks = r.iter_content(1)\n assert all(len(chunk) == 1 for chunk in chunks)\n\n r = requests.Response()\n r.raw = io.BytesIO(b'the content')\n chunks = r.iter_content(None)\n assert list(chunks) == [b'the content']\n\n r = requests.Response()\n r.raw = io.BytesIO(b'the content')\n with pytest.raises(TypeError):\n chunks = r.iter_content(\"1024\")\n\n def test_request_and_response_are_pickleable(self, httpbin):\n r = requests.get(httpbin('get'))\n\n # verify we can pickle the original request\n assert pickle.loads(pickle.dumps(r.request))\n\n # verify we can pickle the response and that we have access to\n # the original request.\n pr = pickle.loads(pickle.dumps(r))\n assert r.request.url == pr.request.url\n assert r.request.headers == pr.request.headers\n\n def test_prepared_request_is_pickleable(self, httpbin):\n p = requests.Request('GET', httpbin('get')).prepare()\n\n # Verify PreparedRequest can be pickled and unpickled\n r = pickle.loads(pickle.dumps(p))\n assert r.url == p.url\n assert r.headers == p.headers\n assert r.body == p.body\n\n # Verify unpickled PreparedRequest sends properly\n s = requests.Session()\n resp = s.send(r)\n assert resp.status_code == 200\n\n def test_prepared_request_with_file_is_pickleable(self, httpbin):\n files = {'file': open(__file__, 'rb')}\n r = requests.Request('POST', httpbin('post'), files=files)\n p = r.prepare()\n\n # Verify PreparedRequest can be pickled and unpickled\n r = pickle.loads(pickle.dumps(p))\n assert r.url == p.url\n assert r.headers == p.headers\n assert r.body == p.body\n\n # Verify unpickled PreparedRequest sends properly\n s = requests.Session()\n resp = s.send(r)\n assert resp.status_code == 200\n\n def test_prepared_request_with_hook_is_pickleable(self, httpbin):\n r = requests.Request('GET', httpbin('get'), hooks=default_hooks())\n p = r.prepare()\n\n # Verify PreparedRequest can be pickled\n r = pickle.loads(pickle.dumps(p))\n assert r.url == p.url\n assert r.headers == p.headers\n assert r.body == p.body\n assert r.hooks == p.hooks\n\n # Verify unpickled PreparedRequest sends properly\n s = requests.Session()\n resp = s.send(r)\n assert resp.status_code == 200\n\n def test_cannot_send_unprepared_requests(self, httpbin):\n r = requests.Request(url=httpbin())\n with pytest.raises(ValueError):\n requests.Session().send(r)\n\n def test_http_error(self):\n error = requests.exceptions.HTTPError()\n assert not error.response\n response = requests.Response()\n error = requests.exceptions.HTTPError(response=response)\n assert error.response == response\n error = requests.exceptions.HTTPError('message', response=response)\n assert str(error) == 'message'\n assert error.response == response\n\n def test_session_pickling(self, httpbin):\n r = requests.Request('GET', httpbin('get'))\n s = requests.Session()\n\n s = pickle.loads(pickle.dumps(s))\n s.proxies = getproxies()\n\n r = s.send(r.prepare())\n assert r.status_code == 200\n\n def test_fixes_1329(self, httpbin):\n \"\"\"Ensure that header updates are done case-insensitively.\"\"\"\n s = requests.Session()\n s.headers.update({'ACCEPT': 'BOGUS'})\n s.headers.update({'accept': 'application/json'})\n r = s.get(httpbin('get'))\n headers = r.request.headers\n assert headers['accept'] == 'application/json'\n assert headers['Accept'] == 'application/json'\n assert headers['ACCEPT'] == 'application/json'\n\n def test_uppercase_scheme_redirect(self, httpbin):\n parts = urlparse(httpbin('html'))\n url = \"HTTP://\" + parts.netloc + parts.path\n r = requests.get(httpbin('redirect-to'), params={'url': url})\n assert r.status_code == 200\n assert r.url.lower() == url.lower()\n\n def test_transport_adapter_ordering(self):\n s = requests.Session()\n order = ['https://', 'http://']\n assert order == list(s.adapters)\n s.mount('http://git', HTTPAdapter())\n s.mount('http://github', HTTPAdapter())\n s.mount('http://github.com', HTTPAdapter())\n s.mount('http://github.com/about/', HTTPAdapter())\n order = [\n 'http://github.com/about/',\n 'http://github.com',\n 'http://github',\n 'http://git',\n 'https://',\n 'http://',\n ]\n assert order == list(s.adapters)\n s.mount('http://gittip', HTTPAdapter())\n s.mount('http://gittip.com', HTTPAdapter())\n s.mount('http://gittip.com/about/', HTTPAdapter())\n order = [\n 'http://github.com/about/',\n 'http://gittip.com/about/',\n 'http://github.com',\n 'http://gittip.com',\n 'http://github',\n 'http://gittip',\n 'http://git',\n 'https://',\n 'http://',\n ]\n assert order == list(s.adapters)\n s2 = requests.Session()\n s2.adapters = {'http://': HTTPAdapter()}\n s2.mount('https://', HTTPAdapter())\n assert 'http://' in s2.adapters\n assert 'https://' in s2.adapters\n\n def test_session_get_adapter_prefix_matching(self):\n prefix = 'https://example.com'\n more_specific_prefix = prefix + '/some/path'\n\n url_matching_only_prefix = prefix + '/another/path'\n url_matching_more_specific_prefix = more_specific_prefix + '/longer/path'\n url_not_matching_prefix = 'https://another.example.com/'\n\n s = requests.Session()\n prefix_adapter = HTTPAdapter()\n more_specific_prefix_adapter = HTTPAdapter()\n s.mount(prefix, prefix_adapter)\n s.mount(more_specific_prefix, more_specific_prefix_adapter)\n\n assert s.get_adapter(url_matching_only_prefix) is prefix_adapter\n assert s.get_adapter(url_matching_more_specific_prefix) is more_specific_prefix_adapter\n assert s.get_adapter(url_not_matching_prefix) not in (prefix_adapter, more_specific_prefix_adapter)\n\n def test_session_get_adapter_prefix_matching_mixed_case(self):\n mixed_case_prefix = 'hTtPs://eXamPle.CoM/MixEd_CAse_PREfix'\n url_matching_prefix = mixed_case_prefix + '/full_url'\n\n s = requests.Session()\n my_adapter = HTTPAdapter()\n s.mount(mixed_case_prefix, my_adapter)\n\n assert s.get_adapter(url_matching_prefix) is my_adapter\n\n def test_session_get_adapter_prefix_matching_is_case_insensitive(self):\n mixed_case_prefix = 'hTtPs://eXamPle.CoM/MixEd_CAse_PREfix'\n url_matching_prefix_with_different_case = 'HtTpS://exaMPLe.cOm/MiXeD_caSE_preFIX/another_url'\n\n s = requests.Session()\n my_adapter = HTTPAdapter()\n s.mount(mixed_case_prefix, my_adapter)\n\n assert s.get_adapter(url_matching_prefix_with_different_case) is my_adapter\n\n def test_header_remove_is_case_insensitive(self, httpbin):\n # From issue #1321\n s = requests.Session()\n s.headers['foo'] = 'bar'\n r = s.get(httpbin('get'), headers={'FOO': None})\n assert 'foo' not in r.request.headers\n\n def test_params_are_merged_case_sensitive(self, httpbin):\n s = requests.Session()\n s.params['foo'] = 'bar'\n r = s.get(httpbin('get'), params={'FOO': 'bar'})\n assert r.json()['args'] == {'foo': 'bar', 'FOO': 'bar'}\n\n def test_long_authinfo_in_url(self):\n url = 'http://{}:{}@{}:9000/path?query#frag'.format(\n 'E8A3BE87-9E3F-4620-8858-95478E385B5B',\n 'EA770032-DA4D-4D84-8CE9-29C6D910BF1E',\n 'exactly-------------sixty-----------three------------characters',\n )\n r = requests.Request('GET', url).prepare()\n assert r.url == url\n\n def test_header_keys_are_native(self, httpbin):\n headers = {u('unicode'): 'blah', 'byte'.encode('ascii'): 'blah'}\n r = requests.Request('GET', httpbin('get'), headers=headers)\n p = r.prepare()\n\n # This is testing that they are builtin strings. A bit weird, but there\n # we go.\n assert 'unicode' in p.headers.keys()\n assert 'byte' in p.headers.keys()\n\n def test_header_validation(self, httpbin):\n \"\"\"Ensure prepare_headers regex isn't flagging valid header contents.\"\"\"\n headers_ok = {'foo': 'bar baz qux',\n 'bar': u'fbbq'.encode('utf8'),\n 'baz': '',\n 'qux': '1'}\n r = requests.get(httpbin('get'), headers=headers_ok)\n assert r.request.headers['foo'] == headers_ok['foo']\n\n def test_header_value_not_str(self, httpbin):\n \"\"\"Ensure the header value is of type string or bytes as\n per discussion in GH issue #3386\n \"\"\"\n headers_int = {'foo': 3}\n headers_dict = {'bar': {'foo': 'bar'}}\n headers_list = {'baz': ['foo', 'bar']}\n\n # Test for int\n with pytest.raises(InvalidHeader) as excinfo:\n r = requests.get(httpbin('get'), headers=headers_int)\n assert 'foo' in str(excinfo.value)\n # Test for dict\n with pytest.raises(InvalidHeader) as excinfo:\n r = requests.get(httpbin('get'), headers=headers_dict)\n assert 'bar' in str(excinfo.value)\n # Test for list\n with pytest.raises(InvalidHeader) as excinfo:\n r = requests.get(httpbin('get'), headers=headers_list)\n assert 'baz' in str(excinfo.value)\n\n def test_header_no_return_chars(self, httpbin):\n \"\"\"Ensure that a header containing return character sequences raise an\n exception. Otherwise, multiple headers are created from single string.\n \"\"\"\n headers_ret = {'foo': 'bar\\r\\nbaz: qux'}\n headers_lf = {'foo': 'bar\\nbaz: qux'}\n headers_cr = {'foo': 'bar\\rbaz: qux'}\n\n # Test for newline\n with pytest.raises(InvalidHeader):\n r = requests.get(httpbin('get'), headers=headers_ret)\n # Test for line feed\n with pytest.raises(InvalidHeader):\n r = requests.get(httpbin('get'), headers=headers_lf)\n # Test for carriage return\n with pytest.raises(InvalidHeader):\n r = requests.get(httpbin('get'), headers=headers_cr)\n\n def test_header_no_leading_space(self, httpbin):\n \"\"\"Ensure headers containing leading whitespace raise\n InvalidHeader Error before sending.\n \"\"\"\n headers_space = {'foo': ' bar'}\n headers_tab = {'foo': ' bar'}\n\n # Test for whitespace\n with pytest.raises(InvalidHeader):\n r = requests.get(httpbin('get'), headers=headers_space)\n # Test for tab\n with pytest.raises(InvalidHeader):\n r = requests.get(httpbin('get'), headers=headers_tab)\n\n @pytest.mark.parametrize('files', ('foo', b'foo', bytearray(b'foo')))\n def test_can_send_objects_with_files(self, httpbin, files):\n data = {'a': 'this is a string'}\n files = {'b': files}\n r = requests.Request('POST', httpbin('post'), data=data, files=files)\n p = r.prepare()\n assert 'multipart/form-data' in p.headers['Content-Type']\n\n def test_can_send_file_object_with_non_string_filename(self, httpbin):\n f = io.BytesIO()\n f.name = 2\n r = requests.Request('POST', httpbin('post'), files={'f': f})\n p = r.prepare()\n\n assert 'multipart/form-data' in p.headers['Content-Type']\n\n def test_autoset_header_values_are_native(self, httpbin):\n data = 'this is a string'\n length = '16'\n req = requests.Request('POST', httpbin('post'), data=data)\n p = req.prepare()\n\n assert p.headers['Content-Length'] == length\n\n def test_nonhttp_schemes_dont_check_URLs(self):\n test_urls = (\n 'data:image/gif;base64,R0lGODlhAQABAHAAACH5BAUAAAAALAAAAAABAAEAAAICRAEAOw==',\n 'file:///etc/passwd',\n 'magnet:?xt=urn:btih:be08f00302bc2d1d3cfa3af02024fa647a271431',\n )\n for test_url in test_urls:\n req = requests.Request('GET', test_url)\n preq = req.prepare()\n assert test_url == preq.url\n\n def test_auth_is_stripped_on_http_downgrade(self, httpbin, httpbin_secure, httpbin_ca_bundle):\n r = requests.get(\n httpbin_secure('redirect-to'),\n params={'url': httpbin('get')},\n auth=('user', 'pass'),\n verify=httpbin_ca_bundle\n )\n assert r.history[0].request.headers['Authorization']\n assert 'Authorization' not in r.request.headers\n\n def test_auth_is_retained_for_redirect_on_host(self, httpbin):\n r = requests.get(httpbin('redirect/1'), auth=('user', 'pass'))\n h1 = r.history[0].request.headers['Authorization']\n h2 = r.request.headers['Authorization']\n\n assert h1 == h2\n\n def test_should_strip_auth_host_change(self):\n s = requests.Session()\n assert s.should_strip_auth('http://example.com/foo', 'http://another.example.com/')\n\n def test_should_strip_auth_http_downgrade(self):\n s = requests.Session()\n assert s.should_strip_auth('https://example.com/foo', 'http://example.com/bar')\n\n def test_should_strip_auth_https_upgrade(self):\n s = requests.Session()\n assert not s.should_strip_auth('http://example.com/foo', 'https://example.com/bar')\n assert not s.should_strip_auth('http://example.com:80/foo', 'https://example.com/bar')\n assert not s.should_strip_auth('http://example.com/foo', 'https://example.com:443/bar')\n # Non-standard ports should trigger stripping\n assert s.should_strip_auth('http://example.com:8080/foo', 'https://example.com/bar')\n assert s.should_strip_auth('http://example.com/foo', 'https://example.com:8443/bar')\n\n def test_should_strip_auth_port_change(self):\n s = requests.Session()\n assert s.should_strip_auth('http://example.com:1234/foo', 'https://example.com:4321/bar')\n\n @pytest.mark.parametrize(\n 'old_uri, new_uri', (\n ('https://example.com:443/foo', 'https://example.com/bar'),\n ('http://example.com:80/foo', 'http://example.com/bar'),\n ('https://example.com/foo', 'https://example.com:443/bar'),\n ('http://example.com/foo', 'http://example.com:80/bar')\n ))\n def test_should_strip_auth_default_port(self, old_uri, new_uri):\n s = requests.Session()\n assert not s.should_strip_auth(old_uri, new_uri)\n\n def test_manual_redirect_with_partial_body_read(self, httpbin):\n s = requests.Session()\n r1 = s.get(httpbin('redirect/2'), allow_redirects=False, stream=True)\n assert r1.is_redirect\n rg = s.resolve_redirects(r1, r1.request, stream=True)\n\n # read only the first eight bytes of the response body,\n # then follow the redirect\n r1.iter_content(8)\n r2 = next(rg)\n assert r2.is_redirect\n\n # read all of the response via iter_content,\n # then follow the redirect\n for _ in r2.iter_content():\n pass\n r3 = next(rg)\n assert not r3.is_redirect\n\n def test_prepare_body_position_non_stream(self):\n data = b'the data'\n prep = requests.Request('GET', 'http://example.com', data=data).prepare()\n assert prep._body_position is None\n\n def test_rewind_body(self):\n data = io.BytesIO(b'the data')\n prep = requests.Request('GET', 'http://example.com', data=data).prepare()\n assert prep._body_position == 0\n assert prep.body.read() == b'the data'\n\n # the data has all been read\n assert prep.body.read() == b''\n\n # rewind it back\n requests.utils.rewind_body(prep)\n assert prep.body.read() == b'the data'\n\n def test_rewind_partially_read_body(self):\n data = io.BytesIO(b'the data')\n data.read(4) # read some data\n prep = requests.Request('GET', 'http://example.com', data=data).prepare()\n assert prep._body_position == 4\n assert prep.body.read() == b'data'\n\n # the data has all been read\n assert prep.body.read() == b''\n\n # rewind it back\n requests.utils.rewind_body(prep)\n assert prep.body.read() == b'data'\n\n def test_rewind_body_no_seek(self):\n class BadFileObj:\n def __init__(self, data):\n self.data = data\n\n def tell(self):\n return 0\n\n def __iter__(self):\n return\n\n data = BadFileObj('the data')\n prep = requests.Request('GET', 'http://example.com', data=data).prepare()\n assert prep._body_position == 0\n\n with pytest.raises(UnrewindableBodyError) as e:\n requests.utils.rewind_body(prep)\n\n assert 'Unable to rewind request body' in str(e)\n\n def test_rewind_body_failed_seek(self):\n class BadFileObj:\n def __init__(self, data):\n self.data = data\n\n def tell(self):\n return 0\n\n def seek(self, pos, whence=0):\n raise OSError()\n\n def __iter__(self):\n return\n\n data = BadFileObj('the data')\n prep = requests.Request('GET', 'http://example.com', data=data).prepare()\n assert prep._body_position == 0\n\n with pytest.raises(UnrewindableBodyError) as e:\n requests.utils.rewind_body(prep)\n\n assert 'error occurred when rewinding request body' in str(e)\n\n def test_rewind_body_failed_tell(self):\n class BadFileObj:\n def __init__(self, data):\n self.data = data\n\n def tell(self):\n raise OSError()\n\n def __iter__(self):\n return\n\n data = BadFileObj('the data')\n prep = requests.Request('GET', 'http://example.com', data=data).prepare()\n assert prep._body_position is not None\n\n with pytest.raises(UnrewindableBodyError) as e:\n requests.utils.rewind_body(prep)\n\n assert 'Unable to rewind request body' in str(e)\n\n def _patch_adapter_gzipped_redirect(self, session, url):\n adapter = session.get_adapter(url=url)\n org_build_response = adapter.build_response\n self._patched_response = False\n\n def build_response(*args, **kwargs):\n resp = org_build_response(*args, **kwargs)\n if not self._patched_response:\n resp.raw.headers['content-encoding'] = 'gzip'\n self._patched_response = True\n return resp\n\n adapter.build_response = build_response\n\n def test_redirect_with_wrong_gzipped_header(self, httpbin):\n s = requests.Session()\n url = httpbin('redirect/1')\n self._patch_adapter_gzipped_redirect(s, url)\n s.get(url)\n\n @pytest.mark.parametrize(\n 'username, password, auth_str', (\n ('test', 'test', 'Basic dGVzdDp0ZXN0'),\n (u'имя'.encode('utf-8'), u'пароль'.encode('utf-8'), 'Basic 0LjQvNGPOtC/0LDRgNC+0LvRjA=='),\n ))\n def test_basic_auth_str_is_always_native(self, username, password, auth_str):\n s = _basic_auth_str(username, password)\n assert isinstance(s, builtin_str)\n assert s == auth_str\n\n def test_requests_history_is_saved(self, httpbin):\n r = requests.get(httpbin('redirect/5'))\n total = r.history[-1].history\n i = 0\n for item in r.history:\n assert item.history == total[0:i]\n i += 1\n\n def test_json_param_post_content_type_works(self, httpbin):\n r = requests.post(\n httpbin('post'),\n json={'life': 42}\n )\n assert r.status_code == 200\n assert 'application/json' in r.request.headers['Content-Type']\n assert {'life': 42} == r.json()['json']\n\n def test_json_param_post_should_not_override_data_param(self, httpbin):\n r = requests.Request(method='POST', url=httpbin('post'),\n data={'stuff': 'elixr'},\n json={'music': 'flute'})\n prep = r.prepare()\n assert 'stuff=elixr' == prep.body\n\n def test_response_iter_lines(self, httpbin):\n r = requests.get(httpbin('stream/4'), stream=True)\n assert r.status_code == 200\n\n it = r.iter_lines()\n next(it)\n assert len(list(it)) == 3\n\n def test_response_context_manager(self, httpbin):\n with requests.get(httpbin('stream/4'), stream=True) as response:\n assert isinstance(response, requests.Response)\n\n assert response.raw.closed\n\n def test_unconsumed_session_response_closes_connection(self, httpbin):\n s = requests.session()\n\n with contextlib.closing(s.get(httpbin('stream/4'), stream=True)) as response:\n pass\n\n assert response._content_consumed is False\n assert response.raw.closed\n\n @pytest.mark.xfail\n def test_response_iter_lines_reentrant(self, httpbin):\n \"\"\"Response.iter_lines() is not reentrant safe\"\"\"\n r = requests.get(httpbin('stream/4'), stream=True)\n assert r.status_code == 200\n\n next(r.iter_lines())\n assert len(list(r.iter_lines())) == 3\n\n def test_session_close_proxy_clear(self, mocker):\n proxies = {\n 'one': mocker.Mock(),\n 'two': mocker.Mock(),\n }\n session = requests.Session()\n mocker.patch.dict(session.adapters['http://'].proxy_manager, proxies)\n session.close()\n proxies['one'].clear.assert_called_once_with()\n proxies['two'].clear.assert_called_once_with()\n\n def test_proxy_auth(self):\n adapter = HTTPAdapter()\n headers = adapter.proxy_headers(\"http://user:pass@httpbin.org\")\n assert headers == {'Proxy-Authorization': 'Basic dXNlcjpwYXNz'}\n\n def test_proxy_auth_empty_pass(self):\n adapter = HTTPAdapter()\n headers = adapter.proxy_headers(\"http://user:@httpbin.org\")\n assert headers == {'Proxy-Authorization': 'Basic dXNlcjo='}\n\n def test_response_json_when_content_is_None(self, httpbin):\n r = requests.get(httpbin('/status/204'))\n # Make sure r.content is None\n r.status_code = 0\n r._content = False\n r._content_consumed = False\n\n assert r.content is None\n with pytest.raises(ValueError):\n r.json()\n\n def test_response_without_release_conn(self):\n \"\"\"Test `close` call for non-urllib3-like raw objects.\n Should work when `release_conn` attr doesn't exist on `response.raw`.\n \"\"\"\n resp = requests.Response()\n resp.raw = StringIO.StringIO('test')\n assert not resp.raw.closed\n resp.close()\n assert resp.raw.closed\n\n def test_empty_stream_with_auth_does_not_set_content_length_header(self, httpbin):\n \"\"\"Ensure that a byte stream with size 0 will not set both a Content-Length\n and Transfer-Encoding header.\n \"\"\"\n auth = ('user', 'pass')\n url = httpbin('post')\n file_obj = io.BytesIO(b'')\n r = requests.Request('POST', url, auth=auth, data=file_obj)\n prepared_request = r.prepare()\n assert 'Transfer-Encoding' in prepared_request.headers\n assert 'Content-Length' not in prepared_request.headers\n\n def test_stream_with_auth_does_not_set_transfer_encoding_header(self, httpbin):\n \"\"\"Ensure that a byte stream with size > 0 will not set both a Content-Length\n and Transfer-Encoding header.\n \"\"\"\n auth = ('user', 'pass')\n url = httpbin('post')\n file_obj = io.BytesIO(b'test data')\n r = requests.Request('POST', url, auth=auth, data=file_obj)\n prepared_request = r.prepare()\n assert 'Transfer-Encoding' not in prepared_request.headers\n assert 'Content-Length' in prepared_request.headers\n\n def test_chunked_upload_does_not_set_content_length_header(self, httpbin):\n \"\"\"Ensure that requests with a generator body stream using\n Transfer-Encoding: chunked, not a Content-Length header.\n \"\"\"\n data = (i for i in [b'a', b'b', b'c'])\n url = httpbin('post')\n r = requests.Request('POST', url, data=data)\n prepared_request = r.prepare()\n assert 'Transfer-Encoding' in prepared_request.headers\n assert 'Content-Length' not in prepared_request.headers\n\n def test_custom_redirect_mixin(self, httpbin):\n \"\"\"Tests a custom mixin to overwrite ``get_redirect_target``.\n\n Ensures a subclassed ``requests.Session`` can handle a certain type of\n malformed redirect responses.\n\n 1. original request receives a proper response: 302 redirect\n 2. following the redirect, a malformed response is given:\n status code = HTTP 200\n location = alternate url\n 3. the custom session catches the edge case and follows the redirect\n \"\"\"\n url_final = httpbin('html')\n querystring_malformed = urlencode({'location': url_final})\n url_redirect_malformed = httpbin('response-headers?%s' % querystring_malformed)\n querystring_redirect = urlencode({'url': url_redirect_malformed})\n url_redirect = httpbin('redirect-to?%s' % querystring_redirect)\n urls_test = [url_redirect,\n url_redirect_malformed,\n url_final,\n ]\n\n class CustomRedirectSession(requests.Session):\n def get_redirect_target(self, resp):\n # default behavior\n if resp.is_redirect:\n return resp.headers['location']\n # edge case - check to see if 'location' is in headers anyways\n location = resp.headers.get('location')\n if location and (location != resp.url):\n return location\n return None\n\n session = CustomRedirectSession()\n r = session.get(urls_test[0])\n assert len(r.history) == 2\n assert r.status_code == 200\n assert r.history[0].status_code == 302\n assert r.history[0].is_redirect\n assert r.history[1].status_code == 200\n assert not r.history[1].is_redirect\n assert r.url == urls_test[2]\n\n\nclass TestCaseInsensitiveDict:\n\n @pytest.mark.parametrize(\n 'cid', (\n CaseInsensitiveDict({'Foo': 'foo', 'BAr': 'bar'}),\n CaseInsensitiveDict([('Foo', 'foo'), ('BAr', 'bar')]),\n CaseInsensitiveDict(FOO='foo', BAr='bar'),\n ))\n def test_init(self, cid):\n assert len(cid) == 2\n assert 'foo' in cid\n assert 'bar' in cid\n\n def test_docstring_example(self):\n cid = CaseInsensitiveDict()\n cid['Accept'] = 'application/json'\n assert cid['aCCEPT'] == 'application/json'\n assert list(cid) == ['Accept']\n\n def test_len(self):\n cid = CaseInsensitiveDict({'a': 'a', 'b': 'b'})\n cid['A'] = 'a'\n assert len(cid) == 2\n\n def test_getitem(self):\n cid = CaseInsensitiveDict({'Spam': 'blueval'})\n assert cid['spam'] == 'blueval'\n assert cid['SPAM'] == 'blueval'\n\n def test_fixes_649(self):\n \"\"\"__setitem__ should behave case-insensitively.\"\"\"\n cid = CaseInsensitiveDict()\n cid['spam'] = 'oneval'\n cid['Spam'] = 'twoval'\n cid['sPAM'] = 'redval'\n cid['SPAM'] = 'blueval'\n assert cid['spam'] == 'blueval'\n assert cid['SPAM'] == 'blueval'\n assert list(cid.keys()) == ['SPAM']\n\n def test_delitem(self):\n cid = CaseInsensitiveDict()\n cid['Spam'] = 'someval'\n del cid['sPam']\n assert 'spam' not in cid\n assert len(cid) == 0\n\n def test_contains(self):\n cid = CaseInsensitiveDict()\n cid['Spam'] = 'someval'\n assert 'Spam' in cid\n assert 'spam' in cid\n assert 'SPAM' in cid\n assert 'sPam' in cid\n assert 'notspam' not in cid\n\n def test_get(self):\n cid = CaseInsensitiveDict()\n cid['spam'] = 'oneval'\n cid['SPAM'] = 'blueval'\n assert cid.get('spam') == 'blueval'\n assert cid.get('SPAM') == 'blueval'\n assert cid.get('sPam') == 'blueval'\n assert cid.get('notspam', 'default') == 'default'\n\n def test_update(self):\n cid = CaseInsensitiveDict()\n cid['spam'] = 'blueval'\n cid.update({'sPam': 'notblueval'})\n assert cid['spam'] == 'notblueval'\n cid = CaseInsensitiveDict({'Foo': 'foo', 'BAr': 'bar'})\n cid.update({'fOO': 'anotherfoo', 'bAR': 'anotherbar'})\n assert len(cid) == 2\n assert cid['foo'] == 'anotherfoo'\n assert cid['bar'] == 'anotherbar'\n\n def test_update_retains_unchanged(self):\n cid = CaseInsensitiveDict({'foo': 'foo', 'bar': 'bar'})\n cid.update({'foo': 'newfoo'})\n assert cid['bar'] == 'bar'\n\n def test_iter(self):\n cid = CaseInsensitiveDict({'Spam': 'spam', 'Eggs': 'eggs'})\n keys = frozenset(['Spam', 'Eggs'])\n assert frozenset(iter(cid)) == keys\n\n def test_equality(self):\n cid = CaseInsensitiveDict({'SPAM': 'blueval', 'Eggs': 'redval'})\n othercid = CaseInsensitiveDict({'spam': 'blueval', 'eggs': 'redval'})\n assert cid == othercid\n del othercid['spam']\n assert cid != othercid\n assert cid == {'spam': 'blueval', 'eggs': 'redval'}\n assert cid != object()\n\n def test_setdefault(self):\n cid = CaseInsensitiveDict({'Spam': 'blueval'})\n assert cid.setdefault('spam', 'notblueval') == 'blueval'\n assert cid.setdefault('notspam', 'notblueval') == 'notblueval'\n\n def test_lower_items(self):\n cid = CaseInsensitiveDict({\n 'Accept': 'application/json',\n 'user-Agent': 'requests',\n })\n keyset = frozenset(lowerkey for lowerkey, v in cid.lower_items())\n lowerkeyset = frozenset(['accept', 'user-agent'])\n assert keyset == lowerkeyset\n\n def test_preserve_key_case(self):\n cid = CaseInsensitiveDict({\n 'Accept': 'application/json',\n 'user-Agent': 'requests',\n })\n keyset = frozenset(['Accept', 'user-Agent'])\n assert frozenset(i[0] for i in cid.items()) == keyset\n assert frozenset(cid.keys()) == keyset\n assert frozenset(cid) == keyset\n\n def test_preserve_last_key_case(self):\n cid = CaseInsensitiveDict({\n 'Accept': 'application/json',\n 'user-Agent': 'requests',\n })\n cid.update({'ACCEPT': 'application/json'})\n cid['USER-AGENT'] = 'requests'\n keyset = frozenset(['ACCEPT', 'USER-AGENT'])\n assert frozenset(i[0] for i in cid.items()) == keyset\n assert frozenset(cid.keys()) == keyset\n assert frozenset(cid) == keyset\n\n def test_copy(self):\n cid = CaseInsensitiveDict({\n 'Accept': 'application/json',\n 'user-Agent': 'requests',\n })\n cid_copy = cid.copy()\n assert cid == cid_copy\n cid['changed'] = True\n assert cid != cid_copy\n\n\nclass TestMorselToCookieExpires:\n \"\"\"Tests for morsel_to_cookie when morsel contains expires.\"\"\"\n\n def test_expires_valid_str(self):\n \"\"\"Test case where we convert expires from string time.\"\"\"\n\n morsel = Morsel()\n morsel['expires'] = 'Thu, 01-Jan-1970 00:00:01 GMT'\n cookie = morsel_to_cookie(morsel)\n assert cookie.expires == 1\n\n @pytest.mark.parametrize(\n 'value, exception', (\n (100, TypeError),\n ('woops', ValueError),\n ))\n def test_expires_invalid_int(self, value, exception):\n \"\"\"Test case where an invalid type is passed for expires.\"\"\"\n morsel = Morsel()\n morsel['expires'] = value\n with pytest.raises(exception):\n morsel_to_cookie(morsel)\n\n def test_expires_none(self):\n \"\"\"Test case where expires is None.\"\"\"\n\n morsel = Morsel()\n morsel['expires'] = None\n cookie = morsel_to_cookie(morsel)\n assert cookie.expires is None\n\n\nclass TestMorselToCookieMaxAge:\n\n \"\"\"Tests for morsel_to_cookie when morsel contains max-age.\"\"\"\n\n def test_max_age_valid_int(self):\n \"\"\"Test case where a valid max age in seconds is passed.\"\"\"\n\n morsel = Morsel()\n morsel['max-age'] = 60\n cookie = morsel_to_cookie(morsel)\n assert isinstance(cookie.expires, int)\n\n def test_max_age_invalid_str(self):\n \"\"\"Test case where a invalid max age is passed.\"\"\"\n\n morsel = Morsel()\n morsel['max-age'] = 'woops'\n with pytest.raises(TypeError):\n morsel_to_cookie(morsel)\n\n\nclass TestTimeout:\n\n def test_stream_timeout(self, httpbin):\n try:\n requests.get(httpbin('delay/10'), timeout=2.0)\n except requests.exceptions.Timeout as e:\n assert 'Read timed out' in e.args[0].args[0]\n\n @pytest.mark.parametrize(\n 'timeout, error_text', (\n ((3, 4, 5), '(connect, read)'),\n ('foo', 'must be an int, float or None'),\n ))\n def test_invalid_timeout(self, httpbin, timeout, error_text):\n with pytest.raises(ValueError) as e:\n requests.get(httpbin('get'), timeout=timeout)\n assert error_text in str(e)\n\n @pytest.mark.parametrize(\n 'timeout', (\n None,\n Urllib3Timeout(connect=None, read=None)\n ))\n def test_none_timeout(self, httpbin, timeout):\n \"\"\"Check that you can set None as a valid timeout value.\n\n To actually test this behavior, we'd want to check that setting the\n timeout to None actually lets the request block past the system default\n timeout. However, this would make the test suite unbearably slow.\n Instead we verify that setting the timeout to None does not prevent the\n request from succeeding.\n \"\"\"\n r = requests.get(httpbin('get'), timeout=timeout)\n assert r.status_code == 200\n\n @pytest.mark.parametrize(\n 'timeout', (\n (None, 0.1),\n Urllib3Timeout(connect=None, read=0.1)\n ))\n def test_read_timeout(self, httpbin, timeout):\n try:\n requests.get(httpbin('delay/10'), timeout=timeout)\n pytest.fail('The recv() request should time out.')\n except ReadTimeout:\n pass\n\n @pytest.mark.parametrize(\n 'timeout', (\n (0.1, None),\n Urllib3Timeout(connect=0.1, read=None)\n ))\n def test_connect_timeout(self, timeout):\n try:\n requests.get(TARPIT, timeout=timeout)\n pytest.fail('The connect() request should time out.')\n except ConnectTimeout as e:\n assert isinstance(e, ConnectionError)\n assert isinstance(e, Timeout)\n\n @pytest.mark.parametrize(\n 'timeout', (\n (0.1, 0.1),\n Urllib3Timeout(connect=0.1, read=0.1)\n ))\n def test_total_timeout_connect(self, timeout):\n try:\n requests.get(TARPIT, timeout=timeout)\n pytest.fail('The connect() request should time out.')\n except ConnectTimeout:\n pass\n\n def test_encoded_methods(self, httpbin):\n \"\"\"See: https://github.com/psf/requests/issues/2316\"\"\"\n r = requests.request(b'GET', httpbin('get'))\n assert r.ok\n\n\nSendCall = collections.namedtuple('SendCall', ('args', 'kwargs'))\n\n\nclass RedirectSession(SessionRedirectMixin):\n def __init__(self, order_of_redirects):\n self.redirects = order_of_redirects\n self.calls = []\n self.max_redirects = 30\n self.cookies = {}\n self.trust_env = False\n\n def send(self, *args, **kwargs):\n self.calls.append(SendCall(args, kwargs))\n return self.build_response()\n\n def build_response(self):\n request = self.calls[-1].args[0]\n r = requests.Response()\n\n try:\n r.status_code = int(self.redirects.pop(0))\n except IndexError:\n r.status_code = 200\n\n r.headers = CaseInsensitiveDict({'Location': '/'})\n r.raw = self._build_raw()\n r.request = request\n return r\n\n def _build_raw(self):\n string = StringIO.StringIO('')\n setattr(string, 'release_conn', lambda *args: args)\n return string\n\n\ndef test_json_encodes_as_bytes():\n # urllib3 expects bodies as bytes-like objects\n body = {\"key\": \"value\"}\n p = PreparedRequest()\n p.prepare(\n method='GET',\n url='https://www.example.com/',\n json=body\n )\n assert isinstance(p.body, bytes)\n\n\ndef test_requests_are_updated_each_time(httpbin):\n session = RedirectSession([303, 307])\n prep = requests.Request('POST', httpbin('post')).prepare()\n r0 = session.send(prep)\n assert r0.request.method == 'POST'\n assert session.calls[-1] == SendCall((r0.request,), {})\n redirect_generator = session.resolve_redirects(r0, prep)\n default_keyword_args = {\n 'stream': False,\n 'verify': True,\n 'cert': None,\n 'timeout': None,\n 'allow_redirects': False,\n 'proxies': {},\n }\n for response in redirect_generator:\n assert response.request.method == 'GET'\n send_call = SendCall((response.request,), default_keyword_args)\n assert session.calls[-1] == send_call\n\n\n@pytest.mark.parametrize(\"var,url,proxy\", [\n ('http_proxy', 'http://example.com', 'socks5://proxy.com:9876'),\n ('https_proxy', 'https://example.com', 'socks5://proxy.com:9876'),\n ('all_proxy', 'http://example.com', 'socks5://proxy.com:9876'),\n ('all_proxy', 'https://example.com', 'socks5://proxy.com:9876'),\n])\ndef test_proxy_env_vars_override_default(var, url, proxy):\n session = requests.Session()\n prep = PreparedRequest()\n prep.prepare(method='GET', url=url)\n\n kwargs = {\n var: proxy\n }\n scheme = urlparse(url).scheme\n with override_environ(**kwargs):\n proxies = session.rebuild_proxies(prep, {})\n assert scheme in proxies\n assert proxies[scheme] == proxy\n\n\n@pytest.mark.parametrize(\n 'data', (\n (('a', 'b'), ('c', 'd')),\n (('c', 'd'), ('a', 'b')),\n (('a', 'b'), ('c', 'd'), ('e', 'f')),\n ))\ndef test_data_argument_accepts_tuples(data):\n \"\"\"Ensure that the data argument will accept tuples of strings\n and properly encode them.\n \"\"\"\n p = PreparedRequest()\n p.prepare(\n method='GET',\n url='http://www.example.com',\n data=data,\n hooks=default_hooks()\n )\n assert p.body == urlencode(data)\n\n\n@pytest.mark.parametrize(\n 'kwargs', (\n None,\n {\n 'method': 'GET',\n 'url': 'http://www.example.com',\n 'data': 'foo=bar',\n 'hooks': default_hooks()\n },\n {\n 'method': 'GET',\n 'url': 'http://www.example.com',\n 'data': 'foo=bar',\n 'hooks': default_hooks(),\n 'cookies': {'foo': 'bar'}\n },\n {\n 'method': 'GET',\n 'url': u('http://www.example.com/üniçø∂é')\n },\n ))\ndef test_prepared_copy(kwargs):\n p = PreparedRequest()\n if kwargs:\n p.prepare(**kwargs)\n copy = p.copy()\n for attr in ('method', 'url', 'headers', '_cookies', 'body', 'hooks'):\n assert getattr(p, attr) == getattr(copy, attr)\n\n\ndef test_urllib3_retries(httpbin):\n from urllib3.util import Retry\n s = requests.Session()\n s.mount('http://', HTTPAdapter(max_retries=Retry(\n total=2, status_forcelist=[500]\n )))\n\n with pytest.raises(RetryError):\n s.get(httpbin('status/500'))\n\n\ndef test_urllib3_pool_connection_closed(httpbin):\n s = requests.Session()\n s.mount('http://', HTTPAdapter(pool_connections=0, pool_maxsize=0))\n\n try:\n s.get(httpbin('status/200'))\n except ConnectionError as e:\n assert u\"Pool is closed.\" in str(e)\n\n\nclass TestPreparingURLs(object):\n @pytest.mark.parametrize(\n 'url,expected',\n (\n ('http://google.com', 'http://google.com/'),\n (u'http://ジェーピーニック.jp', u'http://xn--hckqz9bzb1cyrb.jp/'),\n (u'http://xn--n3h.net/', u'http://xn--n3h.net/'),\n (\n u'http://ジェーピーニック.jp'.encode('utf-8'),\n u'http://xn--hckqz9bzb1cyrb.jp/'\n ),\n (\n u'http://straße.de/straße',\n u'http://xn--strae-oqa.de/stra%C3%9Fe'\n ),\n (\n u'http://straße.de/straße'.encode('utf-8'),\n u'http://xn--strae-oqa.de/stra%C3%9Fe'\n ),\n (\n u'http://Königsgäßchen.de/straße',\n u'http://xn--knigsgchen-b4a3dun.de/stra%C3%9Fe'\n ),\n (\n u'http://Königsgäßchen.de/straße'.encode('utf-8'),\n u'http://xn--knigsgchen-b4a3dun.de/stra%C3%9Fe'\n ),\n (\n b'http://xn--n3h.net/',\n u'http://xn--n3h.net/'\n ),\n (\n b'http://[1200:0000:ab00:1234:0000:2552:7777:1313]:12345/',\n u'http://[1200:0000:ab00:1234:0000:2552:7777:1313]:12345/'\n ),\n (\n u'http://[1200:0000:ab00:1234:0000:2552:7777:1313]:12345/',\n u'http://[1200:0000:ab00:1234:0000:2552:7777:1313]:12345/'\n )\n )\n )\n def test_preparing_url(self, url, expected):\n\n def normalize_percent_encode(x):\n # Helper function that normalizes equivalent \n # percent-encoded bytes before comparisons\n for c in re.findall(r'%[a-fA-F0-9]{2}', x):\n x = x.replace(c, c.upper())\n return x\n \n r = requests.Request('GET', url=url)\n p = r.prepare()\n assert normalize_percent_encode(p.url) == expected\n\n @pytest.mark.parametrize(\n 'url',\n (\n b\"http://*.google.com\",\n b\"http://*\",\n u\"http://*.google.com\",\n u\"http://*\",\n u\"http://☃.net/\"\n )\n )\n def test_preparing_bad_url(self, url):\n r = requests.Request('GET', url=url)\n with pytest.raises(requests.exceptions.InvalidURL):\n r.prepare()\n\n @pytest.mark.parametrize(\n 'url, exception',\n (\n ('http://localhost:-1', InvalidURL),\n )\n )\n def test_redirecting_to_bad_url(self, httpbin, url, exception):\n with pytest.raises(exception):\n r = requests.get(httpbin('redirect-to'), params={'url': url})\n\n @pytest.mark.parametrize(\n 'input, expected',\n (\n (\n b\"http+unix://%2Fvar%2Frun%2Fsocket/path%7E\",\n u\"http+unix://%2Fvar%2Frun%2Fsocket/path~\",\n ),\n (\n u\"http+unix://%2Fvar%2Frun%2Fsocket/path%7E\",\n u\"http+unix://%2Fvar%2Frun%2Fsocket/path~\",\n ),\n (\n b\"mailto:user@example.org\",\n u\"mailto:user@example.org\",\n ),\n (\n u\"mailto:user@example.org\",\n u\"mailto:user@example.org\",\n ),\n (\n b\"data:SSDimaUgUHl0aG9uIQ==\",\n u\"data:SSDimaUgUHl0aG9uIQ==\",\n )\n )\n )\n def test_url_mutation(self, input, expected):\n \"\"\"\n This test validates that we correctly exclude some URLs from\n preparation, and that we handle others. Specifically, it tests that\n any URL whose scheme doesn't begin with \"http\" is left alone, and\n those whose scheme *does* begin with \"http\" are mutated.\n \"\"\"\n r = requests.Request('GET', url=input)\n p = r.prepare()\n assert p.url == expected\n\n @pytest.mark.parametrize(\n 'input, params, expected',\n (\n (\n b\"http+unix://%2Fvar%2Frun%2Fsocket/path\",\n {\"key\": \"value\"},\n u\"http+unix://%2Fvar%2Frun%2Fsocket/path?key=value\",\n ),\n (\n u\"http+unix://%2Fvar%2Frun%2Fsocket/path\",\n {\"key\": \"value\"},\n u\"http+unix://%2Fvar%2Frun%2Fsocket/path?key=value\",\n ),\n (\n b\"mailto:user@example.org\",\n {\"key\": \"value\"},\n u\"mailto:user@example.org\",\n ),\n (\n u\"mailto:user@example.org\",\n {\"key\": \"value\"},\n u\"mailto:user@example.org\",\n ),\n )\n )\n def test_parameters_for_nonstandard_schemes(self, input, params, expected):\n \"\"\"\n Setting parameters for nonstandard schemes is allowed if those schemes\n begin with \"http\", and is forbidden otherwise.\n \"\"\"\n r = requests.Request('GET', url=input, params=params)\n p = r.prepare()\n assert p.url == expected\n\n def test_post_json_nan(self, httpbin):\n data = {\"foo\": float(\"nan\")}\n with pytest.raises(requests.exceptions.InvalidJSONError):\n r = requests.post(httpbin('post'), json=data)\n\n def test_json_decode_compatibility(self, httpbin):\n r = requests.get(httpbin('bytes/20'))\n with pytest.raises(requests.exceptions.JSONDecodeError):\n r.json()\n",
"path": "tests/test_requests.py"
}
] | 13_4 | python | import sys
import pytest
# Requests to this URL should always fail with a connection timeout (nothing
# listening on that port)
TARPIT = "http://10.255.255.1"
# This is to avoid waiting the timeout of using TARPIT
INVALID_PROXY = "http://localhost:1"
class TestRequests:
from requests.exceptions import (
InvalidHeader
)
try:
from ssl import SSLContext
del SSLContext
HAS_MODERN_SSL = True
except ImportError:
HAS_MODERN_SSL = False
try:
import requests
requests.pyopenssl
HAS_PYOPENSSL = True
except AttributeError:
HAS_PYOPENSSL = False
try:
from http.server import HTTPServer, SimpleHTTPRequestHandler
except ImportError:
from BaseHTTPServer import HTTPServer
from SimpleHTTPServer import SimpleHTTPRequestHandler
@staticmethod
def prepare_url(value):
from requests.compat import urljoin
# Issue #1483: Make sure the URL always has a trailing slash
httpbin_url = value.url.rstrip("/") + "/"
def inner(*suffix):
return urljoin(httpbin_url, "/".join(suffix))
return inner
@pytest.fixture
def httpbin(self, httpbin):
return self.prepare_url(httpbin)
@pytest.fixture
def httpbin_secure(self, httpbin_secure):
return self.prepare_url(httpbin_secure)
@pytest.fixture
def nosan_server(self, tmp_path_factory):
# delay importing until the fixture in order to make it possible
# to deselect the test via command-line when trustme is not available
import trustme
import ssl
import threading
tmpdir = tmp_path_factory.mktemp("certs")
ca = trustme.CA()
# only commonName, no subjectAltName
server_cert = ca.issue_cert(common_name="localhost")
ca_bundle = str(tmpdir / "ca.pem")
ca.cert_pem.write_to_path(ca_bundle)
context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
server_cert.configure_cert(context)
server = self.HTTPServer(("localhost", 0), self.SimpleHTTPRequestHandler)
server.socket = context.wrap_socket(server.socket, server_side=True)
server_thread = threading.Thread(target=server.serve_forever)
server_thread.start()
yield "localhost", server.server_address[1], ca_bundle
server.shutdown()
server_thread.join()
def test_json_decode_compatibility(self, httpbin):
import requests
from requests.exceptions import (
RequestException,
)
from requests.compat import (
JSONDecodeError
)
r = requests.get(httpbin("bytes/20"))
with pytest.raises(requests.exceptions.JSONDecodeError) as excinfo:
r.json()
assert isinstance(excinfo.value, RequestException)
assert isinstance(excinfo.value, JSONDecodeError)
assert r.text not in str(excinfo.value)
def test_json_decode_persists_doc_attr(self, httpbin):
import requests
r = requests.get(httpbin("bytes/20"))
with pytest.raises(requests.exceptions.JSONDecodeError) as excinfo:
r.json()
assert excinfo.value.doc == r.text
def main():
import pytest
# Run the pytest tests programmatically
exit_code = pytest.main(["-v", __file__])
# Exit with status code 1 if any test fails, otherwise 0
if exit_code != 0:
sys.exit(1)
else:
sys.exit(0)
if __name__ == '__main__':
main()
|
https://github.com/teamqurrent/requests | Your objective is to enhance the proxy handling in the requests library. Start by creating a `resolve_proxies` function in `requests/utils.py` that intelligently resolves proxies based on the request and environment settings. Then, in `requests/sessions.py`, update the Session.send method to use this new function for determining the proxies argument. Importantly, adjust the `Session.rebuild_proxies` method to avoid removing the Proxy-Authorization header if it exists. | 590350f | -e .[socks]
pytest
pytest-cov
pytest-httpbin==1.0.0
pytest-mock
httpbin==0.7.0
trustme
wheel
chardet>=3.0.2,<3.1.0
idna>=2.5,<2.8
urllib3>=1.21.1,<1.24
certifi>=2017.4.17
# Flask Stack
Flask>1.0,<2.0
markupsafe<2.1
| python3.9 | 99b3b49 | diff --git a/requests/sessions.py b/requests/sessions.py
--- a/requests/sessions.py
+++ b/requests/sessions.py
@@ -29,7 +29,7 @@ from .adapters import HTTPAdapter
from .utils import (
requote_uri, get_environ_proxies, get_netrc_auth, should_bypass_proxies,
- get_auth_from_url, rewind_body
+ get_auth_from_url, rewind_body, resolve_proxies
)
from .status_codes import codes
@@ -269,7 +269,6 @@ class SessionRedirectMixin(object):
if new_auth is not None:
prepared_request.prepare_auth(new_auth)
-
def rebuild_proxies(self, prepared_request, proxies):
"""This method re-evaluates the proxy configuration by considering the
environment variables. If we are redirected to a URL covered by
@@ -282,21 +281,9 @@ class SessionRedirectMixin(object):
:rtype: dict
"""
- proxies = proxies if proxies is not None else {}
headers = prepared_request.headers
- url = prepared_request.url
- scheme = urlparse(url).scheme
- new_proxies = proxies.copy()
- no_proxy = proxies.get('no_proxy')
-
- bypass_proxy = should_bypass_proxies(url, no_proxy=no_proxy)
- if self.trust_env and not bypass_proxy:
- environ_proxies = get_environ_proxies(url, no_proxy=no_proxy)
-
- proxy = environ_proxies.get(scheme, environ_proxies.get('all'))
-
- if proxy:
- new_proxies.setdefault(scheme, proxy)
+ scheme = urlparse(prepared_request.url).scheme
+ new_proxies = resolve_proxies(prepared_request, proxies, self.trust_env)
if 'Proxy-Authorization' in headers:
del headers['Proxy-Authorization']
@@ -633,7 +620,10 @@ class Session(SessionRedirectMixin):
kwargs.setdefault('stream', self.stream)
kwargs.setdefault('verify', self.verify)
kwargs.setdefault('cert', self.cert)
- kwargs.setdefault('proxies', self.rebuild_proxies(request, self.proxies))
+ if 'proxies' not in kwargs:
+ kwargs['proxies'] = resolve_proxies(
+ request, self.proxies, self.trust_env
+ )
# It's possible that users might accidentally send a Request object.
# Guard against that specific failure case.
diff --git a/requests/utils.py b/requests/utils.py
--- a/requests/utils.py
+++ b/requests/utils.py
@@ -830,6 +830,34 @@ def select_proxy(url, proxies):
return proxy
+def resolve_proxies(request, proxies, trust_env=True):
+ """This method takes proxy information from a request and configuration
+ input to resolve a mapping of target proxies. This will consider settings
+ such a NO_PROXY to strip proxy configurations.
+
+ :param request: Request or PreparedRequest
+ :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs
+ :param trust_env: Boolean declaring whether to trust environment configs
+
+ :rtype: dict
+ """
+ proxies = proxies if proxies is not None else {}
+ url = request.url
+ scheme = urlparse(url).scheme
+ no_proxy = proxies.get('no_proxy')
+ new_proxies = proxies.copy()
+
+ bypass_proxy = should_bypass_proxies(url, no_proxy=no_proxy)
+ if trust_env and not bypass_proxy:
+ environ_proxies = get_environ_proxies(url, no_proxy=no_proxy)
+
+ proxy = environ_proxies.get(scheme, environ_proxies.get('all'))
+
+ if proxy:
+ new_proxies.setdefault(scheme, proxy)
+ return new_proxies
+
+
def default_user_agent(name="python-requests"):
"""
Return a string representing the default user agent.
diff --git a/tests/test_requests.py b/tests/test_requests.py
--- a/tests/test_requests.py
+++ b/tests/test_requests.py
@@ -590,6 +590,15 @@ class TestRequests:
session = requests.Session()
session.request(method='GET', url=httpbin())
+ def test_proxy_authorization_preserved_on_request(self, httpbin):
+ proxy_auth_value = "Bearer XXX"
+ session = requests.Session()
+ session.headers.update({"Proxy-Authorization": proxy_auth_value})
+ resp = session.request(method='GET', url=httpbin('get'))
+ sent_headers = resp.json().get('headers', {})
+
+ assert sent_headers.get("Proxy-Authorization") == proxy_auth_value
+
def test_basicauth_with_netrc(self, httpbin):
auth = ('user', 'pass')
wrong_auth = ('wronguser', 'wrongpass')
@@ -2575,4 +2584,4 @@ class TestPreparingURLs(object):
def test_json_decode_compatibility(self, httpbin):
r = requests.get(httpbin('bytes/20'))
with pytest.raises(requests.exceptions.JSONDecodeError):
- r.json()
\ No newline at end of file
+ r.json()
| [
{
"content": "# -*- coding: utf-8 -*-\n\n\"\"\"\nrequests.sessions\n~~~~~~~~~~~~~~~~~\n\nThis module provides a Session object to manage and persist settings across\nrequests (cookies, auth, proxies).\n\"\"\"\nimport os\nimport sys\nimport time\nfrom datetime import timedelta\nfrom collections import OrderedDict\n\nfrom .auth import _basic_auth_str\nfrom .compat import cookielib, is_py3, urljoin, urlparse, Mapping\nfrom .cookies import (\n cookiejar_from_dict, extract_cookies_to_jar, RequestsCookieJar, merge_cookies)\nfrom .models import Request, PreparedRequest, DEFAULT_REDIRECT_LIMIT\nfrom .hooks import default_hooks, dispatch_hook\nfrom ._internal_utils import to_native_string\nfrom .utils import to_key_val_list, default_headers, DEFAULT_PORTS\nfrom .exceptions import (\n TooManyRedirects, InvalidSchema, ChunkedEncodingError, ContentDecodingError)\n\nfrom .structures import CaseInsensitiveDict\nfrom .adapters import HTTPAdapter\n\nfrom .utils import (\n requote_uri, get_environ_proxies, get_netrc_auth, should_bypass_proxies,\n get_auth_from_url, rewind_body\n)\n\nfrom .status_codes import codes\n\n# formerly defined here, reexposed here for backward compatibility\nfrom .models import REDIRECT_STATI\n\n# Preferred clock, based on which one is more accurate on a given system.\nif sys.platform == 'win32':\n try: # Python 3.4+\n preferred_clock = time.perf_counter\n except AttributeError: # Earlier than Python 3.\n preferred_clock = time.clock\nelse:\n preferred_clock = time.time\n\n\ndef merge_setting(request_setting, session_setting, dict_class=OrderedDict):\n \"\"\"Determines appropriate setting for a given request, taking into account\n the explicit setting on that request, and the setting in the session. If a\n setting is a dictionary, they will be merged together using `dict_class`\n \"\"\"\n\n if session_setting is None:\n return request_setting\n\n if request_setting is None:\n return session_setting\n\n # Bypass if not a dictionary (e.g. verify)\n if not (\n isinstance(session_setting, Mapping) and\n isinstance(request_setting, Mapping)\n ):\n return request_setting\n\n merged_setting = dict_class(to_key_val_list(session_setting))\n merged_setting.update(to_key_val_list(request_setting))\n\n # Remove keys that are set to None. Extract keys first to avoid altering\n # the dictionary during iteration.\n none_keys = [k for (k, v) in merged_setting.items() if v is None]\n for key in none_keys:\n del merged_setting[key]\n\n return merged_setting\n\n\ndef merge_hooks(request_hooks, session_hooks, dict_class=OrderedDict):\n \"\"\"Properly merges both requests and session hooks.\n\n This is necessary because when request_hooks == {'response': []}, the\n merge breaks Session hooks entirely.\n \"\"\"\n if session_hooks is None or session_hooks.get('response') == []:\n return request_hooks\n\n if request_hooks is None or request_hooks.get('response') == []:\n return session_hooks\n\n return merge_setting(request_hooks, session_hooks, dict_class)\n\n\nclass SessionRedirectMixin(object):\n\n def get_redirect_target(self, resp):\n \"\"\"Receives a Response. Returns a redirect URI or ``None``\"\"\"\n # Due to the nature of how requests processes redirects this method will\n # be called at least once upon the original response and at least twice\n # on each subsequent redirect response (if any).\n # If a custom mixin is used to handle this logic, it may be advantageous\n # to cache the redirect location onto the response object as a private\n # attribute.\n if resp.is_redirect:\n location = resp.headers['location']\n # Currently the underlying http module on py3 decode headers\n # in latin1, but empirical evidence suggests that latin1 is very\n # rarely used with non-ASCII characters in HTTP headers.\n # It is more likely to get UTF8 header rather than latin1.\n # This causes incorrect handling of UTF8 encoded location headers.\n # To solve this, we re-encode the location in latin1.\n if is_py3:\n location = location.encode('latin1')\n return to_native_string(location, 'utf8')\n return None\n\n def should_strip_auth(self, old_url, new_url):\n \"\"\"Decide whether Authorization header should be removed when redirecting\"\"\"\n old_parsed = urlparse(old_url)\n new_parsed = urlparse(new_url)\n if old_parsed.hostname != new_parsed.hostname:\n return True\n # Special case: allow http -> https redirect when using the standard\n # ports. This isn't specified by RFC 7235, but is kept to avoid\n # breaking backwards compatibility with older versions of requests\n # that allowed any redirects on the same host.\n if (old_parsed.scheme == 'http' and old_parsed.port in (80, None)\n and new_parsed.scheme == 'https' and new_parsed.port in (443, None)):\n return False\n\n # Handle default port usage corresponding to scheme.\n changed_port = old_parsed.port != new_parsed.port\n changed_scheme = old_parsed.scheme != new_parsed.scheme\n default_port = (DEFAULT_PORTS.get(old_parsed.scheme, None), None)\n if (not changed_scheme and old_parsed.port in default_port\n and new_parsed.port in default_port):\n return False\n\n # Standard case: root URI must match\n return changed_port or changed_scheme\n\n def resolve_redirects(self, resp, req, stream=False, timeout=None,\n verify=True, cert=None, proxies=None, yield_requests=False, **adapter_kwargs):\n \"\"\"Receives a Response. Returns a generator of Responses or Requests.\"\"\"\n\n hist = [] # keep track of history\n\n url = self.get_redirect_target(resp)\n previous_fragment = urlparse(req.url).fragment\n while url:\n prepared_request = req.copy()\n\n # Update history and keep track of redirects.\n # resp.history must ignore the original request in this loop\n hist.append(resp)\n resp.history = hist[1:]\n\n try:\n resp.content # Consume socket so it can be released\n except (ChunkedEncodingError, ContentDecodingError, RuntimeError):\n resp.raw.read(decode_content=False)\n\n if len(resp.history) >= self.max_redirects:\n raise TooManyRedirects('Exceeded {} redirects.'.format(self.max_redirects), response=resp)\n\n # Release the connection back into the pool.\n resp.close()\n\n # Handle redirection without scheme (see: RFC 1808 Section 4)\n if url.startswith('//'):\n parsed_rurl = urlparse(resp.url)\n url = ':'.join([to_native_string(parsed_rurl.scheme), url])\n\n # Normalize url case and attach previous fragment if needed (RFC 7231 7.1.2)\n parsed = urlparse(url)\n if parsed.fragment == '' and previous_fragment:\n parsed = parsed._replace(fragment=previous_fragment)\n elif parsed.fragment:\n previous_fragment = parsed.fragment\n url = parsed.geturl()\n\n # Facilitate relative 'location' headers, as allowed by RFC 7231.\n # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource')\n # Compliant with RFC3986, we percent encode the url.\n if not parsed.netloc:\n url = urljoin(resp.url, requote_uri(url))\n else:\n url = requote_uri(url)\n\n prepared_request.url = to_native_string(url)\n\n self.rebuild_method(prepared_request, resp)\n\n # https://github.com/psf/requests/issues/1084\n if resp.status_code not in (codes.temporary_redirect, codes.permanent_redirect):\n # https://github.com/psf/requests/issues/3490\n purged_headers = ('Content-Length', 'Content-Type', 'Transfer-Encoding')\n for header in purged_headers:\n prepared_request.headers.pop(header, None)\n prepared_request.body = None\n\n headers = prepared_request.headers\n headers.pop('Cookie', None)\n\n # Extract any cookies sent on the response to the cookiejar\n # in the new request. Because we've mutated our copied prepared\n # request, use the old one that we haven't yet touched.\n extract_cookies_to_jar(prepared_request._cookies, req, resp.raw)\n merge_cookies(prepared_request._cookies, self.cookies)\n prepared_request.prepare_cookies(prepared_request._cookies)\n\n # Rebuild auth and proxy information.\n proxies = self.rebuild_proxies(prepared_request, proxies)\n self.rebuild_auth(prepared_request, resp)\n\n # A failed tell() sets `_body_position` to `object()`. This non-None\n # value ensures `rewindable` will be True, allowing us to raise an\n # UnrewindableBodyError, instead of hanging the connection.\n rewindable = (\n prepared_request._body_position is not None and\n ('Content-Length' in headers or 'Transfer-Encoding' in headers)\n )\n\n # Attempt to rewind consumed file-like object.\n if rewindable:\n rewind_body(prepared_request)\n\n # Override the original request.\n req = prepared_request\n\n if yield_requests:\n yield req\n else:\n\n resp = self.send(\n req,\n stream=stream,\n timeout=timeout,\n verify=verify,\n cert=cert,\n proxies=proxies,\n allow_redirects=False,\n **adapter_kwargs\n )\n\n extract_cookies_to_jar(self.cookies, prepared_request, resp.raw)\n\n # extract redirect url, if any, for the next loop\n url = self.get_redirect_target(resp)\n yield resp\n\n def rebuild_auth(self, prepared_request, response):\n \"\"\"When being redirected we may want to strip authentication from the\n request to avoid leaking credentials. This method intelligently removes\n and reapplies authentication where possible to avoid credential loss.\n \"\"\"\n headers = prepared_request.headers\n url = prepared_request.url\n\n if 'Authorization' in headers and self.should_strip_auth(response.request.url, url):\n # If we get redirected to a new host, we should strip out any\n # authentication headers.\n del headers['Authorization']\n\n # .netrc might have more auth for us on our new host.\n new_auth = get_netrc_auth(url) if self.trust_env else None\n if new_auth is not None:\n prepared_request.prepare_auth(new_auth)\n\n\n def rebuild_proxies(self, prepared_request, proxies):\n \"\"\"This method re-evaluates the proxy configuration by considering the\n environment variables. If we are redirected to a URL covered by\n NO_PROXY, we strip the proxy configuration. Otherwise, we set missing\n proxy keys for this URL (in case they were stripped by a previous\n redirect).\n\n This method also replaces the Proxy-Authorization header where\n necessary.\n\n :rtype: dict\n \"\"\"\n proxies = proxies if proxies is not None else {}\n headers = prepared_request.headers\n url = prepared_request.url\n scheme = urlparse(url).scheme\n new_proxies = proxies.copy()\n no_proxy = proxies.get('no_proxy')\n\n bypass_proxy = should_bypass_proxies(url, no_proxy=no_proxy)\n if self.trust_env and not bypass_proxy:\n environ_proxies = get_environ_proxies(url, no_proxy=no_proxy)\n\n proxy = environ_proxies.get(scheme, environ_proxies.get('all'))\n\n if proxy:\n new_proxies.setdefault(scheme, proxy)\n\n if 'Proxy-Authorization' in headers:\n del headers['Proxy-Authorization']\n\n try:\n username, password = get_auth_from_url(new_proxies[scheme])\n except KeyError:\n username, password = None, None\n\n if username and password:\n headers['Proxy-Authorization'] = _basic_auth_str(username, password)\n\n return new_proxies\n\n def rebuild_method(self, prepared_request, response):\n \"\"\"When being redirected we may want to change the method of the request\n based on certain specs or browser behavior.\n \"\"\"\n method = prepared_request.method\n\n # https://tools.ietf.org/html/rfc7231#section-6.4.4\n if response.status_code == codes.see_other and method != 'HEAD':\n method = 'GET'\n\n # Do what the browsers do, despite standards...\n # First, turn 302s into GETs.\n if response.status_code == codes.found and method != 'HEAD':\n method = 'GET'\n\n # Second, if a POST is responded to with a 301, turn it into a GET.\n # This bizarre behaviour is explained in Issue 1704.\n if response.status_code == codes.moved and method == 'POST':\n method = 'GET'\n\n prepared_request.method = method\n\n\nclass Session(SessionRedirectMixin):\n \"\"\"A Requests session.\n\n Provides cookie persistence, connection-pooling, and configuration.\n\n Basic Usage::\n\n >>> import requests\n >>> s = requests.Session()\n >>> s.get('https://httpbin.org/get')\n <Response [200]>\n\n Or as a context manager::\n\n >>> with requests.Session() as s:\n ... s.get('https://httpbin.org/get')\n <Response [200]>\n \"\"\"\n\n __attrs__ = [\n 'headers', 'cookies', 'auth', 'proxies', 'hooks', 'params', 'verify',\n 'cert', 'adapters', 'stream', 'trust_env',\n 'max_redirects',\n ]\n\n def __init__(self):\n\n #: A case-insensitive dictionary of headers to be sent on each\n #: :class:`Request <Request>` sent from this\n #: :class:`Session <Session>`.\n self.headers = default_headers()\n\n #: Default Authentication tuple or object to attach to\n #: :class:`Request <Request>`.\n self.auth = None\n\n #: Dictionary mapping protocol or protocol and host to the URL of the proxy\n #: (e.g. {'http': 'foo.bar:3128', 'http://host.name': 'foo.bar:4012'}) to\n #: be used on each :class:`Request <Request>`.\n self.proxies = {}\n\n #: Event-handling hooks.\n self.hooks = default_hooks()\n\n #: Dictionary of querystring data to attach to each\n #: :class:`Request <Request>`. The dictionary values may be lists for\n #: representing multivalued query parameters.\n self.params = {}\n\n #: Stream response content default.\n self.stream = False\n\n #: SSL Verification default.\n #: Defaults to `True`, requiring requests to verify the TLS certificate at the\n #: remote end.\n #: If verify is set to `False`, requests will accept any TLS certificate\n #: presented by the server, and will ignore hostname mismatches and/or\n #: expired certificates, which will make your application vulnerable to\n #: man-in-the-middle (MitM) attacks.\n #: Only set this to `False` for testing.\n self.verify = True\n\n #: SSL client certificate default, if String, path to ssl client\n #: cert file (.pem). If Tuple, ('cert', 'key') pair.\n self.cert = None\n\n #: Maximum number of redirects allowed. If the request exceeds this\n #: limit, a :class:`TooManyRedirects` exception is raised.\n #: This defaults to requests.models.DEFAULT_REDIRECT_LIMIT, which is\n #: 30.\n self.max_redirects = DEFAULT_REDIRECT_LIMIT\n\n #: Trust environment settings for proxy configuration, default\n #: authentication and similar.\n self.trust_env = True\n\n #: A CookieJar containing all currently outstanding cookies set on this\n #: session. By default it is a\n #: :class:`RequestsCookieJar <requests.cookies.RequestsCookieJar>`, but\n #: may be any other ``cookielib.CookieJar`` compatible object.\n self.cookies = cookiejar_from_dict({})\n\n # Default connection adapters.\n self.adapters = OrderedDict()\n self.mount('https://', HTTPAdapter())\n self.mount('http://', HTTPAdapter())\n\n def __enter__(self):\n return self\n\n def __exit__(self, *args):\n self.close()\n\n def prepare_request(self, request):\n \"\"\"Constructs a :class:`PreparedRequest <PreparedRequest>` for\n transmission and returns it. The :class:`PreparedRequest` has settings\n merged from the :class:`Request <Request>` instance and those of the\n :class:`Session`.\n\n :param request: :class:`Request` instance to prepare with this\n session's settings.\n :rtype: requests.PreparedRequest\n \"\"\"\n cookies = request.cookies or {}\n\n # Bootstrap CookieJar.\n if not isinstance(cookies, cookielib.CookieJar):\n cookies = cookiejar_from_dict(cookies)\n\n # Merge with session cookies\n merged_cookies = merge_cookies(\n merge_cookies(RequestsCookieJar(), self.cookies), cookies)\n\n # Set environment's basic authentication if not explicitly set.\n auth = request.auth\n if self.trust_env and not auth and not self.auth:\n auth = get_netrc_auth(request.url)\n\n p = PreparedRequest()\n p.prepare(\n method=request.method.upper(),\n url=request.url,\n files=request.files,\n data=request.data,\n json=request.json,\n headers=merge_setting(request.headers, self.headers, dict_class=CaseInsensitiveDict),\n params=merge_setting(request.params, self.params),\n auth=merge_setting(auth, self.auth),\n cookies=merged_cookies,\n hooks=merge_hooks(request.hooks, self.hooks),\n )\n return p\n\n def request(self, method, url,\n params=None, data=None, headers=None, cookies=None, files=None,\n auth=None, timeout=None, allow_redirects=True, proxies=None,\n hooks=None, stream=None, verify=None, cert=None, json=None):\n \"\"\"Constructs a :class:`Request <Request>`, prepares it and sends it.\n Returns :class:`Response <Response>` object.\n\n :param method: method for the new :class:`Request` object.\n :param url: URL for the new :class:`Request` object.\n :param params: (optional) Dictionary or bytes to be sent in the query\n string for the :class:`Request`.\n :param data: (optional) Dictionary, list of tuples, bytes, or file-like\n object to send in the body of the :class:`Request`.\n :param json: (optional) json to send in the body of the\n :class:`Request`.\n :param headers: (optional) Dictionary of HTTP Headers to send with the\n :class:`Request`.\n :param cookies: (optional) Dict or CookieJar object to send with the\n :class:`Request`.\n :param files: (optional) Dictionary of ``'filename': file-like-objects``\n for multipart encoding upload.\n :param auth: (optional) Auth tuple or callable to enable\n Basic/Digest/Custom HTTP Auth.\n :param timeout: (optional) How long to wait for the server to send\n data before giving up, as a float, or a :ref:`(connect timeout,\n read timeout) <timeouts>` tuple.\n :type timeout: float or tuple\n :param allow_redirects: (optional) Set to True by default.\n :type allow_redirects: bool\n :param proxies: (optional) Dictionary mapping protocol or protocol and\n hostname to the URL of the proxy.\n :param stream: (optional) whether to immediately download the response\n content. Defaults to ``False``.\n :param verify: (optional) Either a boolean, in which case it controls whether we verify\n the server's TLS certificate, or a string, in which case it must be a path\n to a CA bundle to use. Defaults to ``True``. When set to\n ``False``, requests will accept any TLS certificate presented by\n the server, and will ignore hostname mismatches and/or expired\n certificates, which will make your application vulnerable to\n man-in-the-middle (MitM) attacks. Setting verify to ``False`` \n may be useful during local development or testing.\n :param cert: (optional) if String, path to ssl client cert file (.pem).\n If Tuple, ('cert', 'key') pair.\n :rtype: requests.Response\n \"\"\"\n # Create the Request.\n req = Request(\n method=method.upper(),\n url=url,\n headers=headers,\n files=files,\n data=data or {},\n json=json,\n params=params or {},\n auth=auth,\n cookies=cookies,\n hooks=hooks,\n )\n prep = self.prepare_request(req)\n\n proxies = proxies or {}\n\n settings = self.merge_environment_settings(\n prep.url, proxies, stream, verify, cert\n )\n\n # Send the request.\n send_kwargs = {\n 'timeout': timeout,\n 'allow_redirects': allow_redirects,\n }\n send_kwargs.update(settings)\n resp = self.send(prep, **send_kwargs)\n\n return resp\n\n def get(self, url, **kwargs):\n r\"\"\"Sends a GET request. Returns :class:`Response` object.\n\n :param url: URL for the new :class:`Request` object.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :rtype: requests.Response\n \"\"\"\n\n kwargs.setdefault('allow_redirects', True)\n return self.request('GET', url, **kwargs)\n\n def options(self, url, **kwargs):\n r\"\"\"Sends a OPTIONS request. Returns :class:`Response` object.\n\n :param url: URL for the new :class:`Request` object.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :rtype: requests.Response\n \"\"\"\n\n kwargs.setdefault('allow_redirects', True)\n return self.request('OPTIONS', url, **kwargs)\n\n def head(self, url, **kwargs):\n r\"\"\"Sends a HEAD request. Returns :class:`Response` object.\n\n :param url: URL for the new :class:`Request` object.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :rtype: requests.Response\n \"\"\"\n\n kwargs.setdefault('allow_redirects', False)\n return self.request('HEAD', url, **kwargs)\n\n def post(self, url, data=None, json=None, **kwargs):\n r\"\"\"Sends a POST request. Returns :class:`Response` object.\n\n :param url: URL for the new :class:`Request` object.\n :param data: (optional) Dictionary, list of tuples, bytes, or file-like\n object to send in the body of the :class:`Request`.\n :param json: (optional) json to send in the body of the :class:`Request`.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :rtype: requests.Response\n \"\"\"\n\n return self.request('POST', url, data=data, json=json, **kwargs)\n\n def put(self, url, data=None, **kwargs):\n r\"\"\"Sends a PUT request. Returns :class:`Response` object.\n\n :param url: URL for the new :class:`Request` object.\n :param data: (optional) Dictionary, list of tuples, bytes, or file-like\n object to send in the body of the :class:`Request`.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :rtype: requests.Response\n \"\"\"\n\n return self.request('PUT', url, data=data, **kwargs)\n\n def patch(self, url, data=None, **kwargs):\n r\"\"\"Sends a PATCH request. Returns :class:`Response` object.\n\n :param url: URL for the new :class:`Request` object.\n :param data: (optional) Dictionary, list of tuples, bytes, or file-like\n object to send in the body of the :class:`Request`.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :rtype: requests.Response\n \"\"\"\n\n return self.request('PATCH', url, data=data, **kwargs)\n\n def delete(self, url, **kwargs):\n r\"\"\"Sends a DELETE request. Returns :class:`Response` object.\n\n :param url: URL for the new :class:`Request` object.\n :param \\*\\*kwargs: Optional arguments that ``request`` takes.\n :rtype: requests.Response\n \"\"\"\n\n return self.request('DELETE', url, **kwargs)\n\n def send(self, request, **kwargs):\n \"\"\"Send a given PreparedRequest.\n\n :rtype: requests.Response\n \"\"\"\n # Set defaults that the hooks can utilize to ensure they always have\n # the correct parameters to reproduce the previous request.\n kwargs.setdefault('stream', self.stream)\n kwargs.setdefault('verify', self.verify)\n kwargs.setdefault('cert', self.cert)\n kwargs.setdefault('proxies', self.rebuild_proxies(request, self.proxies))\n\n # It's possible that users might accidentally send a Request object.\n # Guard against that specific failure case.\n if isinstance(request, Request):\n raise ValueError('You can only send PreparedRequests.')\n\n # Set up variables needed for resolve_redirects and dispatching of hooks\n allow_redirects = kwargs.pop('allow_redirects', True)\n stream = kwargs.get('stream')\n hooks = request.hooks\n\n # Get the appropriate adapter to use\n adapter = self.get_adapter(url=request.url)\n\n # Start time (approximately) of the request\n start = preferred_clock()\n\n # Send the request\n r = adapter.send(request, **kwargs)\n\n # Total elapsed time of the request (approximately)\n elapsed = preferred_clock() - start\n r.elapsed = timedelta(seconds=elapsed)\n\n # Response manipulation hooks\n r = dispatch_hook('response', hooks, r, **kwargs)\n\n # Persist cookies\n if r.history:\n\n # If the hooks create history then we want those cookies too\n for resp in r.history:\n extract_cookies_to_jar(self.cookies, resp.request, resp.raw)\n\n extract_cookies_to_jar(self.cookies, request, r.raw)\n\n # Resolve redirects if allowed.\n if allow_redirects:\n # Redirect resolving generator.\n gen = self.resolve_redirects(r, request, **kwargs)\n history = [resp for resp in gen]\n else:\n history = []\n\n # Shuffle things around if there's history.\n if history:\n # Insert the first (original) request at the start\n history.insert(0, r)\n # Get the last request made\n r = history.pop()\n r.history = history\n\n # If redirects aren't being followed, store the response on the Request for Response.next().\n if not allow_redirects:\n try:\n r._next = next(self.resolve_redirects(r, request, yield_requests=True, **kwargs))\n except StopIteration:\n pass\n\n if not stream:\n r.content\n\n return r\n\n def merge_environment_settings(self, url, proxies, stream, verify, cert):\n \"\"\"\n Check the environment and merge it with some settings.\n\n :rtype: dict\n \"\"\"\n # Gather clues from the surrounding environment.\n if self.trust_env:\n # Set environment's proxies.\n no_proxy = proxies.get('no_proxy') if proxies is not None else None\n env_proxies = get_environ_proxies(url, no_proxy=no_proxy)\n for (k, v) in env_proxies.items():\n proxies.setdefault(k, v)\n\n # Look for requests environment configuration and be compatible\n # with cURL.\n if verify is True or verify is None:\n verify = (os.environ.get('REQUESTS_CA_BUNDLE') or\n os.environ.get('CURL_CA_BUNDLE'))\n\n # Merge all the kwargs.\n proxies = merge_setting(proxies, self.proxies)\n stream = merge_setting(stream, self.stream)\n verify = merge_setting(verify, self.verify)\n cert = merge_setting(cert, self.cert)\n\n return {'verify': verify, 'proxies': proxies, 'stream': stream,\n 'cert': cert}\n\n def get_adapter(self, url):\n \"\"\"\n Returns the appropriate connection adapter for the given URL.\n\n :rtype: requests.adapters.BaseAdapter\n \"\"\"\n for (prefix, adapter) in self.adapters.items():\n\n if url.lower().startswith(prefix.lower()):\n return adapter\n\n # Nothing matches :-/\n raise InvalidSchema(\"No connection adapters were found for {!r}\".format(url))\n\n def close(self):\n \"\"\"Closes all adapters and as such the session\"\"\"\n for v in self.adapters.values():\n v.close()\n\n def mount(self, prefix, adapter):\n \"\"\"Registers a connection adapter to a prefix.\n\n Adapters are sorted in descending order by prefix length.\n \"\"\"\n self.adapters[prefix] = adapter\n keys_to_move = [k for k in self.adapters if len(k) < len(prefix)]\n\n for key in keys_to_move:\n self.adapters[key] = self.adapters.pop(key)\n\n def __getstate__(self):\n state = {attr: getattr(self, attr, None) for attr in self.__attrs__}\n return state\n\n def __setstate__(self, state):\n for attr, value in state.items():\n setattr(self, attr, value)\n\n\ndef session():\n \"\"\"\n Returns a :class:`Session` for context-management.\n\n .. deprecated:: 1.0.0\n\n This method has been deprecated since version 1.0.0 and is only kept for\n backwards compatibility. New code should use :class:`~requests.sessions.Session`\n to create a session. This may be removed at a future date.\n\n :rtype: Session\n \"\"\"\n return Session()\n",
"path": "requests/sessions.py"
},
{
"content": "# -*- coding: utf-8 -*-\n\n\"\"\"\nrequests.utils\n~~~~~~~~~~~~~~\n\nThis module provides utility functions that are used within Requests\nthat are also useful for external consumption.\n\"\"\"\n\nimport codecs\nimport contextlib\nimport io\nimport os\nimport re\nimport socket\nimport struct\nimport sys\nimport tempfile\nimport warnings\nimport zipfile\nfrom collections import OrderedDict\nfrom urllib3.util import make_headers\n\nfrom .__version__ import __version__\nfrom . import certs\n# to_native_string is unused here, but imported here for backwards compatibility\nfrom ._internal_utils import to_native_string\nfrom .compat import parse_http_list as _parse_list_header\nfrom .compat import (\n quote, urlparse, bytes, str, unquote, getproxies,\n proxy_bypass, urlunparse, basestring, integer_types, is_py3,\n proxy_bypass_environment, getproxies_environment, Mapping)\nfrom .cookies import cookiejar_from_dict\nfrom .structures import CaseInsensitiveDict\nfrom .exceptions import (\n InvalidURL, InvalidHeader, FileModeWarning, UnrewindableBodyError)\n\nNETRC_FILES = ('.netrc', '_netrc')\n\nDEFAULT_CA_BUNDLE_PATH = certs.where()\n\nDEFAULT_PORTS = {'http': 80, 'https': 443}\n\n# Ensure that ', ' is used to preserve previous delimiter behavior.\nDEFAULT_ACCEPT_ENCODING = \", \".join(\n re.split(r\",\\s*\", make_headers(accept_encoding=True)[\"accept-encoding\"])\n)\n\n\nif sys.platform == 'win32':\n # provide a proxy_bypass version on Windows without DNS lookups\n\n def proxy_bypass_registry(host):\n try:\n if is_py3:\n import winreg\n else:\n import _winreg as winreg\n except ImportError:\n return False\n\n try:\n internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER,\n r'Software\\Microsoft\\Windows\\CurrentVersion\\Internet Settings')\n # ProxyEnable could be REG_SZ or REG_DWORD, normalizing it\n proxyEnable = int(winreg.QueryValueEx(internetSettings,\n 'ProxyEnable')[0])\n # ProxyOverride is almost always a string\n proxyOverride = winreg.QueryValueEx(internetSettings,\n 'ProxyOverride')[0]\n except OSError:\n return False\n if not proxyEnable or not proxyOverride:\n return False\n\n # make a check value list from the registry entry: replace the\n # '<local>' string by the localhost entry and the corresponding\n # canonical entry.\n proxyOverride = proxyOverride.split(';')\n # now check if we match one of the registry values.\n for test in proxyOverride:\n if test == '<local>':\n if '.' not in host:\n return True\n test = test.replace(\".\", r\"\\.\") # mask dots\n test = test.replace(\"*\", r\".*\") # change glob sequence\n test = test.replace(\"?\", r\".\") # change glob char\n if re.match(test, host, re.I):\n return True\n return False\n\n def proxy_bypass(host): # noqa\n \"\"\"Return True, if the host should be bypassed.\n\n Checks proxy settings gathered from the environment, if specified,\n or the registry.\n \"\"\"\n if getproxies_environment():\n return proxy_bypass_environment(host)\n else:\n return proxy_bypass_registry(host)\n\n\ndef dict_to_sequence(d):\n \"\"\"Returns an internal sequence dictionary update.\"\"\"\n\n if hasattr(d, 'items'):\n d = d.items()\n\n return d\n\n\ndef super_len(o):\n total_length = None\n current_position = 0\n\n if hasattr(o, '__len__'):\n total_length = len(o)\n\n elif hasattr(o, 'len'):\n total_length = o.len\n\n elif hasattr(o, 'fileno'):\n try:\n fileno = o.fileno()\n except io.UnsupportedOperation:\n pass\n else:\n total_length = os.fstat(fileno).st_size\n\n # Having used fstat to determine the file length, we need to\n # confirm that this file was opened up in binary mode.\n if 'b' not in o.mode:\n warnings.warn((\n \"Requests has determined the content-length for this \"\n \"request using the binary size of the file: however, the \"\n \"file has been opened in text mode (i.e. without the 'b' \"\n \"flag in the mode). This may lead to an incorrect \"\n \"content-length. In Requests 3.0, support will be removed \"\n \"for files in text mode.\"),\n FileModeWarning\n )\n\n if hasattr(o, 'tell'):\n try:\n current_position = o.tell()\n except (OSError, IOError):\n # This can happen in some weird situations, such as when the file\n # is actually a special file descriptor like stdin. In this\n # instance, we don't know what the length is, so set it to zero and\n # let requests chunk it instead.\n if total_length is not None:\n current_position = total_length\n else:\n if hasattr(o, 'seek') and total_length is None:\n # StringIO and BytesIO have seek but no usable fileno\n try:\n # seek to end of file\n o.seek(0, 2)\n total_length = o.tell()\n\n # seek back to current position to support\n # partially read file-like objects\n o.seek(current_position or 0)\n except (OSError, IOError):\n total_length = 0\n\n if total_length is None:\n total_length = 0\n\n return max(0, total_length - current_position)\n\n\ndef get_netrc_auth(url, raise_errors=False):\n \"\"\"Returns the Requests tuple auth for a given url from netrc.\"\"\"\n\n netrc_file = os.environ.get('NETRC')\n if netrc_file is not None:\n netrc_locations = (netrc_file,)\n else:\n netrc_locations = ('~/{}'.format(f) for f in NETRC_FILES)\n\n try:\n from netrc import netrc, NetrcParseError\n\n netrc_path = None\n\n for f in netrc_locations:\n try:\n loc = os.path.expanduser(f)\n except KeyError:\n # os.path.expanduser can fail when $HOME is undefined and\n # getpwuid fails. See https://bugs.python.org/issue20164 &\n # https://github.com/psf/requests/issues/1846\n return\n\n if os.path.exists(loc):\n netrc_path = loc\n break\n\n # Abort early if there isn't one.\n if netrc_path is None:\n return\n\n ri = urlparse(url)\n\n # Strip port numbers from netloc. This weird `if...encode`` dance is\n # used for Python 3.2, which doesn't support unicode literals.\n splitstr = b':'\n if isinstance(url, str):\n splitstr = splitstr.decode('ascii')\n host = ri.netloc.split(splitstr)[0]\n\n try:\n _netrc = netrc(netrc_path).authenticators(host)\n if _netrc:\n # Return with login / password\n login_i = (0 if _netrc[0] else 1)\n return (_netrc[login_i], _netrc[2])\n except (NetrcParseError, IOError):\n # If there was a parsing error or a permissions issue reading the file,\n # we'll just skip netrc auth unless explicitly asked to raise errors.\n if raise_errors:\n raise\n\n # App Engine hackiness.\n except (ImportError, AttributeError):\n pass\n\n\ndef guess_filename(obj):\n \"\"\"Tries to guess the filename of the given object.\"\"\"\n name = getattr(obj, 'name', None)\n if (name and isinstance(name, basestring) and name[0] != '<' and\n name[-1] != '>'):\n return os.path.basename(name)\n\n\ndef extract_zipped_paths(path):\n \"\"\"Replace nonexistent paths that look like they refer to a member of a zip\n archive with the location of an extracted copy of the target, or else\n just return the provided path unchanged.\n \"\"\"\n if os.path.exists(path):\n # this is already a valid path, no need to do anything further\n return path\n\n # find the first valid part of the provided path and treat that as a zip archive\n # assume the rest of the path is the name of a member in the archive\n archive, member = os.path.split(path)\n while archive and not os.path.exists(archive):\n archive, prefix = os.path.split(archive)\n if not prefix:\n # If we don't check for an empty prefix after the split (in other words, archive remains unchanged after the split),\n # we _can_ end up in an infinite loop on a rare corner case affecting a small number of users\n break\n member = '/'.join([prefix, member])\n\n if not zipfile.is_zipfile(archive):\n return path\n\n zip_file = zipfile.ZipFile(archive)\n if member not in zip_file.namelist():\n return path\n\n # we have a valid zip archive and a valid member of that archive\n tmp = tempfile.gettempdir()\n extracted_path = os.path.join(tmp, member.split('/')[-1])\n if not os.path.exists(extracted_path):\n # use read + write to avoid the creating nested folders, we only want the file, avoids mkdir racing condition\n with atomic_open(extracted_path) as file_handler:\n file_handler.write(zip_file.read(member))\n return extracted_path\n\n\n@contextlib.contextmanager\ndef atomic_open(filename):\n \"\"\"Write a file to the disk in an atomic fashion\"\"\"\n replacer = os.rename if sys.version_info[0] == 2 else os.replace\n tmp_descriptor, tmp_name = tempfile.mkstemp(dir=os.path.dirname(filename))\n try:\n with os.fdopen(tmp_descriptor, 'wb') as tmp_handler:\n yield tmp_handler\n replacer(tmp_name, filename)\n except BaseException:\n os.remove(tmp_name)\n raise\n\n\ndef from_key_val_list(value):\n \"\"\"Take an object and test to see if it can be represented as a\n dictionary. Unless it can not be represented as such, return an\n OrderedDict, e.g.,\n\n ::\n\n >>> from_key_val_list([('key', 'val')])\n OrderedDict([('key', 'val')])\n >>> from_key_val_list('string')\n Traceback (most recent call last):\n ...\n ValueError: cannot encode objects that are not 2-tuples\n >>> from_key_val_list({'key': 'val'})\n OrderedDict([('key', 'val')])\n\n :rtype: OrderedDict\n \"\"\"\n if value is None:\n return None\n\n if isinstance(value, (str, bytes, bool, int)):\n raise ValueError('cannot encode objects that are not 2-tuples')\n\n return OrderedDict(value)\n\n\ndef to_key_val_list(value):\n \"\"\"Take an object and test to see if it can be represented as a\n dictionary. If it can be, return a list of tuples, e.g.,\n\n ::\n\n >>> to_key_val_list([('key', 'val')])\n [('key', 'val')]\n >>> to_key_val_list({'key': 'val'})\n [('key', 'val')]\n >>> to_key_val_list('string')\n Traceback (most recent call last):\n ...\n ValueError: cannot encode objects that are not 2-tuples\n\n :rtype: list\n \"\"\"\n if value is None:\n return None\n\n if isinstance(value, (str, bytes, bool, int)):\n raise ValueError('cannot encode objects that are not 2-tuples')\n\n if isinstance(value, Mapping):\n value = value.items()\n\n return list(value)\n\n\n# From mitsuhiko/werkzeug (used with permission).\ndef parse_list_header(value):\n \"\"\"Parse lists as described by RFC 2068 Section 2.\n\n In particular, parse comma-separated lists where the elements of\n the list may include quoted-strings. A quoted-string could\n contain a comma. A non-quoted string could have quotes in the\n middle. Quotes are removed automatically after parsing.\n\n It basically works like :func:`parse_set_header` just that items\n may appear multiple times and case sensitivity is preserved.\n\n The return value is a standard :class:`list`:\n\n >>> parse_list_header('token, \"quoted value\"')\n ['token', 'quoted value']\n\n To create a header from the :class:`list` again, use the\n :func:`dump_header` function.\n\n :param value: a string with a list header.\n :return: :class:`list`\n :rtype: list\n \"\"\"\n result = []\n for item in _parse_list_header(value):\n if item[:1] == item[-1:] == '\"':\n item = unquote_header_value(item[1:-1])\n result.append(item)\n return result\n\n\n# From mitsuhiko/werkzeug (used with permission).\ndef parse_dict_header(value):\n \"\"\"Parse lists of key, value pairs as described by RFC 2068 Section 2 and\n convert them into a python dict:\n\n >>> d = parse_dict_header('foo=\"is a fish\", bar=\"as well\"')\n >>> type(d) is dict\n True\n >>> sorted(d.items())\n [('bar', 'as well'), ('foo', 'is a fish')]\n\n If there is no value for a key it will be `None`:\n\n >>> parse_dict_header('key_without_value')\n {'key_without_value': None}\n\n To create a header from the :class:`dict` again, use the\n :func:`dump_header` function.\n\n :param value: a string with a dict header.\n :return: :class:`dict`\n :rtype: dict\n \"\"\"\n result = {}\n for item in _parse_list_header(value):\n if '=' not in item:\n result[item] = None\n continue\n name, value = item.split('=', 1)\n if value[:1] == value[-1:] == '\"':\n value = unquote_header_value(value[1:-1])\n result[name] = value\n return result\n\n\n# From mitsuhiko/werkzeug (used with permission).\ndef unquote_header_value(value, is_filename=False):\n r\"\"\"Unquotes a header value. (Reversal of :func:`quote_header_value`).\n This does not use the real unquoting but what browsers are actually\n using for quoting.\n\n :param value: the header value to unquote.\n :rtype: str\n \"\"\"\n if value and value[0] == value[-1] == '\"':\n # this is not the real unquoting, but fixing this so that the\n # RFC is met will result in bugs with internet explorer and\n # probably some other browsers as well. IE for example is\n # uploading files with \"C:\\foo\\bar.txt\" as filename\n value = value[1:-1]\n\n # if this is a filename and the starting characters look like\n # a UNC path, then just return the value without quotes. Using the\n # replace sequence below on a UNC path has the effect of turning\n # the leading double slash into a single slash and then\n # _fix_ie_filename() doesn't work correctly. See #458.\n if not is_filename or value[:2] != '\\\\\\\\':\n return value.replace('\\\\\\\\', '\\\\').replace('\\\\\"', '\"')\n return value\n\n\ndef dict_from_cookiejar(cj):\n \"\"\"Returns a key/value dictionary from a CookieJar.\n\n :param cj: CookieJar object to extract cookies from.\n :rtype: dict\n \"\"\"\n\n cookie_dict = {}\n\n for cookie in cj:\n cookie_dict[cookie.name] = cookie.value\n\n return cookie_dict\n\n\ndef add_dict_to_cookiejar(cj, cookie_dict):\n \"\"\"Returns a CookieJar from a key/value dictionary.\n\n :param cj: CookieJar to insert cookies into.\n :param cookie_dict: Dict of key/values to insert into CookieJar.\n :rtype: CookieJar\n \"\"\"\n\n return cookiejar_from_dict(cookie_dict, cj)\n\n\ndef get_encodings_from_content(content):\n \"\"\"Returns encodings from given content string.\n\n :param content: bytestring to extract encodings from.\n \"\"\"\n warnings.warn((\n 'In requests 3.0, get_encodings_from_content will be removed. For '\n 'more information, please see the discussion on issue #2266. (This'\n ' warning should only appear once.)'),\n DeprecationWarning)\n\n charset_re = re.compile(r'<meta.*?charset=[\"\\']*(.+?)[\"\\'>]', flags=re.I)\n pragma_re = re.compile(r'<meta.*?content=[\"\\']*;?charset=(.+?)[\"\\'>]', flags=re.I)\n xml_re = re.compile(r'^<\\?xml.*?encoding=[\"\\']*(.+?)[\"\\'>]')\n\n return (charset_re.findall(content) +\n pragma_re.findall(content) +\n xml_re.findall(content))\n\n\ndef _parse_content_type_header(header):\n \"\"\"Returns content type and parameters from given header\n\n :param header: string\n :return: tuple containing content type and dictionary of\n parameters\n \"\"\"\n\n tokens = header.split(';')\n content_type, params = tokens[0].strip(), tokens[1:]\n params_dict = {}\n items_to_strip = \"\\\"' \"\n\n for param in params:\n param = param.strip()\n if param:\n key, value = param, True\n index_of_equals = param.find(\"=\")\n if index_of_equals != -1:\n key = param[:index_of_equals].strip(items_to_strip)\n value = param[index_of_equals + 1:].strip(items_to_strip)\n params_dict[key.lower()] = value\n return content_type, params_dict\n\n\ndef get_encoding_from_headers(headers):\n \"\"\"Returns encodings from given HTTP Header Dict.\n\n :param headers: dictionary to extract encoding from.\n :rtype: str\n \"\"\"\n\n content_type = headers.get('content-type')\n\n if not content_type:\n return None\n\n content_type, params = _parse_content_type_header(content_type)\n\n if 'charset' in params:\n return params['charset'].strip(\"'\\\"\")\n\n if 'text' in content_type:\n return 'ISO-8859-1'\n\n if 'application/json' in content_type:\n # Assume UTF-8 based on RFC 4627: https://www.ietf.org/rfc/rfc4627.txt since the charset was unset\n return 'utf-8'\n\n\ndef stream_decode_response_unicode(iterator, r):\n \"\"\"Stream decodes a iterator.\"\"\"\n\n if r.encoding is None:\n for item in iterator:\n yield item\n return\n\n decoder = codecs.getincrementaldecoder(r.encoding)(errors='replace')\n for chunk in iterator:\n rv = decoder.decode(chunk)\n if rv:\n yield rv\n rv = decoder.decode(b'', final=True)\n if rv:\n yield rv\n\n\ndef iter_slices(string, slice_length):\n \"\"\"Iterate over slices of a string.\"\"\"\n pos = 0\n if slice_length is None or slice_length <= 0:\n slice_length = len(string)\n while pos < len(string):\n yield string[pos:pos + slice_length]\n pos += slice_length\n\n\ndef get_unicode_from_response(r):\n \"\"\"Returns the requested content back in unicode.\n\n :param r: Response object to get unicode content from.\n\n Tried:\n\n 1. charset from content-type\n 2. fall back and replace all unicode characters\n\n :rtype: str\n \"\"\"\n warnings.warn((\n 'In requests 3.0, get_unicode_from_response will be removed. For '\n 'more information, please see the discussion on issue #2266. (This'\n ' warning should only appear once.)'),\n DeprecationWarning)\n\n tried_encodings = []\n\n # Try charset from content-type\n encoding = get_encoding_from_headers(r.headers)\n\n if encoding:\n try:\n return str(r.content, encoding)\n except UnicodeError:\n tried_encodings.append(encoding)\n\n # Fall back:\n try:\n return str(r.content, encoding, errors='replace')\n except TypeError:\n return r.content\n\n\n# The unreserved URI characters (RFC 3986)\nUNRESERVED_SET = frozenset(\n \"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz\" + \"0123456789-._~\")\n\n\ndef unquote_unreserved(uri):\n \"\"\"Un-escape any percent-escape sequences in a URI that are unreserved\n characters. This leaves all reserved, illegal and non-ASCII bytes encoded.\n\n :rtype: str\n \"\"\"\n parts = uri.split('%')\n for i in range(1, len(parts)):\n h = parts[i][0:2]\n if len(h) == 2 and h.isalnum():\n try:\n c = chr(int(h, 16))\n except ValueError:\n raise InvalidURL(\"Invalid percent-escape sequence: '%s'\" % h)\n\n if c in UNRESERVED_SET:\n parts[i] = c + parts[i][2:]\n else:\n parts[i] = '%' + parts[i]\n else:\n parts[i] = '%' + parts[i]\n return ''.join(parts)\n\n\ndef requote_uri(uri):\n \"\"\"Re-quote the given URI.\n\n This function passes the given URI through an unquote/quote cycle to\n ensure that it is fully and consistently quoted.\n\n :rtype: str\n \"\"\"\n safe_with_percent = \"!#$%&'()*+,/:;=?@[]~\"\n safe_without_percent = \"!#$&'()*+,/:;=?@[]~\"\n try:\n # Unquote only the unreserved characters\n # Then quote only illegal characters (do not quote reserved,\n # unreserved, or '%')\n return quote(unquote_unreserved(uri), safe=safe_with_percent)\n except InvalidURL:\n # We couldn't unquote the given URI, so let's try quoting it, but\n # there may be unquoted '%'s in the URI. We need to make sure they're\n # properly quoted so they do not cause issues elsewhere.\n return quote(uri, safe=safe_without_percent)\n\n\ndef address_in_network(ip, net):\n \"\"\"This function allows you to check if an IP belongs to a network subnet\n\n Example: returns True if ip = 192.168.1.1 and net = 192.168.1.0/24\n returns False if ip = 192.168.1.1 and net = 192.168.100.0/24\n\n :rtype: bool\n \"\"\"\n ipaddr = struct.unpack('=L', socket.inet_aton(ip))[0]\n netaddr, bits = net.split('/')\n netmask = struct.unpack('=L', socket.inet_aton(dotted_netmask(int(bits))))[0]\n network = struct.unpack('=L', socket.inet_aton(netaddr))[0] & netmask\n return (ipaddr & netmask) == (network & netmask)\n\n\ndef dotted_netmask(mask):\n \"\"\"Converts mask from /xx format to xxx.xxx.xxx.xxx\n\n Example: if mask is 24 function returns 255.255.255.0\n\n :rtype: str\n \"\"\"\n bits = 0xffffffff ^ (1 << 32 - mask) - 1\n return socket.inet_ntoa(struct.pack('>I', bits))\n\n\ndef is_ipv4_address(string_ip):\n \"\"\"\n :rtype: bool\n \"\"\"\n try:\n socket.inet_aton(string_ip)\n except socket.error:\n return False\n return True\n\n\ndef is_valid_cidr(string_network):\n \"\"\"\n Very simple check of the cidr format in no_proxy variable.\n\n :rtype: bool\n \"\"\"\n if string_network.count('/') == 1:\n try:\n mask = int(string_network.split('/')[1])\n except ValueError:\n return False\n\n if mask < 1 or mask > 32:\n return False\n\n try:\n socket.inet_aton(string_network.split('/')[0])\n except socket.error:\n return False\n else:\n return False\n return True\n\n\n@contextlib.contextmanager\ndef set_environ(env_name, value):\n \"\"\"Set the environment variable 'env_name' to 'value'\n\n Save previous value, yield, and then restore the previous value stored in\n the environment variable 'env_name'.\n\n If 'value' is None, do nothing\"\"\"\n value_changed = value is not None\n if value_changed:\n old_value = os.environ.get(env_name)\n os.environ[env_name] = value\n try:\n yield\n finally:\n if value_changed:\n if old_value is None:\n del os.environ[env_name]\n else:\n os.environ[env_name] = old_value\n\n\ndef should_bypass_proxies(url, no_proxy):\n \"\"\"\n Returns whether we should bypass proxies or not.\n\n :rtype: bool\n \"\"\"\n # Prioritize lowercase environment variables over uppercase\n # to keep a consistent behaviour with other http projects (curl, wget).\n get_proxy = lambda k: os.environ.get(k) or os.environ.get(k.upper())\n\n # First check whether no_proxy is defined. If it is, check that the URL\n # we're getting isn't in the no_proxy list.\n no_proxy_arg = no_proxy\n if no_proxy is None:\n no_proxy = get_proxy('no_proxy')\n parsed = urlparse(url)\n\n if parsed.hostname is None:\n # URLs don't always have hostnames, e.g. file:/// urls.\n return True\n\n if no_proxy:\n # We need to check whether we match here. We need to see if we match\n # the end of the hostname, both with and without the port.\n no_proxy = (\n host for host in no_proxy.replace(' ', '').split(',') if host\n )\n\n if is_ipv4_address(parsed.hostname):\n for proxy_ip in no_proxy:\n if is_valid_cidr(proxy_ip):\n if address_in_network(parsed.hostname, proxy_ip):\n return True\n elif parsed.hostname == proxy_ip:\n # If no_proxy ip was defined in plain IP notation instead of cidr notation &\n # matches the IP of the index\n return True\n else:\n host_with_port = parsed.hostname\n if parsed.port:\n host_with_port += ':{}'.format(parsed.port)\n\n for host in no_proxy:\n if parsed.hostname.endswith(host) or host_with_port.endswith(host):\n # The URL does match something in no_proxy, so we don't want\n # to apply the proxies on this URL.\n return True\n\n with set_environ('no_proxy', no_proxy_arg):\n # parsed.hostname can be `None` in cases such as a file URI.\n try:\n bypass = proxy_bypass(parsed.hostname)\n except (TypeError, socket.gaierror):\n bypass = False\n\n if bypass:\n return True\n\n return False\n\n\ndef get_environ_proxies(url, no_proxy=None):\n \"\"\"\n Return a dict of environment proxies.\n\n :rtype: dict\n \"\"\"\n if should_bypass_proxies(url, no_proxy=no_proxy):\n return {}\n else:\n return getproxies()\n\n\ndef select_proxy(url, proxies):\n \"\"\"Select a proxy for the url, if applicable.\n\n :param url: The url being for the request\n :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs\n \"\"\"\n proxies = proxies or {}\n urlparts = urlparse(url)\n if urlparts.hostname is None:\n return proxies.get(urlparts.scheme, proxies.get('all'))\n\n proxy_keys = [\n urlparts.scheme + '://' + urlparts.hostname,\n urlparts.scheme,\n 'all://' + urlparts.hostname,\n 'all',\n ]\n proxy = None\n for proxy_key in proxy_keys:\n if proxy_key in proxies:\n proxy = proxies[proxy_key]\n break\n\n return proxy\n\n\ndef default_user_agent(name=\"python-requests\"):\n \"\"\"\n Return a string representing the default user agent.\n\n :rtype: str\n \"\"\"\n return '%s/%s' % (name, __version__)\n\n\ndef default_headers():\n \"\"\"\n :rtype: requests.structures.CaseInsensitiveDict\n \"\"\"\n return CaseInsensitiveDict({\n 'User-Agent': default_user_agent(),\n 'Accept-Encoding': DEFAULT_ACCEPT_ENCODING,\n 'Accept': '*/*',\n 'Connection': 'keep-alive',\n })\n\n\ndef parse_header_links(value):\n \"\"\"Return a list of parsed link headers proxies.\n\n i.e. Link: <http:/.../front.jpeg>; rel=front; type=\"image/jpeg\",<http://.../back.jpeg>; rel=back;type=\"image/jpeg\"\n\n :rtype: list\n \"\"\"\n\n links = []\n\n replace_chars = ' \\'\"'\n\n value = value.strip(replace_chars)\n if not value:\n return links\n\n for val in re.split(', *<', value):\n try:\n url, params = val.split(';', 1)\n except ValueError:\n url, params = val, ''\n\n link = {'url': url.strip('<> \\'\"')}\n\n for param in params.split(';'):\n try:\n key, value = param.split('=')\n except ValueError:\n break\n\n link[key.strip(replace_chars)] = value.strip(replace_chars)\n\n links.append(link)\n\n return links\n\n\n# Null bytes; no need to recreate these on each call to guess_json_utf\n_null = '\\x00'.encode('ascii') # encoding to ASCII for Python 3\n_null2 = _null * 2\n_null3 = _null * 3\n\n\ndef guess_json_utf(data):\n \"\"\"\n :rtype: str\n \"\"\"\n # JSON always starts with two ASCII characters, so detection is as\n # easy as counting the nulls and from their location and count\n # determine the encoding. Also detect a BOM, if present.\n sample = data[:4]\n if sample in (codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE):\n return 'utf-32' # BOM included\n if sample[:3] == codecs.BOM_UTF8:\n return 'utf-8-sig' # BOM included, MS style (discouraged)\n if sample[:2] in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE):\n return 'utf-16' # BOM included\n nullcount = sample.count(_null)\n if nullcount == 0:\n return 'utf-8'\n if nullcount == 2:\n if sample[::2] == _null2: # 1st and 3rd are null\n return 'utf-16-be'\n if sample[1::2] == _null2: # 2nd and 4th are null\n return 'utf-16-le'\n # Did not detect 2 valid UTF-16 ascii-range characters\n if nullcount == 3:\n if sample[:3] == _null3:\n return 'utf-32-be'\n if sample[1:] == _null3:\n return 'utf-32-le'\n # Did not detect a valid UTF-32 ascii-range character\n return None\n\n\ndef prepend_scheme_if_needed(url, new_scheme):\n \"\"\"Given a URL that may or may not have a scheme, prepend the given scheme.\n Does not replace a present scheme with the one provided as an argument.\n\n :rtype: str\n \"\"\"\n scheme, netloc, path, params, query, fragment = urlparse(url, new_scheme)\n\n # urlparse is a finicky beast, and sometimes decides that there isn't a\n # netloc present. Assume that it's being over-cautious, and switch netloc\n # and path if urlparse decided there was no netloc.\n if not netloc:\n netloc, path = path, netloc\n\n return urlunparse((scheme, netloc, path, params, query, fragment))\n\n\ndef get_auth_from_url(url):\n \"\"\"Given a url with authentication components, extract them into a tuple of\n username,password.\n\n :rtype: (str,str)\n \"\"\"\n parsed = urlparse(url)\n\n try:\n auth = (unquote(parsed.username), unquote(parsed.password))\n except (AttributeError, TypeError):\n auth = ('', '')\n\n return auth\n\n\n# Moved outside of function to avoid recompile every call\n_CLEAN_HEADER_REGEX_BYTE = re.compile(b'^\\\\S[^\\\\r\\\\n]*$|^$')\n_CLEAN_HEADER_REGEX_STR = re.compile(r'^\\S[^\\r\\n]*$|^$')\n\n\ndef check_header_validity(header):\n \"\"\"Verifies that header value is a string which doesn't contain\n leading whitespace or return characters. This prevents unintended\n header injection.\n\n :param header: tuple, in the format (name, value).\n \"\"\"\n name, value = header\n\n if isinstance(value, bytes):\n pat = _CLEAN_HEADER_REGEX_BYTE\n else:\n pat = _CLEAN_HEADER_REGEX_STR\n try:\n if not pat.match(value):\n raise InvalidHeader(\"Invalid return character or leading space in header: %s\" % name)\n except TypeError:\n raise InvalidHeader(\"Value for header {%s: %s} must be of type str or \"\n \"bytes, not %s\" % (name, value, type(value)))\n\n\ndef urldefragauth(url):\n \"\"\"\n Given a url remove the fragment and the authentication part.\n\n :rtype: str\n \"\"\"\n scheme, netloc, path, params, query, fragment = urlparse(url)\n\n # see func:`prepend_scheme_if_needed`\n if not netloc:\n netloc, path = path, netloc\n\n netloc = netloc.rsplit('@', 1)[-1]\n\n return urlunparse((scheme, netloc, path, params, query, ''))\n\n\ndef rewind_body(prepared_request):\n \"\"\"Move file pointer back to its recorded starting position\n so it can be read again on redirect.\n \"\"\"\n body_seek = getattr(prepared_request.body, 'seek', None)\n if body_seek is not None and isinstance(prepared_request._body_position, integer_types):\n try:\n body_seek(prepared_request._body_position)\n except (IOError, OSError):\n raise UnrewindableBodyError(\"An error occurred when rewinding request \"\n \"body for redirect.\")\n else:\n raise UnrewindableBodyError(\"Unable to rewind request body for redirect.\")\n",
"path": "requests/utils.py"
},
{
"content": "# -*- coding: utf-8 -*-\n\n\"\"\"Tests for Requests.\"\"\"\n\nfrom __future__ import division\nimport json\nimport os\nimport pickle\nimport collections\nimport contextlib\nimport warnings\nimport re\n\nimport io\nimport requests\nimport pytest\nfrom requests.adapters import HTTPAdapter\nfrom requests.auth import HTTPDigestAuth, _basic_auth_str\nfrom requests.compat import (\n Morsel, cookielib, getproxies, str, urlparse,\n builtin_str)\nfrom requests.cookies import (\n cookiejar_from_dict, morsel_to_cookie)\nfrom requests.exceptions import (\n ConnectionError, ConnectTimeout, InvalidSchema, InvalidURL,\n MissingSchema, ReadTimeout, Timeout, RetryError, TooManyRedirects,\n ProxyError, InvalidHeader, UnrewindableBodyError, SSLError, InvalidProxyURL, InvalidJSONError)\nfrom requests.models import PreparedRequest\nfrom requests.structures import CaseInsensitiveDict\nfrom requests.sessions import SessionRedirectMixin\nfrom requests.models import urlencode\nfrom requests.hooks import default_hooks\nfrom requests.compat import MutableMapping\n\nfrom .compat import StringIO, u\nfrom .utils import override_environ\nfrom urllib3.util import Timeout as Urllib3Timeout\n\n# Requests to this URL should always fail with a connection timeout (nothing\n# listening on that port)\nTARPIT = 'http://10.255.255.1'\n\n# This is to avoid waiting the timeout of using TARPIT\nINVALID_PROXY='http://localhost:1'\n\ntry:\n from ssl import SSLContext\n del SSLContext\n HAS_MODERN_SSL = True\nexcept ImportError:\n HAS_MODERN_SSL = False\n\ntry:\n requests.pyopenssl\n HAS_PYOPENSSL = True\nexcept AttributeError:\n HAS_PYOPENSSL = False\n\n\nclass TestRequests:\n\n digest_auth_algo = ('MD5', 'SHA-256', 'SHA-512')\n\n def test_entry_points(self):\n\n requests.session\n requests.session().get\n requests.session().head\n requests.get\n requests.head\n requests.put\n requests.patch\n requests.post\n # Not really an entry point, but people rely on it.\n from requests.packages.urllib3.poolmanager import PoolManager\n\n @pytest.mark.parametrize(\n 'exception, url', (\n (MissingSchema, 'hiwpefhipowhefopw'),\n (InvalidSchema, 'localhost:3128'),\n (InvalidSchema, 'localhost.localdomain:3128/'),\n (InvalidSchema, '10.122.1.1:3128/'),\n (InvalidURL, 'http://'),\n ))\n def test_invalid_url(self, exception, url):\n with pytest.raises(exception):\n requests.get(url)\n\n def test_basic_building(self):\n req = requests.Request()\n req.url = 'http://kennethreitz.org/'\n req.data = {'life': '42'}\n\n pr = req.prepare()\n assert pr.url == req.url\n assert pr.body == 'life=42'\n\n @pytest.mark.parametrize('method', ('GET', 'HEAD'))\n def test_no_content_length(self, httpbin, method):\n req = requests.Request(method, httpbin(method.lower())).prepare()\n assert 'Content-Length' not in req.headers\n\n @pytest.mark.parametrize('method', ('POST', 'PUT', 'PATCH', 'OPTIONS'))\n def test_no_body_content_length(self, httpbin, method):\n req = requests.Request(method, httpbin(method.lower())).prepare()\n assert req.headers['Content-Length'] == '0'\n\n @pytest.mark.parametrize('method', ('POST', 'PUT', 'PATCH', 'OPTIONS'))\n def test_empty_content_length(self, httpbin, method):\n req = requests.Request(method, httpbin(method.lower()), data='').prepare()\n assert req.headers['Content-Length'] == '0'\n\n def test_override_content_length(self, httpbin):\n headers = {\n 'Content-Length': 'not zero'\n }\n r = requests.Request('POST', httpbin('post'), headers=headers).prepare()\n assert 'Content-Length' in r.headers\n assert r.headers['Content-Length'] == 'not zero'\n\n def test_path_is_not_double_encoded(self):\n request = requests.Request('GET', \"http://0.0.0.0/get/test case\").prepare()\n\n assert request.path_url == '/get/test%20case'\n\n @pytest.mark.parametrize(\n 'url, expected', (\n ('http://example.com/path#fragment', 'http://example.com/path?a=b#fragment'),\n ('http://example.com/path?key=value#fragment', 'http://example.com/path?key=value&a=b#fragment')\n ))\n def test_params_are_added_before_fragment(self, url, expected):\n request = requests.Request('GET', url, params={\"a\": \"b\"}).prepare()\n assert request.url == expected\n\n def test_params_original_order_is_preserved_by_default(self):\n param_ordered_dict = collections.OrderedDict((('z', 1), ('a', 1), ('k', 1), ('d', 1)))\n session = requests.Session()\n request = requests.Request('GET', 'http://example.com/', params=param_ordered_dict)\n prep = session.prepare_request(request)\n assert prep.url == 'http://example.com/?z=1&a=1&k=1&d=1'\n\n def test_params_bytes_are_encoded(self):\n request = requests.Request('GET', 'http://example.com',\n params=b'test=foo').prepare()\n assert request.url == 'http://example.com/?test=foo'\n\n def test_binary_put(self):\n request = requests.Request('PUT', 'http://example.com',\n data=u\"ööö\".encode(\"utf-8\")).prepare()\n assert isinstance(request.body, bytes)\n\n def test_whitespaces_are_removed_from_url(self):\n # Test for issue #3696\n request = requests.Request('GET', ' http://example.com').prepare()\n assert request.url == 'http://example.com/'\n\n @pytest.mark.parametrize('scheme', ('http://', 'HTTP://', 'hTTp://', 'HttP://'))\n def test_mixed_case_scheme_acceptable(self, httpbin, scheme):\n s = requests.Session()\n s.proxies = getproxies()\n parts = urlparse(httpbin('get'))\n url = scheme + parts.netloc + parts.path\n r = requests.Request('GET', url)\n r = s.send(r.prepare())\n assert r.status_code == 200, 'failed for scheme {}'.format(scheme)\n\n def test_HTTP_200_OK_GET_ALTERNATIVE(self, httpbin):\n r = requests.Request('GET', httpbin('get'))\n s = requests.Session()\n s.proxies = getproxies()\n\n r = s.send(r.prepare())\n\n assert r.status_code == 200\n\n def test_HTTP_302_ALLOW_REDIRECT_GET(self, httpbin):\n r = requests.get(httpbin('redirect', '1'))\n assert r.status_code == 200\n assert r.history[0].status_code == 302\n assert r.history[0].is_redirect\n\n def test_HTTP_307_ALLOW_REDIRECT_POST(self, httpbin):\n r = requests.post(httpbin('redirect-to'), data='test', params={'url': 'post', 'status_code': 307})\n assert r.status_code == 200\n assert r.history[0].status_code == 307\n assert r.history[0].is_redirect\n assert r.json()['data'] == 'test'\n\n def test_HTTP_307_ALLOW_REDIRECT_POST_WITH_SEEKABLE(self, httpbin):\n byte_str = b'test'\n r = requests.post(httpbin('redirect-to'), data=io.BytesIO(byte_str), params={'url': 'post', 'status_code': 307})\n assert r.status_code == 200\n assert r.history[0].status_code == 307\n assert r.history[0].is_redirect\n assert r.json()['data'] == byte_str.decode('utf-8')\n\n def test_HTTP_302_TOO_MANY_REDIRECTS(self, httpbin):\n try:\n requests.get(httpbin('relative-redirect', '50'))\n except TooManyRedirects as e:\n url = httpbin('relative-redirect', '20')\n assert e.request.url == url\n assert e.response.url == url\n assert len(e.response.history) == 30\n else:\n pytest.fail('Expected redirect to raise TooManyRedirects but it did not')\n\n def test_HTTP_302_TOO_MANY_REDIRECTS_WITH_PARAMS(self, httpbin):\n s = requests.session()\n s.max_redirects = 5\n try:\n s.get(httpbin('relative-redirect', '50'))\n except TooManyRedirects as e:\n url = httpbin('relative-redirect', '45')\n assert e.request.url == url\n assert e.response.url == url\n assert len(e.response.history) == 5\n else:\n pytest.fail('Expected custom max number of redirects to be respected but was not')\n\n def test_http_301_changes_post_to_get(self, httpbin):\n r = requests.post(httpbin('status', '301'))\n assert r.status_code == 200\n assert r.request.method == 'GET'\n assert r.history[0].status_code == 301\n assert r.history[0].is_redirect\n\n def test_http_301_doesnt_change_head_to_get(self, httpbin):\n r = requests.head(httpbin('status', '301'), allow_redirects=True)\n print(r.content)\n assert r.status_code == 200\n assert r.request.method == 'HEAD'\n assert r.history[0].status_code == 301\n assert r.history[0].is_redirect\n\n def test_http_302_changes_post_to_get(self, httpbin):\n r = requests.post(httpbin('status', '302'))\n assert r.status_code == 200\n assert r.request.method == 'GET'\n assert r.history[0].status_code == 302\n assert r.history[0].is_redirect\n\n def test_http_302_doesnt_change_head_to_get(self, httpbin):\n r = requests.head(httpbin('status', '302'), allow_redirects=True)\n assert r.status_code == 200\n assert r.request.method == 'HEAD'\n assert r.history[0].status_code == 302\n assert r.history[0].is_redirect\n\n def test_http_303_changes_post_to_get(self, httpbin):\n r = requests.post(httpbin('status', '303'))\n assert r.status_code == 200\n assert r.request.method == 'GET'\n assert r.history[0].status_code == 303\n assert r.history[0].is_redirect\n\n def test_http_303_doesnt_change_head_to_get(self, httpbin):\n r = requests.head(httpbin('status', '303'), allow_redirects=True)\n assert r.status_code == 200\n assert r.request.method == 'HEAD'\n assert r.history[0].status_code == 303\n assert r.history[0].is_redirect\n\n def test_header_and_body_removal_on_redirect(self, httpbin):\n purged_headers = ('Content-Length', 'Content-Type')\n ses = requests.Session()\n req = requests.Request('POST', httpbin('post'), data={'test': 'data'})\n prep = ses.prepare_request(req)\n resp = ses.send(prep)\n\n # Mimic a redirect response\n resp.status_code = 302\n resp.headers['location'] = 'get'\n\n # Run request through resolve_redirects\n next_resp = next(ses.resolve_redirects(resp, prep))\n assert next_resp.request.body is None\n for header in purged_headers:\n assert header not in next_resp.request.headers\n\n def test_transfer_enc_removal_on_redirect(self, httpbin):\n purged_headers = ('Transfer-Encoding', 'Content-Type')\n ses = requests.Session()\n req = requests.Request('POST', httpbin('post'), data=(b'x' for x in range(1)))\n prep = ses.prepare_request(req)\n assert 'Transfer-Encoding' in prep.headers\n\n # Create Response to avoid https://github.com/kevin1024/pytest-httpbin/issues/33\n resp = requests.Response()\n resp.raw = io.BytesIO(b'the content')\n resp.request = prep\n setattr(resp.raw, 'release_conn', lambda *args: args)\n\n # Mimic a redirect response\n resp.status_code = 302\n resp.headers['location'] = httpbin('get')\n\n # Run request through resolve_redirect\n next_resp = next(ses.resolve_redirects(resp, prep))\n assert next_resp.request.body is None\n for header in purged_headers:\n assert header not in next_resp.request.headers\n\n def test_fragment_maintained_on_redirect(self, httpbin):\n fragment = \"#view=edit&token=hunter2\"\n r = requests.get(httpbin('redirect-to?url=get')+fragment)\n\n assert len(r.history) > 0\n assert r.history[0].request.url == httpbin('redirect-to?url=get')+fragment\n assert r.url == httpbin('get')+fragment\n\n def test_HTTP_200_OK_GET_WITH_PARAMS(self, httpbin):\n heads = {'User-agent': 'Mozilla/5.0'}\n\n r = requests.get(httpbin('user-agent'), headers=heads)\n\n assert heads['User-agent'] in r.text\n assert r.status_code == 200\n\n def test_HTTP_200_OK_GET_WITH_MIXED_PARAMS(self, httpbin):\n heads = {'User-agent': 'Mozilla/5.0'}\n\n r = requests.get(httpbin('get') + '?test=true', params={'q': 'test'}, headers=heads)\n assert r.status_code == 200\n\n def test_set_cookie_on_301(self, httpbin):\n s = requests.session()\n url = httpbin('cookies/set?foo=bar')\n s.get(url)\n assert s.cookies['foo'] == 'bar'\n\n def test_cookie_sent_on_redirect(self, httpbin):\n s = requests.session()\n s.get(httpbin('cookies/set?foo=bar'))\n r = s.get(httpbin('redirect/1')) # redirects to httpbin('get')\n assert 'Cookie' in r.json()['headers']\n\n def test_cookie_removed_on_expire(self, httpbin):\n s = requests.session()\n s.get(httpbin('cookies/set?foo=bar'))\n assert s.cookies['foo'] == 'bar'\n s.get(\n httpbin('response-headers'),\n params={\n 'Set-Cookie':\n 'foo=deleted; expires=Thu, 01-Jan-1970 00:00:01 GMT'\n }\n )\n assert 'foo' not in s.cookies\n\n def test_cookie_quote_wrapped(self, httpbin):\n s = requests.session()\n s.get(httpbin('cookies/set?foo=\"bar:baz\"'))\n assert s.cookies['foo'] == '\"bar:baz\"'\n\n def test_cookie_persists_via_api(self, httpbin):\n s = requests.session()\n r = s.get(httpbin('redirect/1'), cookies={'foo': 'bar'})\n assert 'foo' in r.request.headers['Cookie']\n assert 'foo' in r.history[0].request.headers['Cookie']\n\n def test_request_cookie_overrides_session_cookie(self, httpbin):\n s = requests.session()\n s.cookies['foo'] = 'bar'\n r = s.get(httpbin('cookies'), cookies={'foo': 'baz'})\n assert r.json()['cookies']['foo'] == 'baz'\n # Session cookie should not be modified\n assert s.cookies['foo'] == 'bar'\n\n def test_request_cookies_not_persisted(self, httpbin):\n s = requests.session()\n s.get(httpbin('cookies'), cookies={'foo': 'baz'})\n # Sending a request with cookies should not add cookies to the session\n assert not s.cookies\n\n def test_generic_cookiejar_works(self, httpbin):\n cj = cookielib.CookieJar()\n cookiejar_from_dict({'foo': 'bar'}, cj)\n s = requests.session()\n s.cookies = cj\n r = s.get(httpbin('cookies'))\n # Make sure the cookie was sent\n assert r.json()['cookies']['foo'] == 'bar'\n # Make sure the session cj is still the custom one\n assert s.cookies is cj\n\n def test_param_cookiejar_works(self, httpbin):\n cj = cookielib.CookieJar()\n cookiejar_from_dict({'foo': 'bar'}, cj)\n s = requests.session()\n r = s.get(httpbin('cookies'), cookies=cj)\n # Make sure the cookie was sent\n assert r.json()['cookies']['foo'] == 'bar'\n\n def test_cookielib_cookiejar_on_redirect(self, httpbin):\n \"\"\"Tests resolve_redirect doesn't fail when merging cookies\n with non-RequestsCookieJar cookiejar.\n\n See GH #3579\n \"\"\"\n cj = cookiejar_from_dict({'foo': 'bar'}, cookielib.CookieJar())\n s = requests.Session()\n s.cookies = cookiejar_from_dict({'cookie': 'tasty'})\n\n # Prepare request without using Session\n req = requests.Request('GET', httpbin('headers'), cookies=cj)\n prep_req = req.prepare()\n\n # Send request and simulate redirect\n resp = s.send(prep_req)\n resp.status_code = 302\n resp.headers['location'] = httpbin('get')\n redirects = s.resolve_redirects(resp, prep_req)\n resp = next(redirects)\n\n # Verify CookieJar isn't being converted to RequestsCookieJar\n assert isinstance(prep_req._cookies, cookielib.CookieJar)\n assert isinstance(resp.request._cookies, cookielib.CookieJar)\n assert not isinstance(resp.request._cookies, requests.cookies.RequestsCookieJar)\n\n cookies = {}\n for c in resp.request._cookies:\n cookies[c.name] = c.value\n assert cookies['foo'] == 'bar'\n assert cookies['cookie'] == 'tasty'\n\n def test_requests_in_history_are_not_overridden(self, httpbin):\n resp = requests.get(httpbin('redirect/3'))\n urls = [r.url for r in resp.history]\n req_urls = [r.request.url for r in resp.history]\n assert urls == req_urls\n\n def test_history_is_always_a_list(self, httpbin):\n \"\"\"Show that even with redirects, Response.history is always a list.\"\"\"\n resp = requests.get(httpbin('get'))\n assert isinstance(resp.history, list)\n resp = requests.get(httpbin('redirect/1'))\n assert isinstance(resp.history, list)\n assert not isinstance(resp.history, tuple)\n\n def test_headers_on_session_with_None_are_not_sent(self, httpbin):\n \"\"\"Do not send headers in Session.headers with None values.\"\"\"\n ses = requests.Session()\n ses.headers['Accept-Encoding'] = None\n req = requests.Request('GET', httpbin('get'))\n prep = ses.prepare_request(req)\n assert 'Accept-Encoding' not in prep.headers\n\n def test_headers_preserve_order(self, httpbin):\n \"\"\"Preserve order when headers provided as OrderedDict.\"\"\"\n ses = requests.Session()\n ses.headers = collections.OrderedDict()\n ses.headers['Accept-Encoding'] = 'identity'\n ses.headers['First'] = '1'\n ses.headers['Second'] = '2'\n headers = collections.OrderedDict([('Third', '3'), ('Fourth', '4')])\n headers['Fifth'] = '5'\n headers['Second'] = '222'\n req = requests.Request('GET', httpbin('get'), headers=headers)\n prep = ses.prepare_request(req)\n items = list(prep.headers.items())\n assert items[0] == ('Accept-Encoding', 'identity')\n assert items[1] == ('First', '1')\n assert items[2] == ('Second', '222')\n assert items[3] == ('Third', '3')\n assert items[4] == ('Fourth', '4')\n assert items[5] == ('Fifth', '5')\n\n @pytest.mark.parametrize('key', ('User-agent', 'user-agent'))\n def test_user_agent_transfers(self, httpbin, key):\n\n heads = {key: 'Mozilla/5.0 (github.com/psf/requests)'}\n\n r = requests.get(httpbin('user-agent'), headers=heads)\n assert heads[key] in r.text\n\n def test_HTTP_200_OK_HEAD(self, httpbin):\n r = requests.head(httpbin('get'))\n assert r.status_code == 200\n\n def test_HTTP_200_OK_PUT(self, httpbin):\n r = requests.put(httpbin('put'))\n assert r.status_code == 200\n\n def test_BASICAUTH_TUPLE_HTTP_200_OK_GET(self, httpbin):\n auth = ('user', 'pass')\n url = httpbin('basic-auth', 'user', 'pass')\n\n r = requests.get(url, auth=auth)\n assert r.status_code == 200\n\n r = requests.get(url)\n assert r.status_code == 401\n\n s = requests.session()\n s.auth = auth\n r = s.get(url)\n assert r.status_code == 200\n\n @pytest.mark.parametrize(\n 'username, password', (\n ('user', 'pass'),\n (u'имя'.encode('utf-8'), u'пароль'.encode('utf-8')),\n (42, 42),\n (None, None),\n ))\n def test_set_basicauth(self, httpbin, username, password):\n auth = (username, password)\n url = httpbin('get')\n\n r = requests.Request('GET', url, auth=auth)\n p = r.prepare()\n\n assert p.headers['Authorization'] == _basic_auth_str(username, password)\n\n def test_basicauth_encodes_byte_strings(self):\n \"\"\"Ensure b'test' formats as the byte string \"test\" rather\n than the unicode string \"b'test'\" in Python 3.\n \"\"\"\n auth = (b'\\xc5\\xafsername', b'test\\xc6\\xb6')\n r = requests.Request('GET', 'http://localhost', auth=auth)\n p = r.prepare()\n\n assert p.headers['Authorization'] == 'Basic xa9zZXJuYW1lOnRlc3TGtg=='\n\n @pytest.mark.parametrize(\n 'url, exception', (\n # Connecting to an unknown domain should raise a ConnectionError\n ('http://doesnotexist.google.com', ConnectionError),\n # Connecting to an invalid port should raise a ConnectionError\n ('http://localhost:1', ConnectionError),\n # Inputing a URL that cannot be parsed should raise an InvalidURL error\n ('http://fe80::5054:ff:fe5a:fc0', InvalidURL)\n ))\n def test_errors(self, url, exception):\n with pytest.raises(exception):\n requests.get(url, timeout=1)\n\n def test_proxy_error(self):\n # any proxy related error (address resolution, no route to host, etc) should result in a ProxyError\n with pytest.raises(ProxyError):\n requests.get('http://localhost:1', proxies={'http': 'non-resolvable-address'})\n\n def test_proxy_error_on_bad_url(self, httpbin, httpbin_secure):\n with pytest.raises(InvalidProxyURL):\n requests.get(httpbin_secure(), proxies={'https': 'http:/badproxyurl:3128'})\n\n with pytest.raises(InvalidProxyURL):\n requests.get(httpbin(), proxies={'http': 'http://:8080'})\n\n with pytest.raises(InvalidProxyURL):\n requests.get(httpbin_secure(), proxies={'https': 'https://'})\n\n with pytest.raises(InvalidProxyURL):\n requests.get(httpbin(), proxies={'http': 'http:///example.com:8080'})\n\n def test_respect_proxy_env_on_send_self_prepared_request(self, httpbin):\n with override_environ(http_proxy=INVALID_PROXY):\n with pytest.raises(ProxyError):\n session = requests.Session()\n request = requests.Request('GET', httpbin())\n session.send(request.prepare())\n\n def test_respect_proxy_env_on_send_session_prepared_request(self, httpbin):\n with override_environ(http_proxy=INVALID_PROXY):\n with pytest.raises(ProxyError):\n session = requests.Session()\n request = requests.Request('GET', httpbin())\n prepared = session.prepare_request(request)\n session.send(prepared)\n\n def test_respect_proxy_env_on_send_with_redirects(self, httpbin):\n with override_environ(http_proxy=INVALID_PROXY):\n with pytest.raises(ProxyError):\n session = requests.Session()\n url = httpbin('redirect/1')\n print(url)\n request = requests.Request('GET', url)\n session.send(request.prepare())\n\n def test_respect_proxy_env_on_get(self, httpbin):\n with override_environ(http_proxy=INVALID_PROXY):\n with pytest.raises(ProxyError):\n session = requests.Session()\n session.get(httpbin())\n\n def test_respect_proxy_env_on_request(self, httpbin):\n with override_environ(http_proxy=INVALID_PROXY):\n with pytest.raises(ProxyError):\n session = requests.Session()\n session.request(method='GET', url=httpbin())\n\n def test_basicauth_with_netrc(self, httpbin):\n auth = ('user', 'pass')\n wrong_auth = ('wronguser', 'wrongpass')\n url = httpbin('basic-auth', 'user', 'pass')\n\n old_auth = requests.sessions.get_netrc_auth\n\n try:\n def get_netrc_auth_mock(url):\n return auth\n requests.sessions.get_netrc_auth = get_netrc_auth_mock\n\n # Should use netrc and work.\n r = requests.get(url)\n assert r.status_code == 200\n\n # Given auth should override and fail.\n r = requests.get(url, auth=wrong_auth)\n assert r.status_code == 401\n\n s = requests.session()\n\n # Should use netrc and work.\n r = s.get(url)\n assert r.status_code == 200\n\n # Given auth should override and fail.\n s.auth = wrong_auth\n r = s.get(url)\n assert r.status_code == 401\n finally:\n requests.sessions.get_netrc_auth = old_auth\n\n def test_DIGEST_HTTP_200_OK_GET(self, httpbin):\n\n for authtype in self.digest_auth_algo:\n auth = HTTPDigestAuth('user', 'pass')\n url = httpbin('digest-auth', 'auth', 'user', 'pass', authtype, 'never')\n\n r = requests.get(url, auth=auth)\n assert r.status_code == 200\n\n r = requests.get(url)\n assert r.status_code == 401\n print(r.headers['WWW-Authenticate'])\n\n s = requests.session()\n s.auth = HTTPDigestAuth('user', 'pass')\n r = s.get(url)\n assert r.status_code == 200\n\n def test_DIGEST_AUTH_RETURNS_COOKIE(self, httpbin):\n\n for authtype in self.digest_auth_algo:\n url = httpbin('digest-auth', 'auth', 'user', 'pass', authtype)\n auth = HTTPDigestAuth('user', 'pass')\n r = requests.get(url)\n assert r.cookies['fake'] == 'fake_value'\n\n r = requests.get(url, auth=auth)\n assert r.status_code == 200\n\n def test_DIGEST_AUTH_SETS_SESSION_COOKIES(self, httpbin):\n\n for authtype in self.digest_auth_algo:\n url = httpbin('digest-auth', 'auth', 'user', 'pass', authtype)\n auth = HTTPDigestAuth('user', 'pass')\n s = requests.Session()\n s.get(url, auth=auth)\n assert s.cookies['fake'] == 'fake_value'\n\n def test_DIGEST_STREAM(self, httpbin):\n\n for authtype in self.digest_auth_algo:\n auth = HTTPDigestAuth('user', 'pass')\n url = httpbin('digest-auth', 'auth', 'user', 'pass', authtype)\n\n r = requests.get(url, auth=auth, stream=True)\n assert r.raw.read() != b''\n\n r = requests.get(url, auth=auth, stream=False)\n assert r.raw.read() == b''\n\n def test_DIGESTAUTH_WRONG_HTTP_401_GET(self, httpbin):\n\n for authtype in self.digest_auth_algo:\n auth = HTTPDigestAuth('user', 'wrongpass')\n url = httpbin('digest-auth', 'auth', 'user', 'pass', authtype)\n\n r = requests.get(url, auth=auth)\n assert r.status_code == 401\n\n r = requests.get(url)\n assert r.status_code == 401\n\n s = requests.session()\n s.auth = auth\n r = s.get(url)\n assert r.status_code == 401\n\n def test_DIGESTAUTH_QUOTES_QOP_VALUE(self, httpbin):\n\n for authtype in self.digest_auth_algo:\n auth = HTTPDigestAuth('user', 'pass')\n url = httpbin('digest-auth', 'auth', 'user', 'pass', authtype)\n\n r = requests.get(url, auth=auth)\n assert '\"auth\"' in r.request.headers['Authorization']\n\n def test_POSTBIN_GET_POST_FILES(self, httpbin):\n\n url = httpbin('post')\n requests.post(url).raise_for_status()\n\n post1 = requests.post(url, data={'some': 'data'})\n assert post1.status_code == 200\n\n with open('requirements-dev.txt') as f:\n post2 = requests.post(url, files={'some': f})\n assert post2.status_code == 200\n\n post4 = requests.post(url, data='[{\"some\": \"json\"}]')\n assert post4.status_code == 200\n\n with pytest.raises(ValueError):\n requests.post(url, files=['bad file data'])\n\n def test_invalid_files_input(self, httpbin):\n\n url = httpbin('post')\n post = requests.post(url,\n files={\"random-file-1\": None, \"random-file-2\": 1})\n assert b'name=\"random-file-1\"' not in post.request.body\n assert b'name=\"random-file-2\"' in post.request.body\n\n def test_POSTBIN_SEEKED_OBJECT_WITH_NO_ITER(self, httpbin):\n\n class TestStream(object):\n def __init__(self, data):\n self.data = data.encode()\n self.length = len(self.data)\n self.index = 0\n\n def __len__(self):\n return self.length\n\n def read(self, size=None):\n if size:\n ret = self.data[self.index:self.index + size]\n self.index += size\n else:\n ret = self.data[self.index:]\n self.index = self.length\n return ret\n\n def tell(self):\n return self.index\n\n def seek(self, offset, where=0):\n if where == 0:\n self.index = offset\n elif where == 1:\n self.index += offset\n elif where == 2:\n self.index = self.length + offset\n\n test = TestStream('test')\n post1 = requests.post(httpbin('post'), data=test)\n assert post1.status_code == 200\n assert post1.json()['data'] == 'test'\n\n test = TestStream('test')\n test.seek(2)\n post2 = requests.post(httpbin('post'), data=test)\n assert post2.status_code == 200\n assert post2.json()['data'] == 'st'\n\n def test_POSTBIN_GET_POST_FILES_WITH_DATA(self, httpbin):\n\n url = httpbin('post')\n requests.post(url).raise_for_status()\n\n post1 = requests.post(url, data={'some': 'data'})\n assert post1.status_code == 200\n\n with open('requirements-dev.txt') as f:\n post2 = requests.post(url, data={'some': 'data'}, files={'some': f})\n assert post2.status_code == 200\n\n post4 = requests.post(url, data='[{\"some\": \"json\"}]')\n assert post4.status_code == 200\n\n with pytest.raises(ValueError):\n requests.post(url, files=['bad file data'])\n\n def test_post_with_custom_mapping(self, httpbin):\n class CustomMapping(MutableMapping):\n def __init__(self, *args, **kwargs):\n self.data = dict(*args, **kwargs)\n\n def __delitem__(self, key):\n del self.data[key]\n\n def __getitem__(self, key):\n return self.data[key]\n\n def __setitem__(self, key, value):\n self.data[key] = value\n\n def __iter__(self):\n return iter(self.data)\n\n def __len__(self):\n return len(self.data)\n\n data = CustomMapping({'some': 'data'})\n url = httpbin('post')\n found_json = requests.post(url, data=data).json().get('form')\n assert found_json == {'some': 'data'}\n\n def test_conflicting_post_params(self, httpbin):\n url = httpbin('post')\n with open('requirements-dev.txt') as f:\n with pytest.raises(ValueError):\n requests.post(url, data='[{\"some\": \"data\"}]', files={'some': f})\n with pytest.raises(ValueError):\n requests.post(url, data=u('[{\"some\": \"data\"}]'), files={'some': f})\n\n def test_request_ok_set(self, httpbin):\n r = requests.get(httpbin('status', '404'))\n assert not r.ok\n\n def test_status_raising(self, httpbin):\n r = requests.get(httpbin('status', '404'))\n with pytest.raises(requests.exceptions.HTTPError):\n r.raise_for_status()\n\n r = requests.get(httpbin('status', '500'))\n assert not r.ok\n\n def test_decompress_gzip(self, httpbin):\n r = requests.get(httpbin('gzip'))\n r.content.decode('ascii')\n\n @pytest.mark.parametrize(\n 'url, params', (\n ('/get', {'foo': 'føø'}),\n ('/get', {'føø': 'føø'}),\n ('/get', {'føø': 'føø'}),\n ('/get', {'foo': 'foo'}),\n ('ø', {'foo': 'foo'}),\n ))\n def test_unicode_get(self, httpbin, url, params):\n requests.get(httpbin(url), params=params)\n\n def test_unicode_header_name(self, httpbin):\n requests.put(\n httpbin('put'),\n headers={str('Content-Type'): 'application/octet-stream'},\n data='\\xff') # compat.str is unicode.\n\n def test_pyopenssl_redirect(self, httpbin_secure, httpbin_ca_bundle):\n requests.get(httpbin_secure('status', '301'), verify=httpbin_ca_bundle)\n\n def test_invalid_ca_certificate_path(self, httpbin_secure):\n INVALID_PATH = '/garbage'\n with pytest.raises(IOError) as e:\n requests.get(httpbin_secure(), verify=INVALID_PATH)\n assert str(e.value) == 'Could not find a suitable TLS CA certificate bundle, invalid path: {}'.format(INVALID_PATH)\n\n def test_invalid_ssl_certificate_files(self, httpbin_secure):\n INVALID_PATH = '/garbage'\n with pytest.raises(IOError) as e:\n requests.get(httpbin_secure(), cert=INVALID_PATH)\n assert str(e.value) == 'Could not find the TLS certificate file, invalid path: {}'.format(INVALID_PATH)\n\n with pytest.raises(IOError) as e:\n requests.get(httpbin_secure(), cert=('.', INVALID_PATH))\n assert str(e.value) == 'Could not find the TLS key file, invalid path: {}'.format(INVALID_PATH)\n\n def test_http_with_certificate(self, httpbin):\n r = requests.get(httpbin(), cert='.')\n assert r.status_code == 200\n\n def test_https_warnings(self, nosan_server):\n \"\"\"warnings are emitted with requests.get\"\"\"\n host, port, ca_bundle = nosan_server\n if HAS_MODERN_SSL or HAS_PYOPENSSL:\n warnings_expected = ('SubjectAltNameWarning', )\n else:\n warnings_expected = ('SNIMissingWarning',\n 'InsecurePlatformWarning',\n 'SubjectAltNameWarning', )\n\n with pytest.warns(None) as warning_records:\n warnings.simplefilter('always')\n requests.get(\"https://localhost:{}/\".format(port), verify=ca_bundle)\n\n warning_records = [item for item in warning_records\n if item.category.__name__ != 'ResourceWarning']\n\n warnings_category = tuple(\n item.category.__name__ for item in warning_records)\n assert warnings_category == warnings_expected\n\n def test_certificate_failure(self, httpbin_secure):\n \"\"\"\n When underlying SSL problems occur, an SSLError is raised.\n \"\"\"\n with pytest.raises(SSLError):\n # Our local httpbin does not have a trusted CA, so this call will\n # fail if we use our default trust bundle.\n requests.get(httpbin_secure('status', '200'))\n\n def test_urlencoded_get_query_multivalued_param(self, httpbin):\n\n r = requests.get(httpbin('get'), params={'test': ['foo', 'baz']})\n assert r.status_code == 200\n assert r.url == httpbin('get?test=foo&test=baz')\n\n def test_form_encoded_post_query_multivalued_element(self, httpbin):\n r = requests.Request(method='POST', url=httpbin('post'),\n data=dict(test=['foo', 'baz']))\n prep = r.prepare()\n assert prep.body == 'test=foo&test=baz'\n\n def test_different_encodings_dont_break_post(self, httpbin):\n r = requests.post(httpbin('post'),\n data={'stuff': json.dumps({'a': 123})},\n params={'blah': 'asdf1234'},\n files={'file': ('test_requests.py', open(__file__, 'rb'))})\n assert r.status_code == 200\n\n @pytest.mark.parametrize(\n 'data', (\n {'stuff': u('ëlïxr')},\n {'stuff': u('ëlïxr').encode('utf-8')},\n {'stuff': 'elixr'},\n {'stuff': 'elixr'.encode('utf-8')},\n ))\n def test_unicode_multipart_post(self, httpbin, data):\n r = requests.post(httpbin('post'),\n data=data,\n files={'file': ('test_requests.py', open(__file__, 'rb'))})\n assert r.status_code == 200\n\n def test_unicode_multipart_post_fieldnames(self, httpbin):\n filename = os.path.splitext(__file__)[0] + '.py'\n r = requests.Request(\n method='POST', url=httpbin('post'),\n data={'stuff'.encode('utf-8'): 'elixr'},\n files={'file': ('test_requests.py', open(filename, 'rb'))})\n prep = r.prepare()\n assert b'name=\"stuff\"' in prep.body\n assert b'name=\"b\\'stuff\\'\"' not in prep.body\n\n def test_unicode_method_name(self, httpbin):\n files = {'file': open(__file__, 'rb')}\n r = requests.request(\n method=u('POST'), url=httpbin('post'), files=files)\n assert r.status_code == 200\n\n def test_unicode_method_name_with_request_object(self, httpbin):\n files = {'file': open(__file__, 'rb')}\n s = requests.Session()\n req = requests.Request(u('POST'), httpbin('post'), files=files)\n prep = s.prepare_request(req)\n assert isinstance(prep.method, builtin_str)\n assert prep.method == 'POST'\n\n resp = s.send(prep)\n assert resp.status_code == 200\n\n def test_non_prepared_request_error(self):\n s = requests.Session()\n req = requests.Request(u('POST'), '/')\n\n with pytest.raises(ValueError) as e:\n s.send(req)\n assert str(e.value) == 'You can only send PreparedRequests.'\n\n def test_custom_content_type(self, httpbin):\n r = requests.post(\n httpbin('post'),\n data={'stuff': json.dumps({'a': 123})},\n files={\n 'file1': ('test_requests.py', open(__file__, 'rb')),\n 'file2': ('test_requests', open(__file__, 'rb'),\n 'text/py-content-type')})\n assert r.status_code == 200\n assert b\"text/py-content-type\" in r.request.body\n\n def test_hook_receives_request_arguments(self, httpbin):\n def hook(resp, **kwargs):\n assert resp is not None\n assert kwargs != {}\n\n s = requests.Session()\n r = requests.Request('GET', httpbin(), hooks={'response': hook})\n prep = s.prepare_request(r)\n s.send(prep)\n\n def test_session_hooks_are_used_with_no_request_hooks(self, httpbin):\n hook = lambda x, *args, **kwargs: x\n s = requests.Session()\n s.hooks['response'].append(hook)\n r = requests.Request('GET', httpbin())\n prep = s.prepare_request(r)\n assert prep.hooks['response'] != []\n assert prep.hooks['response'] == [hook]\n\n def test_session_hooks_are_overridden_by_request_hooks(self, httpbin):\n hook1 = lambda x, *args, **kwargs: x\n hook2 = lambda x, *args, **kwargs: x\n assert hook1 is not hook2\n s = requests.Session()\n s.hooks['response'].append(hook2)\n r = requests.Request('GET', httpbin(), hooks={'response': [hook1]})\n prep = s.prepare_request(r)\n assert prep.hooks['response'] == [hook1]\n\n def test_prepared_request_hook(self, httpbin):\n def hook(resp, **kwargs):\n resp.hook_working = True\n return resp\n\n req = requests.Request('GET', httpbin(), hooks={'response': hook})\n prep = req.prepare()\n\n s = requests.Session()\n s.proxies = getproxies()\n resp = s.send(prep)\n\n assert hasattr(resp, 'hook_working')\n\n def test_prepared_from_session(self, httpbin):\n class DummyAuth(requests.auth.AuthBase):\n def __call__(self, r):\n r.headers['Dummy-Auth-Test'] = 'dummy-auth-test-ok'\n return r\n\n req = requests.Request('GET', httpbin('headers'))\n assert not req.auth\n\n s = requests.Session()\n s.auth = DummyAuth()\n\n prep = s.prepare_request(req)\n resp = s.send(prep)\n\n assert resp.json()['headers'][\n 'Dummy-Auth-Test'] == 'dummy-auth-test-ok'\n\n def test_prepare_request_with_bytestring_url(self):\n req = requests.Request('GET', b'https://httpbin.org/')\n s = requests.Session()\n prep = s.prepare_request(req)\n assert prep.url == \"https://httpbin.org/\"\n\n def test_request_with_bytestring_host(self, httpbin):\n s = requests.Session()\n resp = s.request(\n 'GET',\n httpbin('cookies/set?cookie=value'),\n allow_redirects=False,\n headers={'Host': b'httpbin.org'}\n )\n assert resp.cookies.get('cookie') == 'value'\n\n def test_links(self):\n r = requests.Response()\n r.headers = {\n 'cache-control': 'public, max-age=60, s-maxage=60',\n 'connection': 'keep-alive',\n 'content-encoding': 'gzip',\n 'content-type': 'application/json; charset=utf-8',\n 'date': 'Sat, 26 Jan 2013 16:47:56 GMT',\n 'etag': '\"6ff6a73c0e446c1f61614769e3ceb778\"',\n 'last-modified': 'Sat, 26 Jan 2013 16:22:39 GMT',\n 'link': ('<https://api.github.com/users/kennethreitz/repos?'\n 'page=2&per_page=10>; rel=\"next\", <https://api.github.'\n 'com/users/kennethreitz/repos?page=7&per_page=10>; '\n ' rel=\"last\"'),\n 'server': 'GitHub.com',\n 'status': '200 OK',\n 'vary': 'Accept',\n 'x-content-type-options': 'nosniff',\n 'x-github-media-type': 'github.beta',\n 'x-ratelimit-limit': '60',\n 'x-ratelimit-remaining': '57'\n }\n assert r.links['next']['rel'] == 'next'\n\n def test_cookie_parameters(self):\n key = 'some_cookie'\n value = 'some_value'\n secure = True\n domain = 'test.com'\n rest = {'HttpOnly': True}\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value, secure=secure, domain=domain, rest=rest)\n\n assert len(jar) == 1\n assert 'some_cookie' in jar\n\n cookie = list(jar)[0]\n assert cookie.secure == secure\n assert cookie.domain == domain\n assert cookie._rest['HttpOnly'] == rest['HttpOnly']\n\n def test_cookie_as_dict_keeps_len(self):\n key = 'some_cookie'\n value = 'some_value'\n\n key1 = 'some_cookie1'\n value1 = 'some_value1'\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value)\n jar.set(key1, value1)\n\n d1 = dict(jar)\n d2 = dict(jar.iteritems())\n d3 = dict(jar.items())\n\n assert len(jar) == 2\n assert len(d1) == 2\n assert len(d2) == 2\n assert len(d3) == 2\n\n def test_cookie_as_dict_keeps_items(self):\n key = 'some_cookie'\n value = 'some_value'\n\n key1 = 'some_cookie1'\n value1 = 'some_value1'\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value)\n jar.set(key1, value1)\n\n d1 = dict(jar)\n d2 = dict(jar.iteritems())\n d3 = dict(jar.items())\n\n assert d1['some_cookie'] == 'some_value'\n assert d2['some_cookie'] == 'some_value'\n assert d3['some_cookie1'] == 'some_value1'\n\n def test_cookie_as_dict_keys(self):\n key = 'some_cookie'\n value = 'some_value'\n\n key1 = 'some_cookie1'\n value1 = 'some_value1'\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value)\n jar.set(key1, value1)\n\n keys = jar.keys()\n assert keys == list(keys)\n # make sure one can use keys multiple times\n assert list(keys) == list(keys)\n\n def test_cookie_as_dict_values(self):\n key = 'some_cookie'\n value = 'some_value'\n\n key1 = 'some_cookie1'\n value1 = 'some_value1'\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value)\n jar.set(key1, value1)\n\n values = jar.values()\n assert values == list(values)\n # make sure one can use values multiple times\n assert list(values) == list(values)\n\n def test_cookie_as_dict_items(self):\n key = 'some_cookie'\n value = 'some_value'\n\n key1 = 'some_cookie1'\n value1 = 'some_value1'\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value)\n jar.set(key1, value1)\n\n items = jar.items()\n assert items == list(items)\n # make sure one can use items multiple times\n assert list(items) == list(items)\n\n def test_cookie_duplicate_names_different_domains(self):\n key = 'some_cookie'\n value = 'some_value'\n domain1 = 'test1.com'\n domain2 = 'test2.com'\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value, domain=domain1)\n jar.set(key, value, domain=domain2)\n assert key in jar\n items = jar.items()\n assert len(items) == 2\n\n # Verify that CookieConflictError is raised if domain is not specified\n with pytest.raises(requests.cookies.CookieConflictError):\n jar.get(key)\n\n # Verify that CookieConflictError is not raised if domain is specified\n cookie = jar.get(key, domain=domain1)\n assert cookie == value\n\n def test_cookie_duplicate_names_raises_cookie_conflict_error(self):\n key = 'some_cookie'\n value = 'some_value'\n path = 'some_path'\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set(key, value, path=path)\n jar.set(key, value)\n with pytest.raises(requests.cookies.CookieConflictError):\n jar.get(key)\n\n def test_cookie_policy_copy(self):\n class MyCookiePolicy(cookielib.DefaultCookiePolicy):\n pass\n\n jar = requests.cookies.RequestsCookieJar()\n jar.set_policy(MyCookiePolicy())\n assert isinstance(jar.copy().get_policy(), MyCookiePolicy)\n\n def test_time_elapsed_blank(self, httpbin):\n r = requests.get(httpbin('get'))\n td = r.elapsed\n total_seconds = ((td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6) / 10**6)\n assert total_seconds > 0.0\n\n def test_empty_response_has_content_none(self):\n r = requests.Response()\n assert r.content is None\n\n def test_response_is_iterable(self):\n r = requests.Response()\n io = StringIO.StringIO('abc')\n read_ = io.read\n\n def read_mock(amt, decode_content=None):\n return read_(amt)\n setattr(io, 'read', read_mock)\n r.raw = io\n assert next(iter(r))\n io.close()\n\n def test_response_decode_unicode(self):\n \"\"\"When called with decode_unicode, Response.iter_content should always\n return unicode.\n \"\"\"\n r = requests.Response()\n r._content_consumed = True\n r._content = b'the content'\n r.encoding = 'ascii'\n\n chunks = r.iter_content(decode_unicode=True)\n assert all(isinstance(chunk, str) for chunk in chunks)\n\n # also for streaming\n r = requests.Response()\n r.raw = io.BytesIO(b'the content')\n r.encoding = 'ascii'\n chunks = r.iter_content(decode_unicode=True)\n assert all(isinstance(chunk, str) for chunk in chunks)\n\n def test_response_reason_unicode(self):\n # check for unicode HTTP status\n r = requests.Response()\n r.url = u'unicode URL'\n r.reason = u'Komponenttia ei löydy'.encode('utf-8')\n r.status_code = 404\n r.encoding = None\n assert not r.ok # old behaviour - crashes here\n\n def test_response_reason_unicode_fallback(self):\n # check raise_status falls back to ISO-8859-1\n r = requests.Response()\n r.url = 'some url'\n reason = u'Komponenttia ei löydy'\n r.reason = reason.encode('latin-1')\n r.status_code = 500\n r.encoding = None\n with pytest.raises(requests.exceptions.HTTPError) as e:\n r.raise_for_status()\n assert reason in e.value.args[0]\n\n def test_response_chunk_size_type(self):\n \"\"\"Ensure that chunk_size is passed as None or an integer, otherwise\n raise a TypeError.\n \"\"\"\n r = requests.Response()\n r.raw = io.BytesIO(b'the content')\n chunks = r.iter_content(1)\n assert all(len(chunk) == 1 for chunk in chunks)\n\n r = requests.Response()\n r.raw = io.BytesIO(b'the content')\n chunks = r.iter_content(None)\n assert list(chunks) == [b'the content']\n\n r = requests.Response()\n r.raw = io.BytesIO(b'the content')\n with pytest.raises(TypeError):\n chunks = r.iter_content(\"1024\")\n\n def test_request_and_response_are_pickleable(self, httpbin):\n r = requests.get(httpbin('get'))\n\n # verify we can pickle the original request\n assert pickle.loads(pickle.dumps(r.request))\n\n # verify we can pickle the response and that we have access to\n # the original request.\n pr = pickle.loads(pickle.dumps(r))\n assert r.request.url == pr.request.url\n assert r.request.headers == pr.request.headers\n\n def test_prepared_request_is_pickleable(self, httpbin):\n p = requests.Request('GET', httpbin('get')).prepare()\n\n # Verify PreparedRequest can be pickled and unpickled\n r = pickle.loads(pickle.dumps(p))\n assert r.url == p.url\n assert r.headers == p.headers\n assert r.body == p.body\n\n # Verify unpickled PreparedRequest sends properly\n s = requests.Session()\n resp = s.send(r)\n assert resp.status_code == 200\n\n def test_prepared_request_with_file_is_pickleable(self, httpbin):\n files = {'file': open(__file__, 'rb')}\n r = requests.Request('POST', httpbin('post'), files=files)\n p = r.prepare()\n\n # Verify PreparedRequest can be pickled and unpickled\n r = pickle.loads(pickle.dumps(p))\n assert r.url == p.url\n assert r.headers == p.headers\n assert r.body == p.body\n\n # Verify unpickled PreparedRequest sends properly\n s = requests.Session()\n resp = s.send(r)\n assert resp.status_code == 200\n\n def test_prepared_request_with_hook_is_pickleable(self, httpbin):\n r = requests.Request('GET', httpbin('get'), hooks=default_hooks())\n p = r.prepare()\n\n # Verify PreparedRequest can be pickled\n r = pickle.loads(pickle.dumps(p))\n assert r.url == p.url\n assert r.headers == p.headers\n assert r.body == p.body\n assert r.hooks == p.hooks\n\n # Verify unpickled PreparedRequest sends properly\n s = requests.Session()\n resp = s.send(r)\n assert resp.status_code == 200\n\n def test_cannot_send_unprepared_requests(self, httpbin):\n r = requests.Request(url=httpbin())\n with pytest.raises(ValueError):\n requests.Session().send(r)\n\n def test_http_error(self):\n error = requests.exceptions.HTTPError()\n assert not error.response\n response = requests.Response()\n error = requests.exceptions.HTTPError(response=response)\n assert error.response == response\n error = requests.exceptions.HTTPError('message', response=response)\n assert str(error) == 'message'\n assert error.response == response\n\n def test_session_pickling(self, httpbin):\n r = requests.Request('GET', httpbin('get'))\n s = requests.Session()\n\n s = pickle.loads(pickle.dumps(s))\n s.proxies = getproxies()\n\n r = s.send(r.prepare())\n assert r.status_code == 200\n\n def test_fixes_1329(self, httpbin):\n \"\"\"Ensure that header updates are done case-insensitively.\"\"\"\n s = requests.Session()\n s.headers.update({'ACCEPT': 'BOGUS'})\n s.headers.update({'accept': 'application/json'})\n r = s.get(httpbin('get'))\n headers = r.request.headers\n assert headers['accept'] == 'application/json'\n assert headers['Accept'] == 'application/json'\n assert headers['ACCEPT'] == 'application/json'\n\n def test_uppercase_scheme_redirect(self, httpbin):\n parts = urlparse(httpbin('html'))\n url = \"HTTP://\" + parts.netloc + parts.path\n r = requests.get(httpbin('redirect-to'), params={'url': url})\n assert r.status_code == 200\n assert r.url.lower() == url.lower()\n\n def test_transport_adapter_ordering(self):\n s = requests.Session()\n order = ['https://', 'http://']\n assert order == list(s.adapters)\n s.mount('http://git', HTTPAdapter())\n s.mount('http://github', HTTPAdapter())\n s.mount('http://github.com', HTTPAdapter())\n s.mount('http://github.com/about/', HTTPAdapter())\n order = [\n 'http://github.com/about/',\n 'http://github.com',\n 'http://github',\n 'http://git',\n 'https://',\n 'http://',\n ]\n assert order == list(s.adapters)\n s.mount('http://gittip', HTTPAdapter())\n s.mount('http://gittip.com', HTTPAdapter())\n s.mount('http://gittip.com/about/', HTTPAdapter())\n order = [\n 'http://github.com/about/',\n 'http://gittip.com/about/',\n 'http://github.com',\n 'http://gittip.com',\n 'http://github',\n 'http://gittip',\n 'http://git',\n 'https://',\n 'http://',\n ]\n assert order == list(s.adapters)\n s2 = requests.Session()\n s2.adapters = {'http://': HTTPAdapter()}\n s2.mount('https://', HTTPAdapter())\n assert 'http://' in s2.adapters\n assert 'https://' in s2.adapters\n\n def test_session_get_adapter_prefix_matching(self):\n prefix = 'https://example.com'\n more_specific_prefix = prefix + '/some/path'\n\n url_matching_only_prefix = prefix + '/another/path'\n url_matching_more_specific_prefix = more_specific_prefix + '/longer/path'\n url_not_matching_prefix = 'https://another.example.com/'\n\n s = requests.Session()\n prefix_adapter = HTTPAdapter()\n more_specific_prefix_adapter = HTTPAdapter()\n s.mount(prefix, prefix_adapter)\n s.mount(more_specific_prefix, more_specific_prefix_adapter)\n\n assert s.get_adapter(url_matching_only_prefix) is prefix_adapter\n assert s.get_adapter(url_matching_more_specific_prefix) is more_specific_prefix_adapter\n assert s.get_adapter(url_not_matching_prefix) not in (prefix_adapter, more_specific_prefix_adapter)\n\n def test_session_get_adapter_prefix_matching_mixed_case(self):\n mixed_case_prefix = 'hTtPs://eXamPle.CoM/MixEd_CAse_PREfix'\n url_matching_prefix = mixed_case_prefix + '/full_url'\n\n s = requests.Session()\n my_adapter = HTTPAdapter()\n s.mount(mixed_case_prefix, my_adapter)\n\n assert s.get_adapter(url_matching_prefix) is my_adapter\n\n def test_session_get_adapter_prefix_matching_is_case_insensitive(self):\n mixed_case_prefix = 'hTtPs://eXamPle.CoM/MixEd_CAse_PREfix'\n url_matching_prefix_with_different_case = 'HtTpS://exaMPLe.cOm/MiXeD_caSE_preFIX/another_url'\n\n s = requests.Session()\n my_adapter = HTTPAdapter()\n s.mount(mixed_case_prefix, my_adapter)\n\n assert s.get_adapter(url_matching_prefix_with_different_case) is my_adapter\n\n def test_header_remove_is_case_insensitive(self, httpbin):\n # From issue #1321\n s = requests.Session()\n s.headers['foo'] = 'bar'\n r = s.get(httpbin('get'), headers={'FOO': None})\n assert 'foo' not in r.request.headers\n\n def test_params_are_merged_case_sensitive(self, httpbin):\n s = requests.Session()\n s.params['foo'] = 'bar'\n r = s.get(httpbin('get'), params={'FOO': 'bar'})\n assert r.json()['args'] == {'foo': 'bar', 'FOO': 'bar'}\n\n def test_long_authinfo_in_url(self):\n url = 'http://{}:{}@{}:9000/path?query#frag'.format(\n 'E8A3BE87-9E3F-4620-8858-95478E385B5B',\n 'EA770032-DA4D-4D84-8CE9-29C6D910BF1E',\n 'exactly-------------sixty-----------three------------characters',\n )\n r = requests.Request('GET', url).prepare()\n assert r.url == url\n\n def test_header_keys_are_native(self, httpbin):\n headers = {u('unicode'): 'blah', 'byte'.encode('ascii'): 'blah'}\n r = requests.Request('GET', httpbin('get'), headers=headers)\n p = r.prepare()\n\n # This is testing that they are builtin strings. A bit weird, but there\n # we go.\n assert 'unicode' in p.headers.keys()\n assert 'byte' in p.headers.keys()\n\n def test_header_validation(self, httpbin):\n \"\"\"Ensure prepare_headers regex isn't flagging valid header contents.\"\"\"\n headers_ok = {'foo': 'bar baz qux',\n 'bar': u'fbbq'.encode('utf8'),\n 'baz': '',\n 'qux': '1'}\n r = requests.get(httpbin('get'), headers=headers_ok)\n assert r.request.headers['foo'] == headers_ok['foo']\n\n def test_header_value_not_str(self, httpbin):\n \"\"\"Ensure the header value is of type string or bytes as\n per discussion in GH issue #3386\n \"\"\"\n headers_int = {'foo': 3}\n headers_dict = {'bar': {'foo': 'bar'}}\n headers_list = {'baz': ['foo', 'bar']}\n\n # Test for int\n with pytest.raises(InvalidHeader) as excinfo:\n r = requests.get(httpbin('get'), headers=headers_int)\n assert 'foo' in str(excinfo.value)\n # Test for dict\n with pytest.raises(InvalidHeader) as excinfo:\n r = requests.get(httpbin('get'), headers=headers_dict)\n assert 'bar' in str(excinfo.value)\n # Test for list\n with pytest.raises(InvalidHeader) as excinfo:\n r = requests.get(httpbin('get'), headers=headers_list)\n assert 'baz' in str(excinfo.value)\n\n def test_header_no_return_chars(self, httpbin):\n \"\"\"Ensure that a header containing return character sequences raise an\n exception. Otherwise, multiple headers are created from single string.\n \"\"\"\n headers_ret = {'foo': 'bar\\r\\nbaz: qux'}\n headers_lf = {'foo': 'bar\\nbaz: qux'}\n headers_cr = {'foo': 'bar\\rbaz: qux'}\n\n # Test for newline\n with pytest.raises(InvalidHeader):\n r = requests.get(httpbin('get'), headers=headers_ret)\n # Test for line feed\n with pytest.raises(InvalidHeader):\n r = requests.get(httpbin('get'), headers=headers_lf)\n # Test for carriage return\n with pytest.raises(InvalidHeader):\n r = requests.get(httpbin('get'), headers=headers_cr)\n\n def test_header_no_leading_space(self, httpbin):\n \"\"\"Ensure headers containing leading whitespace raise\n InvalidHeader Error before sending.\n \"\"\"\n headers_space = {'foo': ' bar'}\n headers_tab = {'foo': ' bar'}\n\n # Test for whitespace\n with pytest.raises(InvalidHeader):\n r = requests.get(httpbin('get'), headers=headers_space)\n # Test for tab\n with pytest.raises(InvalidHeader):\n r = requests.get(httpbin('get'), headers=headers_tab)\n\n @pytest.mark.parametrize('files', ('foo', b'foo', bytearray(b'foo')))\n def test_can_send_objects_with_files(self, httpbin, files):\n data = {'a': 'this is a string'}\n files = {'b': files}\n r = requests.Request('POST', httpbin('post'), data=data, files=files)\n p = r.prepare()\n assert 'multipart/form-data' in p.headers['Content-Type']\n\n def test_can_send_file_object_with_non_string_filename(self, httpbin):\n f = io.BytesIO()\n f.name = 2\n r = requests.Request('POST', httpbin('post'), files={'f': f})\n p = r.prepare()\n\n assert 'multipart/form-data' in p.headers['Content-Type']\n\n def test_autoset_header_values_are_native(self, httpbin):\n data = 'this is a string'\n length = '16'\n req = requests.Request('POST', httpbin('post'), data=data)\n p = req.prepare()\n\n assert p.headers['Content-Length'] == length\n\n def test_nonhttp_schemes_dont_check_URLs(self):\n test_urls = (\n 'data:image/gif;base64,R0lGODlhAQABAHAAACH5BAUAAAAALAAAAAABAAEAAAICRAEAOw==',\n 'file:///etc/passwd',\n 'magnet:?xt=urn:btih:be08f00302bc2d1d3cfa3af02024fa647a271431',\n )\n for test_url in test_urls:\n req = requests.Request('GET', test_url)\n preq = req.prepare()\n assert test_url == preq.url\n\n def test_auth_is_stripped_on_http_downgrade(self, httpbin, httpbin_secure, httpbin_ca_bundle):\n r = requests.get(\n httpbin_secure('redirect-to'),\n params={'url': httpbin('get')},\n auth=('user', 'pass'),\n verify=httpbin_ca_bundle\n )\n assert r.history[0].request.headers['Authorization']\n assert 'Authorization' not in r.request.headers\n\n def test_auth_is_retained_for_redirect_on_host(self, httpbin):\n r = requests.get(httpbin('redirect/1'), auth=('user', 'pass'))\n h1 = r.history[0].request.headers['Authorization']\n h2 = r.request.headers['Authorization']\n\n assert h1 == h2\n\n def test_should_strip_auth_host_change(self):\n s = requests.Session()\n assert s.should_strip_auth('http://example.com/foo', 'http://another.example.com/')\n\n def test_should_strip_auth_http_downgrade(self):\n s = requests.Session()\n assert s.should_strip_auth('https://example.com/foo', 'http://example.com/bar')\n\n def test_should_strip_auth_https_upgrade(self):\n s = requests.Session()\n assert not s.should_strip_auth('http://example.com/foo', 'https://example.com/bar')\n assert not s.should_strip_auth('http://example.com:80/foo', 'https://example.com/bar')\n assert not s.should_strip_auth('http://example.com/foo', 'https://example.com:443/bar')\n # Non-standard ports should trigger stripping\n assert s.should_strip_auth('http://example.com:8080/foo', 'https://example.com/bar')\n assert s.should_strip_auth('http://example.com/foo', 'https://example.com:8443/bar')\n\n def test_should_strip_auth_port_change(self):\n s = requests.Session()\n assert s.should_strip_auth('http://example.com:1234/foo', 'https://example.com:4321/bar')\n\n @pytest.mark.parametrize(\n 'old_uri, new_uri', (\n ('https://example.com:443/foo', 'https://example.com/bar'),\n ('http://example.com:80/foo', 'http://example.com/bar'),\n ('https://example.com/foo', 'https://example.com:443/bar'),\n ('http://example.com/foo', 'http://example.com:80/bar')\n ))\n def test_should_strip_auth_default_port(self, old_uri, new_uri):\n s = requests.Session()\n assert not s.should_strip_auth(old_uri, new_uri)\n\n def test_manual_redirect_with_partial_body_read(self, httpbin):\n s = requests.Session()\n r1 = s.get(httpbin('redirect/2'), allow_redirects=False, stream=True)\n assert r1.is_redirect\n rg = s.resolve_redirects(r1, r1.request, stream=True)\n\n # read only the first eight bytes of the response body,\n # then follow the redirect\n r1.iter_content(8)\n r2 = next(rg)\n assert r2.is_redirect\n\n # read all of the response via iter_content,\n # then follow the redirect\n for _ in r2.iter_content():\n pass\n r3 = next(rg)\n assert not r3.is_redirect\n\n def test_prepare_body_position_non_stream(self):\n data = b'the data'\n prep = requests.Request('GET', 'http://example.com', data=data).prepare()\n assert prep._body_position is None\n\n def test_rewind_body(self):\n data = io.BytesIO(b'the data')\n prep = requests.Request('GET', 'http://example.com', data=data).prepare()\n assert prep._body_position == 0\n assert prep.body.read() == b'the data'\n\n # the data has all been read\n assert prep.body.read() == b''\n\n # rewind it back\n requests.utils.rewind_body(prep)\n assert prep.body.read() == b'the data'\n\n def test_rewind_partially_read_body(self):\n data = io.BytesIO(b'the data')\n data.read(4) # read some data\n prep = requests.Request('GET', 'http://example.com', data=data).prepare()\n assert prep._body_position == 4\n assert prep.body.read() == b'data'\n\n # the data has all been read\n assert prep.body.read() == b''\n\n # rewind it back\n requests.utils.rewind_body(prep)\n assert prep.body.read() == b'data'\n\n def test_rewind_body_no_seek(self):\n class BadFileObj:\n def __init__(self, data):\n self.data = data\n\n def tell(self):\n return 0\n\n def __iter__(self):\n return\n\n data = BadFileObj('the data')\n prep = requests.Request('GET', 'http://example.com', data=data).prepare()\n assert prep._body_position == 0\n\n with pytest.raises(UnrewindableBodyError) as e:\n requests.utils.rewind_body(prep)\n\n assert 'Unable to rewind request body' in str(e)\n\n def test_rewind_body_failed_seek(self):\n class BadFileObj:\n def __init__(self, data):\n self.data = data\n\n def tell(self):\n return 0\n\n def seek(self, pos, whence=0):\n raise OSError()\n\n def __iter__(self):\n return\n\n data = BadFileObj('the data')\n prep = requests.Request('GET', 'http://example.com', data=data).prepare()\n assert prep._body_position == 0\n\n with pytest.raises(UnrewindableBodyError) as e:\n requests.utils.rewind_body(prep)\n\n assert 'error occurred when rewinding request body' in str(e)\n\n def test_rewind_body_failed_tell(self):\n class BadFileObj:\n def __init__(self, data):\n self.data = data\n\n def tell(self):\n raise OSError()\n\n def __iter__(self):\n return\n\n data = BadFileObj('the data')\n prep = requests.Request('GET', 'http://example.com', data=data).prepare()\n assert prep._body_position is not None\n\n with pytest.raises(UnrewindableBodyError) as e:\n requests.utils.rewind_body(prep)\n\n assert 'Unable to rewind request body' in str(e)\n\n def _patch_adapter_gzipped_redirect(self, session, url):\n adapter = session.get_adapter(url=url)\n org_build_response = adapter.build_response\n self._patched_response = False\n\n def build_response(*args, **kwargs):\n resp = org_build_response(*args, **kwargs)\n if not self._patched_response:\n resp.raw.headers['content-encoding'] = 'gzip'\n self._patched_response = True\n return resp\n\n adapter.build_response = build_response\n\n def test_redirect_with_wrong_gzipped_header(self, httpbin):\n s = requests.Session()\n url = httpbin('redirect/1')\n self._patch_adapter_gzipped_redirect(s, url)\n s.get(url)\n\n @pytest.mark.parametrize(\n 'username, password, auth_str', (\n ('test', 'test', 'Basic dGVzdDp0ZXN0'),\n (u'имя'.encode('utf-8'), u'пароль'.encode('utf-8'), 'Basic 0LjQvNGPOtC/0LDRgNC+0LvRjA=='),\n ))\n def test_basic_auth_str_is_always_native(self, username, password, auth_str):\n s = _basic_auth_str(username, password)\n assert isinstance(s, builtin_str)\n assert s == auth_str\n\n def test_requests_history_is_saved(self, httpbin):\n r = requests.get(httpbin('redirect/5'))\n total = r.history[-1].history\n i = 0\n for item in r.history:\n assert item.history == total[0:i]\n i += 1\n\n def test_json_param_post_content_type_works(self, httpbin):\n r = requests.post(\n httpbin('post'),\n json={'life': 42}\n )\n assert r.status_code == 200\n assert 'application/json' in r.request.headers['Content-Type']\n assert {'life': 42} == r.json()['json']\n\n def test_json_param_post_should_not_override_data_param(self, httpbin):\n r = requests.Request(method='POST', url=httpbin('post'),\n data={'stuff': 'elixr'},\n json={'music': 'flute'})\n prep = r.prepare()\n assert 'stuff=elixr' == prep.body\n\n def test_response_iter_lines(self, httpbin):\n r = requests.get(httpbin('stream/4'), stream=True)\n assert r.status_code == 200\n\n it = r.iter_lines()\n next(it)\n assert len(list(it)) == 3\n\n def test_response_context_manager(self, httpbin):\n with requests.get(httpbin('stream/4'), stream=True) as response:\n assert isinstance(response, requests.Response)\n\n assert response.raw.closed\n\n def test_unconsumed_session_response_closes_connection(self, httpbin):\n s = requests.session()\n\n with contextlib.closing(s.get(httpbin('stream/4'), stream=True)) as response:\n pass\n\n assert response._content_consumed is False\n assert response.raw.closed\n\n @pytest.mark.xfail\n def test_response_iter_lines_reentrant(self, httpbin):\n \"\"\"Response.iter_lines() is not reentrant safe\"\"\"\n r = requests.get(httpbin('stream/4'), stream=True)\n assert r.status_code == 200\n\n next(r.iter_lines())\n assert len(list(r.iter_lines())) == 3\n\n def test_session_close_proxy_clear(self, mocker):\n proxies = {\n 'one': mocker.Mock(),\n 'two': mocker.Mock(),\n }\n session = requests.Session()\n mocker.patch.dict(session.adapters['http://'].proxy_manager, proxies)\n session.close()\n proxies['one'].clear.assert_called_once_with()\n proxies['two'].clear.assert_called_once_with()\n\n def test_proxy_auth(self):\n adapter = HTTPAdapter()\n headers = adapter.proxy_headers(\"http://user:pass@httpbin.org\")\n assert headers == {'Proxy-Authorization': 'Basic dXNlcjpwYXNz'}\n\n def test_proxy_auth_empty_pass(self):\n adapter = HTTPAdapter()\n headers = adapter.proxy_headers(\"http://user:@httpbin.org\")\n assert headers == {'Proxy-Authorization': 'Basic dXNlcjo='}\n\n def test_response_json_when_content_is_None(self, httpbin):\n r = requests.get(httpbin('/status/204'))\n # Make sure r.content is None\n r.status_code = 0\n r._content = False\n r._content_consumed = False\n\n assert r.content is None\n with pytest.raises(ValueError):\n r.json()\n\n def test_response_without_release_conn(self):\n \"\"\"Test `close` call for non-urllib3-like raw objects.\n Should work when `release_conn` attr doesn't exist on `response.raw`.\n \"\"\"\n resp = requests.Response()\n resp.raw = StringIO.StringIO('test')\n assert not resp.raw.closed\n resp.close()\n assert resp.raw.closed\n\n def test_empty_stream_with_auth_does_not_set_content_length_header(self, httpbin):\n \"\"\"Ensure that a byte stream with size 0 will not set both a Content-Length\n and Transfer-Encoding header.\n \"\"\"\n auth = ('user', 'pass')\n url = httpbin('post')\n file_obj = io.BytesIO(b'')\n r = requests.Request('POST', url, auth=auth, data=file_obj)\n prepared_request = r.prepare()\n assert 'Transfer-Encoding' in prepared_request.headers\n assert 'Content-Length' not in prepared_request.headers\n\n def test_stream_with_auth_does_not_set_transfer_encoding_header(self, httpbin):\n \"\"\"Ensure that a byte stream with size > 0 will not set both a Content-Length\n and Transfer-Encoding header.\n \"\"\"\n auth = ('user', 'pass')\n url = httpbin('post')\n file_obj = io.BytesIO(b'test data')\n r = requests.Request('POST', url, auth=auth, data=file_obj)\n prepared_request = r.prepare()\n assert 'Transfer-Encoding' not in prepared_request.headers\n assert 'Content-Length' in prepared_request.headers\n\n def test_chunked_upload_does_not_set_content_length_header(self, httpbin):\n \"\"\"Ensure that requests with a generator body stream using\n Transfer-Encoding: chunked, not a Content-Length header.\n \"\"\"\n data = (i for i in [b'a', b'b', b'c'])\n url = httpbin('post')\n r = requests.Request('POST', url, data=data)\n prepared_request = r.prepare()\n assert 'Transfer-Encoding' in prepared_request.headers\n assert 'Content-Length' not in prepared_request.headers\n\n def test_custom_redirect_mixin(self, httpbin):\n \"\"\"Tests a custom mixin to overwrite ``get_redirect_target``.\n\n Ensures a subclassed ``requests.Session`` can handle a certain type of\n malformed redirect responses.\n\n 1. original request receives a proper response: 302 redirect\n 2. following the redirect, a malformed response is given:\n status code = HTTP 200\n location = alternate url\n 3. the custom session catches the edge case and follows the redirect\n \"\"\"\n url_final = httpbin('html')\n querystring_malformed = urlencode({'location': url_final})\n url_redirect_malformed = httpbin('response-headers?%s' % querystring_malformed)\n querystring_redirect = urlencode({'url': url_redirect_malformed})\n url_redirect = httpbin('redirect-to?%s' % querystring_redirect)\n urls_test = [url_redirect,\n url_redirect_malformed,\n url_final,\n ]\n\n class CustomRedirectSession(requests.Session):\n def get_redirect_target(self, resp):\n # default behavior\n if resp.is_redirect:\n return resp.headers['location']\n # edge case - check to see if 'location' is in headers anyways\n location = resp.headers.get('location')\n if location and (location != resp.url):\n return location\n return None\n\n session = CustomRedirectSession()\n r = session.get(urls_test[0])\n assert len(r.history) == 2\n assert r.status_code == 200\n assert r.history[0].status_code == 302\n assert r.history[0].is_redirect\n assert r.history[1].status_code == 200\n assert not r.history[1].is_redirect\n assert r.url == urls_test[2]\n\n\nclass TestCaseInsensitiveDict:\n\n @pytest.mark.parametrize(\n 'cid', (\n CaseInsensitiveDict({'Foo': 'foo', 'BAr': 'bar'}),\n CaseInsensitiveDict([('Foo', 'foo'), ('BAr', 'bar')]),\n CaseInsensitiveDict(FOO='foo', BAr='bar'),\n ))\n def test_init(self, cid):\n assert len(cid) == 2\n assert 'foo' in cid\n assert 'bar' in cid\n\n def test_docstring_example(self):\n cid = CaseInsensitiveDict()\n cid['Accept'] = 'application/json'\n assert cid['aCCEPT'] == 'application/json'\n assert list(cid) == ['Accept']\n\n def test_len(self):\n cid = CaseInsensitiveDict({'a': 'a', 'b': 'b'})\n cid['A'] = 'a'\n assert len(cid) == 2\n\n def test_getitem(self):\n cid = CaseInsensitiveDict({'Spam': 'blueval'})\n assert cid['spam'] == 'blueval'\n assert cid['SPAM'] == 'blueval'\n\n def test_fixes_649(self):\n \"\"\"__setitem__ should behave case-insensitively.\"\"\"\n cid = CaseInsensitiveDict()\n cid['spam'] = 'oneval'\n cid['Spam'] = 'twoval'\n cid['sPAM'] = 'redval'\n cid['SPAM'] = 'blueval'\n assert cid['spam'] == 'blueval'\n assert cid['SPAM'] == 'blueval'\n assert list(cid.keys()) == ['SPAM']\n\n def test_delitem(self):\n cid = CaseInsensitiveDict()\n cid['Spam'] = 'someval'\n del cid['sPam']\n assert 'spam' not in cid\n assert len(cid) == 0\n\n def test_contains(self):\n cid = CaseInsensitiveDict()\n cid['Spam'] = 'someval'\n assert 'Spam' in cid\n assert 'spam' in cid\n assert 'SPAM' in cid\n assert 'sPam' in cid\n assert 'notspam' not in cid\n\n def test_get(self):\n cid = CaseInsensitiveDict()\n cid['spam'] = 'oneval'\n cid['SPAM'] = 'blueval'\n assert cid.get('spam') == 'blueval'\n assert cid.get('SPAM') == 'blueval'\n assert cid.get('sPam') == 'blueval'\n assert cid.get('notspam', 'default') == 'default'\n\n def test_update(self):\n cid = CaseInsensitiveDict()\n cid['spam'] = 'blueval'\n cid.update({'sPam': 'notblueval'})\n assert cid['spam'] == 'notblueval'\n cid = CaseInsensitiveDict({'Foo': 'foo', 'BAr': 'bar'})\n cid.update({'fOO': 'anotherfoo', 'bAR': 'anotherbar'})\n assert len(cid) == 2\n assert cid['foo'] == 'anotherfoo'\n assert cid['bar'] == 'anotherbar'\n\n def test_update_retains_unchanged(self):\n cid = CaseInsensitiveDict({'foo': 'foo', 'bar': 'bar'})\n cid.update({'foo': 'newfoo'})\n assert cid['bar'] == 'bar'\n\n def test_iter(self):\n cid = CaseInsensitiveDict({'Spam': 'spam', 'Eggs': 'eggs'})\n keys = frozenset(['Spam', 'Eggs'])\n assert frozenset(iter(cid)) == keys\n\n def test_equality(self):\n cid = CaseInsensitiveDict({'SPAM': 'blueval', 'Eggs': 'redval'})\n othercid = CaseInsensitiveDict({'spam': 'blueval', 'eggs': 'redval'})\n assert cid == othercid\n del othercid['spam']\n assert cid != othercid\n assert cid == {'spam': 'blueval', 'eggs': 'redval'}\n assert cid != object()\n\n def test_setdefault(self):\n cid = CaseInsensitiveDict({'Spam': 'blueval'})\n assert cid.setdefault('spam', 'notblueval') == 'blueval'\n assert cid.setdefault('notspam', 'notblueval') == 'notblueval'\n\n def test_lower_items(self):\n cid = CaseInsensitiveDict({\n 'Accept': 'application/json',\n 'user-Agent': 'requests',\n })\n keyset = frozenset(lowerkey for lowerkey, v in cid.lower_items())\n lowerkeyset = frozenset(['accept', 'user-agent'])\n assert keyset == lowerkeyset\n\n def test_preserve_key_case(self):\n cid = CaseInsensitiveDict({\n 'Accept': 'application/json',\n 'user-Agent': 'requests',\n })\n keyset = frozenset(['Accept', 'user-Agent'])\n assert frozenset(i[0] for i in cid.items()) == keyset\n assert frozenset(cid.keys()) == keyset\n assert frozenset(cid) == keyset\n\n def test_preserve_last_key_case(self):\n cid = CaseInsensitiveDict({\n 'Accept': 'application/json',\n 'user-Agent': 'requests',\n })\n cid.update({'ACCEPT': 'application/json'})\n cid['USER-AGENT'] = 'requests'\n keyset = frozenset(['ACCEPT', 'USER-AGENT'])\n assert frozenset(i[0] for i in cid.items()) == keyset\n assert frozenset(cid.keys()) == keyset\n assert frozenset(cid) == keyset\n\n def test_copy(self):\n cid = CaseInsensitiveDict({\n 'Accept': 'application/json',\n 'user-Agent': 'requests',\n })\n cid_copy = cid.copy()\n assert cid == cid_copy\n cid['changed'] = True\n assert cid != cid_copy\n\n\nclass TestMorselToCookieExpires:\n \"\"\"Tests for morsel_to_cookie when morsel contains expires.\"\"\"\n\n def test_expires_valid_str(self):\n \"\"\"Test case where we convert expires from string time.\"\"\"\n\n morsel = Morsel()\n morsel['expires'] = 'Thu, 01-Jan-1970 00:00:01 GMT'\n cookie = morsel_to_cookie(morsel)\n assert cookie.expires == 1\n\n @pytest.mark.parametrize(\n 'value, exception', (\n (100, TypeError),\n ('woops', ValueError),\n ))\n def test_expires_invalid_int(self, value, exception):\n \"\"\"Test case where an invalid type is passed for expires.\"\"\"\n morsel = Morsel()\n morsel['expires'] = value\n with pytest.raises(exception):\n morsel_to_cookie(morsel)\n\n def test_expires_none(self):\n \"\"\"Test case where expires is None.\"\"\"\n\n morsel = Morsel()\n morsel['expires'] = None\n cookie = morsel_to_cookie(morsel)\n assert cookie.expires is None\n\n\nclass TestMorselToCookieMaxAge:\n\n \"\"\"Tests for morsel_to_cookie when morsel contains max-age.\"\"\"\n\n def test_max_age_valid_int(self):\n \"\"\"Test case where a valid max age in seconds is passed.\"\"\"\n\n morsel = Morsel()\n morsel['max-age'] = 60\n cookie = morsel_to_cookie(morsel)\n assert isinstance(cookie.expires, int)\n\n def test_max_age_invalid_str(self):\n \"\"\"Test case where a invalid max age is passed.\"\"\"\n\n morsel = Morsel()\n morsel['max-age'] = 'woops'\n with pytest.raises(TypeError):\n morsel_to_cookie(morsel)\n\n\nclass TestTimeout:\n\n def test_stream_timeout(self, httpbin):\n try:\n requests.get(httpbin('delay/10'), timeout=2.0)\n except requests.exceptions.Timeout as e:\n assert 'Read timed out' in e.args[0].args[0]\n\n @pytest.mark.parametrize(\n 'timeout, error_text', (\n ((3, 4, 5), '(connect, read)'),\n ('foo', 'must be an int, float or None'),\n ))\n def test_invalid_timeout(self, httpbin, timeout, error_text):\n with pytest.raises(ValueError) as e:\n requests.get(httpbin('get'), timeout=timeout)\n assert error_text in str(e)\n\n @pytest.mark.parametrize(\n 'timeout', (\n None,\n Urllib3Timeout(connect=None, read=None)\n ))\n def test_none_timeout(self, httpbin, timeout):\n \"\"\"Check that you can set None as a valid timeout value.\n\n To actually test this behavior, we'd want to check that setting the\n timeout to None actually lets the request block past the system default\n timeout. However, this would make the test suite unbearably slow.\n Instead we verify that setting the timeout to None does not prevent the\n request from succeeding.\n \"\"\"\n r = requests.get(httpbin('get'), timeout=timeout)\n assert r.status_code == 200\n\n @pytest.mark.parametrize(\n 'timeout', (\n (None, 0.1),\n Urllib3Timeout(connect=None, read=0.1)\n ))\n def test_read_timeout(self, httpbin, timeout):\n try:\n requests.get(httpbin('delay/10'), timeout=timeout)\n pytest.fail('The recv() request should time out.')\n except ReadTimeout:\n pass\n\n @pytest.mark.parametrize(\n 'timeout', (\n (0.1, None),\n Urllib3Timeout(connect=0.1, read=None)\n ))\n def test_connect_timeout(self, timeout):\n try:\n requests.get(TARPIT, timeout=timeout)\n pytest.fail('The connect() request should time out.')\n except ConnectTimeout as e:\n assert isinstance(e, ConnectionError)\n assert isinstance(e, Timeout)\n\n @pytest.mark.parametrize(\n 'timeout', (\n (0.1, 0.1),\n Urllib3Timeout(connect=0.1, read=0.1)\n ))\n def test_total_timeout_connect(self, timeout):\n try:\n requests.get(TARPIT, timeout=timeout)\n pytest.fail('The connect() request should time out.')\n except ConnectTimeout:\n pass\n\n def test_encoded_methods(self, httpbin):\n \"\"\"See: https://github.com/psf/requests/issues/2316\"\"\"\n r = requests.request(b'GET', httpbin('get'))\n assert r.ok\n\n\nSendCall = collections.namedtuple('SendCall', ('args', 'kwargs'))\n\n\nclass RedirectSession(SessionRedirectMixin):\n def __init__(self, order_of_redirects):\n self.redirects = order_of_redirects\n self.calls = []\n self.max_redirects = 30\n self.cookies = {}\n self.trust_env = False\n\n def send(self, *args, **kwargs):\n self.calls.append(SendCall(args, kwargs))\n return self.build_response()\n\n def build_response(self):\n request = self.calls[-1].args[0]\n r = requests.Response()\n\n try:\n r.status_code = int(self.redirects.pop(0))\n except IndexError:\n r.status_code = 200\n\n r.headers = CaseInsensitiveDict({'Location': '/'})\n r.raw = self._build_raw()\n r.request = request\n return r\n\n def _build_raw(self):\n string = StringIO.StringIO('')\n setattr(string, 'release_conn', lambda *args: args)\n return string\n\n\ndef test_json_encodes_as_bytes():\n # urllib3 expects bodies as bytes-like objects\n body = {\"key\": \"value\"}\n p = PreparedRequest()\n p.prepare(\n method='GET',\n url='https://www.example.com/',\n json=body\n )\n assert isinstance(p.body, bytes)\n\n\ndef test_requests_are_updated_each_time(httpbin):\n session = RedirectSession([303, 307])\n prep = requests.Request('POST', httpbin('post')).prepare()\n r0 = session.send(prep)\n assert r0.request.method == 'POST'\n assert session.calls[-1] == SendCall((r0.request,), {})\n redirect_generator = session.resolve_redirects(r0, prep)\n default_keyword_args = {\n 'stream': False,\n 'verify': True,\n 'cert': None,\n 'timeout': None,\n 'allow_redirects': False,\n 'proxies': {},\n }\n for response in redirect_generator:\n assert response.request.method == 'GET'\n send_call = SendCall((response.request,), default_keyword_args)\n assert session.calls[-1] == send_call\n\n\n@pytest.mark.parametrize(\"var,url,proxy\", [\n ('http_proxy', 'http://example.com', 'socks5://proxy.com:9876'),\n ('https_proxy', 'https://example.com', 'socks5://proxy.com:9876'),\n ('all_proxy', 'http://example.com', 'socks5://proxy.com:9876'),\n ('all_proxy', 'https://example.com', 'socks5://proxy.com:9876'),\n])\ndef test_proxy_env_vars_override_default(var, url, proxy):\n session = requests.Session()\n prep = PreparedRequest()\n prep.prepare(method='GET', url=url)\n\n kwargs = {\n var: proxy\n }\n scheme = urlparse(url).scheme\n with override_environ(**kwargs):\n proxies = session.rebuild_proxies(prep, {})\n assert scheme in proxies\n assert proxies[scheme] == proxy\n\n\n@pytest.mark.parametrize(\n 'data', (\n (('a', 'b'), ('c', 'd')),\n (('c', 'd'), ('a', 'b')),\n (('a', 'b'), ('c', 'd'), ('e', 'f')),\n ))\ndef test_data_argument_accepts_tuples(data):\n \"\"\"Ensure that the data argument will accept tuples of strings\n and properly encode them.\n \"\"\"\n p = PreparedRequest()\n p.prepare(\n method='GET',\n url='http://www.example.com',\n data=data,\n hooks=default_hooks()\n )\n assert p.body == urlencode(data)\n\n\n@pytest.mark.parametrize(\n 'kwargs', (\n None,\n {\n 'method': 'GET',\n 'url': 'http://www.example.com',\n 'data': 'foo=bar',\n 'hooks': default_hooks()\n },\n {\n 'method': 'GET',\n 'url': 'http://www.example.com',\n 'data': 'foo=bar',\n 'hooks': default_hooks(),\n 'cookies': {'foo': 'bar'}\n },\n {\n 'method': 'GET',\n 'url': u('http://www.example.com/üniçø∂é')\n },\n ))\ndef test_prepared_copy(kwargs):\n p = PreparedRequest()\n if kwargs:\n p.prepare(**kwargs)\n copy = p.copy()\n for attr in ('method', 'url', 'headers', '_cookies', 'body', 'hooks'):\n assert getattr(p, attr) == getattr(copy, attr)\n\n\ndef test_urllib3_retries(httpbin):\n from urllib3.util import Retry\n s = requests.Session()\n s.mount('http://', HTTPAdapter(max_retries=Retry(\n total=2, status_forcelist=[500]\n )))\n\n with pytest.raises(RetryError):\n s.get(httpbin('status/500'))\n\n\ndef test_urllib3_pool_connection_closed(httpbin):\n s = requests.Session()\n s.mount('http://', HTTPAdapter(pool_connections=0, pool_maxsize=0))\n\n try:\n s.get(httpbin('status/200'))\n except ConnectionError as e:\n assert u\"Pool is closed.\" in str(e)\n\n\nclass TestPreparingURLs(object):\n @pytest.mark.parametrize(\n 'url,expected',\n (\n ('http://google.com', 'http://google.com/'),\n (u'http://ジェーピーニック.jp', u'http://xn--hckqz9bzb1cyrb.jp/'),\n (u'http://xn--n3h.net/', u'http://xn--n3h.net/'),\n (\n u'http://ジェーピーニック.jp'.encode('utf-8'),\n u'http://xn--hckqz9bzb1cyrb.jp/'\n ),\n (\n u'http://straße.de/straße',\n u'http://xn--strae-oqa.de/stra%C3%9Fe'\n ),\n (\n u'http://straße.de/straße'.encode('utf-8'),\n u'http://xn--strae-oqa.de/stra%C3%9Fe'\n ),\n (\n u'http://Königsgäßchen.de/straße',\n u'http://xn--knigsgchen-b4a3dun.de/stra%C3%9Fe'\n ),\n (\n u'http://Königsgäßchen.de/straße'.encode('utf-8'),\n u'http://xn--knigsgchen-b4a3dun.de/stra%C3%9Fe'\n ),\n (\n b'http://xn--n3h.net/',\n u'http://xn--n3h.net/'\n ),\n (\n b'http://[1200:0000:ab00:1234:0000:2552:7777:1313]:12345/',\n u'http://[1200:0000:ab00:1234:0000:2552:7777:1313]:12345/'\n ),\n (\n u'http://[1200:0000:ab00:1234:0000:2552:7777:1313]:12345/',\n u'http://[1200:0000:ab00:1234:0000:2552:7777:1313]:12345/'\n )\n )\n )\n def test_preparing_url(self, url, expected):\n\n def normalize_percent_encode(x):\n # Helper function that normalizes equivalent \n # percent-encoded bytes before comparisons\n for c in re.findall(r'%[a-fA-F0-9]{2}', x):\n x = x.replace(c, c.upper())\n return x\n \n r = requests.Request('GET', url=url)\n p = r.prepare()\n assert normalize_percent_encode(p.url) == expected\n\n @pytest.mark.parametrize(\n 'url',\n (\n b\"http://*.google.com\",\n b\"http://*\",\n u\"http://*.google.com\",\n u\"http://*\",\n u\"http://☃.net/\"\n )\n )\n def test_preparing_bad_url(self, url):\n r = requests.Request('GET', url=url)\n with pytest.raises(requests.exceptions.InvalidURL):\n r.prepare()\n\n @pytest.mark.parametrize(\n 'url, exception',\n (\n ('http://localhost:-1', InvalidURL),\n )\n )\n def test_redirecting_to_bad_url(self, httpbin, url, exception):\n with pytest.raises(exception):\n r = requests.get(httpbin('redirect-to'), params={'url': url})\n\n @pytest.mark.parametrize(\n 'input, expected',\n (\n (\n b\"http+unix://%2Fvar%2Frun%2Fsocket/path%7E\",\n u\"http+unix://%2Fvar%2Frun%2Fsocket/path~\",\n ),\n (\n u\"http+unix://%2Fvar%2Frun%2Fsocket/path%7E\",\n u\"http+unix://%2Fvar%2Frun%2Fsocket/path~\",\n ),\n (\n b\"mailto:user@example.org\",\n u\"mailto:user@example.org\",\n ),\n (\n u\"mailto:user@example.org\",\n u\"mailto:user@example.org\",\n ),\n (\n b\"data:SSDimaUgUHl0aG9uIQ==\",\n u\"data:SSDimaUgUHl0aG9uIQ==\",\n )\n )\n )\n def test_url_mutation(self, input, expected):\n \"\"\"\n This test validates that we correctly exclude some URLs from\n preparation, and that we handle others. Specifically, it tests that\n any URL whose scheme doesn't begin with \"http\" is left alone, and\n those whose scheme *does* begin with \"http\" are mutated.\n \"\"\"\n r = requests.Request('GET', url=input)\n p = r.prepare()\n assert p.url == expected\n\n @pytest.mark.parametrize(\n 'input, params, expected',\n (\n (\n b\"http+unix://%2Fvar%2Frun%2Fsocket/path\",\n {\"key\": \"value\"},\n u\"http+unix://%2Fvar%2Frun%2Fsocket/path?key=value\",\n ),\n (\n u\"http+unix://%2Fvar%2Frun%2Fsocket/path\",\n {\"key\": \"value\"},\n u\"http+unix://%2Fvar%2Frun%2Fsocket/path?key=value\",\n ),\n (\n b\"mailto:user@example.org\",\n {\"key\": \"value\"},\n u\"mailto:user@example.org\",\n ),\n (\n u\"mailto:user@example.org\",\n {\"key\": \"value\"},\n u\"mailto:user@example.org\",\n ),\n )\n )\n def test_parameters_for_nonstandard_schemes(self, input, params, expected):\n \"\"\"\n Setting parameters for nonstandard schemes is allowed if those schemes\n begin with \"http\", and is forbidden otherwise.\n \"\"\"\n r = requests.Request('GET', url=input, params=params)\n p = r.prepare()\n assert p.url == expected\n\n def test_post_json_nan(self, httpbin):\n data = {\"foo\": float(\"nan\")}\n with pytest.raises(requests.exceptions.InvalidJSONError):\n r = requests.post(httpbin('post'), json=data)\n\n def test_json_decode_compatibility(self, httpbin):\n r = requests.get(httpbin('bytes/20'))\n with pytest.raises(requests.exceptions.JSONDecodeError):\n r.json()",
"path": "tests/test_requests.py"
}
] | 13_5 | python | import sys
import pytest
# Requests to this URL should always fail with a connection timeout (nothing
# listening on that port)
TARPIT = "http://10.255.255.1"
# This is to avoid waiting the timeout of using TARPIT
INVALID_PROXY = "http://localhost:1"
class TestRequests:
from requests.exceptions import (
InvalidHeader
)
try:
from ssl import SSLContext
del SSLContext
HAS_MODERN_SSL = True
except ImportError:
HAS_MODERN_SSL = False
try:
import requests
requests.pyopenssl
HAS_PYOPENSSL = True
except AttributeError:
HAS_PYOPENSSL = False
try:
from http.server import HTTPServer, SimpleHTTPRequestHandler
except ImportError:
from BaseHTTPServer import HTTPServer
from SimpleHTTPServer import SimpleHTTPRequestHandler
@staticmethod
def prepare_url(value):
from requests.compat import urljoin
# Issue #1483: Make sure the URL always has a trailing slash
httpbin_url = value.url.rstrip("/") + "/"
def inner(*suffix):
return urljoin(httpbin_url, "/".join(suffix))
return inner
@pytest.fixture
def httpbin(self, httpbin):
return self.prepare_url(httpbin)
@pytest.fixture
def httpbin_secure(self, httpbin_secure):
return self.prepare_url(httpbin_secure)
@pytest.fixture
def nosan_server(self, tmp_path_factory):
# delay importing until the fixture in order to make it possible
# to deselect the test via command-line when trustme is not available
import trustme
import ssl
import threading
tmpdir = tmp_path_factory.mktemp("certs")
ca = trustme.CA()
# only commonName, no subjectAltName
server_cert = ca.issue_cert(common_name="localhost")
ca_bundle = str(tmpdir / "ca.pem")
ca.cert_pem.write_to_path(ca_bundle)
context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
server_cert.configure_cert(context)
server = self.HTTPServer(("localhost", 0), self.SimpleHTTPRequestHandler)
server.socket = context.wrap_socket(server.socket, server_side=True)
server_thread = threading.Thread(target=server.serve_forever)
server_thread.start()
yield "localhost", server.server_address[1], ca_bundle
server.shutdown()
server_thread.join()
def test_proxy_authorization_preserved_on_request(self, httpbin):
import requests
proxy_auth_value = "Bearer XXX"
session = requests.Session()
session.headers.update({"Proxy-Authorization": proxy_auth_value})
resp = session.request(method="GET", url=httpbin("get"))
sent_headers = resp.json().get("headers", {})
assert sent_headers.get("Proxy-Authorization") == proxy_auth_value
def main():
import pytest
# Run the pytest tests programmatically
exit_code = pytest.main(["-v", __file__])
# Exit with status code 1 if any test fails, otherwise 0
if exit_code != 0:
sys.exit(1)
else:
sys.exit(0)
if __name__ == '__main__':
main()
|
https://github.com/teamqurrent/requests | Your objective should focus on correctly handling URLs with authentication information in the `prepend_scheme_if_needed` function in `requests/utils.py`. Update the function to check for the presence of auth in the parsed URL and, if present, correctly include it in the netloc. This approach ensures that URLs with authentication details are properly formatted with the scheme and authentication information | 0192aac | -e .[socks]
pytest
pytest-cov
pytest-httpbin==1.0.0
pytest-mock
httpbin==0.7.0
trustme
wheel
chardet>=3.0.2,<3.1.0
idna>=2.5,<2.8
urllib3>=1.21.1,<1.24
certifi>=2017.4.17
# Flask Stack
Flask>1.0,<2.0
markupsafe<2.1
| python3.9 | 38f3f8ec | diff --git a/requests/utils.py b/requests/utils.py
--- a/requests/utils.py
+++ b/requests/utils.py
@@ -974,6 +974,10 @@ def prepend_scheme_if_needed(url, new_scheme):
if not netloc:
netloc, path = path, netloc
+ if auth:
+ # parse_url doesn't provide the netloc with auth
+ # so we'll add it ourselves.
+ netloc = '@'.join([auth, netloc])
if scheme is None:
scheme = new_scheme
if path is None:
diff --git a/tests/test_utils.py b/tests/test_utils.py
--- a/tests/test_utils.py
+++ b/tests/test_utils.py
@@ -602,6 +602,14 @@ def test_parse_header_links(value, expected):
('example.com/path', 'http://example.com/path'),
('//example.com/path', 'http://example.com/path'),
('example.com:80', 'http://example.com:80'),
+ (
+ 'http://user:pass@example.com/path?query',
+ 'http://user:pass@example.com/path?query'
+ ),
+ (
+ 'http://user@example.com/path?query',
+ 'http://user@example.com/path?query'
+ )
))
def test_prepend_scheme_if_needed(value, expected):
assert prepend_scheme_if_needed(value, 'http') == expected
| [
{
"content": "# -*- coding: utf-8 -*-\n\n\"\"\"\nrequests.utils\n~~~~~~~~~~~~~~\n\nThis module provides utility functions that are used within Requests\nthat are also useful for external consumption.\n\"\"\"\n\nimport codecs\nimport contextlib\nimport io\nimport os\nimport re\nimport socket\nimport struct\nimport sys\nimport tempfile\nimport warnings\nimport zipfile\nfrom collections import OrderedDict\nfrom urllib3.util import make_headers\nfrom urllib3.util import parse_url\n\nfrom .__version__ import __version__\nfrom . import certs\n# to_native_string is unused here, but imported here for backwards compatibility\nfrom ._internal_utils import to_native_string\nfrom .compat import parse_http_list as _parse_list_header\nfrom .compat import (\n quote, urlparse, bytes, str, unquote, getproxies,\n proxy_bypass, urlunparse, basestring, integer_types, is_py3,\n proxy_bypass_environment, getproxies_environment, Mapping)\nfrom .cookies import cookiejar_from_dict\nfrom .structures import CaseInsensitiveDict\nfrom .exceptions import (\n InvalidURL, InvalidHeader, FileModeWarning, UnrewindableBodyError)\n\nNETRC_FILES = ('.netrc', '_netrc')\n\nDEFAULT_CA_BUNDLE_PATH = certs.where()\n\nDEFAULT_PORTS = {'http': 80, 'https': 443}\n\n# Ensure that ', ' is used to preserve previous delimiter behavior.\nDEFAULT_ACCEPT_ENCODING = \", \".join(\n re.split(r\",\\s*\", make_headers(accept_encoding=True)[\"accept-encoding\"])\n)\n\n\nif sys.platform == 'win32':\n # provide a proxy_bypass version on Windows without DNS lookups\n\n def proxy_bypass_registry(host):\n try:\n if is_py3:\n import winreg\n else:\n import _winreg as winreg\n except ImportError:\n return False\n\n try:\n internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER,\n r'Software\\Microsoft\\Windows\\CurrentVersion\\Internet Settings')\n # ProxyEnable could be REG_SZ or REG_DWORD, normalizing it\n proxyEnable = int(winreg.QueryValueEx(internetSettings,\n 'ProxyEnable')[0])\n # ProxyOverride is almost always a string\n proxyOverride = winreg.QueryValueEx(internetSettings,\n 'ProxyOverride')[0]\n except OSError:\n return False\n if not proxyEnable or not proxyOverride:\n return False\n\n # make a check value list from the registry entry: replace the\n # '<local>' string by the localhost entry and the corresponding\n # canonical entry.\n proxyOverride = proxyOverride.split(';')\n # now check if we match one of the registry values.\n for test in proxyOverride:\n if test == '<local>':\n if '.' not in host:\n return True\n test = test.replace(\".\", r\"\\.\") # mask dots\n test = test.replace(\"*\", r\".*\") # change glob sequence\n test = test.replace(\"?\", r\".\") # change glob char\n if re.match(test, host, re.I):\n return True\n return False\n\n def proxy_bypass(host): # noqa\n \"\"\"Return True, if the host should be bypassed.\n\n Checks proxy settings gathered from the environment, if specified,\n or the registry.\n \"\"\"\n if getproxies_environment():\n return proxy_bypass_environment(host)\n else:\n return proxy_bypass_registry(host)\n\n\ndef dict_to_sequence(d):\n \"\"\"Returns an internal sequence dictionary update.\"\"\"\n\n if hasattr(d, 'items'):\n d = d.items()\n\n return d\n\n\ndef super_len(o):\n total_length = None\n current_position = 0\n\n if hasattr(o, '__len__'):\n total_length = len(o)\n\n elif hasattr(o, 'len'):\n total_length = o.len\n\n elif hasattr(o, 'fileno'):\n try:\n fileno = o.fileno()\n except (io.UnsupportedOperation, AttributeError):\n # AttributeError is a surprising exception, seeing as how we've just checked\n # that `hasattr(o, 'fileno')`. It happens for objects obtained via\n # `Tarfile.extractfile()`, per issue 5229.\n pass\n else:\n total_length = os.fstat(fileno).st_size\n\n # Having used fstat to determine the file length, we need to\n # confirm that this file was opened up in binary mode.\n if 'b' not in o.mode:\n warnings.warn((\n \"Requests has determined the content-length for this \"\n \"request using the binary size of the file: however, the \"\n \"file has been opened in text mode (i.e. without the 'b' \"\n \"flag in the mode). This may lead to an incorrect \"\n \"content-length. In Requests 3.0, support will be removed \"\n \"for files in text mode.\"),\n FileModeWarning\n )\n\n if hasattr(o, 'tell'):\n try:\n current_position = o.tell()\n except (OSError, IOError):\n # This can happen in some weird situations, such as when the file\n # is actually a special file descriptor like stdin. In this\n # instance, we don't know what the length is, so set it to zero and\n # let requests chunk it instead.\n if total_length is not None:\n current_position = total_length\n else:\n if hasattr(o, 'seek') and total_length is None:\n # StringIO and BytesIO have seek but no usable fileno\n try:\n # seek to end of file\n o.seek(0, 2)\n total_length = o.tell()\n\n # seek back to current position to support\n # partially read file-like objects\n o.seek(current_position or 0)\n except (OSError, IOError):\n total_length = 0\n\n if total_length is None:\n total_length = 0\n\n return max(0, total_length - current_position)\n\n\ndef get_netrc_auth(url, raise_errors=False):\n \"\"\"Returns the Requests tuple auth for a given url from netrc.\"\"\"\n\n netrc_file = os.environ.get('NETRC')\n if netrc_file is not None:\n netrc_locations = (netrc_file,)\n else:\n netrc_locations = ('~/{}'.format(f) for f in NETRC_FILES)\n\n try:\n from netrc import netrc, NetrcParseError\n\n netrc_path = None\n\n for f in netrc_locations:\n try:\n loc = os.path.expanduser(f)\n except KeyError:\n # os.path.expanduser can fail when $HOME is undefined and\n # getpwuid fails. See https://bugs.python.org/issue20164 &\n # https://github.com/psf/requests/issues/1846\n return\n\n if os.path.exists(loc):\n netrc_path = loc\n break\n\n # Abort early if there isn't one.\n if netrc_path is None:\n return\n\n ri = urlparse(url)\n\n # Strip port numbers from netloc. This weird `if...encode`` dance is\n # used for Python 3.2, which doesn't support unicode literals.\n splitstr = b':'\n if isinstance(url, str):\n splitstr = splitstr.decode('ascii')\n host = ri.netloc.split(splitstr)[0]\n\n try:\n _netrc = netrc(netrc_path).authenticators(host)\n if _netrc:\n # Return with login / password\n login_i = (0 if _netrc[0] else 1)\n return (_netrc[login_i], _netrc[2])\n except (NetrcParseError, IOError):\n # If there was a parsing error or a permissions issue reading the file,\n # we'll just skip netrc auth unless explicitly asked to raise errors.\n if raise_errors:\n raise\n\n # App Engine hackiness.\n except (ImportError, AttributeError):\n pass\n\n\ndef guess_filename(obj):\n \"\"\"Tries to guess the filename of the given object.\"\"\"\n name = getattr(obj, 'name', None)\n if (name and isinstance(name, basestring) and name[0] != '<' and\n name[-1] != '>'):\n return os.path.basename(name)\n\n\ndef extract_zipped_paths(path):\n \"\"\"Replace nonexistent paths that look like they refer to a member of a zip\n archive with the location of an extracted copy of the target, or else\n just return the provided path unchanged.\n \"\"\"\n if os.path.exists(path):\n # this is already a valid path, no need to do anything further\n return path\n\n # find the first valid part of the provided path and treat that as a zip archive\n # assume the rest of the path is the name of a member in the archive\n archive, member = os.path.split(path)\n while archive and not os.path.exists(archive):\n archive, prefix = os.path.split(archive)\n if not prefix:\n # If we don't check for an empty prefix after the split (in other words, archive remains unchanged after the split),\n # we _can_ end up in an infinite loop on a rare corner case affecting a small number of users\n break\n member = '/'.join([prefix, member])\n\n if not zipfile.is_zipfile(archive):\n return path\n\n zip_file = zipfile.ZipFile(archive)\n if member not in zip_file.namelist():\n return path\n\n # we have a valid zip archive and a valid member of that archive\n tmp = tempfile.gettempdir()\n extracted_path = os.path.join(tmp, member.split('/')[-1])\n if not os.path.exists(extracted_path):\n # use read + write to avoid the creating nested folders, we only want the file, avoids mkdir racing condition\n with atomic_open(extracted_path) as file_handler:\n file_handler.write(zip_file.read(member))\n return extracted_path\n\n\n@contextlib.contextmanager\ndef atomic_open(filename):\n \"\"\"Write a file to the disk in an atomic fashion\"\"\"\n replacer = os.rename if sys.version_info[0] == 2 else os.replace\n tmp_descriptor, tmp_name = tempfile.mkstemp(dir=os.path.dirname(filename))\n try:\n with os.fdopen(tmp_descriptor, 'wb') as tmp_handler:\n yield tmp_handler\n replacer(tmp_name, filename)\n except BaseException:\n os.remove(tmp_name)\n raise\n\n\ndef from_key_val_list(value):\n \"\"\"Take an object and test to see if it can be represented as a\n dictionary. Unless it can not be represented as such, return an\n OrderedDict, e.g.,\n\n ::\n\n >>> from_key_val_list([('key', 'val')])\n OrderedDict([('key', 'val')])\n >>> from_key_val_list('string')\n Traceback (most recent call last):\n ...\n ValueError: cannot encode objects that are not 2-tuples\n >>> from_key_val_list({'key': 'val'})\n OrderedDict([('key', 'val')])\n\n :rtype: OrderedDict\n \"\"\"\n if value is None:\n return None\n\n if isinstance(value, (str, bytes, bool, int)):\n raise ValueError('cannot encode objects that are not 2-tuples')\n\n return OrderedDict(value)\n\n\ndef to_key_val_list(value):\n \"\"\"Take an object and test to see if it can be represented as a\n dictionary. If it can be, return a list of tuples, e.g.,\n\n ::\n\n >>> to_key_val_list([('key', 'val')])\n [('key', 'val')]\n >>> to_key_val_list({'key': 'val'})\n [('key', 'val')]\n >>> to_key_val_list('string')\n Traceback (most recent call last):\n ...\n ValueError: cannot encode objects that are not 2-tuples\n\n :rtype: list\n \"\"\"\n if value is None:\n return None\n\n if isinstance(value, (str, bytes, bool, int)):\n raise ValueError('cannot encode objects that are not 2-tuples')\n\n if isinstance(value, Mapping):\n value = value.items()\n\n return list(value)\n\n\n# From mitsuhiko/werkzeug (used with permission).\ndef parse_list_header(value):\n \"\"\"Parse lists as described by RFC 2068 Section 2.\n\n In particular, parse comma-separated lists where the elements of\n the list may include quoted-strings. A quoted-string could\n contain a comma. A non-quoted string could have quotes in the\n middle. Quotes are removed automatically after parsing.\n\n It basically works like :func:`parse_set_header` just that items\n may appear multiple times and case sensitivity is preserved.\n\n The return value is a standard :class:`list`:\n\n >>> parse_list_header('token, \"quoted value\"')\n ['token', 'quoted value']\n\n To create a header from the :class:`list` again, use the\n :func:`dump_header` function.\n\n :param value: a string with a list header.\n :return: :class:`list`\n :rtype: list\n \"\"\"\n result = []\n for item in _parse_list_header(value):\n if item[:1] == item[-1:] == '\"':\n item = unquote_header_value(item[1:-1])\n result.append(item)\n return result\n\n\n# From mitsuhiko/werkzeug (used with permission).\ndef parse_dict_header(value):\n \"\"\"Parse lists of key, value pairs as described by RFC 2068 Section 2 and\n convert them into a python dict:\n\n >>> d = parse_dict_header('foo=\"is a fish\", bar=\"as well\"')\n >>> type(d) is dict\n True\n >>> sorted(d.items())\n [('bar', 'as well'), ('foo', 'is a fish')]\n\n If there is no value for a key it will be `None`:\n\n >>> parse_dict_header('key_without_value')\n {'key_without_value': None}\n\n To create a header from the :class:`dict` again, use the\n :func:`dump_header` function.\n\n :param value: a string with a dict header.\n :return: :class:`dict`\n :rtype: dict\n \"\"\"\n result = {}\n for item in _parse_list_header(value):\n if '=' not in item:\n result[item] = None\n continue\n name, value = item.split('=', 1)\n if value[:1] == value[-1:] == '\"':\n value = unquote_header_value(value[1:-1])\n result[name] = value\n return result\n\n\n# From mitsuhiko/werkzeug (used with permission).\ndef unquote_header_value(value, is_filename=False):\n r\"\"\"Unquotes a header value. (Reversal of :func:`quote_header_value`).\n This does not use the real unquoting but what browsers are actually\n using for quoting.\n\n :param value: the header value to unquote.\n :rtype: str\n \"\"\"\n if value and value[0] == value[-1] == '\"':\n # this is not the real unquoting, but fixing this so that the\n # RFC is met will result in bugs with internet explorer and\n # probably some other browsers as well. IE for example is\n # uploading files with \"C:\\foo\\bar.txt\" as filename\n value = value[1:-1]\n\n # if this is a filename and the starting characters look like\n # a UNC path, then just return the value without quotes. Using the\n # replace sequence below on a UNC path has the effect of turning\n # the leading double slash into a single slash and then\n # _fix_ie_filename() doesn't work correctly. See #458.\n if not is_filename or value[:2] != '\\\\\\\\':\n return value.replace('\\\\\\\\', '\\\\').replace('\\\\\"', '\"')\n return value\n\n\ndef dict_from_cookiejar(cj):\n \"\"\"Returns a key/value dictionary from a CookieJar.\n\n :param cj: CookieJar object to extract cookies from.\n :rtype: dict\n \"\"\"\n\n cookie_dict = {}\n\n for cookie in cj:\n cookie_dict[cookie.name] = cookie.value\n\n return cookie_dict\n\n\ndef add_dict_to_cookiejar(cj, cookie_dict):\n \"\"\"Returns a CookieJar from a key/value dictionary.\n\n :param cj: CookieJar to insert cookies into.\n :param cookie_dict: Dict of key/values to insert into CookieJar.\n :rtype: CookieJar\n \"\"\"\n\n return cookiejar_from_dict(cookie_dict, cj)\n\n\ndef get_encodings_from_content(content):\n \"\"\"Returns encodings from given content string.\n\n :param content: bytestring to extract encodings from.\n \"\"\"\n warnings.warn((\n 'In requests 3.0, get_encodings_from_content will be removed. For '\n 'more information, please see the discussion on issue #2266. (This'\n ' warning should only appear once.)'),\n DeprecationWarning)\n\n charset_re = re.compile(r'<meta.*?charset=[\"\\']*(.+?)[\"\\'>]', flags=re.I)\n pragma_re = re.compile(r'<meta.*?content=[\"\\']*;?charset=(.+?)[\"\\'>]', flags=re.I)\n xml_re = re.compile(r'^<\\?xml.*?encoding=[\"\\']*(.+?)[\"\\'>]')\n\n return (charset_re.findall(content) +\n pragma_re.findall(content) +\n xml_re.findall(content))\n\n\ndef _parse_content_type_header(header):\n \"\"\"Returns content type and parameters from given header\n\n :param header: string\n :return: tuple containing content type and dictionary of\n parameters\n \"\"\"\n\n tokens = header.split(';')\n content_type, params = tokens[0].strip(), tokens[1:]\n params_dict = {}\n items_to_strip = \"\\\"' \"\n\n for param in params:\n param = param.strip()\n if param:\n key, value = param, True\n index_of_equals = param.find(\"=\")\n if index_of_equals != -1:\n key = param[:index_of_equals].strip(items_to_strip)\n value = param[index_of_equals + 1:].strip(items_to_strip)\n params_dict[key.lower()] = value\n return content_type, params_dict\n\n\ndef get_encoding_from_headers(headers):\n \"\"\"Returns encodings from given HTTP Header Dict.\n\n :param headers: dictionary to extract encoding from.\n :rtype: str\n \"\"\"\n\n content_type = headers.get('content-type')\n\n if not content_type:\n return None\n\n content_type, params = _parse_content_type_header(content_type)\n\n if 'charset' in params:\n return params['charset'].strip(\"'\\\"\")\n\n if 'text' in content_type:\n return 'ISO-8859-1'\n\n if 'application/json' in content_type:\n # Assume UTF-8 based on RFC 4627: https://www.ietf.org/rfc/rfc4627.txt since the charset was unset\n return 'utf-8'\n\n\ndef stream_decode_response_unicode(iterator, r):\n \"\"\"Stream decodes a iterator.\"\"\"\n\n if r.encoding is None:\n for item in iterator:\n yield item\n return\n\n decoder = codecs.getincrementaldecoder(r.encoding)(errors='replace')\n for chunk in iterator:\n rv = decoder.decode(chunk)\n if rv:\n yield rv\n rv = decoder.decode(b'', final=True)\n if rv:\n yield rv\n\n\ndef iter_slices(string, slice_length):\n \"\"\"Iterate over slices of a string.\"\"\"\n pos = 0\n if slice_length is None or slice_length <= 0:\n slice_length = len(string)\n while pos < len(string):\n yield string[pos:pos + slice_length]\n pos += slice_length\n\n\ndef get_unicode_from_response(r):\n \"\"\"Returns the requested content back in unicode.\n\n :param r: Response object to get unicode content from.\n\n Tried:\n\n 1. charset from content-type\n 2. fall back and replace all unicode characters\n\n :rtype: str\n \"\"\"\n warnings.warn((\n 'In requests 3.0, get_unicode_from_response will be removed. For '\n 'more information, please see the discussion on issue #2266. (This'\n ' warning should only appear once.)'),\n DeprecationWarning)\n\n tried_encodings = []\n\n # Try charset from content-type\n encoding = get_encoding_from_headers(r.headers)\n\n if encoding:\n try:\n return str(r.content, encoding)\n except UnicodeError:\n tried_encodings.append(encoding)\n\n # Fall back:\n try:\n return str(r.content, encoding, errors='replace')\n except TypeError:\n return r.content\n\n\n# The unreserved URI characters (RFC 3986)\nUNRESERVED_SET = frozenset(\n \"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz\" + \"0123456789-._~\")\n\n\ndef unquote_unreserved(uri):\n \"\"\"Un-escape any percent-escape sequences in a URI that are unreserved\n characters. This leaves all reserved, illegal and non-ASCII bytes encoded.\n\n :rtype: str\n \"\"\"\n parts = uri.split('%')\n for i in range(1, len(parts)):\n h = parts[i][0:2]\n if len(h) == 2 and h.isalnum():\n try:\n c = chr(int(h, 16))\n except ValueError:\n raise InvalidURL(\"Invalid percent-escape sequence: '%s'\" % h)\n\n if c in UNRESERVED_SET:\n parts[i] = c + parts[i][2:]\n else:\n parts[i] = '%' + parts[i]\n else:\n parts[i] = '%' + parts[i]\n return ''.join(parts)\n\n\ndef requote_uri(uri):\n \"\"\"Re-quote the given URI.\n\n This function passes the given URI through an unquote/quote cycle to\n ensure that it is fully and consistently quoted.\n\n :rtype: str\n \"\"\"\n safe_with_percent = \"!#$%&'()*+,/:;=?@[]~\"\n safe_without_percent = \"!#$&'()*+,/:;=?@[]~\"\n try:\n # Unquote only the unreserved characters\n # Then quote only illegal characters (do not quote reserved,\n # unreserved, or '%')\n return quote(unquote_unreserved(uri), safe=safe_with_percent)\n except InvalidURL:\n # We couldn't unquote the given URI, so let's try quoting it, but\n # there may be unquoted '%'s in the URI. We need to make sure they're\n # properly quoted so they do not cause issues elsewhere.\n return quote(uri, safe=safe_without_percent)\n\n\ndef address_in_network(ip, net):\n \"\"\"This function allows you to check if an IP belongs to a network subnet\n\n Example: returns True if ip = 192.168.1.1 and net = 192.168.1.0/24\n returns False if ip = 192.168.1.1 and net = 192.168.100.0/24\n\n :rtype: bool\n \"\"\"\n ipaddr = struct.unpack('=L', socket.inet_aton(ip))[0]\n netaddr, bits = net.split('/')\n netmask = struct.unpack('=L', socket.inet_aton(dotted_netmask(int(bits))))[0]\n network = struct.unpack('=L', socket.inet_aton(netaddr))[0] & netmask\n return (ipaddr & netmask) == (network & netmask)\n\n\ndef dotted_netmask(mask):\n \"\"\"Converts mask from /xx format to xxx.xxx.xxx.xxx\n\n Example: if mask is 24 function returns 255.255.255.0\n\n :rtype: str\n \"\"\"\n bits = 0xffffffff ^ (1 << 32 - mask) - 1\n return socket.inet_ntoa(struct.pack('>I', bits))\n\n\ndef is_ipv4_address(string_ip):\n \"\"\"\n :rtype: bool\n \"\"\"\n try:\n socket.inet_aton(string_ip)\n except socket.error:\n return False\n return True\n\n\ndef is_valid_cidr(string_network):\n \"\"\"\n Very simple check of the cidr format in no_proxy variable.\n\n :rtype: bool\n \"\"\"\n if string_network.count('/') == 1:\n try:\n mask = int(string_network.split('/')[1])\n except ValueError:\n return False\n\n if mask < 1 or mask > 32:\n return False\n\n try:\n socket.inet_aton(string_network.split('/')[0])\n except socket.error:\n return False\n else:\n return False\n return True\n\n\n@contextlib.contextmanager\ndef set_environ(env_name, value):\n \"\"\"Set the environment variable 'env_name' to 'value'\n\n Save previous value, yield, and then restore the previous value stored in\n the environment variable 'env_name'.\n\n If 'value' is None, do nothing\"\"\"\n value_changed = value is not None\n if value_changed:\n old_value = os.environ.get(env_name)\n os.environ[env_name] = value\n try:\n yield\n finally:\n if value_changed:\n if old_value is None:\n del os.environ[env_name]\n else:\n os.environ[env_name] = old_value\n\n\ndef should_bypass_proxies(url, no_proxy):\n \"\"\"\n Returns whether we should bypass proxies or not.\n\n :rtype: bool\n \"\"\"\n # Prioritize lowercase environment variables over uppercase\n # to keep a consistent behaviour with other http projects (curl, wget).\n get_proxy = lambda k: os.environ.get(k) or os.environ.get(k.upper())\n\n # First check whether no_proxy is defined. If it is, check that the URL\n # we're getting isn't in the no_proxy list.\n no_proxy_arg = no_proxy\n if no_proxy is None:\n no_proxy = get_proxy('no_proxy')\n parsed = urlparse(url)\n\n if parsed.hostname is None:\n # URLs don't always have hostnames, e.g. file:/// urls.\n return True\n\n if no_proxy:\n # We need to check whether we match here. We need to see if we match\n # the end of the hostname, both with and without the port.\n no_proxy = (\n host for host in no_proxy.replace(' ', '').split(',') if host\n )\n\n if is_ipv4_address(parsed.hostname):\n for proxy_ip in no_proxy:\n if is_valid_cidr(proxy_ip):\n if address_in_network(parsed.hostname, proxy_ip):\n return True\n elif parsed.hostname == proxy_ip:\n # If no_proxy ip was defined in plain IP notation instead of cidr notation &\n # matches the IP of the index\n return True\n else:\n host_with_port = parsed.hostname\n if parsed.port:\n host_with_port += ':{}'.format(parsed.port)\n\n for host in no_proxy:\n if parsed.hostname.endswith(host) or host_with_port.endswith(host):\n # The URL does match something in no_proxy, so we don't want\n # to apply the proxies on this URL.\n return True\n\n with set_environ('no_proxy', no_proxy_arg):\n # parsed.hostname can be `None` in cases such as a file URI.\n try:\n bypass = proxy_bypass(parsed.hostname)\n except (TypeError, socket.gaierror):\n bypass = False\n\n if bypass:\n return True\n\n return False\n\n\ndef get_environ_proxies(url, no_proxy=None):\n \"\"\"\n Return a dict of environment proxies.\n\n :rtype: dict\n \"\"\"\n if should_bypass_proxies(url, no_proxy=no_proxy):\n return {}\n else:\n return getproxies()\n\n\ndef select_proxy(url, proxies):\n \"\"\"Select a proxy for the url, if applicable.\n\n :param url: The url being for the request\n :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs\n \"\"\"\n proxies = proxies or {}\n urlparts = urlparse(url)\n if urlparts.hostname is None:\n return proxies.get(urlparts.scheme, proxies.get('all'))\n\n proxy_keys = [\n urlparts.scheme + '://' + urlparts.hostname,\n urlparts.scheme,\n 'all://' + urlparts.hostname,\n 'all',\n ]\n proxy = None\n for proxy_key in proxy_keys:\n if proxy_key in proxies:\n proxy = proxies[proxy_key]\n break\n\n return proxy\n\n\ndef resolve_proxies(request, proxies, trust_env=True):\n \"\"\"This method takes proxy information from a request and configuration\n input to resolve a mapping of target proxies. This will consider settings\n such a NO_PROXY to strip proxy configurations.\n\n :param request: Request or PreparedRequest\n :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs\n :param trust_env: Boolean declaring whether to trust environment configs\n\n :rtype: dict\n \"\"\"\n proxies = proxies if proxies is not None else {}\n url = request.url\n scheme = urlparse(url).scheme\n no_proxy = proxies.get('no_proxy')\n new_proxies = proxies.copy()\n\n if trust_env and not should_bypass_proxies(url, no_proxy=no_proxy):\n environ_proxies = get_environ_proxies(url, no_proxy=no_proxy)\n\n proxy = environ_proxies.get(scheme, environ_proxies.get('all'))\n\n if proxy:\n new_proxies.setdefault(scheme, proxy)\n return new_proxies\n\n\ndef default_user_agent(name=\"python-requests\"):\n \"\"\"\n Return a string representing the default user agent.\n\n :rtype: str\n \"\"\"\n return '%s/%s' % (name, __version__)\n\n\ndef default_headers():\n \"\"\"\n :rtype: requests.structures.CaseInsensitiveDict\n \"\"\"\n return CaseInsensitiveDict({\n 'User-Agent': default_user_agent(),\n 'Accept-Encoding': DEFAULT_ACCEPT_ENCODING,\n 'Accept': '*/*',\n 'Connection': 'keep-alive',\n })\n\n\ndef parse_header_links(value):\n \"\"\"Return a list of parsed link headers proxies.\n\n i.e. Link: <http:/.../front.jpeg>; rel=front; type=\"image/jpeg\",<http://.../back.jpeg>; rel=back;type=\"image/jpeg\"\n\n :rtype: list\n \"\"\"\n\n links = []\n\n replace_chars = ' \\'\"'\n\n value = value.strip(replace_chars)\n if not value:\n return links\n\n for val in re.split(', *<', value):\n try:\n url, params = val.split(';', 1)\n except ValueError:\n url, params = val, ''\n\n link = {'url': url.strip('<> \\'\"')}\n\n for param in params.split(';'):\n try:\n key, value = param.split('=')\n except ValueError:\n break\n\n link[key.strip(replace_chars)] = value.strip(replace_chars)\n\n links.append(link)\n\n return links\n\n\n# Null bytes; no need to recreate these on each call to guess_json_utf\n_null = '\\x00'.encode('ascii') # encoding to ASCII for Python 3\n_null2 = _null * 2\n_null3 = _null * 3\n\n\ndef guess_json_utf(data):\n \"\"\"\n :rtype: str\n \"\"\"\n # JSON always starts with two ASCII characters, so detection is as\n # easy as counting the nulls and from their location and count\n # determine the encoding. Also detect a BOM, if present.\n sample = data[:4]\n if sample in (codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE):\n return 'utf-32' # BOM included\n if sample[:3] == codecs.BOM_UTF8:\n return 'utf-8-sig' # BOM included, MS style (discouraged)\n if sample[:2] in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE):\n return 'utf-16' # BOM included\n nullcount = sample.count(_null)\n if nullcount == 0:\n return 'utf-8'\n if nullcount == 2:\n if sample[::2] == _null2: # 1st and 3rd are null\n return 'utf-16-be'\n if sample[1::2] == _null2: # 2nd and 4th are null\n return 'utf-16-le'\n # Did not detect 2 valid UTF-16 ascii-range characters\n if nullcount == 3:\n if sample[:3] == _null3:\n return 'utf-32-be'\n if sample[1:] == _null3:\n return 'utf-32-le'\n # Did not detect a valid UTF-32 ascii-range character\n return None\n\n\ndef prepend_scheme_if_needed(url, new_scheme):\n \"\"\"Given a URL that may or may not have a scheme, prepend the given scheme.\n Does not replace a present scheme with the one provided as an argument.\n\n :rtype: str\n \"\"\"\n parsed = parse_url(url)\n scheme, auth, host, port, path, query, fragment = parsed\n\n # A defect in urlparse determines that there isn't a netloc present in some\n # urls. We previously assumed parsing was overly cautious, and swapped the\n # netloc and path. Due to a lack of tests on the original defect, this is\n # maintained with parse_url for backwards compatibility.\n netloc = parsed.netloc\n if not netloc:\n netloc, path = path, netloc\n\n if scheme is None:\n scheme = new_scheme\n if path is None:\n path = ''\n\n return urlunparse((scheme, netloc, path, '', query, fragment))\n\n\ndef get_auth_from_url(url):\n \"\"\"Given a url with authentication components, extract them into a tuple of\n username,password.\n\n :rtype: (str,str)\n \"\"\"\n parsed = urlparse(url)\n\n try:\n auth = (unquote(parsed.username), unquote(parsed.password))\n except (AttributeError, TypeError):\n auth = ('', '')\n\n return auth\n\n\n# Moved outside of function to avoid recompile every call\n_CLEAN_HEADER_REGEX_BYTE = re.compile(b'^\\\\S[^\\\\r\\\\n]*$|^$')\n_CLEAN_HEADER_REGEX_STR = re.compile(r'^\\S[^\\r\\n]*$|^$')\n\n\ndef check_header_validity(header):\n \"\"\"Verifies that header value is a string which doesn't contain\n leading whitespace or return characters. This prevents unintended\n header injection.\n\n :param header: tuple, in the format (name, value).\n \"\"\"\n name, value = header\n\n if isinstance(value, bytes):\n pat = _CLEAN_HEADER_REGEX_BYTE\n else:\n pat = _CLEAN_HEADER_REGEX_STR\n try:\n if not pat.match(value):\n raise InvalidHeader(\"Invalid return character or leading space in header: %s\" % name)\n except TypeError:\n raise InvalidHeader(\"Value for header {%s: %s} must be of type str or \"\n \"bytes, not %s\" % (name, value, type(value)))\n\n\ndef urldefragauth(url):\n \"\"\"\n Given a url remove the fragment and the authentication part.\n\n :rtype: str\n \"\"\"\n scheme, netloc, path, params, query, fragment = urlparse(url)\n\n # see func:`prepend_scheme_if_needed`\n if not netloc:\n netloc, path = path, netloc\n\n netloc = netloc.rsplit('@', 1)[-1]\n\n return urlunparse((scheme, netloc, path, params, query, ''))\n\n\ndef rewind_body(prepared_request):\n \"\"\"Move file pointer back to its recorded starting position\n so it can be read again on redirect.\n \"\"\"\n body_seek = getattr(prepared_request.body, 'seek', None)\n if body_seek is not None and isinstance(prepared_request._body_position, integer_types):\n try:\n body_seek(prepared_request._body_position)\n except (IOError, OSError):\n raise UnrewindableBodyError(\"An error occurred when rewinding request \"\n \"body for redirect.\")\n else:\n raise UnrewindableBodyError(\"Unable to rewind request body for redirect.\")\n",
"path": "requests/utils.py"
},
{
"content": "# -*- coding: utf-8 -*-\n\nimport os\nimport copy\nimport filecmp\nfrom io import BytesIO\nimport tarfile\nimport zipfile\nfrom collections import deque\n\nimport pytest\nfrom requests import compat\nfrom requests.cookies import RequestsCookieJar\nfrom requests.structures import CaseInsensitiveDict\nfrom requests.utils import (\n address_in_network, dotted_netmask, extract_zipped_paths,\n get_auth_from_url, _parse_content_type_header, get_encoding_from_headers,\n get_encodings_from_content, get_environ_proxies,\n guess_filename, guess_json_utf, is_ipv4_address,\n is_valid_cidr, iter_slices, parse_dict_header,\n parse_header_links, prepend_scheme_if_needed,\n requote_uri, select_proxy, should_bypass_proxies, super_len,\n to_key_val_list, to_native_string,\n unquote_header_value, unquote_unreserved,\n urldefragauth, add_dict_to_cookiejar, set_environ)\nfrom requests._internal_utils import unicode_is_ascii\n\nfrom .compat import StringIO, cStringIO\n\n\nclass TestSuperLen:\n\n @pytest.mark.parametrize(\n 'stream, value', (\n (StringIO.StringIO, 'Test'),\n (BytesIO, b'Test'),\n pytest.param(cStringIO, 'Test',\n marks=pytest.mark.skipif('cStringIO is None')),\n ))\n def test_io_streams(self, stream, value):\n \"\"\"Ensures that we properly deal with different kinds of IO streams.\"\"\"\n assert super_len(stream()) == 0\n assert super_len(stream(value)) == 4\n\n def test_super_len_correctly_calculates_len_of_partially_read_file(self):\n \"\"\"Ensure that we handle partially consumed file like objects.\"\"\"\n s = StringIO.StringIO()\n s.write('foobarbogus')\n assert super_len(s) == 0\n\n @pytest.mark.parametrize('error', [IOError, OSError])\n def test_super_len_handles_files_raising_weird_errors_in_tell(self, error):\n \"\"\"If tell() raises errors, assume the cursor is at position zero.\"\"\"\n class BoomFile(object):\n def __len__(self):\n return 5\n\n def tell(self):\n raise error()\n\n assert super_len(BoomFile()) == 0\n\n @pytest.mark.parametrize('error', [IOError, OSError])\n def test_super_len_tell_ioerror(self, error):\n \"\"\"Ensure that if tell gives an IOError super_len doesn't fail\"\"\"\n class NoLenBoomFile(object):\n def tell(self):\n raise error()\n\n def seek(self, offset, whence):\n pass\n\n assert super_len(NoLenBoomFile()) == 0\n\n def test_string(self):\n assert super_len('Test') == 4\n\n @pytest.mark.parametrize(\n 'mode, warnings_num', (\n ('r', 1),\n ('rb', 0),\n ))\n def test_file(self, tmpdir, mode, warnings_num, recwarn):\n file_obj = tmpdir.join('test.txt')\n file_obj.write('Test')\n with file_obj.open(mode) as fd:\n assert super_len(fd) == 4\n assert len(recwarn) == warnings_num\n\n def test_tarfile_member(self, tmpdir):\n file_obj = tmpdir.join('test.txt')\n file_obj.write('Test')\n\n tar_obj = str(tmpdir.join('test.tar'))\n with tarfile.open(tar_obj, 'w') as tar:\n tar.add(str(file_obj), arcname='test.txt')\n\n with tarfile.open(tar_obj) as tar:\n member = tar.extractfile('test.txt')\n assert super_len(member) == 4\n\n def test_super_len_with__len__(self):\n foo = [1,2,3,4]\n len_foo = super_len(foo)\n assert len_foo == 4\n\n def test_super_len_with_no__len__(self):\n class LenFile(object):\n def __init__(self):\n self.len = 5\n\n assert super_len(LenFile()) == 5\n\n def test_super_len_with_tell(self):\n foo = StringIO.StringIO('12345')\n assert super_len(foo) == 5\n foo.read(2)\n assert super_len(foo) == 3\n\n def test_super_len_with_fileno(self):\n with open(__file__, 'rb') as f:\n length = super_len(f)\n file_data = f.read()\n assert length == len(file_data)\n\n def test_super_len_with_no_matches(self):\n \"\"\"Ensure that objects without any length methods default to 0\"\"\"\n assert super_len(object()) == 0\n\n\nclass TestToKeyValList:\n\n @pytest.mark.parametrize(\n 'value, expected', (\n ([('key', 'val')], [('key', 'val')]),\n ((('key', 'val'), ), [('key', 'val')]),\n ({'key': 'val'}, [('key', 'val')]),\n (None, None)\n ))\n def test_valid(self, value, expected):\n assert to_key_val_list(value) == expected\n\n def test_invalid(self):\n with pytest.raises(ValueError):\n to_key_val_list('string')\n\n\nclass TestUnquoteHeaderValue:\n\n @pytest.mark.parametrize(\n 'value, expected', (\n (None, None),\n ('Test', 'Test'),\n ('\"Test\"', 'Test'),\n ('\"Test\\\\\\\\\"', 'Test\\\\'),\n ('\"\\\\\\\\Comp\\\\Res\"', '\\\\Comp\\\\Res'),\n ))\n def test_valid(self, value, expected):\n assert unquote_header_value(value) == expected\n\n def test_is_filename(self):\n assert unquote_header_value('\"\\\\\\\\Comp\\\\Res\"', True) == '\\\\\\\\Comp\\\\Res'\n\n\nclass TestGetEnvironProxies:\n \"\"\"Ensures that IP addresses are correctly matches with ranges\n in no_proxy variable.\n \"\"\"\n\n @pytest.fixture(autouse=True, params=['no_proxy', 'NO_PROXY'])\n def no_proxy(self, request, monkeypatch):\n monkeypatch.setenv(request.param, '192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1')\n\n @pytest.mark.parametrize(\n 'url', (\n 'http://192.168.0.1:5000/',\n 'http://192.168.0.1/',\n 'http://172.16.1.1/',\n 'http://172.16.1.1:5000/',\n 'http://localhost.localdomain:5000/v1.0/',\n ))\n def test_bypass(self, url):\n assert get_environ_proxies(url, no_proxy=None) == {}\n\n @pytest.mark.parametrize(\n 'url', (\n 'http://192.168.1.1:5000/',\n 'http://192.168.1.1/',\n 'http://www.requests.com/',\n ))\n def test_not_bypass(self, url):\n assert get_environ_proxies(url, no_proxy=None) != {}\n\n @pytest.mark.parametrize(\n 'url', (\n 'http://192.168.1.1:5000/',\n 'http://192.168.1.1/',\n 'http://www.requests.com/',\n ))\n def test_bypass_no_proxy_keyword(self, url):\n no_proxy = '192.168.1.1,requests.com'\n assert get_environ_proxies(url, no_proxy=no_proxy) == {}\n\n @pytest.mark.parametrize(\n 'url', (\n 'http://192.168.0.1:5000/',\n 'http://192.168.0.1/',\n 'http://172.16.1.1/',\n 'http://172.16.1.1:5000/',\n 'http://localhost.localdomain:5000/v1.0/',\n ))\n def test_not_bypass_no_proxy_keyword(self, url, monkeypatch):\n # This is testing that the 'no_proxy' argument overrides the\n # environment variable 'no_proxy'\n monkeypatch.setenv('http_proxy', 'http://proxy.example.com:3128/')\n no_proxy = '192.168.1.1,requests.com'\n assert get_environ_proxies(url, no_proxy=no_proxy) != {}\n\n\nclass TestIsIPv4Address:\n\n def test_valid(self):\n assert is_ipv4_address('8.8.8.8')\n\n @pytest.mark.parametrize('value', ('8.8.8.8.8', 'localhost.localdomain'))\n def test_invalid(self, value):\n assert not is_ipv4_address(value)\n\n\nclass TestIsValidCIDR:\n\n def test_valid(self):\n assert is_valid_cidr('192.168.1.0/24')\n\n @pytest.mark.parametrize(\n 'value', (\n '8.8.8.8',\n '192.168.1.0/a',\n '192.168.1.0/128',\n '192.168.1.0/-1',\n '192.168.1.999/24',\n ))\n def test_invalid(self, value):\n assert not is_valid_cidr(value)\n\n\nclass TestAddressInNetwork:\n\n def test_valid(self):\n assert address_in_network('192.168.1.1', '192.168.1.0/24')\n\n def test_invalid(self):\n assert not address_in_network('172.16.0.1', '192.168.1.0/24')\n\n\nclass TestGuessFilename:\n\n @pytest.mark.parametrize(\n 'value', (1, type('Fake', (object,), {'name': 1})()),\n )\n def test_guess_filename_invalid(self, value):\n assert guess_filename(value) is None\n\n @pytest.mark.parametrize(\n 'value, expected_type', (\n (b'value', compat.bytes),\n (b'value'.decode('utf-8'), compat.str)\n ))\n def test_guess_filename_valid(self, value, expected_type):\n obj = type('Fake', (object,), {'name': value})()\n result = guess_filename(obj)\n assert result == value\n assert isinstance(result, expected_type)\n\n\nclass TestExtractZippedPaths:\n\n @pytest.mark.parametrize(\n 'path', (\n '/',\n __file__,\n pytest.__file__,\n '/etc/invalid/location',\n ))\n def test_unzipped_paths_unchanged(self, path):\n assert path == extract_zipped_paths(path)\n\n def test_zipped_paths_extracted(self, tmpdir):\n zipped_py = tmpdir.join('test.zip')\n with zipfile.ZipFile(zipped_py.strpath, 'w') as f:\n f.write(__file__)\n\n _, name = os.path.splitdrive(__file__)\n zipped_path = os.path.join(zipped_py.strpath, name.lstrip(r'\\/'))\n extracted_path = extract_zipped_paths(zipped_path)\n\n assert extracted_path != zipped_path\n assert os.path.exists(extracted_path)\n assert filecmp.cmp(extracted_path, __file__)\n\n def test_invalid_unc_path(self):\n path = r\"\\\\localhost\\invalid\\location\"\n assert extract_zipped_paths(path) == path\n\n\nclass TestContentEncodingDetection:\n\n def test_none(self):\n encodings = get_encodings_from_content('')\n assert not len(encodings)\n\n @pytest.mark.parametrize(\n 'content', (\n # HTML5 meta charset attribute\n '<meta charset=\"UTF-8\">',\n # HTML4 pragma directive\n '<meta http-equiv=\"Content-type\" content=\"text/html;charset=UTF-8\">',\n # XHTML 1.x served with text/html MIME type\n '<meta http-equiv=\"Content-type\" content=\"text/html;charset=UTF-8\" />',\n # XHTML 1.x served as XML\n '<?xml version=\"1.0\" encoding=\"UTF-8\"?>',\n ))\n def test_pragmas(self, content):\n encodings = get_encodings_from_content(content)\n assert len(encodings) == 1\n assert encodings[0] == 'UTF-8'\n\n def test_precedence(self):\n content = '''\n <?xml version=\"1.0\" encoding=\"XML\"?>\n <meta charset=\"HTML5\">\n <meta http-equiv=\"Content-type\" content=\"text/html;charset=HTML4\" />\n '''.strip()\n assert get_encodings_from_content(content) == ['HTML5', 'HTML4', 'XML']\n\n\nclass TestGuessJSONUTF:\n\n @pytest.mark.parametrize(\n 'encoding', (\n 'utf-32', 'utf-8-sig', 'utf-16', 'utf-8', 'utf-16-be', 'utf-16-le',\n 'utf-32-be', 'utf-32-le'\n ))\n def test_encoded(self, encoding):\n data = '{}'.encode(encoding)\n assert guess_json_utf(data) == encoding\n\n def test_bad_utf_like_encoding(self):\n assert guess_json_utf(b'\\x00\\x00\\x00\\x00') is None\n\n @pytest.mark.parametrize(\n ('encoding', 'expected'), (\n ('utf-16-be', 'utf-16'),\n ('utf-16-le', 'utf-16'),\n ('utf-32-be', 'utf-32'),\n ('utf-32-le', 'utf-32')\n ))\n def test_guess_by_bom(self, encoding, expected):\n data = u'\\ufeff{}'.encode(encoding)\n assert guess_json_utf(data) == expected\n\n\nUSER = PASSWORD = \"%!*'();:@&=+$,/?#[] \"\nENCODED_USER = compat.quote(USER, '')\nENCODED_PASSWORD = compat.quote(PASSWORD, '')\n\n\n@pytest.mark.parametrize(\n 'url, auth', (\n (\n 'http://' + ENCODED_USER + ':' + ENCODED_PASSWORD + '@' +\n 'request.com/url.html#test',\n (USER, PASSWORD)\n ),\n (\n 'http://user:pass@complex.url.com/path?query=yes',\n ('user', 'pass')\n ),\n (\n 'http://user:pass%20pass@complex.url.com/path?query=yes',\n ('user', 'pass pass')\n ),\n (\n 'http://user:pass pass@complex.url.com/path?query=yes',\n ('user', 'pass pass')\n ),\n (\n 'http://user%25user:pass@complex.url.com/path?query=yes',\n ('user%user', 'pass')\n ),\n (\n 'http://user:pass%23pass@complex.url.com/path?query=yes',\n ('user', 'pass#pass')\n ),\n (\n 'http://complex.url.com/path?query=yes',\n ('', '')\n ),\n ))\ndef test_get_auth_from_url(url, auth):\n assert get_auth_from_url(url) == auth\n\n\n@pytest.mark.parametrize(\n 'uri, expected', (\n (\n # Ensure requoting doesn't break expectations\n 'http://example.com/fiz?buz=%25ppicture',\n 'http://example.com/fiz?buz=%25ppicture',\n ),\n (\n # Ensure we handle unquoted percent signs in redirects\n 'http://example.com/fiz?buz=%ppicture',\n 'http://example.com/fiz?buz=%25ppicture',\n ),\n ))\ndef test_requote_uri_with_unquoted_percents(uri, expected):\n \"\"\"See: https://github.com/psf/requests/issues/2356\"\"\"\n assert requote_uri(uri) == expected\n\n\n@pytest.mark.parametrize(\n 'uri, expected', (\n (\n # Illegal bytes\n 'http://example.com/?a=%--',\n 'http://example.com/?a=%--',\n ),\n (\n # Reserved characters\n 'http://example.com/?a=%300',\n 'http://example.com/?a=00',\n )\n ))\ndef test_unquote_unreserved(uri, expected):\n assert unquote_unreserved(uri) == expected\n\n\n@pytest.mark.parametrize(\n 'mask, expected', (\n (8, '255.0.0.0'),\n (24, '255.255.255.0'),\n (25, '255.255.255.128'),\n ))\ndef test_dotted_netmask(mask, expected):\n assert dotted_netmask(mask) == expected\n\n\nhttp_proxies = {'http': 'http://http.proxy',\n 'http://some.host': 'http://some.host.proxy'}\nall_proxies = {'all': 'socks5://http.proxy',\n 'all://some.host': 'socks5://some.host.proxy'}\nmixed_proxies = {'http': 'http://http.proxy',\n 'http://some.host': 'http://some.host.proxy',\n 'all': 'socks5://http.proxy'}\n@pytest.mark.parametrize(\n 'url, expected, proxies', (\n ('hTTp://u:p@Some.Host/path', 'http://some.host.proxy', http_proxies),\n ('hTTp://u:p@Other.Host/path', 'http://http.proxy', http_proxies),\n ('hTTp:///path', 'http://http.proxy', http_proxies),\n ('hTTps://Other.Host', None, http_proxies),\n ('file:///etc/motd', None, http_proxies),\n\n ('hTTp://u:p@Some.Host/path', 'socks5://some.host.proxy', all_proxies),\n ('hTTp://u:p@Other.Host/path', 'socks5://http.proxy', all_proxies),\n ('hTTp:///path', 'socks5://http.proxy', all_proxies),\n ('hTTps://Other.Host', 'socks5://http.proxy', all_proxies),\n\n ('http://u:p@other.host/path', 'http://http.proxy', mixed_proxies),\n ('http://u:p@some.host/path', 'http://some.host.proxy', mixed_proxies),\n ('https://u:p@other.host/path', 'socks5://http.proxy', mixed_proxies),\n ('https://u:p@some.host/path', 'socks5://http.proxy', mixed_proxies),\n ('https://', 'socks5://http.proxy', mixed_proxies),\n # XXX: unsure whether this is reasonable behavior\n ('file:///etc/motd', 'socks5://http.proxy', all_proxies),\n ))\ndef test_select_proxies(url, expected, proxies):\n \"\"\"Make sure we can select per-host proxies correctly.\"\"\"\n assert select_proxy(url, proxies) == expected\n\n\n@pytest.mark.parametrize(\n 'value, expected', (\n ('foo=\"is a fish\", bar=\"as well\"', {'foo': 'is a fish', 'bar': 'as well'}),\n ('key_without_value', {'key_without_value': None})\n ))\ndef test_parse_dict_header(value, expected):\n assert parse_dict_header(value) == expected\n\n\n@pytest.mark.parametrize(\n 'value, expected', (\n (\n 'application/xml',\n ('application/xml', {})\n ),\n (\n 'application/json ; charset=utf-8',\n ('application/json', {'charset': 'utf-8'})\n ),\n (\n 'application/json ; Charset=utf-8',\n ('application/json', {'charset': 'utf-8'})\n ),\n (\n 'text/plain',\n ('text/plain', {})\n ),\n (\n 'multipart/form-data; boundary = something ; boundary2=\\'something_else\\' ; no_equals ',\n ('multipart/form-data', {'boundary': 'something', 'boundary2': 'something_else', 'no_equals': True})\n ),\n (\n 'multipart/form-data; boundary = something ; boundary2=\"something_else\" ; no_equals ',\n ('multipart/form-data', {'boundary': 'something', 'boundary2': 'something_else', 'no_equals': True})\n ),\n (\n 'multipart/form-data; boundary = something ; \\'boundary2=something_else\\' ; no_equals ',\n ('multipart/form-data', {'boundary': 'something', 'boundary2': 'something_else', 'no_equals': True})\n ),\n (\n 'multipart/form-data; boundary = something ; \"boundary2=something_else\" ; no_equals ',\n ('multipart/form-data', {'boundary': 'something', 'boundary2': 'something_else', 'no_equals': True})\n ),\n (\n 'application/json ; ; ',\n ('application/json', {})\n )\n ))\ndef test__parse_content_type_header(value, expected):\n assert _parse_content_type_header(value) == expected\n\n\n@pytest.mark.parametrize(\n 'value, expected', (\n (\n CaseInsensitiveDict(),\n None\n ),\n (\n CaseInsensitiveDict({'content-type': 'application/json; charset=utf-8'}),\n 'utf-8'\n ),\n (\n CaseInsensitiveDict({'content-type': 'text/plain'}),\n 'ISO-8859-1'\n ),\n ))\ndef test_get_encoding_from_headers(value, expected):\n assert get_encoding_from_headers(value) == expected\n\n\n@pytest.mark.parametrize(\n 'value, length', (\n ('', 0),\n ('T', 1),\n ('Test', 4),\n ('Cont', 0),\n ('Other', -5),\n ('Content', None),\n ))\ndef test_iter_slices(value, length):\n if length is None or (length <= 0 and len(value) > 0):\n # Reads all content at once\n assert len(list(iter_slices(value, length))) == 1\n else:\n assert len(list(iter_slices(value, 1))) == length\n\n\n@pytest.mark.parametrize(\n 'value, expected', (\n (\n '<http:/.../front.jpeg>; rel=front; type=\"image/jpeg\"',\n [{'url': 'http:/.../front.jpeg', 'rel': 'front', 'type': 'image/jpeg'}]\n ),\n (\n '<http:/.../front.jpeg>',\n [{'url': 'http:/.../front.jpeg'}]\n ),\n (\n '<http:/.../front.jpeg>;',\n [{'url': 'http:/.../front.jpeg'}]\n ),\n (\n '<http:/.../front.jpeg>; type=\"image/jpeg\",<http://.../back.jpeg>;',\n [\n {'url': 'http:/.../front.jpeg', 'type': 'image/jpeg'},\n {'url': 'http://.../back.jpeg'}\n ]\n ),\n (\n '',\n []\n ),\n ))\ndef test_parse_header_links(value, expected):\n assert parse_header_links(value) == expected\n\n\n@pytest.mark.parametrize(\n 'value, expected', (\n ('example.com/path', 'http://example.com/path'),\n ('//example.com/path', 'http://example.com/path'),\n ('example.com:80', 'http://example.com:80'),\n ))\ndef test_prepend_scheme_if_needed(value, expected):\n assert prepend_scheme_if_needed(value, 'http') == expected\n\n\n@pytest.mark.parametrize(\n 'value, expected', (\n ('T', 'T'),\n (b'T', 'T'),\n (u'T', 'T'),\n ))\ndef test_to_native_string(value, expected):\n assert to_native_string(value) == expected\n\n\n@pytest.mark.parametrize(\n 'url, expected', (\n ('http://u:p@example.com/path?a=1#test', 'http://example.com/path?a=1'),\n ('http://example.com/path', 'http://example.com/path'),\n ('//u:p@example.com/path', '//example.com/path'),\n ('//example.com/path', '//example.com/path'),\n ('example.com/path', '//example.com/path'),\n ('scheme:u:p@example.com/path', 'scheme://example.com/path'),\n ))\ndef test_urldefragauth(url, expected):\n assert urldefragauth(url) == expected\n\n\n@pytest.mark.parametrize(\n 'url, expected', (\n ('http://192.168.0.1:5000/', True),\n ('http://192.168.0.1/', True),\n ('http://172.16.1.1/', True),\n ('http://172.16.1.1:5000/', True),\n ('http://localhost.localdomain:5000/v1.0/', True),\n ('http://google.com:6000/', True),\n ('http://172.16.1.12/', False),\n ('http://172.16.1.12:5000/', False),\n ('http://google.com:5000/v1.0/', False),\n ('file:///some/path/on/disk', True),\n ))\ndef test_should_bypass_proxies(url, expected, monkeypatch):\n \"\"\"Tests for function should_bypass_proxies to check if proxy\n can be bypassed or not\n \"\"\"\n monkeypatch.setenv('no_proxy', '192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1, google.com:6000')\n monkeypatch.setenv('NO_PROXY', '192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1, google.com:6000')\n assert should_bypass_proxies(url, no_proxy=None) == expected\n\n\n@pytest.mark.parametrize(\n 'url, expected', (\n ('http://172.16.1.1/', '172.16.1.1'),\n ('http://172.16.1.1:5000/', '172.16.1.1'),\n ('http://user:pass@172.16.1.1', '172.16.1.1'),\n ('http://user:pass@172.16.1.1:5000', '172.16.1.1'),\n ('http://hostname/', 'hostname'),\n ('http://hostname:5000/', 'hostname'),\n ('http://user:pass@hostname', 'hostname'),\n ('http://user:pass@hostname:5000', 'hostname'),\n ))\ndef test_should_bypass_proxies_pass_only_hostname(url, expected, mocker):\n \"\"\"The proxy_bypass function should be called with a hostname or IP without\n a port number or auth credentials.\n \"\"\"\n proxy_bypass = mocker.patch('requests.utils.proxy_bypass')\n should_bypass_proxies(url, no_proxy=None)\n proxy_bypass.assert_called_once_with(expected)\n\n\n@pytest.mark.parametrize(\n 'cookiejar', (\n compat.cookielib.CookieJar(),\n RequestsCookieJar()\n ))\ndef test_add_dict_to_cookiejar(cookiejar):\n \"\"\"Ensure add_dict_to_cookiejar works for\n non-RequestsCookieJar CookieJars\n \"\"\"\n cookiedict = {'test': 'cookies',\n 'good': 'cookies'}\n cj = add_dict_to_cookiejar(cookiejar, cookiedict)\n cookies = {cookie.name: cookie.value for cookie in cj}\n assert cookiedict == cookies\n\n\n@pytest.mark.parametrize(\n 'value, expected', (\n (u'test', True),\n (u'æíöû', False),\n (u'ジェーピーニック', False),\n )\n)\ndef test_unicode_is_ascii(value, expected):\n assert unicode_is_ascii(value) is expected\n\n\n@pytest.mark.parametrize(\n 'url, expected', (\n ('http://192.168.0.1:5000/', True),\n ('http://192.168.0.1/', True),\n ('http://172.16.1.1/', True),\n ('http://172.16.1.1:5000/', True),\n ('http://localhost.localdomain:5000/v1.0/', True),\n ('http://172.16.1.12/', False),\n ('http://172.16.1.12:5000/', False),\n ('http://google.com:5000/v1.0/', False),\n ))\ndef test_should_bypass_proxies_no_proxy(\n url, expected, monkeypatch):\n \"\"\"Tests for function should_bypass_proxies to check if proxy\n can be bypassed or not using the 'no_proxy' argument\n \"\"\"\n no_proxy = '192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1'\n # Test 'no_proxy' argument\n assert should_bypass_proxies(url, no_proxy=no_proxy) == expected\n\n\n@pytest.mark.skipif(os.name != 'nt', reason='Test only on Windows')\n@pytest.mark.parametrize(\n 'url, expected, override', (\n ('http://192.168.0.1:5000/', True, None),\n ('http://192.168.0.1/', True, None),\n ('http://172.16.1.1/', True, None),\n ('http://172.16.1.1:5000/', True, None),\n ('http://localhost.localdomain:5000/v1.0/', True, None),\n ('http://172.16.1.22/', False, None),\n ('http://172.16.1.22:5000/', False, None),\n ('http://google.com:5000/v1.0/', False, None),\n ('http://mylocalhostname:5000/v1.0/', True, '<local>'),\n ('http://192.168.0.1/', False, ''),\n ))\ndef test_should_bypass_proxies_win_registry(url, expected, override,\n monkeypatch):\n \"\"\"Tests for function should_bypass_proxies to check if proxy\n can be bypassed or not with Windows registry settings\n \"\"\"\n if override is None:\n override = '192.168.*;127.0.0.1;localhost.localdomain;172.16.1.1'\n if compat.is_py3:\n import winreg\n else:\n import _winreg as winreg\n\n class RegHandle:\n def Close(self):\n pass\n\n ie_settings = RegHandle()\n proxyEnableValues = deque([1, \"1\"])\n\n def OpenKey(key, subkey):\n return ie_settings\n\n def QueryValueEx(key, value_name):\n if key is ie_settings:\n if value_name == 'ProxyEnable':\n # this could be a string (REG_SZ) or a 32-bit number (REG_DWORD)\n proxyEnableValues.rotate()\n return [proxyEnableValues[0]]\n elif value_name == 'ProxyOverride':\n return [override]\n\n monkeypatch.setenv('http_proxy', '')\n monkeypatch.setenv('https_proxy', '')\n monkeypatch.setenv('ftp_proxy', '')\n monkeypatch.setenv('no_proxy', '')\n monkeypatch.setenv('NO_PROXY', '')\n monkeypatch.setattr(winreg, 'OpenKey', OpenKey)\n monkeypatch.setattr(winreg, 'QueryValueEx', QueryValueEx)\n assert should_bypass_proxies(url, None) == expected\n\n\n@pytest.mark.parametrize(\n 'env_name, value', (\n ('no_proxy', '192.168.0.0/24,127.0.0.1,localhost.localdomain'),\n ('no_proxy', None),\n ('a_new_key', '192.168.0.0/24,127.0.0.1,localhost.localdomain'),\n ('a_new_key', None),\n ))\ndef test_set_environ(env_name, value):\n \"\"\"Tests set_environ will set environ values and will restore the environ.\"\"\"\n environ_copy = copy.deepcopy(os.environ)\n with set_environ(env_name, value):\n assert os.environ.get(env_name) == value\n\n assert os.environ == environ_copy\n\n\ndef test_set_environ_raises_exception():\n \"\"\"Tests set_environ will raise exceptions in context when the\n value parameter is None.\"\"\"\n with pytest.raises(Exception) as exception:\n with set_environ('test1', None):\n raise Exception('Expected exception')\n\n assert 'Expected exception' in str(exception.value)\n",
"path": "tests/test_utils.py"
}
] | 13_6 | python | import sys
import pytest
@pytest.mark.parametrize(
"value, expected",
(
(
'http://user:pass@example.com/path?query',
'http://user:pass@example.com/path?query'
),
(
'http://user@example.com/path?query',
'http://user@example.com/path?query'
)
),
)
def test_prepend_scheme_if_needed(value, expected):
from requests.utils import prepend_scheme_if_needed
assert prepend_scheme_if_needed(value, "http") == expected
def main():
# Run the pytest tests programmatically
exit_code = pytest.main(["-v", __file__])
# Exit with status code 1 if any test fails, otherwise 0
if exit_code != 0:
sys.exit(1)
else:
sys.exit(0)
if __name__ == '__main__':
main()
|
https://github.com/teamqurrent/requests | Focus on enhancing the error handling in the `super_len` function within `requests/utils.py`. Update the function to catch an AttributeError when attempting to use the fileno method on file-like objects. | e77dd8d | -e .[socks]
pytest
pytest-cov
pytest-httpbin==1.0.0
pytest-mock
httpbin==0.7.0
trustme
wheel
chardet>=3.0.2,<3.1.0
idna>=2.5,<2.8
urllib3>=1.21.1,<1.24
certifi>=2017.4.17
# Flask Stack
Flask>1.0,<2.0
markupsafe<2.1
| python3.9 | 2d2447e2 | diff --git a/HISTORY.md b/HISTORY.md
--- a/HISTORY.md
+++ b/HISTORY.md
@@ -11,6 +11,9 @@ dev
backwards compatible as it inherits from previously thrown exceptions.
Can be caught from `requests.exceptions.RequestException` as well.
+- Catch `AttributeError` when calculating length of files obtained by
+ `Tarfile.extractfile()`
+
2.26.0 (2021-07-13)
-------------------
@@ -1702,7 +1705,7 @@ This is not a backwards compatible change.
- Automatic Authentication API Change
- Smarter Query URL Parameterization
- Allow file uploads and POST data together
--
+-
New Authentication Manager System
@@ -1721,7 +1724,7 @@ This is not a backwards compatible change.
0.2.3 (2011-02-15)
------------------
--
+-
New HTTPHandling Methods
diff --git a/requests/utils.py b/requests/utils.py
--- a/requests/utils.py
+++ b/requests/utils.py
@@ -124,7 +124,10 @@ def super_len(o):
elif hasattr(o, 'fileno'):
try:
fileno = o.fileno()
- except io.UnsupportedOperation:
+ except (io.UnsupportedOperation, AttributeError):
+ # AttributeError is a surprising exception, seeing as how we've just checked
+ # that `hasattr(o, 'fileno')`. It happens for objects obtained via
+ # `Tarfile.extractfile()`, per issue 5229.
pass
else:
total_length = os.fstat(fileno).st_size
diff --git a/tests/test_utils.py b/tests/test_utils.py
--- a/tests/test_utils.py
+++ b/tests/test_utils.py
@@ -4,6 +4,7 @@ import os
import copy
import filecmp
from io import BytesIO
+import tarfile
import zipfile
from collections import deque
@@ -86,6 +87,18 @@ class TestSuperLen:
assert super_len(fd) == 4
assert len(recwarn) == warnings_num
+ def test_tarfile_member(self, tmpdir):
+ file_obj = tmpdir.join('test.txt')
+ file_obj.write('Test')
+
+ tar_obj = str(tmpdir.join('test.tar'))
+ with tarfile.open(tar_obj, 'w') as tar:
+ tar.add(str(file_obj), arcname='test.txt')
+
+ with tarfile.open(tar_obj) as tar:
+ member = tar.extractfile('test.txt')
+ assert super_len(member) == 4
+
def test_super_len_with__len__(self):
foo = [1,2,3,4]
len_foo = super_len(foo)
| [
{
"content": "Release History\n===============\n\ndev\n---\n\n- \\[Short description of non-trivial change.\\]\n\n- Added a `requests.exceptions.JSONDecodeError` to decrease inconsistencies\n in the library. This gets raised in the `response.json()` method, and is\n backwards compatible as it inherits from previously thrown exceptions.\n Can be caught from `requests.exceptions.RequestException` as well.\n\n2.26.0 (2021-07-13)\n-------------------\n\n**Improvements**\n\n- Requests now supports Brotli compression, if either the `brotli` or\n `brotlicffi` package is installed. (#5783)\n\n- `Session.send` now correctly resolves proxy configurations from both\n the Session and Request. Behavior now matches `Session.request`. (#5681)\n\n**Bugfixes**\n\n- Fixed a race condition in zip extraction when using Requests in parallel\n from zip archive. (#5707)\n\n**Dependencies**\n\n- Instead of `chardet`, use the MIT-licensed `charset_normalizer` for Python3\n to remove license ambiguity for projects bundling requests. If `chardet`\n is already installed on your machine it will be used instead of `charset_normalizer`\n to keep backwards compatibility. (#5797)\n\n You can also install `chardet` while installing requests by\n specifying `[use_chardet_on_py3]` extra as follows:\n\n ```shell\n pip install \"requests[use_chardet_on_py3]\"\n ```\n\n Python2 still depends upon the `chardet` module.\n\n- Requests now supports `idna` 3.x on Python 3. `idna` 2.x will continue to\n be used on Python 2 installations. (#5711)\n\n**Deprecations**\n\n- The `requests[security]` extra has been converted to a no-op install.\n PyOpenSSL is no longer the recommended secure option for Requests. (#5867)\n\n- Requests has officially dropped support for Python 3.5. (#5867)\n\n2.25.1 (2020-12-16)\n-------------------\n\n**Bugfixes**\n\n- Requests now treats `application/json` as `utf8` by default. Resolving\n inconsistencies between `r.text` and `r.json` output. (#5673)\n\n**Dependencies**\n\n- Requests now supports chardet v4.x.\n\n2.25.0 (2020-11-11)\n-------------------\n\n**Improvements**\n\n- Added support for NETRC environment variable. (#5643)\n\n**Dependencies**\n\n- Requests now supports urllib3 v1.26.\n\n**Deprecations**\n\n- Requests v2.25.x will be the last release series with support for Python 3.5.\n- The `requests[security]` extra is officially deprecated and will be removed\n in Requests v2.26.0.\n\n2.24.0 (2020-06-17)\n-------------------\n\n**Improvements**\n\n- pyOpenSSL TLS implementation is now only used if Python\n either doesn't have an `ssl` module or doesn't support\n SNI. Previously pyOpenSSL was unconditionally used if available.\n This applies even if pyOpenSSL is installed via the\n `requests[security]` extra (#5443)\n\n- Redirect resolution should now only occur when\n `allow_redirects` is True. (#5492)\n\n- No longer perform unnecessary Content-Length calculation for\n requests that won't use it. (#5496)\n\n2.23.0 (2020-02-19)\n-------------------\n\n**Improvements**\n\n- Remove defunct reference to `prefetch` in Session `__attrs__` (#5110)\n\n**Bugfixes**\n\n- Requests no longer outputs password in basic auth usage warning. (#5099)\n\n**Dependencies**\n\n- Pinning for `chardet` and `idna` now uses major version instead of minor.\n This hopefully reduces the need for releases every time a dependency is updated.\n\n2.22.0 (2019-05-15)\n-------------------\n\n**Dependencies**\n\n- Requests now supports urllib3 v1.25.2.\n (note: 1.25.0 and 1.25.1 are incompatible)\n\n**Deprecations**\n\n- Requests has officially stopped support for Python 3.4.\n\n2.21.0 (2018-12-10)\n-------------------\n\n**Dependencies**\n\n- Requests now supports idna v2.8.\n\n2.20.1 (2018-11-08)\n-------------------\n\n**Bugfixes**\n\n- Fixed bug with unintended Authorization header stripping for\n redirects using default ports (http/80, https/443).\n\n2.20.0 (2018-10-18)\n-------------------\n\n**Bugfixes**\n\n- Content-Type header parsing is now case-insensitive (e.g.\n charset=utf8 v Charset=utf8).\n- Fixed exception leak where certain redirect urls would raise\n uncaught urllib3 exceptions.\n- Requests removes Authorization header from requests redirected\n from https to http on the same hostname. (CVE-2018-18074)\n- `should_bypass_proxies` now handles URIs without hostnames (e.g.\n files).\n\n**Dependencies**\n\n- Requests now supports urllib3 v1.24.\n\n**Deprecations**\n\n- Requests has officially stopped support for Python 2.6.\n\n2.19.1 (2018-06-14)\n-------------------\n\n**Bugfixes**\n\n- Fixed issue where status\\_codes.py's `init` function failed trying\n to append to a `__doc__` value of `None`.\n\n2.19.0 (2018-06-12)\n-------------------\n\n**Improvements**\n\n- Warn user about possible slowdown when using cryptography version\n < 1.3.4\n- Check for invalid host in proxy URL, before forwarding request to\n adapter.\n- Fragments are now properly maintained across redirects. (RFC7231\n 7.1.2)\n- Removed use of cgi module to expedite library load time.\n- Added support for SHA-256 and SHA-512 digest auth algorithms.\n- Minor performance improvement to `Request.content`.\n- Migrate to using collections.abc for 3.7 compatibility.\n\n**Bugfixes**\n\n- Parsing empty `Link` headers with `parse_header_links()` no longer\n return one bogus entry.\n- Fixed issue where loading the default certificate bundle from a zip\n archive would raise an `IOError`.\n- Fixed issue with unexpected `ImportError` on windows system which do\n not support `winreg` module.\n- DNS resolution in proxy bypass no longer includes the username and\n password in the request. This also fixes the issue of DNS queries\n failing on macOS.\n- Properly normalize adapter prefixes for url comparison.\n- Passing `None` as a file pointer to the `files` param no longer\n raises an exception.\n- Calling `copy` on a `RequestsCookieJar` will now preserve the cookie\n policy correctly.\n\n**Dependencies**\n\n- We now support idna v2.7.\n- We now support urllib3 v1.23.\n\n2.18.4 (2017-08-15)\n-------------------\n\n**Improvements**\n\n- Error messages for invalid headers now include the header name for\n easier debugging\n\n**Dependencies**\n\n- We now support idna v2.6.\n\n2.18.3 (2017-08-02)\n-------------------\n\n**Improvements**\n\n- Running `$ python -m requests.help` now includes the installed\n version of idna.\n\n**Bugfixes**\n\n- Fixed issue where Requests would raise `ConnectionError` instead of\n `SSLError` when encountering SSL problems when using urllib3 v1.22.\n\n2.18.2 (2017-07-25)\n-------------------\n\n**Bugfixes**\n\n- `requests.help` no longer fails on Python 2.6 due to the absence of\n `ssl.OPENSSL_VERSION_NUMBER`.\n\n**Dependencies**\n\n- We now support urllib3 v1.22.\n\n2.18.1 (2017-06-14)\n-------------------\n\n**Bugfixes**\n\n- Fix an error in the packaging whereby the `*.whl` contained\n incorrect data that regressed the fix in v2.17.3.\n\n2.18.0 (2017-06-14)\n-------------------\n\n**Improvements**\n\n- `Response` is now a context manager, so can be used directly in a\n `with` statement without first having to be wrapped by\n `contextlib.closing()`.\n\n**Bugfixes**\n\n- Resolve installation failure if multiprocessing is not available\n- Resolve tests crash if multiprocessing is not able to determine the\n number of CPU cores\n- Resolve error swallowing in utils set\\_environ generator\n\n2.17.3 (2017-05-29)\n-------------------\n\n**Improvements**\n\n- Improved `packages` namespace identity support, for monkeypatching\n libraries.\n\n2.17.2 (2017-05-29)\n-------------------\n\n**Improvements**\n\n- Improved `packages` namespace identity support, for monkeypatching\n libraries.\n\n2.17.1 (2017-05-29)\n-------------------\n\n**Improvements**\n\n- Improved `packages` namespace identity support, for monkeypatching\n libraries.\n\n2.17.0 (2017-05-29)\n-------------------\n\n**Improvements**\n\n- Removal of the 301 redirect cache. This improves thread-safety.\n\n2.16.5 (2017-05-28)\n-------------------\n\n- Improvements to `$ python -m requests.help`.\n\n2.16.4 (2017-05-27)\n-------------------\n\n- Introduction of the `$ python -m requests.help` command, for\n debugging with maintainers!\n\n2.16.3 (2017-05-27)\n-------------------\n\n- Further restored the `requests.packages` namespace for compatibility\n reasons.\n\n2.16.2 (2017-05-27)\n-------------------\n\n- Further restored the `requests.packages` namespace for compatibility\n reasons.\n\nNo code modification (noted below) should be necessary any longer.\n\n2.16.1 (2017-05-27)\n-------------------\n\n- Restored the `requests.packages` namespace for compatibility\n reasons.\n- Bugfix for `urllib3` version parsing.\n\n**Note**: code that was written to import against the\n`requests.packages` namespace previously will have to import code that\nrests at this module-level now.\n\nFor example:\n\n from requests.packages.urllib3.poolmanager import PoolManager\n\nWill need to be re-written to be:\n\n from requests.packages import urllib3\n urllib3.poolmanager.PoolManager\n\nOr, even better:\n\n from urllib3.poolmanager import PoolManager\n\n2.16.0 (2017-05-26)\n-------------------\n\n- Unvendor ALL the things!\n\n2.15.1 (2017-05-26)\n-------------------\n\n- Everyone makes mistakes.\n\n2.15.0 (2017-05-26)\n-------------------\n\n**Improvements**\n\n- Introduction of the `Response.next` property, for getting the next\n `PreparedResponse` from a redirect chain (when\n `allow_redirects=False`).\n- Internal refactoring of `__version__` module.\n\n**Bugfixes**\n\n- Restored once-optional parameter for\n `requests.utils.get_environ_proxies()`.\n\n2.14.2 (2017-05-10)\n-------------------\n\n**Bugfixes**\n\n- Changed a less-than to an equal-to and an or in the dependency\n markers to widen compatibility with older setuptools releases.\n\n2.14.1 (2017-05-09)\n-------------------\n\n**Bugfixes**\n\n- Changed the dependency markers to widen compatibility with older pip\n releases.\n\n2.14.0 (2017-05-09)\n-------------------\n\n**Improvements**\n\n- It is now possible to pass `no_proxy` as a key to the `proxies`\n dictionary to provide handling similar to the `NO_PROXY` environment\n variable.\n- When users provide invalid paths to certificate bundle files or\n directories Requests now raises `IOError`, rather than failing at\n the time of the HTTPS request with a fairly inscrutable certificate\n validation error.\n- The behavior of `SessionRedirectMixin` was slightly altered.\n `resolve_redirects` will now detect a redirect by calling\n `get_redirect_target(response)` instead of directly querying\n `Response.is_redirect` and `Response.headers['location']`. Advanced\n users will be able to process malformed redirects more easily.\n- Changed the internal calculation of elapsed request time to have\n higher resolution on Windows.\n- Added `win_inet_pton` as conditional dependency for the `[socks]`\n extra on Windows with Python 2.7.\n- Changed the proxy bypass implementation on Windows: the proxy bypass\n check doesn't use forward and reverse DNS requests anymore\n- URLs with schemes that begin with `http` but are not `http` or\n `https` no longer have their host parts forced to lowercase.\n\n**Bugfixes**\n\n- Much improved handling of non-ASCII `Location` header values in\n redirects. Fewer `UnicodeDecodeErrors` are encountered on Python 2,\n and Python 3 now correctly understands that Latin-1 is unlikely to\n be the correct encoding.\n- If an attempt to `seek` file to find out its length fails, we now\n appropriately handle that by aborting our content-length\n calculations.\n- Restricted `HTTPDigestAuth` to only respond to auth challenges made\n on 4XX responses, rather than to all auth challenges.\n- Fixed some code that was firing `DeprecationWarning` on Python 3.6.\n- The dismayed person emoticon (`/o\\\\`) no longer has a big head. I'm\n sure this is what you were all worrying about most.\n\n**Miscellaneous**\n\n- Updated bundled urllib3 to v1.21.1.\n- Updated bundled chardet to v3.0.2.\n- Updated bundled idna to v2.5.\n- Updated bundled certifi to 2017.4.17.\n\n2.13.0 (2017-01-24)\n-------------------\n\n**Features**\n\n- Only load the `idna` library when we've determined we need it. This\n will save some memory for users.\n\n**Miscellaneous**\n\n- Updated bundled urllib3 to 1.20.\n- Updated bundled idna to 2.2.\n\n2.12.5 (2017-01-18)\n-------------------\n\n**Bugfixes**\n\n- Fixed an issue with JSON encoding detection, specifically detecting\n big-endian UTF-32 with BOM.\n\n2.12.4 (2016-12-14)\n-------------------\n\n**Bugfixes**\n\n- Fixed regression from 2.12.2 where non-string types were rejected in\n the basic auth parameters. While support for this behaviour has been\n re-added, the behaviour is deprecated and will be removed in the\n future.\n\n2.12.3 (2016-12-01)\n-------------------\n\n**Bugfixes**\n\n- Fixed regression from v2.12.1 for URLs with schemes that begin with\n \"http\". These URLs have historically been processed as though they\n were HTTP-schemed URLs, and so have had parameters added. This was\n removed in v2.12.2 in an overzealous attempt to resolve problems\n with IDNA-encoding those URLs. This change was reverted: the other\n fixes for IDNA-encoding have been judged to be sufficient to return\n to the behaviour Requests had before v2.12.0.\n\n2.12.2 (2016-11-30)\n-------------------\n\n**Bugfixes**\n\n- Fixed several issues with IDNA-encoding URLs that are technically\n invalid but which are widely accepted. Requests will now attempt to\n IDNA-encode a URL if it can but, if it fails, and the host contains\n only ASCII characters, it will be passed through optimistically.\n This will allow users to opt-in to using IDNA2003 themselves if they\n want to, and will also allow technically invalid but still common\n hostnames.\n- Fixed an issue where URLs with leading whitespace would raise\n `InvalidSchema` errors.\n- Fixed an issue where some URLs without the HTTP or HTTPS schemes\n would still have HTTP URL preparation applied to them.\n- Fixed an issue where Unicode strings could not be used in basic\n auth.\n- Fixed an issue encountered by some Requests plugins where\n constructing a Response object would cause `Response.content` to\n raise an `AttributeError`.\n\n2.12.1 (2016-11-16)\n-------------------\n\n**Bugfixes**\n\n- Updated setuptools 'security' extra for the new PyOpenSSL backend in\n urllib3.\n\n**Miscellaneous**\n\n- Updated bundled urllib3 to 1.19.1.\n\n2.12.0 (2016-11-15)\n-------------------\n\n**Improvements**\n\n- Updated support for internationalized domain names from IDNA2003 to\n IDNA2008. This updated support is required for several forms of IDNs\n and is mandatory for .de domains.\n- Much improved heuristics for guessing content lengths: Requests will\n no longer read an entire `StringIO` into memory.\n- Much improved logic for recalculating `Content-Length` headers for\n `PreparedRequest` objects.\n- Improved tolerance for file-like objects that have no `tell` method\n but do have a `seek` method.\n- Anything that is a subclass of `Mapping` is now treated like a\n dictionary by the `data=` keyword argument.\n- Requests now tolerates empty passwords in proxy credentials, rather\n than stripping the credentials.\n- If a request is made with a file-like object as the body and that\n request is redirected with a 307 or 308 status code, Requests will\n now attempt to rewind the body object so it can be replayed.\n\n**Bugfixes**\n\n- When calling `response.close`, the call to `close` will be\n propagated through to non-urllib3 backends.\n- Fixed issue where the `ALL_PROXY` environment variable would be\n preferred over scheme-specific variables like `HTTP_PROXY`.\n- Fixed issue where non-UTF8 reason phrases got severely mangled by\n falling back to decoding using ISO 8859-1 instead.\n- Fixed a bug where Requests would not correctly correlate cookies set\n when using custom Host headers if those Host headers did not use the\n native string type for the platform.\n\n**Miscellaneous**\n\n- Updated bundled urllib3 to 1.19.\n- Updated bundled certifi certs to 2016.09.26.\n\n2.11.1 (2016-08-17)\n-------------------\n\n**Bugfixes**\n\n- Fixed a bug when using `iter_content` with `decode_unicode=True` for\n streamed bodies would raise `AttributeError`. This bug was\n introduced in 2.11.\n- Strip Content-Type and Transfer-Encoding headers from the header\n block when following a redirect that transforms the verb from\n POST/PUT to GET.\n\n2.11.0 (2016-08-08)\n-------------------\n\n**Improvements**\n\n- Added support for the `ALL_PROXY` environment variable.\n- Reject header values that contain leading whitespace or newline\n characters to reduce risk of header smuggling.\n\n**Bugfixes**\n\n- Fixed occasional `TypeError` when attempting to decode a JSON\n response that occurred in an error case. Now correctly returns a\n `ValueError`.\n- Requests would incorrectly ignore a non-CIDR IP address in the\n `NO_PROXY` environment variables: Requests now treats it as a\n specific IP.\n- Fixed a bug when sending JSON data that could cause us to encounter\n obscure OpenSSL errors in certain network conditions (yes, really).\n- Added type checks to ensure that `iter_content` only accepts\n integers and `None` for chunk sizes.\n- Fixed issue where responses whose body had not been fully consumed\n would have the underlying connection closed but not returned to the\n connection pool, which could cause Requests to hang in situations\n where the `HTTPAdapter` had been configured to use a blocking\n connection pool.\n\n**Miscellaneous**\n\n- Updated bundled urllib3 to 1.16.\n- Some previous releases accidentally accepted non-strings as\n acceptable header values. This release does not.\n\n2.10.0 (2016-04-29)\n-------------------\n\n**New Features**\n\n- SOCKS Proxy Support! (requires PySocks;\n `$ pip install requests[socks]`)\n\n**Miscellaneous**\n\n- Updated bundled urllib3 to 1.15.1.\n\n2.9.2 (2016-04-29)\n------------------\n\n**Improvements**\n\n- Change built-in CaseInsensitiveDict (used for headers) to use\n OrderedDict as its underlying datastore.\n\n**Bugfixes**\n\n- Don't use redirect\\_cache if allow\\_redirects=False\n- When passed objects that throw exceptions from `tell()`, send them\n via chunked transfer encoding instead of failing.\n- Raise a ProxyError for proxy related connection issues.\n\n2.9.1 (2015-12-21)\n------------------\n\n**Bugfixes**\n\n- Resolve regression introduced in 2.9.0 that made it impossible to\n send binary strings as bodies in Python 3.\n- Fixed errors when calculating cookie expiration dates in certain\n locales.\n\n**Miscellaneous**\n\n- Updated bundled urllib3 to 1.13.1.\n\n2.9.0 (2015-12-15)\n------------------\n\n**Minor Improvements** (Backwards compatible)\n\n- The `verify` keyword argument now supports being passed a path to a\n directory of CA certificates, not just a single-file bundle.\n- Warnings are now emitted when sending files opened in text mode.\n- Added the 511 Network Authentication Required status code to the\n status code registry.\n\n**Bugfixes**\n\n- For file-like objects that are not sought to the very beginning, we\n now send the content length for the number of bytes we will actually\n read, rather than the total size of the file, allowing partial file\n uploads.\n- When uploading file-like objects, if they are empty or have no\n obvious content length we set `Transfer-Encoding: chunked` rather\n than `Content-Length: 0`.\n- We correctly receive the response in buffered mode when uploading\n chunked bodies.\n- We now handle being passed a query string as a bytestring on Python\n 3, by decoding it as UTF-8.\n- Sessions are now closed in all cases (exceptional and not) when\n using the functional API rather than leaking and waiting for the\n garbage collector to clean them up.\n- Correctly handle digest auth headers with a malformed `qop`\n directive that contains no token, by treating it the same as if no\n `qop` directive was provided at all.\n- Minor performance improvements when removing specific cookies by\n name.\n\n**Miscellaneous**\n\n- Updated urllib3 to 1.13.\n\n2.8.1 (2015-10-13)\n------------------\n\n**Bugfixes**\n\n- Update certificate bundle to match `certifi` 2015.9.6.2's weak\n certificate bundle.\n- Fix a bug in 2.8.0 where requests would raise `ConnectTimeout`\n instead of `ConnectionError`\n- When using the PreparedRequest flow, requests will now correctly\n respect the `json` parameter. Broken in 2.8.0.\n- When using the PreparedRequest flow, requests will now correctly\n handle a Unicode-string method name on Python 2. Broken in 2.8.0.\n\n2.8.0 (2015-10-05)\n------------------\n\n**Minor Improvements** (Backwards Compatible)\n\n- Requests now supports per-host proxies. This allows the `proxies`\n dictionary to have entries of the form\n `{'<scheme>://<hostname>': '<proxy>'}`. Host-specific proxies will\n be used in preference to the previously-supported scheme-specific\n ones, but the previous syntax will continue to work.\n- `Response.raise_for_status` now prints the URL that failed as part\n of the exception message.\n- `requests.utils.get_netrc_auth` now takes an `raise_errors` kwarg,\n defaulting to `False`. When `True`, errors parsing `.netrc` files\n cause exceptions to be thrown.\n- Change to bundled projects import logic to make it easier to\n unbundle requests downstream.\n- Changed the default User-Agent string to avoid leaking data on\n Linux: now contains only the requests version.\n\n**Bugfixes**\n\n- The `json` parameter to `post()` and friends will now only be used\n if neither `data` nor `files` are present, consistent with the\n documentation.\n- We now ignore empty fields in the `NO_PROXY` environment variable.\n- Fixed problem where `httplib.BadStatusLine` would get raised if\n combining `stream=True` with `contextlib.closing`.\n- Prevented bugs where we would attempt to return the same connection\n back to the connection pool twice when sending a Chunked body.\n- Miscellaneous minor internal changes.\n- Digest Auth support is now thread safe.\n\n**Updates**\n\n- Updated urllib3 to 1.12.\n\n2.7.0 (2015-05-03)\n------------------\n\nThis is the first release that follows our new release process. For\nmore, see [our\ndocumentation](https://requests.readthedocs.io/en/latest/community/release-process/).\n\n**Bugfixes**\n\n- Updated urllib3 to 1.10.4, resolving several bugs involving chunked\n transfer encoding and response framing.\n\n2.6.2 (2015-04-23)\n------------------\n\n**Bugfixes**\n\n- Fix regression where compressed data that was sent as chunked data\n was not properly decompressed. (\\#2561)\n\n2.6.1 (2015-04-22)\n------------------\n\n**Bugfixes**\n\n- Remove VendorAlias import machinery introduced in v2.5.2.\n- Simplify the PreparedRequest.prepare API: We no longer require the\n user to pass an empty list to the hooks keyword argument. (c.f.\n \\#2552)\n- Resolve redirects now receives and forwards all of the original\n arguments to the adapter. (\\#2503)\n- Handle UnicodeDecodeErrors when trying to deal with a unicode URL\n that cannot be encoded in ASCII. (\\#2540)\n- Populate the parsed path of the URI field when performing Digest\n Authentication. (\\#2426)\n- Copy a PreparedRequest's CookieJar more reliably when it is not an\n instance of RequestsCookieJar. (\\#2527)\n\n2.6.0 (2015-03-14)\n------------------\n\n**Bugfixes**\n\n- CVE-2015-2296: Fix handling of cookies on redirect. Previously a\n cookie without a host value set would use the hostname for the\n redirected URL exposing requests users to session fixation attacks\n and potentially cookie stealing. This was disclosed privately by\n Matthew Daley of [BugFuzz](https://bugfuzz.com). This affects all\n versions of requests from v2.1.0 to v2.5.3 (inclusive on both ends).\n- Fix error when requests is an `install_requires` dependency and\n `python setup.py test` is run. (\\#2462)\n- Fix error when urllib3 is unbundled and requests continues to use\n the vendored import location.\n- Include fixes to `urllib3`'s header handling.\n- Requests' handling of unvendored dependencies is now more\n restrictive.\n\n**Features and Improvements**\n\n- Support bytearrays when passed as parameters in the `files`\n argument. (\\#2468)\n- Avoid data duplication when creating a request with `str`, `bytes`,\n or `bytearray` input to the `files` argument.\n\n2.5.3 (2015-02-24)\n------------------\n\n**Bugfixes**\n\n- Revert changes to our vendored certificate bundle. For more context\n see (\\#2455, \\#2456, and <https://bugs.python.org/issue23476>)\n\n2.5.2 (2015-02-23)\n------------------\n\n**Features and Improvements**\n\n- Add sha256 fingerprint support.\n ([shazow/urllib3\\#540](https://github.com/shazow/urllib3/pull/540))\n- Improve the performance of headers.\n ([shazow/urllib3\\#544](https://github.com/shazow/urllib3/pull/544))\n\n**Bugfixes**\n\n- Copy pip's import machinery. When downstream redistributors remove\n requests.packages.urllib3 the import machinery will continue to let\n those same symbols work. Example usage in requests' documentation\n and 3rd-party libraries relying on the vendored copies of urllib3\n will work without having to fallback to the system urllib3.\n- Attempt to quote parts of the URL on redirect if unquoting and then\n quoting fails. (\\#2356)\n- Fix filename type check for multipart form-data uploads. (\\#2411)\n- Properly handle the case where a server issuing digest\n authentication challenges provides both auth and auth-int\n qop-values. (\\#2408)\n- Fix a socket leak.\n ([shazow/urllib3\\#549](https://github.com/shazow/urllib3/pull/549))\n- Fix multiple `Set-Cookie` headers properly.\n ([shazow/urllib3\\#534](https://github.com/shazow/urllib3/pull/534))\n- Disable the built-in hostname verification.\n ([shazow/urllib3\\#526](https://github.com/shazow/urllib3/pull/526))\n- Fix the behaviour of decoding an exhausted stream.\n ([shazow/urllib3\\#535](https://github.com/shazow/urllib3/pull/535))\n\n**Security**\n\n- Pulled in an updated `cacert.pem`.\n- Drop RC4 from the default cipher list.\n ([shazow/urllib3\\#551](https://github.com/shazow/urllib3/pull/551))\n\n2.5.1 (2014-12-23)\n------------------\n\n**Behavioural Changes**\n\n- Only catch HTTPErrors in raise\\_for\\_status (\\#2382)\n\n**Bugfixes**\n\n- Handle LocationParseError from urllib3 (\\#2344)\n- Handle file-like object filenames that are not strings (\\#2379)\n- Unbreak HTTPDigestAuth handler. Allow new nonces to be negotiated\n (\\#2389)\n\n2.5.0 (2014-12-01)\n------------------\n\n**Improvements**\n\n- Allow usage of urllib3's Retry object with HTTPAdapters (\\#2216)\n- The `iter_lines` method on a response now accepts a delimiter with\n which to split the content (\\#2295)\n\n**Behavioural Changes**\n\n- Add deprecation warnings to functions in requests.utils that will be\n removed in 3.0 (\\#2309)\n- Sessions used by the functional API are always closed (\\#2326)\n- Restrict requests to HTTP/1.1 and HTTP/1.0 (stop accepting HTTP/0.9)\n (\\#2323)\n\n**Bugfixes**\n\n- Only parse the URL once (\\#2353)\n- Allow Content-Length header to always be overridden (\\#2332)\n- Properly handle files in HTTPDigestAuth (\\#2333)\n- Cap redirect\\_cache size to prevent memory abuse (\\#2299)\n- Fix HTTPDigestAuth handling of redirects after authenticating\n successfully (\\#2253)\n- Fix crash with custom method parameter to Session.request (\\#2317)\n- Fix how Link headers are parsed using the regular expression library\n (\\#2271)\n\n**Documentation**\n\n- Add more references for interlinking (\\#2348)\n- Update CSS for theme (\\#2290)\n- Update width of buttons and sidebar (\\#2289)\n- Replace references of Gittip with Gratipay (\\#2282)\n- Add link to changelog in sidebar (\\#2273)\n\n2.4.3 (2014-10-06)\n------------------\n\n**Bugfixes**\n\n- Unicode URL improvements for Python 2.\n- Re-order JSON param for backwards compat.\n- Automatically defrag authentication schemes from host/pass URIs.\n ([\\#2249](https://github.com/psf/requests/issues/2249))\n\n2.4.2 (2014-10-05)\n------------------\n\n**Improvements**\n\n- FINALLY! Add json parameter for uploads!\n ([\\#2258](https://github.com/psf/requests/pull/2258))\n- Support for bytestring URLs on Python 3.x\n ([\\#2238](https://github.com/psf/requests/pull/2238))\n\n**Bugfixes**\n\n- Avoid getting stuck in a loop\n ([\\#2244](https://github.com/psf/requests/pull/2244))\n- Multiple calls to iter\\* fail with unhelpful error.\n ([\\#2240](https://github.com/psf/requests/issues/2240),\n [\\#2241](https://github.com/psf/requests/issues/2241))\n\n**Documentation**\n\n- Correct redirection introduction\n ([\\#2245](https://github.com/psf/requests/pull/2245/))\n- Added example of how to send multiple files in one request.\n ([\\#2227](https://github.com/psf/requests/pull/2227/))\n- Clarify how to pass a custom set of CAs\n ([\\#2248](https://github.com/psf/requests/pull/2248/))\n\n2.4.1 (2014-09-09)\n------------------\n\n- Now has a \"security\" package extras set,\n `$ pip install requests[security]`\n- Requests will now use Certifi if it is available.\n- Capture and re-raise urllib3 ProtocolError\n- Bugfix for responses that attempt to redirect to themselves forever\n (wtf?).\n\n2.4.0 (2014-08-29)\n------------------\n\n**Behavioral Changes**\n\n- `Connection: keep-alive` header is now sent automatically.\n\n**Improvements**\n\n- Support for connect timeouts! Timeout now accepts a tuple (connect,\n read) which is used to set individual connect and read timeouts.\n- Allow copying of PreparedRequests without headers/cookies.\n- Updated bundled urllib3 version.\n- Refactored settings loading from environment -- new\n Session.merge\\_environment\\_settings.\n- Handle socket errors in iter\\_content.\n\n2.3.0 (2014-05-16)\n------------------\n\n**API Changes**\n\n- New `Response` property `is_redirect`, which is true when the\n library could have processed this response as a redirection (whether\n or not it actually did).\n- The `timeout` parameter now affects requests with both `stream=True`\n and `stream=False` equally.\n- The change in v2.0.0 to mandate explicit proxy schemes has been\n reverted. Proxy schemes now default to `http://`.\n- The `CaseInsensitiveDict` used for HTTP headers now behaves like a\n normal dictionary when references as string or viewed in the\n interpreter.\n\n**Bugfixes**\n\n- No longer expose Authorization or Proxy-Authorization headers on\n redirect. Fix CVE-2014-1829 and CVE-2014-1830 respectively.\n- Authorization is re-evaluated each redirect.\n- On redirect, pass url as native strings.\n- Fall-back to autodetected encoding for JSON when Unicode detection\n fails.\n- Headers set to `None` on the `Session` are now correctly not sent.\n- Correctly honor `decode_unicode` even if it wasn't used earlier in\n the same response.\n- Stop advertising `compress` as a supported Content-Encoding.\n- The `Response.history` parameter is now always a list.\n- Many, many `urllib3` bugfixes.\n\n2.2.1 (2014-01-23)\n------------------\n\n**Bugfixes**\n\n- Fixes incorrect parsing of proxy credentials that contain a literal\n or encoded '\\#' character.\n- Assorted urllib3 fixes.\n\n2.2.0 (2014-01-09)\n------------------\n\n**API Changes**\n\n- New exception: `ContentDecodingError`. Raised instead of `urllib3`\n `DecodeError` exceptions.\n\n**Bugfixes**\n\n- Avoid many many exceptions from the buggy implementation of\n `proxy_bypass` on OS X in Python 2.6.\n- Avoid crashing when attempting to get authentication credentials\n from \\~/.netrc when running as a user without a home directory.\n- Use the correct pool size for pools of connections to proxies.\n- Fix iteration of `CookieJar` objects.\n- Ensure that cookies are persisted over redirect.\n- Switch back to using chardet, since it has merged with charade.\n\n2.1.0 (2013-12-05)\n------------------\n\n- Updated CA Bundle, of course.\n- Cookies set on individual Requests through a `Session` (e.g. via\n `Session.get()`) are no longer persisted to the `Session`.\n- Clean up connections when we hit problems during chunked upload,\n rather than leaking them.\n- Return connections to the pool when a chunked upload is successful,\n rather than leaking it.\n- Match the HTTPbis recommendation for HTTP 301 redirects.\n- Prevent hanging when using streaming uploads and Digest Auth when a\n 401 is received.\n- Values of headers set by Requests are now always the native string\n type.\n- Fix previously broken SNI support.\n- Fix accessing HTTP proxies using proxy authentication.\n- Unencode HTTP Basic usernames and passwords extracted from URLs.\n- Support for IP address ranges for no\\_proxy environment variable\n- Parse headers correctly when users override the default `Host:`\n header.\n- Avoid munging the URL in case of case-sensitive servers.\n- Looser URL handling for non-HTTP/HTTPS urls.\n- Accept unicode methods in Python 2.6 and 2.7.\n- More resilient cookie handling.\n- Make `Response` objects pickleable.\n- Actually added MD5-sess to Digest Auth instead of pretending to like\n last time.\n- Updated internal urllib3.\n- Fixed @Lukasa's lack of taste.\n\n2.0.1 (2013-10-24)\n------------------\n\n- Updated included CA Bundle with new mistrusts and automated process\n for the future\n- Added MD5-sess to Digest Auth\n- Accept per-file headers in multipart file POST messages.\n- Fixed: Don't send the full URL on CONNECT messages.\n- Fixed: Correctly lowercase a redirect scheme.\n- Fixed: Cookies not persisted when set via functional API.\n- Fixed: Translate urllib3 ProxyError into a requests ProxyError\n derived from ConnectionError.\n- Updated internal urllib3 and chardet.\n\n2.0.0 (2013-09-24)\n------------------\n\n**API Changes:**\n\n- Keys in the Headers dictionary are now native strings on all Python\n versions, i.e. bytestrings on Python 2, unicode on Python 3.\n- Proxy URLs now *must* have an explicit scheme. A `MissingSchema`\n exception will be raised if they don't.\n- Timeouts now apply to read time if `Stream=False`.\n- `RequestException` is now a subclass of `IOError`, not\n `RuntimeError`.\n- Added new method to `PreparedRequest` objects:\n `PreparedRequest.copy()`.\n- Added new method to `Session` objects: `Session.update_request()`.\n This method updates a `Request` object with the data (e.g. cookies)\n stored on the `Session`.\n- Added new method to `Session` objects: `Session.prepare_request()`.\n This method updates and prepares a `Request` object, and returns the\n corresponding `PreparedRequest` object.\n- Added new method to `HTTPAdapter` objects:\n `HTTPAdapter.proxy_headers()`. This should not be called directly,\n but improves the subclass interface.\n- `httplib.IncompleteRead` exceptions caused by incorrect chunked\n encoding will now raise a Requests `ChunkedEncodingError` instead.\n- Invalid percent-escape sequences now cause a Requests `InvalidURL`\n exception to be raised.\n- HTTP 208 no longer uses reason phrase `\"im_used\"`. Correctly uses\n `\"already_reported\"`.\n- HTTP 226 reason added (`\"im_used\"`).\n\n**Bugfixes:**\n\n- Vastly improved proxy support, including the CONNECT verb. Special\n thanks to the many contributors who worked towards this improvement.\n- Cookies are now properly managed when 401 authentication responses\n are received.\n- Chunked encoding fixes.\n- Support for mixed case schemes.\n- Better handling of streaming downloads.\n- Retrieve environment proxies from more locations.\n- Minor cookies fixes.\n- Improved redirect behaviour.\n- Improved streaming behaviour, particularly for compressed data.\n- Miscellaneous small Python 3 text encoding bugs.\n- `.netrc` no longer overrides explicit auth.\n- Cookies set by hooks are now correctly persisted on Sessions.\n- Fix problem with cookies that specify port numbers in their host\n field.\n- `BytesIO` can be used to perform streaming uploads.\n- More generous parsing of the `no_proxy` environment variable.\n- Non-string objects can be passed in data values alongside files.\n\n1.2.3 (2013-05-25)\n------------------\n\n- Simple packaging fix\n\n1.2.2 (2013-05-23)\n------------------\n\n- Simple packaging fix\n\n1.2.1 (2013-05-20)\n------------------\n\n- 301 and 302 redirects now change the verb to GET for all verbs, not\n just POST, improving browser compatibility.\n- Python 3.3.2 compatibility\n- Always percent-encode location headers\n- Fix connection adapter matching to be most-specific first\n- new argument to the default connection adapter for passing a block\n argument\n- prevent a KeyError when there's no link headers\n\n1.2.0 (2013-03-31)\n------------------\n\n- Fixed cookies on sessions and on requests\n- Significantly change how hooks are dispatched - hooks now receive\n all the arguments specified by the user when making a request so\n hooks can make a secondary request with the same parameters. This is\n especially necessary for authentication handler authors\n- certifi support was removed\n- Fixed bug where using OAuth 1 with body `signature_type` sent no\n data\n- Major proxy work thanks to @Lukasa including parsing of proxy\n authentication from the proxy url\n- Fix DigestAuth handling too many 401s\n- Update vendored urllib3 to include SSL bug fixes\n- Allow keyword arguments to be passed to `json.loads()` via the\n `Response.json()` method\n- Don't send `Content-Length` header by default on `GET` or `HEAD`\n requests\n- Add `elapsed` attribute to `Response` objects to time how long a\n request took.\n- Fix `RequestsCookieJar`\n- Sessions and Adapters are now picklable, i.e., can be used with the\n multiprocessing library\n- Update charade to version 1.0.3\n\nThe change in how hooks are dispatched will likely cause a great deal of\nissues.\n\n1.1.0 (2013-01-10)\n------------------\n\n- CHUNKED REQUESTS\n- Support for iterable response bodies\n- Assume servers persist redirect params\n- Allow explicit content types to be specified for file data\n- Make merge\\_kwargs case-insensitive when looking up keys\n\n1.0.3 (2012-12-18)\n------------------\n\n- Fix file upload encoding bug\n- Fix cookie behavior\n\n1.0.2 (2012-12-17)\n------------------\n\n- Proxy fix for HTTPAdapter.\n\n1.0.1 (2012-12-17)\n------------------\n\n- Cert verification exception bug.\n- Proxy fix for HTTPAdapter.\n\n1.0.0 (2012-12-17)\n------------------\n\n- Massive Refactor and Simplification\n- Switch to Apache 2.0 license\n- Swappable Connection Adapters\n- Mountable Connection Adapters\n- Mutable ProcessedRequest chain\n- /s/prefetch/stream\n- Removal of all configuration\n- Standard library logging\n- Make Response.json() callable, not property.\n- Usage of new charade project, which provides python 2 and 3\n simultaneous chardet.\n- Removal of all hooks except 'response'\n- Removal of all authentication helpers (OAuth, Kerberos)\n\nThis is not a backwards compatible change.\n\n0.14.2 (2012-10-27)\n-------------------\n\n- Improved mime-compatible JSON handling\n- Proxy fixes\n- Path hack fixes\n- Case-Insensitive Content-Encoding headers\n- Support for CJK parameters in form posts\n\n0.14.1 (2012-10-01)\n-------------------\n\n- Python 3.3 Compatibility\n- Simply default accept-encoding\n- Bugfixes\n\n0.14.0 (2012-09-02)\n-------------------\n\n- No more iter\\_content errors if already downloaded.\n\n0.13.9 (2012-08-25)\n-------------------\n\n- Fix for OAuth + POSTs\n- Remove exception eating from dispatch\\_hook\n- General bugfixes\n\n0.13.8 (2012-08-21)\n-------------------\n\n- Incredible Link header support :)\n\n0.13.7 (2012-08-19)\n-------------------\n\n- Support for (key, value) lists everywhere.\n- Digest Authentication improvements.\n- Ensure proxy exclusions work properly.\n- Clearer UnicodeError exceptions.\n- Automatic casting of URLs to strings (fURL and such)\n- Bugfixes.\n\n0.13.6 (2012-08-06)\n-------------------\n\n- Long awaited fix for hanging connections!\n\n0.13.5 (2012-07-27)\n-------------------\n\n- Packaging fix\n\n0.13.4 (2012-07-27)\n-------------------\n\n- GSSAPI/Kerberos authentication!\n- App Engine 2.7 Fixes!\n- Fix leaking connections (from urllib3 update)\n- OAuthlib path hack fix\n- OAuthlib URL parameters fix.\n\n0.13.3 (2012-07-12)\n-------------------\n\n- Use simplejson if available.\n- Do not hide SSLErrors behind Timeouts.\n- Fixed param handling with urls containing fragments.\n- Significantly improved information in User Agent.\n- client certificates are ignored when verify=False\n\n0.13.2 (2012-06-28)\n-------------------\n\n- Zero dependencies (once again)!\n- New: Response.reason\n- Sign querystring parameters in OAuth 1.0\n- Client certificates no longer ignored when verify=False\n- Add openSUSE certificate support\n\n0.13.1 (2012-06-07)\n-------------------\n\n- Allow passing a file or file-like object as data.\n- Allow hooks to return responses that indicate errors.\n- Fix Response.text and Response.json for body-less responses.\n\n0.13.0 (2012-05-29)\n-------------------\n\n- Removal of Requests.async in favor of\n [grequests](https://github.com/kennethreitz/grequests)\n- Allow disabling of cookie persistence.\n- New implementation of safe\\_mode\n- cookies.get now supports default argument\n- Session cookies not saved when Session.request is called with\n return\\_response=False\n- Env: no\\_proxy support.\n- RequestsCookieJar improvements.\n- Various bug fixes.\n\n0.12.1 (2012-05-08)\n-------------------\n\n- New `Response.json` property.\n- Ability to add string file uploads.\n- Fix out-of-range issue with iter\\_lines.\n- Fix iter\\_content default size.\n- Fix POST redirects containing files.\n\n0.12.0 (2012-05-02)\n-------------------\n\n- EXPERIMENTAL OAUTH SUPPORT!\n- Proper CookieJar-backed cookies interface with awesome dict-like\n interface.\n- Speed fix for non-iterated content chunks.\n- Move `pre_request` to a more usable place.\n- New `pre_send` hook.\n- Lazily encode data, params, files.\n- Load system Certificate Bundle if `certify` isn't available.\n- Cleanups, fixes.\n\n0.11.2 (2012-04-22)\n-------------------\n\n- Attempt to use the OS's certificate bundle if `certifi` isn't\n available.\n- Infinite digest auth redirect fix.\n- Multi-part file upload improvements.\n- Fix decoding of invalid %encodings in URLs.\n- If there is no content in a response don't throw an error the second\n time that content is attempted to be read.\n- Upload data on redirects.\n\n0.11.1 (2012-03-30)\n-------------------\n\n- POST redirects now break RFC to do what browsers do: Follow up with\n a GET.\n- New `strict_mode` configuration to disable new redirect behavior.\n\n0.11.0 (2012-03-14)\n-------------------\n\n- Private SSL Certificate support\n- Remove select.poll from Gevent monkeypatching\n- Remove redundant generator for chunked transfer encoding\n- Fix: Response.ok raises Timeout Exception in safe\\_mode\n\n0.10.8 (2012-03-09)\n-------------------\n\n- Generate chunked ValueError fix\n- Proxy configuration by environment variables\n- Simplification of iter\\_lines.\n- New trust\\_env configuration for disabling system/environment hints.\n- Suppress cookie errors.\n\n0.10.7 (2012-03-07)\n-------------------\n\n- encode\\_uri = False\n\n0.10.6 (2012-02-25)\n-------------------\n\n- Allow '=' in cookies.\n\n0.10.5 (2012-02-25)\n-------------------\n\n- Response body with 0 content-length fix.\n- New async.imap.\n- Don't fail on netrc.\n\n0.10.4 (2012-02-20)\n-------------------\n\n- Honor netrc.\n\n0.10.3 (2012-02-20)\n-------------------\n\n- HEAD requests don't follow redirects anymore.\n- raise\\_for\\_status() doesn't raise for 3xx anymore.\n- Make Session objects picklable.\n- ValueError for invalid schema URLs.\n\n0.10.2 (2012-01-15)\n-------------------\n\n- Vastly improved URL quoting.\n- Additional allowed cookie key values.\n- Attempted fix for \"Too many open files\" Error\n- Replace unicode errors on first pass, no need for second pass.\n- Append '/' to bare-domain urls before query insertion.\n- Exceptions now inherit from RuntimeError.\n- Binary uploads + auth fix.\n- Bugfixes.\n\n0.10.1 (2012-01-23)\n-------------------\n\n- PYTHON 3 SUPPORT!\n- Dropped 2.5 Support. (*Backwards Incompatible*)\n\n0.10.0 (2012-01-21)\n-------------------\n\n- `Response.content` is now bytes-only. (*Backwards Incompatible*)\n- New `Response.text` is unicode-only.\n- If no `Response.encoding` is specified and `chardet` is available,\n `Response.text` will guess an encoding.\n- Default to ISO-8859-1 (Western) encoding for \"text\" subtypes.\n- Removal of decode\\_unicode. (*Backwards Incompatible*)\n- New multiple-hooks system.\n- New `Response.register_hook` for registering hooks within the\n pipeline.\n- `Response.url` is now Unicode.\n\n0.9.3 (2012-01-18)\n------------------\n\n- SSL verify=False bugfix (apparent on windows machines).\n\n0.9.2 (2012-01-18)\n------------------\n\n- Asynchronous async.send method.\n- Support for proper chunk streams with boundaries.\n- session argument for Session classes.\n- Print entire hook tracebacks, not just exception instance.\n- Fix response.iter\\_lines from pending next line.\n- Fix but in HTTP-digest auth w/ URI having query strings.\n- Fix in Event Hooks section.\n- Urllib3 update.\n\n0.9.1 (2012-01-06)\n------------------\n\n- danger\\_mode for automatic Response.raise\\_for\\_status()\n- Response.iter\\_lines refactor\n\n0.9.0 (2011-12-28)\n------------------\n\n- verify ssl is default.\n\n0.8.9 (2011-12-28)\n------------------\n\n- Packaging fix.\n\n0.8.8 (2011-12-28)\n------------------\n\n- SSL CERT VERIFICATION!\n- Release of Cerifi: Mozilla's cert list.\n- New 'verify' argument for SSL requests.\n- Urllib3 update.\n\n0.8.7 (2011-12-24)\n------------------\n\n- iter\\_lines last-line truncation fix\n- Force safe\\_mode for async requests\n- Handle safe\\_mode exceptions more consistently\n- Fix iteration on null responses in safe\\_mode\n\n0.8.6 (2011-12-18)\n------------------\n\n- Socket timeout fixes.\n- Proxy Authorization support.\n\n0.8.5 (2011-12-14)\n------------------\n\n- Response.iter\\_lines!\n\n0.8.4 (2011-12-11)\n------------------\n\n- Prefetch bugfix.\n- Added license to installed version.\n\n0.8.3 (2011-11-27)\n------------------\n\n- Converted auth system to use simpler callable objects.\n- New session parameter to API methods.\n- Display full URL while logging.\n\n0.8.2 (2011-11-19)\n------------------\n\n- New Unicode decoding system, based on over-ridable\n Response.encoding.\n- Proper URL slash-quote handling.\n- Cookies with `[`, `]`, and `_` allowed.\n\n0.8.1 (2011-11-15)\n------------------\n\n- URL Request path fix\n- Proxy fix.\n- Timeouts fix.\n\n0.8.0 (2011-11-13)\n------------------\n\n- Keep-alive support!\n- Complete removal of Urllib2\n- Complete removal of Poster\n- Complete removal of CookieJars\n- New ConnectionError raising\n- Safe\\_mode for error catching\n- prefetch parameter for request methods\n- OPTION method\n- Async pool size throttling\n- File uploads send real names\n- Vendored in urllib3\n\n0.7.6 (2011-11-07)\n------------------\n\n- Digest authentication bugfix (attach query data to path)\n\n0.7.5 (2011-11-04)\n------------------\n\n- Response.content = None if there was an invalid response.\n- Redirection auth handling.\n\n0.7.4 (2011-10-26)\n------------------\n\n- Session Hooks fix.\n\n0.7.3 (2011-10-23)\n------------------\n\n- Digest Auth fix.\n\n0.7.2 (2011-10-23)\n------------------\n\n- PATCH Fix.\n\n0.7.1 (2011-10-23)\n------------------\n\n- Move away from urllib2 authentication handling.\n- Fully Remove AuthManager, AuthObject, &c.\n- New tuple-based auth system with handler callbacks.\n\n0.7.0 (2011-10-22)\n------------------\n\n- Sessions are now the primary interface.\n- Deprecated InvalidMethodException.\n- PATCH fix.\n- New config system (no more global settings).\n\n0.6.6 (2011-10-19)\n------------------\n\n- Session parameter bugfix (params merging).\n\n0.6.5 (2011-10-18)\n------------------\n\n- Offline (fast) test suite.\n- Session dictionary argument merging.\n\n0.6.4 (2011-10-13)\n------------------\n\n- Automatic decoding of unicode, based on HTTP Headers.\n- New `decode_unicode` setting.\n- Removal of `r.read/close` methods.\n- New `r.faw` interface for advanced response usage.\\*\n- Automatic expansion of parameterized headers.\n\n0.6.3 (2011-10-13)\n------------------\n\n- Beautiful `requests.async` module, for making async requests w/\n gevent.\n\n0.6.2 (2011-10-09)\n------------------\n\n- GET/HEAD obeys allow\\_redirects=False.\n\n0.6.1 (2011-08-20)\n------------------\n\n- Enhanced status codes experience `\\o/`\n- Set a maximum number of redirects (`settings.max_redirects`)\n- Full Unicode URL support\n- Support for protocol-less redirects.\n- Allow for arbitrary request types.\n- Bugfixes\n\n0.6.0 (2011-08-17)\n------------------\n\n- New callback hook system\n- New persistent sessions object and context manager\n- Transparent Dict-cookie handling\n- Status code reference object\n- Removed Response.cached\n- Added Response.request\n- All args are kwargs\n- Relative redirect support\n- HTTPError handling improvements\n- Improved https testing\n- Bugfixes\n\n0.5.1 (2011-07-23)\n------------------\n\n- International Domain Name Support!\n- Access headers without fetching entire body (`read()`)\n- Use lists as dicts for parameters\n- Add Forced Basic Authentication\n- Forced Basic is default authentication type\n- `python-requests.org` default User-Agent header\n- CaseInsensitiveDict lower-case caching\n- Response.history bugfix\n\n0.5.0 (2011-06-21)\n------------------\n\n- PATCH Support\n- Support for Proxies\n- HTTPBin Test Suite\n- Redirect Fixes\n- settings.verbose stream writing\n- Querystrings for all methods\n- URLErrors (Connection Refused, Timeout, Invalid URLs) are treated as\n explicitly raised\n `r.requests.get('hwe://blah'); r.raise_for_status()`\n\n0.4.1 (2011-05-22)\n------------------\n\n- Improved Redirection Handling\n- New 'allow\\_redirects' param for following non-GET/HEAD Redirects\n- Settings module refactoring\n\n0.4.0 (2011-05-15)\n------------------\n\n- Response.history: list of redirected responses\n- Case-Insensitive Header Dictionaries!\n- Unicode URLs\n\n0.3.4 (2011-05-14)\n------------------\n\n- Urllib2 HTTPAuthentication Recursion fix (Basic/Digest)\n- Internal Refactor\n- Bytes data upload Bugfix\n\n0.3.3 (2011-05-12)\n------------------\n\n- Request timeouts\n- Unicode url-encoded data\n- Settings context manager and module\n\n0.3.2 (2011-04-15)\n------------------\n\n- Automatic Decompression of GZip Encoded Content\n- AutoAuth Support for Tupled HTTP Auth\n\n0.3.1 (2011-04-01)\n------------------\n\n- Cookie Changes\n- Response.read()\n- Poster fix\n\n0.3.0 (2011-02-25)\n------------------\n\n- Automatic Authentication API Change\n- Smarter Query URL Parameterization\n- Allow file uploads and POST data together\n- \n\n New Authentication Manager System\n\n : - Simpler Basic HTTP System\n - Supports all built-in urllib2 Auths\n - Allows for custom Auth Handlers\n\n0.2.4 (2011-02-19)\n------------------\n\n- Python 2.5 Support\n- PyPy-c v1.4 Support\n- Auto-Authentication tests\n- Improved Request object constructor\n\n0.2.3 (2011-02-15)\n------------------\n\n- \n\n New HTTPHandling Methods\n\n : - Response.\\_\\_nonzero\\_\\_ (false if bad HTTP Status)\n - Response.ok (True if expected HTTP Status)\n - Response.error (Logged HTTPError if bad HTTP Status)\n - Response.raise\\_for\\_status() (Raises stored HTTPError)\n\n0.2.2 (2011-02-14)\n------------------\n\n- Still handles request in the event of an HTTPError. (Issue \\#2)\n- Eventlet and Gevent Monkeypatch support.\n- Cookie Support (Issue \\#1)\n\n0.2.1 (2011-02-14)\n------------------\n\n- Added file attribute to POST and PUT requests for multipart-encode\n file uploads.\n- Added Request.url attribute for context and redirects\n\n0.2.0 (2011-02-14)\n------------------\n\n- Birth!\n\n0.0.1 (2011-02-13)\n------------------\n\n- Frustration\n- Conception\n",
"path": "HISTORY.md"
},
{
"content": "# -*- coding: utf-8 -*-\n\n\"\"\"\nrequests.utils\n~~~~~~~~~~~~~~\n\nThis module provides utility functions that are used within Requests\nthat are also useful for external consumption.\n\"\"\"\n\nimport codecs\nimport contextlib\nimport io\nimport os\nimport re\nimport socket\nimport struct\nimport sys\nimport tempfile\nimport warnings\nimport zipfile\nfrom collections import OrderedDict\nfrom urllib3.util import make_headers\n\nfrom .__version__ import __version__\nfrom . import certs\n# to_native_string is unused here, but imported here for backwards compatibility\nfrom ._internal_utils import to_native_string\nfrom .compat import parse_http_list as _parse_list_header\nfrom .compat import (\n quote, urlparse, bytes, str, unquote, getproxies,\n proxy_bypass, urlunparse, basestring, integer_types, is_py3,\n proxy_bypass_environment, getproxies_environment, Mapping)\nfrom .cookies import cookiejar_from_dict\nfrom .structures import CaseInsensitiveDict\nfrom .exceptions import (\n InvalidURL, InvalidHeader, FileModeWarning, UnrewindableBodyError)\n\nNETRC_FILES = ('.netrc', '_netrc')\n\nDEFAULT_CA_BUNDLE_PATH = certs.where()\n\nDEFAULT_PORTS = {'http': 80, 'https': 443}\n\n# Ensure that ', ' is used to preserve previous delimiter behavior.\nDEFAULT_ACCEPT_ENCODING = \", \".join(\n re.split(r\",\\s*\", make_headers(accept_encoding=True)[\"accept-encoding\"])\n)\n\n\nif sys.platform == 'win32':\n # provide a proxy_bypass version on Windows without DNS lookups\n\n def proxy_bypass_registry(host):\n try:\n if is_py3:\n import winreg\n else:\n import _winreg as winreg\n except ImportError:\n return False\n\n try:\n internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER,\n r'Software\\Microsoft\\Windows\\CurrentVersion\\Internet Settings')\n # ProxyEnable could be REG_SZ or REG_DWORD, normalizing it\n proxyEnable = int(winreg.QueryValueEx(internetSettings,\n 'ProxyEnable')[0])\n # ProxyOverride is almost always a string\n proxyOverride = winreg.QueryValueEx(internetSettings,\n 'ProxyOverride')[0]\n except OSError:\n return False\n if not proxyEnable or not proxyOverride:\n return False\n\n # make a check value list from the registry entry: replace the\n # '<local>' string by the localhost entry and the corresponding\n # canonical entry.\n proxyOverride = proxyOverride.split(';')\n # now check if we match one of the registry values.\n for test in proxyOverride:\n if test == '<local>':\n if '.' not in host:\n return True\n test = test.replace(\".\", r\"\\.\") # mask dots\n test = test.replace(\"*\", r\".*\") # change glob sequence\n test = test.replace(\"?\", r\".\") # change glob char\n if re.match(test, host, re.I):\n return True\n return False\n\n def proxy_bypass(host): # noqa\n \"\"\"Return True, if the host should be bypassed.\n\n Checks proxy settings gathered from the environment, if specified,\n or the registry.\n \"\"\"\n if getproxies_environment():\n return proxy_bypass_environment(host)\n else:\n return proxy_bypass_registry(host)\n\n\ndef dict_to_sequence(d):\n \"\"\"Returns an internal sequence dictionary update.\"\"\"\n\n if hasattr(d, 'items'):\n d = d.items()\n\n return d\n\n\ndef super_len(o):\n total_length = None\n current_position = 0\n\n if hasattr(o, '__len__'):\n total_length = len(o)\n\n elif hasattr(o, 'len'):\n total_length = o.len\n\n elif hasattr(o, 'fileno'):\n try:\n fileno = o.fileno()\n except io.UnsupportedOperation:\n pass\n else:\n total_length = os.fstat(fileno).st_size\n\n # Having used fstat to determine the file length, we need to\n # confirm that this file was opened up in binary mode.\n if 'b' not in o.mode:\n warnings.warn((\n \"Requests has determined the content-length for this \"\n \"request using the binary size of the file: however, the \"\n \"file has been opened in text mode (i.e. without the 'b' \"\n \"flag in the mode). This may lead to an incorrect \"\n \"content-length. In Requests 3.0, support will be removed \"\n \"for files in text mode.\"),\n FileModeWarning\n )\n\n if hasattr(o, 'tell'):\n try:\n current_position = o.tell()\n except (OSError, IOError):\n # This can happen in some weird situations, such as when the file\n # is actually a special file descriptor like stdin. In this\n # instance, we don't know what the length is, so set it to zero and\n # let requests chunk it instead.\n if total_length is not None:\n current_position = total_length\n else:\n if hasattr(o, 'seek') and total_length is None:\n # StringIO and BytesIO have seek but no usable fileno\n try:\n # seek to end of file\n o.seek(0, 2)\n total_length = o.tell()\n\n # seek back to current position to support\n # partially read file-like objects\n o.seek(current_position or 0)\n except (OSError, IOError):\n total_length = 0\n\n if total_length is None:\n total_length = 0\n\n return max(0, total_length - current_position)\n\n\ndef get_netrc_auth(url, raise_errors=False):\n \"\"\"Returns the Requests tuple auth for a given url from netrc.\"\"\"\n\n netrc_file = os.environ.get('NETRC')\n if netrc_file is not None:\n netrc_locations = (netrc_file,)\n else:\n netrc_locations = ('~/{}'.format(f) for f in NETRC_FILES)\n\n try:\n from netrc import netrc, NetrcParseError\n\n netrc_path = None\n\n for f in netrc_locations:\n try:\n loc = os.path.expanduser(f)\n except KeyError:\n # os.path.expanduser can fail when $HOME is undefined and\n # getpwuid fails. See https://bugs.python.org/issue20164 &\n # https://github.com/psf/requests/issues/1846\n return\n\n if os.path.exists(loc):\n netrc_path = loc\n break\n\n # Abort early if there isn't one.\n if netrc_path is None:\n return\n\n ri = urlparse(url)\n\n # Strip port numbers from netloc. This weird `if...encode`` dance is\n # used for Python 3.2, which doesn't support unicode literals.\n splitstr = b':'\n if isinstance(url, str):\n splitstr = splitstr.decode('ascii')\n host = ri.netloc.split(splitstr)[0]\n\n try:\n _netrc = netrc(netrc_path).authenticators(host)\n if _netrc:\n # Return with login / password\n login_i = (0 if _netrc[0] else 1)\n return (_netrc[login_i], _netrc[2])\n except (NetrcParseError, IOError):\n # If there was a parsing error or a permissions issue reading the file,\n # we'll just skip netrc auth unless explicitly asked to raise errors.\n if raise_errors:\n raise\n\n # App Engine hackiness.\n except (ImportError, AttributeError):\n pass\n\n\ndef guess_filename(obj):\n \"\"\"Tries to guess the filename of the given object.\"\"\"\n name = getattr(obj, 'name', None)\n if (name and isinstance(name, basestring) and name[0] != '<' and\n name[-1] != '>'):\n return os.path.basename(name)\n\n\ndef extract_zipped_paths(path):\n \"\"\"Replace nonexistent paths that look like they refer to a member of a zip\n archive with the location of an extracted copy of the target, or else\n just return the provided path unchanged.\n \"\"\"\n if os.path.exists(path):\n # this is already a valid path, no need to do anything further\n return path\n\n # find the first valid part of the provided path and treat that as a zip archive\n # assume the rest of the path is the name of a member in the archive\n archive, member = os.path.split(path)\n while archive and not os.path.exists(archive):\n archive, prefix = os.path.split(archive)\n if not prefix:\n # If we don't check for an empty prefix after the split (in other words, archive remains unchanged after the split),\n # we _can_ end up in an infinite loop on a rare corner case affecting a small number of users\n break\n member = '/'.join([prefix, member])\n\n if not zipfile.is_zipfile(archive):\n return path\n\n zip_file = zipfile.ZipFile(archive)\n if member not in zip_file.namelist():\n return path\n\n # we have a valid zip archive and a valid member of that archive\n tmp = tempfile.gettempdir()\n extracted_path = os.path.join(tmp, member.split('/')[-1])\n if not os.path.exists(extracted_path):\n # use read + write to avoid the creating nested folders, we only want the file, avoids mkdir racing condition\n with atomic_open(extracted_path) as file_handler:\n file_handler.write(zip_file.read(member))\n return extracted_path\n\n\n@contextlib.contextmanager\ndef atomic_open(filename):\n \"\"\"Write a file to the disk in an atomic fashion\"\"\"\n replacer = os.rename if sys.version_info[0] == 2 else os.replace\n tmp_descriptor, tmp_name = tempfile.mkstemp(dir=os.path.dirname(filename))\n try:\n with os.fdopen(tmp_descriptor, 'wb') as tmp_handler:\n yield tmp_handler\n replacer(tmp_name, filename)\n except BaseException:\n os.remove(tmp_name)\n raise\n\n\ndef from_key_val_list(value):\n \"\"\"Take an object and test to see if it can be represented as a\n dictionary. Unless it can not be represented as such, return an\n OrderedDict, e.g.,\n\n ::\n\n >>> from_key_val_list([('key', 'val')])\n OrderedDict([('key', 'val')])\n >>> from_key_val_list('string')\n Traceback (most recent call last):\n ...\n ValueError: cannot encode objects that are not 2-tuples\n >>> from_key_val_list({'key': 'val'})\n OrderedDict([('key', 'val')])\n\n :rtype: OrderedDict\n \"\"\"\n if value is None:\n return None\n\n if isinstance(value, (str, bytes, bool, int)):\n raise ValueError('cannot encode objects that are not 2-tuples')\n\n return OrderedDict(value)\n\n\ndef to_key_val_list(value):\n \"\"\"Take an object and test to see if it can be represented as a\n dictionary. If it can be, return a list of tuples, e.g.,\n\n ::\n\n >>> to_key_val_list([('key', 'val')])\n [('key', 'val')]\n >>> to_key_val_list({'key': 'val'})\n [('key', 'val')]\n >>> to_key_val_list('string')\n Traceback (most recent call last):\n ...\n ValueError: cannot encode objects that are not 2-tuples\n\n :rtype: list\n \"\"\"\n if value is None:\n return None\n\n if isinstance(value, (str, bytes, bool, int)):\n raise ValueError('cannot encode objects that are not 2-tuples')\n\n if isinstance(value, Mapping):\n value = value.items()\n\n return list(value)\n\n\n# From mitsuhiko/werkzeug (used with permission).\ndef parse_list_header(value):\n \"\"\"Parse lists as described by RFC 2068 Section 2.\n\n In particular, parse comma-separated lists where the elements of\n the list may include quoted-strings. A quoted-string could\n contain a comma. A non-quoted string could have quotes in the\n middle. Quotes are removed automatically after parsing.\n\n It basically works like :func:`parse_set_header` just that items\n may appear multiple times and case sensitivity is preserved.\n\n The return value is a standard :class:`list`:\n\n >>> parse_list_header('token, \"quoted value\"')\n ['token', 'quoted value']\n\n To create a header from the :class:`list` again, use the\n :func:`dump_header` function.\n\n :param value: a string with a list header.\n :return: :class:`list`\n :rtype: list\n \"\"\"\n result = []\n for item in _parse_list_header(value):\n if item[:1] == item[-1:] == '\"':\n item = unquote_header_value(item[1:-1])\n result.append(item)\n return result\n\n\n# From mitsuhiko/werkzeug (used with permission).\ndef parse_dict_header(value):\n \"\"\"Parse lists of key, value pairs as described by RFC 2068 Section 2 and\n convert them into a python dict:\n\n >>> d = parse_dict_header('foo=\"is a fish\", bar=\"as well\"')\n >>> type(d) is dict\n True\n >>> sorted(d.items())\n [('bar', 'as well'), ('foo', 'is a fish')]\n\n If there is no value for a key it will be `None`:\n\n >>> parse_dict_header('key_without_value')\n {'key_without_value': None}\n\n To create a header from the :class:`dict` again, use the\n :func:`dump_header` function.\n\n :param value: a string with a dict header.\n :return: :class:`dict`\n :rtype: dict\n \"\"\"\n result = {}\n for item in _parse_list_header(value):\n if '=' not in item:\n result[item] = None\n continue\n name, value = item.split('=', 1)\n if value[:1] == value[-1:] == '\"':\n value = unquote_header_value(value[1:-1])\n result[name] = value\n return result\n\n\n# From mitsuhiko/werkzeug (used with permission).\ndef unquote_header_value(value, is_filename=False):\n r\"\"\"Unquotes a header value. (Reversal of :func:`quote_header_value`).\n This does not use the real unquoting but what browsers are actually\n using for quoting.\n\n :param value: the header value to unquote.\n :rtype: str\n \"\"\"\n if value and value[0] == value[-1] == '\"':\n # this is not the real unquoting, but fixing this so that the\n # RFC is met will result in bugs with internet explorer and\n # probably some other browsers as well. IE for example is\n # uploading files with \"C:\\foo\\bar.txt\" as filename\n value = value[1:-1]\n\n # if this is a filename and the starting characters look like\n # a UNC path, then just return the value without quotes. Using the\n # replace sequence below on a UNC path has the effect of turning\n # the leading double slash into a single slash and then\n # _fix_ie_filename() doesn't work correctly. See #458.\n if not is_filename or value[:2] != '\\\\\\\\':\n return value.replace('\\\\\\\\', '\\\\').replace('\\\\\"', '\"')\n return value\n\n\ndef dict_from_cookiejar(cj):\n \"\"\"Returns a key/value dictionary from a CookieJar.\n\n :param cj: CookieJar object to extract cookies from.\n :rtype: dict\n \"\"\"\n\n cookie_dict = {}\n\n for cookie in cj:\n cookie_dict[cookie.name] = cookie.value\n\n return cookie_dict\n\n\ndef add_dict_to_cookiejar(cj, cookie_dict):\n \"\"\"Returns a CookieJar from a key/value dictionary.\n\n :param cj: CookieJar to insert cookies into.\n :param cookie_dict: Dict of key/values to insert into CookieJar.\n :rtype: CookieJar\n \"\"\"\n\n return cookiejar_from_dict(cookie_dict, cj)\n\n\ndef get_encodings_from_content(content):\n \"\"\"Returns encodings from given content string.\n\n :param content: bytestring to extract encodings from.\n \"\"\"\n warnings.warn((\n 'In requests 3.0, get_encodings_from_content will be removed. For '\n 'more information, please see the discussion on issue #2266. (This'\n ' warning should only appear once.)'),\n DeprecationWarning)\n\n charset_re = re.compile(r'<meta.*?charset=[\"\\']*(.+?)[\"\\'>]', flags=re.I)\n pragma_re = re.compile(r'<meta.*?content=[\"\\']*;?charset=(.+?)[\"\\'>]', flags=re.I)\n xml_re = re.compile(r'^<\\?xml.*?encoding=[\"\\']*(.+?)[\"\\'>]')\n\n return (charset_re.findall(content) +\n pragma_re.findall(content) +\n xml_re.findall(content))\n\n\ndef _parse_content_type_header(header):\n \"\"\"Returns content type and parameters from given header\n\n :param header: string\n :return: tuple containing content type and dictionary of\n parameters\n \"\"\"\n\n tokens = header.split(';')\n content_type, params = tokens[0].strip(), tokens[1:]\n params_dict = {}\n items_to_strip = \"\\\"' \"\n\n for param in params:\n param = param.strip()\n if param:\n key, value = param, True\n index_of_equals = param.find(\"=\")\n if index_of_equals != -1:\n key = param[:index_of_equals].strip(items_to_strip)\n value = param[index_of_equals + 1:].strip(items_to_strip)\n params_dict[key.lower()] = value\n return content_type, params_dict\n\n\ndef get_encoding_from_headers(headers):\n \"\"\"Returns encodings from given HTTP Header Dict.\n\n :param headers: dictionary to extract encoding from.\n :rtype: str\n \"\"\"\n\n content_type = headers.get('content-type')\n\n if not content_type:\n return None\n\n content_type, params = _parse_content_type_header(content_type)\n\n if 'charset' in params:\n return params['charset'].strip(\"'\\\"\")\n\n if 'text' in content_type:\n return 'ISO-8859-1'\n\n if 'application/json' in content_type:\n # Assume UTF-8 based on RFC 4627: https://www.ietf.org/rfc/rfc4627.txt since the charset was unset\n return 'utf-8'\n\n\ndef stream_decode_response_unicode(iterator, r):\n \"\"\"Stream decodes a iterator.\"\"\"\n\n if r.encoding is None:\n for item in iterator:\n yield item\n return\n\n decoder = codecs.getincrementaldecoder(r.encoding)(errors='replace')\n for chunk in iterator:\n rv = decoder.decode(chunk)\n if rv:\n yield rv\n rv = decoder.decode(b'', final=True)\n if rv:\n yield rv\n\n\ndef iter_slices(string, slice_length):\n \"\"\"Iterate over slices of a string.\"\"\"\n pos = 0\n if slice_length is None or slice_length <= 0:\n slice_length = len(string)\n while pos < len(string):\n yield string[pos:pos + slice_length]\n pos += slice_length\n\n\ndef get_unicode_from_response(r):\n \"\"\"Returns the requested content back in unicode.\n\n :param r: Response object to get unicode content from.\n\n Tried:\n\n 1. charset from content-type\n 2. fall back and replace all unicode characters\n\n :rtype: str\n \"\"\"\n warnings.warn((\n 'In requests 3.0, get_unicode_from_response will be removed. For '\n 'more information, please see the discussion on issue #2266. (This'\n ' warning should only appear once.)'),\n DeprecationWarning)\n\n tried_encodings = []\n\n # Try charset from content-type\n encoding = get_encoding_from_headers(r.headers)\n\n if encoding:\n try:\n return str(r.content, encoding)\n except UnicodeError:\n tried_encodings.append(encoding)\n\n # Fall back:\n try:\n return str(r.content, encoding, errors='replace')\n except TypeError:\n return r.content\n\n\n# The unreserved URI characters (RFC 3986)\nUNRESERVED_SET = frozenset(\n \"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz\" + \"0123456789-._~\")\n\n\ndef unquote_unreserved(uri):\n \"\"\"Un-escape any percent-escape sequences in a URI that are unreserved\n characters. This leaves all reserved, illegal and non-ASCII bytes encoded.\n\n :rtype: str\n \"\"\"\n parts = uri.split('%')\n for i in range(1, len(parts)):\n h = parts[i][0:2]\n if len(h) == 2 and h.isalnum():\n try:\n c = chr(int(h, 16))\n except ValueError:\n raise InvalidURL(\"Invalid percent-escape sequence: '%s'\" % h)\n\n if c in UNRESERVED_SET:\n parts[i] = c + parts[i][2:]\n else:\n parts[i] = '%' + parts[i]\n else:\n parts[i] = '%' + parts[i]\n return ''.join(parts)\n\n\ndef requote_uri(uri):\n \"\"\"Re-quote the given URI.\n\n This function passes the given URI through an unquote/quote cycle to\n ensure that it is fully and consistently quoted.\n\n :rtype: str\n \"\"\"\n safe_with_percent = \"!#$%&'()*+,/:;=?@[]~\"\n safe_without_percent = \"!#$&'()*+,/:;=?@[]~\"\n try:\n # Unquote only the unreserved characters\n # Then quote only illegal characters (do not quote reserved,\n # unreserved, or '%')\n return quote(unquote_unreserved(uri), safe=safe_with_percent)\n except InvalidURL:\n # We couldn't unquote the given URI, so let's try quoting it, but\n # there may be unquoted '%'s in the URI. We need to make sure they're\n # properly quoted so they do not cause issues elsewhere.\n return quote(uri, safe=safe_without_percent)\n\n\ndef address_in_network(ip, net):\n \"\"\"This function allows you to check if an IP belongs to a network subnet\n\n Example: returns True if ip = 192.168.1.1 and net = 192.168.1.0/24\n returns False if ip = 192.168.1.1 and net = 192.168.100.0/24\n\n :rtype: bool\n \"\"\"\n ipaddr = struct.unpack('=L', socket.inet_aton(ip))[0]\n netaddr, bits = net.split('/')\n netmask = struct.unpack('=L', socket.inet_aton(dotted_netmask(int(bits))))[0]\n network = struct.unpack('=L', socket.inet_aton(netaddr))[0] & netmask\n return (ipaddr & netmask) == (network & netmask)\n\n\ndef dotted_netmask(mask):\n \"\"\"Converts mask from /xx format to xxx.xxx.xxx.xxx\n\n Example: if mask is 24 function returns 255.255.255.0\n\n :rtype: str\n \"\"\"\n bits = 0xffffffff ^ (1 << 32 - mask) - 1\n return socket.inet_ntoa(struct.pack('>I', bits))\n\n\ndef is_ipv4_address(string_ip):\n \"\"\"\n :rtype: bool\n \"\"\"\n try:\n socket.inet_aton(string_ip)\n except socket.error:\n return False\n return True\n\n\ndef is_valid_cidr(string_network):\n \"\"\"\n Very simple check of the cidr format in no_proxy variable.\n\n :rtype: bool\n \"\"\"\n if string_network.count('/') == 1:\n try:\n mask = int(string_network.split('/')[1])\n except ValueError:\n return False\n\n if mask < 1 or mask > 32:\n return False\n\n try:\n socket.inet_aton(string_network.split('/')[0])\n except socket.error:\n return False\n else:\n return False\n return True\n\n\n@contextlib.contextmanager\ndef set_environ(env_name, value):\n \"\"\"Set the environment variable 'env_name' to 'value'\n\n Save previous value, yield, and then restore the previous value stored in\n the environment variable 'env_name'.\n\n If 'value' is None, do nothing\"\"\"\n value_changed = value is not None\n if value_changed:\n old_value = os.environ.get(env_name)\n os.environ[env_name] = value\n try:\n yield\n finally:\n if value_changed:\n if old_value is None:\n del os.environ[env_name]\n else:\n os.environ[env_name] = old_value\n\n\ndef should_bypass_proxies(url, no_proxy):\n \"\"\"\n Returns whether we should bypass proxies or not.\n\n :rtype: bool\n \"\"\"\n # Prioritize lowercase environment variables over uppercase\n # to keep a consistent behaviour with other http projects (curl, wget).\n get_proxy = lambda k: os.environ.get(k) or os.environ.get(k.upper())\n\n # First check whether no_proxy is defined. If it is, check that the URL\n # we're getting isn't in the no_proxy list.\n no_proxy_arg = no_proxy\n if no_proxy is None:\n no_proxy = get_proxy('no_proxy')\n parsed = urlparse(url)\n\n if parsed.hostname is None:\n # URLs don't always have hostnames, e.g. file:/// urls.\n return True\n\n if no_proxy:\n # We need to check whether we match here. We need to see if we match\n # the end of the hostname, both with and without the port.\n no_proxy = (\n host for host in no_proxy.replace(' ', '').split(',') if host\n )\n\n if is_ipv4_address(parsed.hostname):\n for proxy_ip in no_proxy:\n if is_valid_cidr(proxy_ip):\n if address_in_network(parsed.hostname, proxy_ip):\n return True\n elif parsed.hostname == proxy_ip:\n # If no_proxy ip was defined in plain IP notation instead of cidr notation &\n # matches the IP of the index\n return True\n else:\n host_with_port = parsed.hostname\n if parsed.port:\n host_with_port += ':{}'.format(parsed.port)\n\n for host in no_proxy:\n if parsed.hostname.endswith(host) or host_with_port.endswith(host):\n # The URL does match something in no_proxy, so we don't want\n # to apply the proxies on this URL.\n return True\n\n with set_environ('no_proxy', no_proxy_arg):\n # parsed.hostname can be `None` in cases such as a file URI.\n try:\n bypass = proxy_bypass(parsed.hostname)\n except (TypeError, socket.gaierror):\n bypass = False\n\n if bypass:\n return True\n\n return False\n\n\ndef get_environ_proxies(url, no_proxy=None):\n \"\"\"\n Return a dict of environment proxies.\n\n :rtype: dict\n \"\"\"\n if should_bypass_proxies(url, no_proxy=no_proxy):\n return {}\n else:\n return getproxies()\n\n\ndef select_proxy(url, proxies):\n \"\"\"Select a proxy for the url, if applicable.\n\n :param url: The url being for the request\n :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs\n \"\"\"\n proxies = proxies or {}\n urlparts = urlparse(url)\n if urlparts.hostname is None:\n return proxies.get(urlparts.scheme, proxies.get('all'))\n\n proxy_keys = [\n urlparts.scheme + '://' + urlparts.hostname,\n urlparts.scheme,\n 'all://' + urlparts.hostname,\n 'all',\n ]\n proxy = None\n for proxy_key in proxy_keys:\n if proxy_key in proxies:\n proxy = proxies[proxy_key]\n break\n\n return proxy\n\n\ndef resolve_proxies(request, proxies, trust_env=True):\n \"\"\"This method takes proxy information from a request and configuration\n input to resolve a mapping of target proxies. This will consider settings\n such a NO_PROXY to strip proxy configurations.\n\n :param request: Request or PreparedRequest\n :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs\n :param trust_env: Boolean declaring whether to trust environment configs\n\n :rtype: dict\n \"\"\"\n proxies = proxies if proxies is not None else {}\n url = request.url\n scheme = urlparse(url).scheme\n no_proxy = proxies.get('no_proxy')\n new_proxies = proxies.copy()\n\n bypass_proxy = should_bypass_proxies(url, no_proxy=no_proxy)\n if trust_env and not bypass_proxy:\n environ_proxies = get_environ_proxies(url, no_proxy=no_proxy)\n\n proxy = environ_proxies.get(scheme, environ_proxies.get('all'))\n\n if proxy:\n new_proxies.setdefault(scheme, proxy)\n return new_proxies\n\n\ndef default_user_agent(name=\"python-requests\"):\n \"\"\"\n Return a string representing the default user agent.\n\n :rtype: str\n \"\"\"\n return '%s/%s' % (name, __version__)\n\n\ndef default_headers():\n \"\"\"\n :rtype: requests.structures.CaseInsensitiveDict\n \"\"\"\n return CaseInsensitiveDict({\n 'User-Agent': default_user_agent(),\n 'Accept-Encoding': DEFAULT_ACCEPT_ENCODING,\n 'Accept': '*/*',\n 'Connection': 'keep-alive',\n })\n\n\ndef parse_header_links(value):\n \"\"\"Return a list of parsed link headers proxies.\n\n i.e. Link: <http:/.../front.jpeg>; rel=front; type=\"image/jpeg\",<http://.../back.jpeg>; rel=back;type=\"image/jpeg\"\n\n :rtype: list\n \"\"\"\n\n links = []\n\n replace_chars = ' \\'\"'\n\n value = value.strip(replace_chars)\n if not value:\n return links\n\n for val in re.split(', *<', value):\n try:\n url, params = val.split(';', 1)\n except ValueError:\n url, params = val, ''\n\n link = {'url': url.strip('<> \\'\"')}\n\n for param in params.split(';'):\n try:\n key, value = param.split('=')\n except ValueError:\n break\n\n link[key.strip(replace_chars)] = value.strip(replace_chars)\n\n links.append(link)\n\n return links\n\n\n# Null bytes; no need to recreate these on each call to guess_json_utf\n_null = '\\x00'.encode('ascii') # encoding to ASCII for Python 3\n_null2 = _null * 2\n_null3 = _null * 3\n\n\ndef guess_json_utf(data):\n \"\"\"\n :rtype: str\n \"\"\"\n # JSON always starts with two ASCII characters, so detection is as\n # easy as counting the nulls and from their location and count\n # determine the encoding. Also detect a BOM, if present.\n sample = data[:4]\n if sample in (codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE):\n return 'utf-32' # BOM included\n if sample[:3] == codecs.BOM_UTF8:\n return 'utf-8-sig' # BOM included, MS style (discouraged)\n if sample[:2] in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE):\n return 'utf-16' # BOM included\n nullcount = sample.count(_null)\n if nullcount == 0:\n return 'utf-8'\n if nullcount == 2:\n if sample[::2] == _null2: # 1st and 3rd are null\n return 'utf-16-be'\n if sample[1::2] == _null2: # 2nd and 4th are null\n return 'utf-16-le'\n # Did not detect 2 valid UTF-16 ascii-range characters\n if nullcount == 3:\n if sample[:3] == _null3:\n return 'utf-32-be'\n if sample[1:] == _null3:\n return 'utf-32-le'\n # Did not detect a valid UTF-32 ascii-range character\n return None\n\n\ndef prepend_scheme_if_needed(url, new_scheme):\n \"\"\"Given a URL that may or may not have a scheme, prepend the given scheme.\n Does not replace a present scheme with the one provided as an argument.\n\n :rtype: str\n \"\"\"\n scheme, netloc, path, params, query, fragment = urlparse(url, new_scheme)\n\n # urlparse is a finicky beast, and sometimes decides that there isn't a\n # netloc present. Assume that it's being over-cautious, and switch netloc\n # and path if urlparse decided there was no netloc.\n if not netloc:\n netloc, path = path, netloc\n\n return urlunparse((scheme, netloc, path, params, query, fragment))\n\n\ndef get_auth_from_url(url):\n \"\"\"Given a url with authentication components, extract them into a tuple of\n username,password.\n\n :rtype: (str,str)\n \"\"\"\n parsed = urlparse(url)\n\n try:\n auth = (unquote(parsed.username), unquote(parsed.password))\n except (AttributeError, TypeError):\n auth = ('', '')\n\n return auth\n\n\n# Moved outside of function to avoid recompile every call\n_CLEAN_HEADER_REGEX_BYTE = re.compile(b'^\\\\S[^\\\\r\\\\n]*$|^$')\n_CLEAN_HEADER_REGEX_STR = re.compile(r'^\\S[^\\r\\n]*$|^$')\n\n\ndef check_header_validity(header):\n \"\"\"Verifies that header value is a string which doesn't contain\n leading whitespace or return characters. This prevents unintended\n header injection.\n\n :param header: tuple, in the format (name, value).\n \"\"\"\n name, value = header\n\n if isinstance(value, bytes):\n pat = _CLEAN_HEADER_REGEX_BYTE\n else:\n pat = _CLEAN_HEADER_REGEX_STR\n try:\n if not pat.match(value):\n raise InvalidHeader(\"Invalid return character or leading space in header: %s\" % name)\n except TypeError:\n raise InvalidHeader(\"Value for header {%s: %s} must be of type str or \"\n \"bytes, not %s\" % (name, value, type(value)))\n\n\ndef urldefragauth(url):\n \"\"\"\n Given a url remove the fragment and the authentication part.\n\n :rtype: str\n \"\"\"\n scheme, netloc, path, params, query, fragment = urlparse(url)\n\n # see func:`prepend_scheme_if_needed`\n if not netloc:\n netloc, path = path, netloc\n\n netloc = netloc.rsplit('@', 1)[-1]\n\n return urlunparse((scheme, netloc, path, params, query, ''))\n\n\ndef rewind_body(prepared_request):\n \"\"\"Move file pointer back to its recorded starting position\n so it can be read again on redirect.\n \"\"\"\n body_seek = getattr(prepared_request.body, 'seek', None)\n if body_seek is not None and isinstance(prepared_request._body_position, integer_types):\n try:\n body_seek(prepared_request._body_position)\n except (IOError, OSError):\n raise UnrewindableBodyError(\"An error occurred when rewinding request \"\n \"body for redirect.\")\n else:\n raise UnrewindableBodyError(\"Unable to rewind request body for redirect.\")\n",
"path": "requests/utils.py"
},
{
"content": "# -*- coding: utf-8 -*-\n\nimport os\nimport copy\nimport filecmp\nfrom io import BytesIO\nimport zipfile\nfrom collections import deque\n\nimport pytest\nfrom requests import compat\nfrom requests.cookies import RequestsCookieJar\nfrom requests.structures import CaseInsensitiveDict\nfrom requests.utils import (\n address_in_network, dotted_netmask, extract_zipped_paths,\n get_auth_from_url, _parse_content_type_header, get_encoding_from_headers,\n get_encodings_from_content, get_environ_proxies,\n guess_filename, guess_json_utf, is_ipv4_address,\n is_valid_cidr, iter_slices, parse_dict_header,\n parse_header_links, prepend_scheme_if_needed,\n requote_uri, select_proxy, should_bypass_proxies, super_len,\n to_key_val_list, to_native_string,\n unquote_header_value, unquote_unreserved,\n urldefragauth, add_dict_to_cookiejar, set_environ)\nfrom requests._internal_utils import unicode_is_ascii\n\nfrom .compat import StringIO, cStringIO\n\n\nclass TestSuperLen:\n\n @pytest.mark.parametrize(\n 'stream, value', (\n (StringIO.StringIO, 'Test'),\n (BytesIO, b'Test'),\n pytest.param(cStringIO, 'Test',\n marks=pytest.mark.skipif('cStringIO is None')),\n ))\n def test_io_streams(self, stream, value):\n \"\"\"Ensures that we properly deal with different kinds of IO streams.\"\"\"\n assert super_len(stream()) == 0\n assert super_len(stream(value)) == 4\n\n def test_super_len_correctly_calculates_len_of_partially_read_file(self):\n \"\"\"Ensure that we handle partially consumed file like objects.\"\"\"\n s = StringIO.StringIO()\n s.write('foobarbogus')\n assert super_len(s) == 0\n\n @pytest.mark.parametrize('error', [IOError, OSError])\n def test_super_len_handles_files_raising_weird_errors_in_tell(self, error):\n \"\"\"If tell() raises errors, assume the cursor is at position zero.\"\"\"\n class BoomFile(object):\n def __len__(self):\n return 5\n\n def tell(self):\n raise error()\n\n assert super_len(BoomFile()) == 0\n\n @pytest.mark.parametrize('error', [IOError, OSError])\n def test_super_len_tell_ioerror(self, error):\n \"\"\"Ensure that if tell gives an IOError super_len doesn't fail\"\"\"\n class NoLenBoomFile(object):\n def tell(self):\n raise error()\n\n def seek(self, offset, whence):\n pass\n\n assert super_len(NoLenBoomFile()) == 0\n\n def test_string(self):\n assert super_len('Test') == 4\n\n @pytest.mark.parametrize(\n 'mode, warnings_num', (\n ('r', 1),\n ('rb', 0),\n ))\n def test_file(self, tmpdir, mode, warnings_num, recwarn):\n file_obj = tmpdir.join('test.txt')\n file_obj.write('Test')\n with file_obj.open(mode) as fd:\n assert super_len(fd) == 4\n assert len(recwarn) == warnings_num\n\n def test_super_len_with__len__(self):\n foo = [1,2,3,4]\n len_foo = super_len(foo)\n assert len_foo == 4\n\n def test_super_len_with_no__len__(self):\n class LenFile(object):\n def __init__(self):\n self.len = 5\n\n assert super_len(LenFile()) == 5\n\n def test_super_len_with_tell(self):\n foo = StringIO.StringIO('12345')\n assert super_len(foo) == 5\n foo.read(2)\n assert super_len(foo) == 3\n\n def test_super_len_with_fileno(self):\n with open(__file__, 'rb') as f:\n length = super_len(f)\n file_data = f.read()\n assert length == len(file_data)\n\n def test_super_len_with_no_matches(self):\n \"\"\"Ensure that objects without any length methods default to 0\"\"\"\n assert super_len(object()) == 0\n\n\nclass TestToKeyValList:\n\n @pytest.mark.parametrize(\n 'value, expected', (\n ([('key', 'val')], [('key', 'val')]),\n ((('key', 'val'), ), [('key', 'val')]),\n ({'key': 'val'}, [('key', 'val')]),\n (None, None)\n ))\n def test_valid(self, value, expected):\n assert to_key_val_list(value) == expected\n\n def test_invalid(self):\n with pytest.raises(ValueError):\n to_key_val_list('string')\n\n\nclass TestUnquoteHeaderValue:\n\n @pytest.mark.parametrize(\n 'value, expected', (\n (None, None),\n ('Test', 'Test'),\n ('\"Test\"', 'Test'),\n ('\"Test\\\\\\\\\"', 'Test\\\\'),\n ('\"\\\\\\\\Comp\\\\Res\"', '\\\\Comp\\\\Res'),\n ))\n def test_valid(self, value, expected):\n assert unquote_header_value(value) == expected\n\n def test_is_filename(self):\n assert unquote_header_value('\"\\\\\\\\Comp\\\\Res\"', True) == '\\\\\\\\Comp\\\\Res'\n\n\nclass TestGetEnvironProxies:\n \"\"\"Ensures that IP addresses are correctly matches with ranges\n in no_proxy variable.\n \"\"\"\n\n @pytest.fixture(autouse=True, params=['no_proxy', 'NO_PROXY'])\n def no_proxy(self, request, monkeypatch):\n monkeypatch.setenv(request.param, '192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1')\n\n @pytest.mark.parametrize(\n 'url', (\n 'http://192.168.0.1:5000/',\n 'http://192.168.0.1/',\n 'http://172.16.1.1/',\n 'http://172.16.1.1:5000/',\n 'http://localhost.localdomain:5000/v1.0/',\n ))\n def test_bypass(self, url):\n assert get_environ_proxies(url, no_proxy=None) == {}\n\n @pytest.mark.parametrize(\n 'url', (\n 'http://192.168.1.1:5000/',\n 'http://192.168.1.1/',\n 'http://www.requests.com/',\n ))\n def test_not_bypass(self, url):\n assert get_environ_proxies(url, no_proxy=None) != {}\n\n @pytest.mark.parametrize(\n 'url', (\n 'http://192.168.1.1:5000/',\n 'http://192.168.1.1/',\n 'http://www.requests.com/',\n ))\n def test_bypass_no_proxy_keyword(self, url):\n no_proxy = '192.168.1.1,requests.com'\n assert get_environ_proxies(url, no_proxy=no_proxy) == {}\n\n @pytest.mark.parametrize(\n 'url', (\n 'http://192.168.0.1:5000/',\n 'http://192.168.0.1/',\n 'http://172.16.1.1/',\n 'http://172.16.1.1:5000/',\n 'http://localhost.localdomain:5000/v1.0/',\n ))\n def test_not_bypass_no_proxy_keyword(self, url, monkeypatch):\n # This is testing that the 'no_proxy' argument overrides the\n # environment variable 'no_proxy'\n monkeypatch.setenv('http_proxy', 'http://proxy.example.com:3128/')\n no_proxy = '192.168.1.1,requests.com'\n assert get_environ_proxies(url, no_proxy=no_proxy) != {}\n\n\nclass TestIsIPv4Address:\n\n def test_valid(self):\n assert is_ipv4_address('8.8.8.8')\n\n @pytest.mark.parametrize('value', ('8.8.8.8.8', 'localhost.localdomain'))\n def test_invalid(self, value):\n assert not is_ipv4_address(value)\n\n\nclass TestIsValidCIDR:\n\n def test_valid(self):\n assert is_valid_cidr('192.168.1.0/24')\n\n @pytest.mark.parametrize(\n 'value', (\n '8.8.8.8',\n '192.168.1.0/a',\n '192.168.1.0/128',\n '192.168.1.0/-1',\n '192.168.1.999/24',\n ))\n def test_invalid(self, value):\n assert not is_valid_cidr(value)\n\n\nclass TestAddressInNetwork:\n\n def test_valid(self):\n assert address_in_network('192.168.1.1', '192.168.1.0/24')\n\n def test_invalid(self):\n assert not address_in_network('172.16.0.1', '192.168.1.0/24')\n\n\nclass TestGuessFilename:\n\n @pytest.mark.parametrize(\n 'value', (1, type('Fake', (object,), {'name': 1})()),\n )\n def test_guess_filename_invalid(self, value):\n assert guess_filename(value) is None\n\n @pytest.mark.parametrize(\n 'value, expected_type', (\n (b'value', compat.bytes),\n (b'value'.decode('utf-8'), compat.str)\n ))\n def test_guess_filename_valid(self, value, expected_type):\n obj = type('Fake', (object,), {'name': value})()\n result = guess_filename(obj)\n assert result == value\n assert isinstance(result, expected_type)\n\n\nclass TestExtractZippedPaths:\n\n @pytest.mark.parametrize(\n 'path', (\n '/',\n __file__,\n pytest.__file__,\n '/etc/invalid/location',\n ))\n def test_unzipped_paths_unchanged(self, path):\n assert path == extract_zipped_paths(path)\n\n def test_zipped_paths_extracted(self, tmpdir):\n zipped_py = tmpdir.join('test.zip')\n with zipfile.ZipFile(zipped_py.strpath, 'w') as f:\n f.write(__file__)\n\n _, name = os.path.splitdrive(__file__)\n zipped_path = os.path.join(zipped_py.strpath, name.lstrip(r'\\/'))\n extracted_path = extract_zipped_paths(zipped_path)\n\n assert extracted_path != zipped_path\n assert os.path.exists(extracted_path)\n assert filecmp.cmp(extracted_path, __file__)\n\n def test_invalid_unc_path(self):\n path = r\"\\\\localhost\\invalid\\location\"\n assert extract_zipped_paths(path) == path\n\n\nclass TestContentEncodingDetection:\n\n def test_none(self):\n encodings = get_encodings_from_content('')\n assert not len(encodings)\n\n @pytest.mark.parametrize(\n 'content', (\n # HTML5 meta charset attribute\n '<meta charset=\"UTF-8\">',\n # HTML4 pragma directive\n '<meta http-equiv=\"Content-type\" content=\"text/html;charset=UTF-8\">',\n # XHTML 1.x served with text/html MIME type\n '<meta http-equiv=\"Content-type\" content=\"text/html;charset=UTF-8\" />',\n # XHTML 1.x served as XML\n '<?xml version=\"1.0\" encoding=\"UTF-8\"?>',\n ))\n def test_pragmas(self, content):\n encodings = get_encodings_from_content(content)\n assert len(encodings) == 1\n assert encodings[0] == 'UTF-8'\n\n def test_precedence(self):\n content = '''\n <?xml version=\"1.0\" encoding=\"XML\"?>\n <meta charset=\"HTML5\">\n <meta http-equiv=\"Content-type\" content=\"text/html;charset=HTML4\" />\n '''.strip()\n assert get_encodings_from_content(content) == ['HTML5', 'HTML4', 'XML']\n\n\nclass TestGuessJSONUTF:\n\n @pytest.mark.parametrize(\n 'encoding', (\n 'utf-32', 'utf-8-sig', 'utf-16', 'utf-8', 'utf-16-be', 'utf-16-le',\n 'utf-32-be', 'utf-32-le'\n ))\n def test_encoded(self, encoding):\n data = '{}'.encode(encoding)\n assert guess_json_utf(data) == encoding\n\n def test_bad_utf_like_encoding(self):\n assert guess_json_utf(b'\\x00\\x00\\x00\\x00') is None\n\n @pytest.mark.parametrize(\n ('encoding', 'expected'), (\n ('utf-16-be', 'utf-16'),\n ('utf-16-le', 'utf-16'),\n ('utf-32-be', 'utf-32'),\n ('utf-32-le', 'utf-32')\n ))\n def test_guess_by_bom(self, encoding, expected):\n data = u'\\ufeff{}'.encode(encoding)\n assert guess_json_utf(data) == expected\n\n\nUSER = PASSWORD = \"%!*'();:@&=+$,/?#[] \"\nENCODED_USER = compat.quote(USER, '')\nENCODED_PASSWORD = compat.quote(PASSWORD, '')\n\n\n@pytest.mark.parametrize(\n 'url, auth', (\n (\n 'http://' + ENCODED_USER + ':' + ENCODED_PASSWORD + '@' +\n 'request.com/url.html#test',\n (USER, PASSWORD)\n ),\n (\n 'http://user:pass@complex.url.com/path?query=yes',\n ('user', 'pass')\n ),\n (\n 'http://user:pass%20pass@complex.url.com/path?query=yes',\n ('user', 'pass pass')\n ),\n (\n 'http://user:pass pass@complex.url.com/path?query=yes',\n ('user', 'pass pass')\n ),\n (\n 'http://user%25user:pass@complex.url.com/path?query=yes',\n ('user%user', 'pass')\n ),\n (\n 'http://user:pass%23pass@complex.url.com/path?query=yes',\n ('user', 'pass#pass')\n ),\n (\n 'http://complex.url.com/path?query=yes',\n ('', '')\n ),\n ))\ndef test_get_auth_from_url(url, auth):\n assert get_auth_from_url(url) == auth\n\n\n@pytest.mark.parametrize(\n 'uri, expected', (\n (\n # Ensure requoting doesn't break expectations\n 'http://example.com/fiz?buz=%25ppicture',\n 'http://example.com/fiz?buz=%25ppicture',\n ),\n (\n # Ensure we handle unquoted percent signs in redirects\n 'http://example.com/fiz?buz=%ppicture',\n 'http://example.com/fiz?buz=%25ppicture',\n ),\n ))\ndef test_requote_uri_with_unquoted_percents(uri, expected):\n \"\"\"See: https://github.com/psf/requests/issues/2356\"\"\"\n assert requote_uri(uri) == expected\n\n\n@pytest.mark.parametrize(\n 'uri, expected', (\n (\n # Illegal bytes\n 'http://example.com/?a=%--',\n 'http://example.com/?a=%--',\n ),\n (\n # Reserved characters\n 'http://example.com/?a=%300',\n 'http://example.com/?a=00',\n )\n ))\ndef test_unquote_unreserved(uri, expected):\n assert unquote_unreserved(uri) == expected\n\n\n@pytest.mark.parametrize(\n 'mask, expected', (\n (8, '255.0.0.0'),\n (24, '255.255.255.0'),\n (25, '255.255.255.128'),\n ))\ndef test_dotted_netmask(mask, expected):\n assert dotted_netmask(mask) == expected\n\n\nhttp_proxies = {'http': 'http://http.proxy',\n 'http://some.host': 'http://some.host.proxy'}\nall_proxies = {'all': 'socks5://http.proxy',\n 'all://some.host': 'socks5://some.host.proxy'}\nmixed_proxies = {'http': 'http://http.proxy',\n 'http://some.host': 'http://some.host.proxy',\n 'all': 'socks5://http.proxy'}\n@pytest.mark.parametrize(\n 'url, expected, proxies', (\n ('hTTp://u:p@Some.Host/path', 'http://some.host.proxy', http_proxies),\n ('hTTp://u:p@Other.Host/path', 'http://http.proxy', http_proxies),\n ('hTTp:///path', 'http://http.proxy', http_proxies),\n ('hTTps://Other.Host', None, http_proxies),\n ('file:///etc/motd', None, http_proxies),\n\n ('hTTp://u:p@Some.Host/path', 'socks5://some.host.proxy', all_proxies),\n ('hTTp://u:p@Other.Host/path', 'socks5://http.proxy', all_proxies),\n ('hTTp:///path', 'socks5://http.proxy', all_proxies),\n ('hTTps://Other.Host', 'socks5://http.proxy', all_proxies),\n\n ('http://u:p@other.host/path', 'http://http.proxy', mixed_proxies),\n ('http://u:p@some.host/path', 'http://some.host.proxy', mixed_proxies),\n ('https://u:p@other.host/path', 'socks5://http.proxy', mixed_proxies),\n ('https://u:p@some.host/path', 'socks5://http.proxy', mixed_proxies),\n ('https://', 'socks5://http.proxy', mixed_proxies),\n # XXX: unsure whether this is reasonable behavior\n ('file:///etc/motd', 'socks5://http.proxy', all_proxies),\n ))\ndef test_select_proxies(url, expected, proxies):\n \"\"\"Make sure we can select per-host proxies correctly.\"\"\"\n assert select_proxy(url, proxies) == expected\n\n\n@pytest.mark.parametrize(\n 'value, expected', (\n ('foo=\"is a fish\", bar=\"as well\"', {'foo': 'is a fish', 'bar': 'as well'}),\n ('key_without_value', {'key_without_value': None})\n ))\ndef test_parse_dict_header(value, expected):\n assert parse_dict_header(value) == expected\n\n\n@pytest.mark.parametrize(\n 'value, expected', (\n (\n 'application/xml',\n ('application/xml', {})\n ),\n (\n 'application/json ; charset=utf-8',\n ('application/json', {'charset': 'utf-8'})\n ),\n (\n 'application/json ; Charset=utf-8',\n ('application/json', {'charset': 'utf-8'})\n ),\n (\n 'text/plain',\n ('text/plain', {})\n ),\n (\n 'multipart/form-data; boundary = something ; boundary2=\\'something_else\\' ; no_equals ',\n ('multipart/form-data', {'boundary': 'something', 'boundary2': 'something_else', 'no_equals': True})\n ),\n (\n 'multipart/form-data; boundary = something ; boundary2=\"something_else\" ; no_equals ',\n ('multipart/form-data', {'boundary': 'something', 'boundary2': 'something_else', 'no_equals': True})\n ),\n (\n 'multipart/form-data; boundary = something ; \\'boundary2=something_else\\' ; no_equals ',\n ('multipart/form-data', {'boundary': 'something', 'boundary2': 'something_else', 'no_equals': True})\n ),\n (\n 'multipart/form-data; boundary = something ; \"boundary2=something_else\" ; no_equals ',\n ('multipart/form-data', {'boundary': 'something', 'boundary2': 'something_else', 'no_equals': True})\n ),\n (\n 'application/json ; ; ',\n ('application/json', {})\n )\n ))\ndef test__parse_content_type_header(value, expected):\n assert _parse_content_type_header(value) == expected\n\n\n@pytest.mark.parametrize(\n 'value, expected', (\n (\n CaseInsensitiveDict(),\n None\n ),\n (\n CaseInsensitiveDict({'content-type': 'application/json; charset=utf-8'}),\n 'utf-8'\n ),\n (\n CaseInsensitiveDict({'content-type': 'text/plain'}),\n 'ISO-8859-1'\n ),\n ))\ndef test_get_encoding_from_headers(value, expected):\n assert get_encoding_from_headers(value) == expected\n\n\n@pytest.mark.parametrize(\n 'value, length', (\n ('', 0),\n ('T', 1),\n ('Test', 4),\n ('Cont', 0),\n ('Other', -5),\n ('Content', None),\n ))\ndef test_iter_slices(value, length):\n if length is None or (length <= 0 and len(value) > 0):\n # Reads all content at once\n assert len(list(iter_slices(value, length))) == 1\n else:\n assert len(list(iter_slices(value, 1))) == length\n\n\n@pytest.mark.parametrize(\n 'value, expected', (\n (\n '<http:/.../front.jpeg>; rel=front; type=\"image/jpeg\"',\n [{'url': 'http:/.../front.jpeg', 'rel': 'front', 'type': 'image/jpeg'}]\n ),\n (\n '<http:/.../front.jpeg>',\n [{'url': 'http:/.../front.jpeg'}]\n ),\n (\n '<http:/.../front.jpeg>;',\n [{'url': 'http:/.../front.jpeg'}]\n ),\n (\n '<http:/.../front.jpeg>; type=\"image/jpeg\",<http://.../back.jpeg>;',\n [\n {'url': 'http:/.../front.jpeg', 'type': 'image/jpeg'},\n {'url': 'http://.../back.jpeg'}\n ]\n ),\n (\n '',\n []\n ),\n ))\ndef test_parse_header_links(value, expected):\n assert parse_header_links(value) == expected\n\n\n@pytest.mark.parametrize(\n 'value, expected', (\n ('example.com/path', 'http://example.com/path'),\n ('//example.com/path', 'http://example.com/path'),\n ))\ndef test_prepend_scheme_if_needed(value, expected):\n assert prepend_scheme_if_needed(value, 'http') == expected\n\n\n@pytest.mark.parametrize(\n 'value, expected', (\n ('T', 'T'),\n (b'T', 'T'),\n (u'T', 'T'),\n ))\ndef test_to_native_string(value, expected):\n assert to_native_string(value) == expected\n\n\n@pytest.mark.parametrize(\n 'url, expected', (\n ('http://u:p@example.com/path?a=1#test', 'http://example.com/path?a=1'),\n ('http://example.com/path', 'http://example.com/path'),\n ('//u:p@example.com/path', '//example.com/path'),\n ('//example.com/path', '//example.com/path'),\n ('example.com/path', '//example.com/path'),\n ('scheme:u:p@example.com/path', 'scheme://example.com/path'),\n ))\ndef test_urldefragauth(url, expected):\n assert urldefragauth(url) == expected\n\n\n@pytest.mark.parametrize(\n 'url, expected', (\n ('http://192.168.0.1:5000/', True),\n ('http://192.168.0.1/', True),\n ('http://172.16.1.1/', True),\n ('http://172.16.1.1:5000/', True),\n ('http://localhost.localdomain:5000/v1.0/', True),\n ('http://google.com:6000/', True),\n ('http://172.16.1.12/', False),\n ('http://172.16.1.12:5000/', False),\n ('http://google.com:5000/v1.0/', False),\n ('file:///some/path/on/disk', True),\n ))\ndef test_should_bypass_proxies(url, expected, monkeypatch):\n \"\"\"Tests for function should_bypass_proxies to check if proxy\n can be bypassed or not\n \"\"\"\n monkeypatch.setenv('no_proxy', '192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1, google.com:6000')\n monkeypatch.setenv('NO_PROXY', '192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1, google.com:6000')\n assert should_bypass_proxies(url, no_proxy=None) == expected\n\n\n@pytest.mark.parametrize(\n 'url, expected', (\n ('http://172.16.1.1/', '172.16.1.1'),\n ('http://172.16.1.1:5000/', '172.16.1.1'),\n ('http://user:pass@172.16.1.1', '172.16.1.1'),\n ('http://user:pass@172.16.1.1:5000', '172.16.1.1'),\n ('http://hostname/', 'hostname'),\n ('http://hostname:5000/', 'hostname'),\n ('http://user:pass@hostname', 'hostname'),\n ('http://user:pass@hostname:5000', 'hostname'),\n ))\ndef test_should_bypass_proxies_pass_only_hostname(url, expected, mocker):\n \"\"\"The proxy_bypass function should be called with a hostname or IP without\n a port number or auth credentials.\n \"\"\"\n proxy_bypass = mocker.patch('requests.utils.proxy_bypass')\n should_bypass_proxies(url, no_proxy=None)\n proxy_bypass.assert_called_once_with(expected)\n\n\n@pytest.mark.parametrize(\n 'cookiejar', (\n compat.cookielib.CookieJar(),\n RequestsCookieJar()\n ))\ndef test_add_dict_to_cookiejar(cookiejar):\n \"\"\"Ensure add_dict_to_cookiejar works for\n non-RequestsCookieJar CookieJars\n \"\"\"\n cookiedict = {'test': 'cookies',\n 'good': 'cookies'}\n cj = add_dict_to_cookiejar(cookiejar, cookiedict)\n cookies = {cookie.name: cookie.value for cookie in cj}\n assert cookiedict == cookies\n\n\n@pytest.mark.parametrize(\n 'value, expected', (\n (u'test', True),\n (u'æíöû', False),\n (u'ジェーピーニック', False),\n )\n)\ndef test_unicode_is_ascii(value, expected):\n assert unicode_is_ascii(value) is expected\n\n\n@pytest.mark.parametrize(\n 'url, expected', (\n ('http://192.168.0.1:5000/', True),\n ('http://192.168.0.1/', True),\n ('http://172.16.1.1/', True),\n ('http://172.16.1.1:5000/', True),\n ('http://localhost.localdomain:5000/v1.0/', True),\n ('http://172.16.1.12/', False),\n ('http://172.16.1.12:5000/', False),\n ('http://google.com:5000/v1.0/', False),\n ))\ndef test_should_bypass_proxies_no_proxy(\n url, expected, monkeypatch):\n \"\"\"Tests for function should_bypass_proxies to check if proxy\n can be bypassed or not using the 'no_proxy' argument\n \"\"\"\n no_proxy = '192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1'\n # Test 'no_proxy' argument\n assert should_bypass_proxies(url, no_proxy=no_proxy) == expected\n\n\n@pytest.mark.skipif(os.name != 'nt', reason='Test only on Windows')\n@pytest.mark.parametrize(\n 'url, expected, override', (\n ('http://192.168.0.1:5000/', True, None),\n ('http://192.168.0.1/', True, None),\n ('http://172.16.1.1/', True, None),\n ('http://172.16.1.1:5000/', True, None),\n ('http://localhost.localdomain:5000/v1.0/', True, None),\n ('http://172.16.1.22/', False, None),\n ('http://172.16.1.22:5000/', False, None),\n ('http://google.com:5000/v1.0/', False, None),\n ('http://mylocalhostname:5000/v1.0/', True, '<local>'),\n ('http://192.168.0.1/', False, ''),\n ))\ndef test_should_bypass_proxies_win_registry(url, expected, override,\n monkeypatch):\n \"\"\"Tests for function should_bypass_proxies to check if proxy\n can be bypassed or not with Windows registry settings\n \"\"\"\n if override is None:\n override = '192.168.*;127.0.0.1;localhost.localdomain;172.16.1.1'\n if compat.is_py3:\n import winreg\n else:\n import _winreg as winreg\n\n class RegHandle:\n def Close(self):\n pass\n\n ie_settings = RegHandle()\n proxyEnableValues = deque([1, \"1\"])\n\n def OpenKey(key, subkey):\n return ie_settings\n\n def QueryValueEx(key, value_name):\n if key is ie_settings:\n if value_name == 'ProxyEnable':\n # this could be a string (REG_SZ) or a 32-bit number (REG_DWORD)\n proxyEnableValues.rotate()\n return [proxyEnableValues[0]]\n elif value_name == 'ProxyOverride':\n return [override]\n\n monkeypatch.setenv('http_proxy', '')\n monkeypatch.setenv('https_proxy', '')\n monkeypatch.setenv('ftp_proxy', '')\n monkeypatch.setenv('no_proxy', '')\n monkeypatch.setenv('NO_PROXY', '')\n monkeypatch.setattr(winreg, 'OpenKey', OpenKey)\n monkeypatch.setattr(winreg, 'QueryValueEx', QueryValueEx)\n assert should_bypass_proxies(url, None) == expected\n\n\n@pytest.mark.parametrize(\n 'env_name, value', (\n ('no_proxy', '192.168.0.0/24,127.0.0.1,localhost.localdomain'),\n ('no_proxy', None),\n ('a_new_key', '192.168.0.0/24,127.0.0.1,localhost.localdomain'),\n ('a_new_key', None),\n ))\ndef test_set_environ(env_name, value):\n \"\"\"Tests set_environ will set environ values and will restore the environ.\"\"\"\n environ_copy = copy.deepcopy(os.environ)\n with set_environ(env_name, value):\n assert os.environ.get(env_name) == value\n\n assert os.environ == environ_copy\n\n\ndef test_set_environ_raises_exception():\n \"\"\"Tests set_environ will raise exceptions in context when the\n value parameter is None.\"\"\"\n with pytest.raises(Exception) as exception:\n with set_environ('test1', None):\n raise Exception('Expected exception')\n\n assert 'Expected exception' in str(exception.value)\n",
"path": "tests/test_utils.py"
}
] | 13_7 | python | import sys
import pytest
import tarfile
from io import BytesIO
def test_tarfile_member(tmpdir):
from requests.utils import super_len
file_obj = tmpdir.join('test.txt')
file_obj.write('Test')
tar_obj = str(tmpdir.join('test.tar'))
with tarfile.open(tar_obj, 'w') as tar:
tar.add(str(file_obj), arcname='test.txt')
with tarfile.open(tar_obj) as tar:
member = tar.extractfile('test.txt')
assert super_len(member) == 4
def test_file_like_object_without_fileno():
from requests.utils import super_len
file_like_obj = BytesIO(b"Test")
assert super_len(file_like_obj) == 4
def main():
# Run the pytest tests programmatically
exit_code = pytest.main(["-v", __file__])
# Exit with status code 1 if any test fails, otherwise 0
if exit_code != 0:
sys.exit(1)
else:
sys.exit(0)
if __name__ == '__main__':
main()
|
https://github.com/teamqurrent/requests | your goal is to update the URL parsing logic in the `prepend_scheme_if_needed` function within `requests/utils.py`. Start by importing parse_url from urllib3.util and use it to parse URLs more accurately. Handle cases where the netloc might be incorrectly interpreted by swapping it with path if necessary. Ensure that URLs without a scheme are correctly prepended with the default scheme. | 39d0fdd | -e .[socks]
pytest
pytest-cov
pytest-httpbin==1.0.0
pytest-mock
httpbin==0.7.0
trustme
wheel
chardet>=3.0.2,<3.1.0
idna>=2.5,<2.8
urllib3>=1.21.1,<1.24
certifi>=2017.4.17
# Flask Stack
Flask>1.0,<2.0
markupsafe<2.1
| python3.9 | ef59aa0 | diff --git a/requests/utils.py b/requests/utils.py
--- a/requests/utils.py
+++ b/requests/utils.py
@@ -21,6 +21,7 @@ import warnings
import zipfile
from collections import OrderedDict
from urllib3.util import make_headers
+from urllib3.util import parse_url
from .__version__ import __version__
from . import certs
@@ -963,15 +964,23 @@ def prepend_scheme_if_needed(url, new_scheme):
:rtype: str
"""
- scheme, netloc, path, params, query, fragment = urlparse(url, new_scheme)
-
- # urlparse is a finicky beast, and sometimes decides that there isn't a
- # netloc present. Assume that it's being over-cautious, and switch netloc
- # and path if urlparse decided there was no netloc.
+ parsed = parse_url(url)
+ scheme, auth, host, port, path, query, fragment = parsed
+
+ # A defect in urlparse determines that there isn't a netloc present in some
+ # urls. We previously assumed parsing was overly cautious, and swapped the
+ # netloc and path. Due to a lack of tests on the original defect, this is
+ # maintained with parse_url for backwards compatibility.
+ netloc = parsed.netloc
if not netloc:
netloc, path = path, netloc
- return urlunparse((scheme, netloc, path, params, query, fragment))
+ if scheme is None:
+ scheme = new_scheme
+ if path is None:
+ path = ''
+
+ return urlunparse((scheme, netloc, path, '', query, fragment))
def get_auth_from_url(url):
diff --git a/tests/test_utils.py b/tests/test_utils.py
--- a/tests/test_utils.py
+++ b/tests/test_utils.py
@@ -601,6 +601,7 @@ def test_parse_header_links(value, expected):
'value, expected', (
('example.com/path', 'http://example.com/path'),
('//example.com/path', 'http://example.com/path'),
+ ('example.com:80', 'http://example.com:80'),
))
def test_prepend_scheme_if_needed(value, expected):
assert prepend_scheme_if_needed(value, 'http') == expected
| [
{
"content": "# -*- coding: utf-8 -*-\n\n\"\"\"\nrequests.utils\n~~~~~~~~~~~~~~\n\nThis module provides utility functions that are used within Requests\nthat are also useful for external consumption.\n\"\"\"\n\nimport codecs\nimport contextlib\nimport io\nimport os\nimport re\nimport socket\nimport struct\nimport sys\nimport tempfile\nimport warnings\nimport zipfile\nfrom collections import OrderedDict\nfrom urllib3.util import make_headers\n\nfrom .__version__ import __version__\nfrom . import certs\n# to_native_string is unused here, but imported here for backwards compatibility\nfrom ._internal_utils import to_native_string\nfrom .compat import parse_http_list as _parse_list_header\nfrom .compat import (\n quote, urlparse, bytes, str, unquote, getproxies,\n proxy_bypass, urlunparse, basestring, integer_types, is_py3,\n proxy_bypass_environment, getproxies_environment, Mapping)\nfrom .cookies import cookiejar_from_dict\nfrom .structures import CaseInsensitiveDict\nfrom .exceptions import (\n InvalidURL, InvalidHeader, FileModeWarning, UnrewindableBodyError)\n\nNETRC_FILES = ('.netrc', '_netrc')\n\nDEFAULT_CA_BUNDLE_PATH = certs.where()\n\nDEFAULT_PORTS = {'http': 80, 'https': 443}\n\n# Ensure that ', ' is used to preserve previous delimiter behavior.\nDEFAULT_ACCEPT_ENCODING = \", \".join(\n re.split(r\",\\s*\", make_headers(accept_encoding=True)[\"accept-encoding\"])\n)\n\n\nif sys.platform == 'win32':\n # provide a proxy_bypass version on Windows without DNS lookups\n\n def proxy_bypass_registry(host):\n try:\n if is_py3:\n import winreg\n else:\n import _winreg as winreg\n except ImportError:\n return False\n\n try:\n internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER,\n r'Software\\Microsoft\\Windows\\CurrentVersion\\Internet Settings')\n # ProxyEnable could be REG_SZ or REG_DWORD, normalizing it\n proxyEnable = int(winreg.QueryValueEx(internetSettings,\n 'ProxyEnable')[0])\n # ProxyOverride is almost always a string\n proxyOverride = winreg.QueryValueEx(internetSettings,\n 'ProxyOverride')[0]\n except OSError:\n return False\n if not proxyEnable or not proxyOverride:\n return False\n\n # make a check value list from the registry entry: replace the\n # '<local>' string by the localhost entry and the corresponding\n # canonical entry.\n proxyOverride = proxyOverride.split(';')\n # now check if we match one of the registry values.\n for test in proxyOverride:\n if test == '<local>':\n if '.' not in host:\n return True\n test = test.replace(\".\", r\"\\.\") # mask dots\n test = test.replace(\"*\", r\".*\") # change glob sequence\n test = test.replace(\"?\", r\".\") # change glob char\n if re.match(test, host, re.I):\n return True\n return False\n\n def proxy_bypass(host): # noqa\n \"\"\"Return True, if the host should be bypassed.\n\n Checks proxy settings gathered from the environment, if specified,\n or the registry.\n \"\"\"\n if getproxies_environment():\n return proxy_bypass_environment(host)\n else:\n return proxy_bypass_registry(host)\n\n\ndef dict_to_sequence(d):\n \"\"\"Returns an internal sequence dictionary update.\"\"\"\n\n if hasattr(d, 'items'):\n d = d.items()\n\n return d\n\n\ndef super_len(o):\n total_length = None\n current_position = 0\n\n if hasattr(o, '__len__'):\n total_length = len(o)\n\n elif hasattr(o, 'len'):\n total_length = o.len\n\n elif hasattr(o, 'fileno'):\n try:\n fileno = o.fileno()\n except (io.UnsupportedOperation, AttributeError):\n # AttributeError is a surprising exception, seeing as how we've just checked\n # that `hasattr(o, 'fileno')`. It happens for objects obtained via\n # `Tarfile.extractfile()`, per issue 5229.\n pass\n else:\n total_length = os.fstat(fileno).st_size\n\n # Having used fstat to determine the file length, we need to\n # confirm that this file was opened up in binary mode.\n if 'b' not in o.mode:\n warnings.warn((\n \"Requests has determined the content-length for this \"\n \"request using the binary size of the file: however, the \"\n \"file has been opened in text mode (i.e. without the 'b' \"\n \"flag in the mode). This may lead to an incorrect \"\n \"content-length. In Requests 3.0, support will be removed \"\n \"for files in text mode.\"),\n FileModeWarning\n )\n\n if hasattr(o, 'tell'):\n try:\n current_position = o.tell()\n except (OSError, IOError):\n # This can happen in some weird situations, such as when the file\n # is actually a special file descriptor like stdin. In this\n # instance, we don't know what the length is, so set it to zero and\n # let requests chunk it instead.\n if total_length is not None:\n current_position = total_length\n else:\n if hasattr(o, 'seek') and total_length is None:\n # StringIO and BytesIO have seek but no usable fileno\n try:\n # seek to end of file\n o.seek(0, 2)\n total_length = o.tell()\n\n # seek back to current position to support\n # partially read file-like objects\n o.seek(current_position or 0)\n except (OSError, IOError):\n total_length = 0\n\n if total_length is None:\n total_length = 0\n\n return max(0, total_length - current_position)\n\n\ndef get_netrc_auth(url, raise_errors=False):\n \"\"\"Returns the Requests tuple auth for a given url from netrc.\"\"\"\n\n netrc_file = os.environ.get('NETRC')\n if netrc_file is not None:\n netrc_locations = (netrc_file,)\n else:\n netrc_locations = ('~/{}'.format(f) for f in NETRC_FILES)\n\n try:\n from netrc import netrc, NetrcParseError\n\n netrc_path = None\n\n for f in netrc_locations:\n try:\n loc = os.path.expanduser(f)\n except KeyError:\n # os.path.expanduser can fail when $HOME is undefined and\n # getpwuid fails. See https://bugs.python.org/issue20164 &\n # https://github.com/psf/requests/issues/1846\n return\n\n if os.path.exists(loc):\n netrc_path = loc\n break\n\n # Abort early if there isn't one.\n if netrc_path is None:\n return\n\n ri = urlparse(url)\n\n # Strip port numbers from netloc. This weird `if...encode`` dance is\n # used for Python 3.2, which doesn't support unicode literals.\n splitstr = b':'\n if isinstance(url, str):\n splitstr = splitstr.decode('ascii')\n host = ri.netloc.split(splitstr)[0]\n\n try:\n _netrc = netrc(netrc_path).authenticators(host)\n if _netrc:\n # Return with login / password\n login_i = (0 if _netrc[0] else 1)\n return (_netrc[login_i], _netrc[2])\n except (NetrcParseError, IOError):\n # If there was a parsing error or a permissions issue reading the file,\n # we'll just skip netrc auth unless explicitly asked to raise errors.\n if raise_errors:\n raise\n\n # App Engine hackiness.\n except (ImportError, AttributeError):\n pass\n\n\ndef guess_filename(obj):\n \"\"\"Tries to guess the filename of the given object.\"\"\"\n name = getattr(obj, 'name', None)\n if (name and isinstance(name, basestring) and name[0] != '<' and\n name[-1] != '>'):\n return os.path.basename(name)\n\n\ndef extract_zipped_paths(path):\n \"\"\"Replace nonexistent paths that look like they refer to a member of a zip\n archive with the location of an extracted copy of the target, or else\n just return the provided path unchanged.\n \"\"\"\n if os.path.exists(path):\n # this is already a valid path, no need to do anything further\n return path\n\n # find the first valid part of the provided path and treat that as a zip archive\n # assume the rest of the path is the name of a member in the archive\n archive, member = os.path.split(path)\n while archive and not os.path.exists(archive):\n archive, prefix = os.path.split(archive)\n if not prefix:\n # If we don't check for an empty prefix after the split (in other words, archive remains unchanged after the split),\n # we _can_ end up in an infinite loop on a rare corner case affecting a small number of users\n break\n member = '/'.join([prefix, member])\n\n if not zipfile.is_zipfile(archive):\n return path\n\n zip_file = zipfile.ZipFile(archive)\n if member not in zip_file.namelist():\n return path\n\n # we have a valid zip archive and a valid member of that archive\n tmp = tempfile.gettempdir()\n extracted_path = os.path.join(tmp, member.split('/')[-1])\n if not os.path.exists(extracted_path):\n # use read + write to avoid the creating nested folders, we only want the file, avoids mkdir racing condition\n with atomic_open(extracted_path) as file_handler:\n file_handler.write(zip_file.read(member))\n return extracted_path\n\n\n@contextlib.contextmanager\ndef atomic_open(filename):\n \"\"\"Write a file to the disk in an atomic fashion\"\"\"\n replacer = os.rename if sys.version_info[0] == 2 else os.replace\n tmp_descriptor, tmp_name = tempfile.mkstemp(dir=os.path.dirname(filename))\n try:\n with os.fdopen(tmp_descriptor, 'wb') as tmp_handler:\n yield tmp_handler\n replacer(tmp_name, filename)\n except BaseException:\n os.remove(tmp_name)\n raise\n\n\ndef from_key_val_list(value):\n \"\"\"Take an object and test to see if it can be represented as a\n dictionary. Unless it can not be represented as such, return an\n OrderedDict, e.g.,\n\n ::\n\n >>> from_key_val_list([('key', 'val')])\n OrderedDict([('key', 'val')])\n >>> from_key_val_list('string')\n Traceback (most recent call last):\n ...\n ValueError: cannot encode objects that are not 2-tuples\n >>> from_key_val_list({'key': 'val'})\n OrderedDict([('key', 'val')])\n\n :rtype: OrderedDict\n \"\"\"\n if value is None:\n return None\n\n if isinstance(value, (str, bytes, bool, int)):\n raise ValueError('cannot encode objects that are not 2-tuples')\n\n return OrderedDict(value)\n\n\ndef to_key_val_list(value):\n \"\"\"Take an object and test to see if it can be represented as a\n dictionary. If it can be, return a list of tuples, e.g.,\n\n ::\n\n >>> to_key_val_list([('key', 'val')])\n [('key', 'val')]\n >>> to_key_val_list({'key': 'val'})\n [('key', 'val')]\n >>> to_key_val_list('string')\n Traceback (most recent call last):\n ...\n ValueError: cannot encode objects that are not 2-tuples\n\n :rtype: list\n \"\"\"\n if value is None:\n return None\n\n if isinstance(value, (str, bytes, bool, int)):\n raise ValueError('cannot encode objects that are not 2-tuples')\n\n if isinstance(value, Mapping):\n value = value.items()\n\n return list(value)\n\n\n# From mitsuhiko/werkzeug (used with permission).\ndef parse_list_header(value):\n \"\"\"Parse lists as described by RFC 2068 Section 2.\n\n In particular, parse comma-separated lists where the elements of\n the list may include quoted-strings. A quoted-string could\n contain a comma. A non-quoted string could have quotes in the\n middle. Quotes are removed automatically after parsing.\n\n It basically works like :func:`parse_set_header` just that items\n may appear multiple times and case sensitivity is preserved.\n\n The return value is a standard :class:`list`:\n\n >>> parse_list_header('token, \"quoted value\"')\n ['token', 'quoted value']\n\n To create a header from the :class:`list` again, use the\n :func:`dump_header` function.\n\n :param value: a string with a list header.\n :return: :class:`list`\n :rtype: list\n \"\"\"\n result = []\n for item in _parse_list_header(value):\n if item[:1] == item[-1:] == '\"':\n item = unquote_header_value(item[1:-1])\n result.append(item)\n return result\n\n\n# From mitsuhiko/werkzeug (used with permission).\ndef parse_dict_header(value):\n \"\"\"Parse lists of key, value pairs as described by RFC 2068 Section 2 and\n convert them into a python dict:\n\n >>> d = parse_dict_header('foo=\"is a fish\", bar=\"as well\"')\n >>> type(d) is dict\n True\n >>> sorted(d.items())\n [('bar', 'as well'), ('foo', 'is a fish')]\n\n If there is no value for a key it will be `None`:\n\n >>> parse_dict_header('key_without_value')\n {'key_without_value': None}\n\n To create a header from the :class:`dict` again, use the\n :func:`dump_header` function.\n\n :param value: a string with a dict header.\n :return: :class:`dict`\n :rtype: dict\n \"\"\"\n result = {}\n for item in _parse_list_header(value):\n if '=' not in item:\n result[item] = None\n continue\n name, value = item.split('=', 1)\n if value[:1] == value[-1:] == '\"':\n value = unquote_header_value(value[1:-1])\n result[name] = value\n return result\n\n\n# From mitsuhiko/werkzeug (used with permission).\ndef unquote_header_value(value, is_filename=False):\n r\"\"\"Unquotes a header value. (Reversal of :func:`quote_header_value`).\n This does not use the real unquoting but what browsers are actually\n using for quoting.\n\n :param value: the header value to unquote.\n :rtype: str\n \"\"\"\n if value and value[0] == value[-1] == '\"':\n # this is not the real unquoting, but fixing this so that the\n # RFC is met will result in bugs with internet explorer and\n # probably some other browsers as well. IE for example is\n # uploading files with \"C:\\foo\\bar.txt\" as filename\n value = value[1:-1]\n\n # if this is a filename and the starting characters look like\n # a UNC path, then just return the value without quotes. Using the\n # replace sequence below on a UNC path has the effect of turning\n # the leading double slash into a single slash and then\n # _fix_ie_filename() doesn't work correctly. See #458.\n if not is_filename or value[:2] != '\\\\\\\\':\n return value.replace('\\\\\\\\', '\\\\').replace('\\\\\"', '\"')\n return value\n\n\ndef dict_from_cookiejar(cj):\n \"\"\"Returns a key/value dictionary from a CookieJar.\n\n :param cj: CookieJar object to extract cookies from.\n :rtype: dict\n \"\"\"\n\n cookie_dict = {}\n\n for cookie in cj:\n cookie_dict[cookie.name] = cookie.value\n\n return cookie_dict\n\n\ndef add_dict_to_cookiejar(cj, cookie_dict):\n \"\"\"Returns a CookieJar from a key/value dictionary.\n\n :param cj: CookieJar to insert cookies into.\n :param cookie_dict: Dict of key/values to insert into CookieJar.\n :rtype: CookieJar\n \"\"\"\n\n return cookiejar_from_dict(cookie_dict, cj)\n\n\ndef get_encodings_from_content(content):\n \"\"\"Returns encodings from given content string.\n\n :param content: bytestring to extract encodings from.\n \"\"\"\n warnings.warn((\n 'In requests 3.0, get_encodings_from_content will be removed. For '\n 'more information, please see the discussion on issue #2266. (This'\n ' warning should only appear once.)'),\n DeprecationWarning)\n\n charset_re = re.compile(r'<meta.*?charset=[\"\\']*(.+?)[\"\\'>]', flags=re.I)\n pragma_re = re.compile(r'<meta.*?content=[\"\\']*;?charset=(.+?)[\"\\'>]', flags=re.I)\n xml_re = re.compile(r'^<\\?xml.*?encoding=[\"\\']*(.+?)[\"\\'>]')\n\n return (charset_re.findall(content) +\n pragma_re.findall(content) +\n xml_re.findall(content))\n\n\ndef _parse_content_type_header(header):\n \"\"\"Returns content type and parameters from given header\n\n :param header: string\n :return: tuple containing content type and dictionary of\n parameters\n \"\"\"\n\n tokens = header.split(';')\n content_type, params = tokens[0].strip(), tokens[1:]\n params_dict = {}\n items_to_strip = \"\\\"' \"\n\n for param in params:\n param = param.strip()\n if param:\n key, value = param, True\n index_of_equals = param.find(\"=\")\n if index_of_equals != -1:\n key = param[:index_of_equals].strip(items_to_strip)\n value = param[index_of_equals + 1:].strip(items_to_strip)\n params_dict[key.lower()] = value\n return content_type, params_dict\n\n\ndef get_encoding_from_headers(headers):\n \"\"\"Returns encodings from given HTTP Header Dict.\n\n :param headers: dictionary to extract encoding from.\n :rtype: str\n \"\"\"\n\n content_type = headers.get('content-type')\n\n if not content_type:\n return None\n\n content_type, params = _parse_content_type_header(content_type)\n\n if 'charset' in params:\n return params['charset'].strip(\"'\\\"\")\n\n if 'text' in content_type:\n return 'ISO-8859-1'\n\n if 'application/json' in content_type:\n # Assume UTF-8 based on RFC 4627: https://www.ietf.org/rfc/rfc4627.txt since the charset was unset\n return 'utf-8'\n\n\ndef stream_decode_response_unicode(iterator, r):\n \"\"\"Stream decodes a iterator.\"\"\"\n\n if r.encoding is None:\n for item in iterator:\n yield item\n return\n\n decoder = codecs.getincrementaldecoder(r.encoding)(errors='replace')\n for chunk in iterator:\n rv = decoder.decode(chunk)\n if rv:\n yield rv\n rv = decoder.decode(b'', final=True)\n if rv:\n yield rv\n\n\ndef iter_slices(string, slice_length):\n \"\"\"Iterate over slices of a string.\"\"\"\n pos = 0\n if slice_length is None or slice_length <= 0:\n slice_length = len(string)\n while pos < len(string):\n yield string[pos:pos + slice_length]\n pos += slice_length\n\n\ndef get_unicode_from_response(r):\n \"\"\"Returns the requested content back in unicode.\n\n :param r: Response object to get unicode content from.\n\n Tried:\n\n 1. charset from content-type\n 2. fall back and replace all unicode characters\n\n :rtype: str\n \"\"\"\n warnings.warn((\n 'In requests 3.0, get_unicode_from_response will be removed. For '\n 'more information, please see the discussion on issue #2266. (This'\n ' warning should only appear once.)'),\n DeprecationWarning)\n\n tried_encodings = []\n\n # Try charset from content-type\n encoding = get_encoding_from_headers(r.headers)\n\n if encoding:\n try:\n return str(r.content, encoding)\n except UnicodeError:\n tried_encodings.append(encoding)\n\n # Fall back:\n try:\n return str(r.content, encoding, errors='replace')\n except TypeError:\n return r.content\n\n\n# The unreserved URI characters (RFC 3986)\nUNRESERVED_SET = frozenset(\n \"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz\" + \"0123456789-._~\")\n\n\ndef unquote_unreserved(uri):\n \"\"\"Un-escape any percent-escape sequences in a URI that are unreserved\n characters. This leaves all reserved, illegal and non-ASCII bytes encoded.\n\n :rtype: str\n \"\"\"\n parts = uri.split('%')\n for i in range(1, len(parts)):\n h = parts[i][0:2]\n if len(h) == 2 and h.isalnum():\n try:\n c = chr(int(h, 16))\n except ValueError:\n raise InvalidURL(\"Invalid percent-escape sequence: '%s'\" % h)\n\n if c in UNRESERVED_SET:\n parts[i] = c + parts[i][2:]\n else:\n parts[i] = '%' + parts[i]\n else:\n parts[i] = '%' + parts[i]\n return ''.join(parts)\n\n\ndef requote_uri(uri):\n \"\"\"Re-quote the given URI.\n\n This function passes the given URI through an unquote/quote cycle to\n ensure that it is fully and consistently quoted.\n\n :rtype: str\n \"\"\"\n safe_with_percent = \"!#$%&'()*+,/:;=?@[]~\"\n safe_without_percent = \"!#$&'()*+,/:;=?@[]~\"\n try:\n # Unquote only the unreserved characters\n # Then quote only illegal characters (do not quote reserved,\n # unreserved, or '%')\n return quote(unquote_unreserved(uri), safe=safe_with_percent)\n except InvalidURL:\n # We couldn't unquote the given URI, so let's try quoting it, but\n # there may be unquoted '%'s in the URI. We need to make sure they're\n # properly quoted so they do not cause issues elsewhere.\n return quote(uri, safe=safe_without_percent)\n\n\ndef address_in_network(ip, net):\n \"\"\"This function allows you to check if an IP belongs to a network subnet\n\n Example: returns True if ip = 192.168.1.1 and net = 192.168.1.0/24\n returns False if ip = 192.168.1.1 and net = 192.168.100.0/24\n\n :rtype: bool\n \"\"\"\n ipaddr = struct.unpack('=L', socket.inet_aton(ip))[0]\n netaddr, bits = net.split('/')\n netmask = struct.unpack('=L', socket.inet_aton(dotted_netmask(int(bits))))[0]\n network = struct.unpack('=L', socket.inet_aton(netaddr))[0] & netmask\n return (ipaddr & netmask) == (network & netmask)\n\n\ndef dotted_netmask(mask):\n \"\"\"Converts mask from /xx format to xxx.xxx.xxx.xxx\n\n Example: if mask is 24 function returns 255.255.255.0\n\n :rtype: str\n \"\"\"\n bits = 0xffffffff ^ (1 << 32 - mask) - 1\n return socket.inet_ntoa(struct.pack('>I', bits))\n\n\ndef is_ipv4_address(string_ip):\n \"\"\"\n :rtype: bool\n \"\"\"\n try:\n socket.inet_aton(string_ip)\n except socket.error:\n return False\n return True\n\n\ndef is_valid_cidr(string_network):\n \"\"\"\n Very simple check of the cidr format in no_proxy variable.\n\n :rtype: bool\n \"\"\"\n if string_network.count('/') == 1:\n try:\n mask = int(string_network.split('/')[1])\n except ValueError:\n return False\n\n if mask < 1 or mask > 32:\n return False\n\n try:\n socket.inet_aton(string_network.split('/')[0])\n except socket.error:\n return False\n else:\n return False\n return True\n\n\n@contextlib.contextmanager\ndef set_environ(env_name, value):\n \"\"\"Set the environment variable 'env_name' to 'value'\n\n Save previous value, yield, and then restore the previous value stored in\n the environment variable 'env_name'.\n\n If 'value' is None, do nothing\"\"\"\n value_changed = value is not None\n if value_changed:\n old_value = os.environ.get(env_name)\n os.environ[env_name] = value\n try:\n yield\n finally:\n if value_changed:\n if old_value is None:\n del os.environ[env_name]\n else:\n os.environ[env_name] = old_value\n\n\ndef should_bypass_proxies(url, no_proxy):\n \"\"\"\n Returns whether we should bypass proxies or not.\n\n :rtype: bool\n \"\"\"\n # Prioritize lowercase environment variables over uppercase\n # to keep a consistent behaviour with other http projects (curl, wget).\n get_proxy = lambda k: os.environ.get(k) or os.environ.get(k.upper())\n\n # First check whether no_proxy is defined. If it is, check that the URL\n # we're getting isn't in the no_proxy list.\n no_proxy_arg = no_proxy\n if no_proxy is None:\n no_proxy = get_proxy('no_proxy')\n parsed = urlparse(url)\n\n if parsed.hostname is None:\n # URLs don't always have hostnames, e.g. file:/// urls.\n return True\n\n if no_proxy:\n # We need to check whether we match here. We need to see if we match\n # the end of the hostname, both with and without the port.\n no_proxy = (\n host for host in no_proxy.replace(' ', '').split(',') if host\n )\n\n if is_ipv4_address(parsed.hostname):\n for proxy_ip in no_proxy:\n if is_valid_cidr(proxy_ip):\n if address_in_network(parsed.hostname, proxy_ip):\n return True\n elif parsed.hostname == proxy_ip:\n # If no_proxy ip was defined in plain IP notation instead of cidr notation &\n # matches the IP of the index\n return True\n else:\n host_with_port = parsed.hostname\n if parsed.port:\n host_with_port += ':{}'.format(parsed.port)\n\n for host in no_proxy:\n if parsed.hostname.endswith(host) or host_with_port.endswith(host):\n # The URL does match something in no_proxy, so we don't want\n # to apply the proxies on this URL.\n return True\n\n with set_environ('no_proxy', no_proxy_arg):\n # parsed.hostname can be `None` in cases such as a file URI.\n try:\n bypass = proxy_bypass(parsed.hostname)\n except (TypeError, socket.gaierror):\n bypass = False\n\n if bypass:\n return True\n\n return False\n\n\ndef get_environ_proxies(url, no_proxy=None):\n \"\"\"\n Return a dict of environment proxies.\n\n :rtype: dict\n \"\"\"\n if should_bypass_proxies(url, no_proxy=no_proxy):\n return {}\n else:\n return getproxies()\n\n\ndef select_proxy(url, proxies):\n \"\"\"Select a proxy for the url, if applicable.\n\n :param url: The url being for the request\n :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs\n \"\"\"\n proxies = proxies or {}\n urlparts = urlparse(url)\n if urlparts.hostname is None:\n return proxies.get(urlparts.scheme, proxies.get('all'))\n\n proxy_keys = [\n urlparts.scheme + '://' + urlparts.hostname,\n urlparts.scheme,\n 'all://' + urlparts.hostname,\n 'all',\n ]\n proxy = None\n for proxy_key in proxy_keys:\n if proxy_key in proxies:\n proxy = proxies[proxy_key]\n break\n\n return proxy\n\n\ndef resolve_proxies(request, proxies, trust_env=True):\n \"\"\"This method takes proxy information from a request and configuration\n input to resolve a mapping of target proxies. This will consider settings\n such a NO_PROXY to strip proxy configurations.\n\n :param request: Request or PreparedRequest\n :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs\n :param trust_env: Boolean declaring whether to trust environment configs\n\n :rtype: dict\n \"\"\"\n proxies = proxies if proxies is not None else {}\n url = request.url\n scheme = urlparse(url).scheme\n no_proxy = proxies.get('no_proxy')\n new_proxies = proxies.copy()\n\n bypass_proxy = should_bypass_proxies(url, no_proxy=no_proxy)\n if trust_env and not bypass_proxy:\n environ_proxies = get_environ_proxies(url, no_proxy=no_proxy)\n\n proxy = environ_proxies.get(scheme, environ_proxies.get('all'))\n\n if proxy:\n new_proxies.setdefault(scheme, proxy)\n return new_proxies\n\n\ndef default_user_agent(name=\"python-requests\"):\n \"\"\"\n Return a string representing the default user agent.\n\n :rtype: str\n \"\"\"\n return '%s/%s' % (name, __version__)\n\n\ndef default_headers():\n \"\"\"\n :rtype: requests.structures.CaseInsensitiveDict\n \"\"\"\n return CaseInsensitiveDict({\n 'User-Agent': default_user_agent(),\n 'Accept-Encoding': DEFAULT_ACCEPT_ENCODING,\n 'Accept': '*/*',\n 'Connection': 'keep-alive',\n })\n\n\ndef parse_header_links(value):\n \"\"\"Return a list of parsed link headers proxies.\n\n i.e. Link: <http:/.../front.jpeg>; rel=front; type=\"image/jpeg\",<http://.../back.jpeg>; rel=back;type=\"image/jpeg\"\n\n :rtype: list\n \"\"\"\n\n links = []\n\n replace_chars = ' \\'\"'\n\n value = value.strip(replace_chars)\n if not value:\n return links\n\n for val in re.split(', *<', value):\n try:\n url, params = val.split(';', 1)\n except ValueError:\n url, params = val, ''\n\n link = {'url': url.strip('<> \\'\"')}\n\n for param in params.split(';'):\n try:\n key, value = param.split('=')\n except ValueError:\n break\n\n link[key.strip(replace_chars)] = value.strip(replace_chars)\n\n links.append(link)\n\n return links\n\n\n# Null bytes; no need to recreate these on each call to guess_json_utf\n_null = '\\x00'.encode('ascii') # encoding to ASCII for Python 3\n_null2 = _null * 2\n_null3 = _null * 3\n\n\ndef guess_json_utf(data):\n \"\"\"\n :rtype: str\n \"\"\"\n # JSON always starts with two ASCII characters, so detection is as\n # easy as counting the nulls and from their location and count\n # determine the encoding. Also detect a BOM, if present.\n sample = data[:4]\n if sample in (codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE):\n return 'utf-32' # BOM included\n if sample[:3] == codecs.BOM_UTF8:\n return 'utf-8-sig' # BOM included, MS style (discouraged)\n if sample[:2] in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE):\n return 'utf-16' # BOM included\n nullcount = sample.count(_null)\n if nullcount == 0:\n return 'utf-8'\n if nullcount == 2:\n if sample[::2] == _null2: # 1st and 3rd are null\n return 'utf-16-be'\n if sample[1::2] == _null2: # 2nd and 4th are null\n return 'utf-16-le'\n # Did not detect 2 valid UTF-16 ascii-range characters\n if nullcount == 3:\n if sample[:3] == _null3:\n return 'utf-32-be'\n if sample[1:] == _null3:\n return 'utf-32-le'\n # Did not detect a valid UTF-32 ascii-range character\n return None\n\n\ndef prepend_scheme_if_needed(url, new_scheme):\n \"\"\"Given a URL that may or may not have a scheme, prepend the given scheme.\n Does not replace a present scheme with the one provided as an argument.\n\n :rtype: str\n \"\"\"\n scheme, netloc, path, params, query, fragment = urlparse(url, new_scheme)\n\n # urlparse is a finicky beast, and sometimes decides that there isn't a\n # netloc present. Assume that it's being over-cautious, and switch netloc\n # and path if urlparse decided there was no netloc.\n if not netloc:\n netloc, path = path, netloc\n\n return urlunparse((scheme, netloc, path, params, query, fragment))\n\n\ndef get_auth_from_url(url):\n \"\"\"Given a url with authentication components, extract them into a tuple of\n username,password.\n\n :rtype: (str,str)\n \"\"\"\n parsed = urlparse(url)\n\n try:\n auth = (unquote(parsed.username), unquote(parsed.password))\n except (AttributeError, TypeError):\n auth = ('', '')\n\n return auth\n\n\n# Moved outside of function to avoid recompile every call\n_CLEAN_HEADER_REGEX_BYTE = re.compile(b'^\\\\S[^\\\\r\\\\n]*$|^$')\n_CLEAN_HEADER_REGEX_STR = re.compile(r'^\\S[^\\r\\n]*$|^$')\n\n\ndef check_header_validity(header):\n \"\"\"Verifies that header value is a string which doesn't contain\n leading whitespace or return characters. This prevents unintended\n header injection.\n\n :param header: tuple, in the format (name, value).\n \"\"\"\n name, value = header\n\n if isinstance(value, bytes):\n pat = _CLEAN_HEADER_REGEX_BYTE\n else:\n pat = _CLEAN_HEADER_REGEX_STR\n try:\n if not pat.match(value):\n raise InvalidHeader(\"Invalid return character or leading space in header: %s\" % name)\n except TypeError:\n raise InvalidHeader(\"Value for header {%s: %s} must be of type str or \"\n \"bytes, not %s\" % (name, value, type(value)))\n\n\ndef urldefragauth(url):\n \"\"\"\n Given a url remove the fragment and the authentication part.\n\n :rtype: str\n \"\"\"\n scheme, netloc, path, params, query, fragment = urlparse(url)\n\n # see func:`prepend_scheme_if_needed`\n if not netloc:\n netloc, path = path, netloc\n\n netloc = netloc.rsplit('@', 1)[-1]\n\n return urlunparse((scheme, netloc, path, params, query, ''))\n\n\ndef rewind_body(prepared_request):\n \"\"\"Move file pointer back to its recorded starting position\n so it can be read again on redirect.\n \"\"\"\n body_seek = getattr(prepared_request.body, 'seek', None)\n if body_seek is not None and isinstance(prepared_request._body_position, integer_types):\n try:\n body_seek(prepared_request._body_position)\n except (IOError, OSError):\n raise UnrewindableBodyError(\"An error occurred when rewinding request \"\n \"body for redirect.\")\n else:\n raise UnrewindableBodyError(\"Unable to rewind request body for redirect.\")\n",
"path": "requests/utils.py"
},
{
"content": "# -*- coding: utf-8 -*-\n\nimport os\nimport copy\nimport filecmp\nfrom io import BytesIO\nimport tarfile\nimport zipfile\nfrom collections import deque\n\nimport pytest\nfrom requests import compat\nfrom requests.cookies import RequestsCookieJar\nfrom requests.structures import CaseInsensitiveDict\nfrom requests.utils import (\n address_in_network, dotted_netmask, extract_zipped_paths,\n get_auth_from_url, _parse_content_type_header, get_encoding_from_headers,\n get_encodings_from_content, get_environ_proxies,\n guess_filename, guess_json_utf, is_ipv4_address,\n is_valid_cidr, iter_slices, parse_dict_header,\n parse_header_links, prepend_scheme_if_needed,\n requote_uri, select_proxy, should_bypass_proxies, super_len,\n to_key_val_list, to_native_string,\n unquote_header_value, unquote_unreserved,\n urldefragauth, add_dict_to_cookiejar, set_environ)\nfrom requests._internal_utils import unicode_is_ascii\n\nfrom .compat import StringIO, cStringIO\n\n\nclass TestSuperLen:\n\n @pytest.mark.parametrize(\n 'stream, value', (\n (StringIO.StringIO, 'Test'),\n (BytesIO, b'Test'),\n pytest.param(cStringIO, 'Test',\n marks=pytest.mark.skipif('cStringIO is None')),\n ))\n def test_io_streams(self, stream, value):\n \"\"\"Ensures that we properly deal with different kinds of IO streams.\"\"\"\n assert super_len(stream()) == 0\n assert super_len(stream(value)) == 4\n\n def test_super_len_correctly_calculates_len_of_partially_read_file(self):\n \"\"\"Ensure that we handle partially consumed file like objects.\"\"\"\n s = StringIO.StringIO()\n s.write('foobarbogus')\n assert super_len(s) == 0\n\n @pytest.mark.parametrize('error', [IOError, OSError])\n def test_super_len_handles_files_raising_weird_errors_in_tell(self, error):\n \"\"\"If tell() raises errors, assume the cursor is at position zero.\"\"\"\n class BoomFile(object):\n def __len__(self):\n return 5\n\n def tell(self):\n raise error()\n\n assert super_len(BoomFile()) == 0\n\n @pytest.mark.parametrize('error', [IOError, OSError])\n def test_super_len_tell_ioerror(self, error):\n \"\"\"Ensure that if tell gives an IOError super_len doesn't fail\"\"\"\n class NoLenBoomFile(object):\n def tell(self):\n raise error()\n\n def seek(self, offset, whence):\n pass\n\n assert super_len(NoLenBoomFile()) == 0\n\n def test_string(self):\n assert super_len('Test') == 4\n\n @pytest.mark.parametrize(\n 'mode, warnings_num', (\n ('r', 1),\n ('rb', 0),\n ))\n def test_file(self, tmpdir, mode, warnings_num, recwarn):\n file_obj = tmpdir.join('test.txt')\n file_obj.write('Test')\n with file_obj.open(mode) as fd:\n assert super_len(fd) == 4\n assert len(recwarn) == warnings_num\n\n def test_tarfile_member(self, tmpdir):\n file_obj = tmpdir.join('test.txt')\n file_obj.write('Test')\n\n tar_obj = str(tmpdir.join('test.tar'))\n with tarfile.open(tar_obj, 'w') as tar:\n tar.add(str(file_obj), arcname='test.txt')\n\n with tarfile.open(tar_obj) as tar:\n member = tar.extractfile('test.txt')\n assert super_len(member) == 4\n\n def test_super_len_with__len__(self):\n foo = [1,2,3,4]\n len_foo = super_len(foo)\n assert len_foo == 4\n\n def test_super_len_with_no__len__(self):\n class LenFile(object):\n def __init__(self):\n self.len = 5\n\n assert super_len(LenFile()) == 5\n\n def test_super_len_with_tell(self):\n foo = StringIO.StringIO('12345')\n assert super_len(foo) == 5\n foo.read(2)\n assert super_len(foo) == 3\n\n def test_super_len_with_fileno(self):\n with open(__file__, 'rb') as f:\n length = super_len(f)\n file_data = f.read()\n assert length == len(file_data)\n\n def test_super_len_with_no_matches(self):\n \"\"\"Ensure that objects without any length methods default to 0\"\"\"\n assert super_len(object()) == 0\n\n\nclass TestToKeyValList:\n\n @pytest.mark.parametrize(\n 'value, expected', (\n ([('key', 'val')], [('key', 'val')]),\n ((('key', 'val'), ), [('key', 'val')]),\n ({'key': 'val'}, [('key', 'val')]),\n (None, None)\n ))\n def test_valid(self, value, expected):\n assert to_key_val_list(value) == expected\n\n def test_invalid(self):\n with pytest.raises(ValueError):\n to_key_val_list('string')\n\n\nclass TestUnquoteHeaderValue:\n\n @pytest.mark.parametrize(\n 'value, expected', (\n (None, None),\n ('Test', 'Test'),\n ('\"Test\"', 'Test'),\n ('\"Test\\\\\\\\\"', 'Test\\\\'),\n ('\"\\\\\\\\Comp\\\\Res\"', '\\\\Comp\\\\Res'),\n ))\n def test_valid(self, value, expected):\n assert unquote_header_value(value) == expected\n\n def test_is_filename(self):\n assert unquote_header_value('\"\\\\\\\\Comp\\\\Res\"', True) == '\\\\\\\\Comp\\\\Res'\n\n\nclass TestGetEnvironProxies:\n \"\"\"Ensures that IP addresses are correctly matches with ranges\n in no_proxy variable.\n \"\"\"\n\n @pytest.fixture(autouse=True, params=['no_proxy', 'NO_PROXY'])\n def no_proxy(self, request, monkeypatch):\n monkeypatch.setenv(request.param, '192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1')\n\n @pytest.mark.parametrize(\n 'url', (\n 'http://192.168.0.1:5000/',\n 'http://192.168.0.1/',\n 'http://172.16.1.1/',\n 'http://172.16.1.1:5000/',\n 'http://localhost.localdomain:5000/v1.0/',\n ))\n def test_bypass(self, url):\n assert get_environ_proxies(url, no_proxy=None) == {}\n\n @pytest.mark.parametrize(\n 'url', (\n 'http://192.168.1.1:5000/',\n 'http://192.168.1.1/',\n 'http://www.requests.com/',\n ))\n def test_not_bypass(self, url):\n assert get_environ_proxies(url, no_proxy=None) != {}\n\n @pytest.mark.parametrize(\n 'url', (\n 'http://192.168.1.1:5000/',\n 'http://192.168.1.1/',\n 'http://www.requests.com/',\n ))\n def test_bypass_no_proxy_keyword(self, url):\n no_proxy = '192.168.1.1,requests.com'\n assert get_environ_proxies(url, no_proxy=no_proxy) == {}\n\n @pytest.mark.parametrize(\n 'url', (\n 'http://192.168.0.1:5000/',\n 'http://192.168.0.1/',\n 'http://172.16.1.1/',\n 'http://172.16.1.1:5000/',\n 'http://localhost.localdomain:5000/v1.0/',\n ))\n def test_not_bypass_no_proxy_keyword(self, url, monkeypatch):\n # This is testing that the 'no_proxy' argument overrides the\n # environment variable 'no_proxy'\n monkeypatch.setenv('http_proxy', 'http://proxy.example.com:3128/')\n no_proxy = '192.168.1.1,requests.com'\n assert get_environ_proxies(url, no_proxy=no_proxy) != {}\n\n\nclass TestIsIPv4Address:\n\n def test_valid(self):\n assert is_ipv4_address('8.8.8.8')\n\n @pytest.mark.parametrize('value', ('8.8.8.8.8', 'localhost.localdomain'))\n def test_invalid(self, value):\n assert not is_ipv4_address(value)\n\n\nclass TestIsValidCIDR:\n\n def test_valid(self):\n assert is_valid_cidr('192.168.1.0/24')\n\n @pytest.mark.parametrize(\n 'value', (\n '8.8.8.8',\n '192.168.1.0/a',\n '192.168.1.0/128',\n '192.168.1.0/-1',\n '192.168.1.999/24',\n ))\n def test_invalid(self, value):\n assert not is_valid_cidr(value)\n\n\nclass TestAddressInNetwork:\n\n def test_valid(self):\n assert address_in_network('192.168.1.1', '192.168.1.0/24')\n\n def test_invalid(self):\n assert not address_in_network('172.16.0.1', '192.168.1.0/24')\n\n\nclass TestGuessFilename:\n\n @pytest.mark.parametrize(\n 'value', (1, type('Fake', (object,), {'name': 1})()),\n )\n def test_guess_filename_invalid(self, value):\n assert guess_filename(value) is None\n\n @pytest.mark.parametrize(\n 'value, expected_type', (\n (b'value', compat.bytes),\n (b'value'.decode('utf-8'), compat.str)\n ))\n def test_guess_filename_valid(self, value, expected_type):\n obj = type('Fake', (object,), {'name': value})()\n result = guess_filename(obj)\n assert result == value\n assert isinstance(result, expected_type)\n\n\nclass TestExtractZippedPaths:\n\n @pytest.mark.parametrize(\n 'path', (\n '/',\n __file__,\n pytest.__file__,\n '/etc/invalid/location',\n ))\n def test_unzipped_paths_unchanged(self, path):\n assert path == extract_zipped_paths(path)\n\n def test_zipped_paths_extracted(self, tmpdir):\n zipped_py = tmpdir.join('test.zip')\n with zipfile.ZipFile(zipped_py.strpath, 'w') as f:\n f.write(__file__)\n\n _, name = os.path.splitdrive(__file__)\n zipped_path = os.path.join(zipped_py.strpath, name.lstrip(r'\\/'))\n extracted_path = extract_zipped_paths(zipped_path)\n\n assert extracted_path != zipped_path\n assert os.path.exists(extracted_path)\n assert filecmp.cmp(extracted_path, __file__)\n\n def test_invalid_unc_path(self):\n path = r\"\\\\localhost\\invalid\\location\"\n assert extract_zipped_paths(path) == path\n\n\nclass TestContentEncodingDetection:\n\n def test_none(self):\n encodings = get_encodings_from_content('')\n assert not len(encodings)\n\n @pytest.mark.parametrize(\n 'content', (\n # HTML5 meta charset attribute\n '<meta charset=\"UTF-8\">',\n # HTML4 pragma directive\n '<meta http-equiv=\"Content-type\" content=\"text/html;charset=UTF-8\">',\n # XHTML 1.x served with text/html MIME type\n '<meta http-equiv=\"Content-type\" content=\"text/html;charset=UTF-8\" />',\n # XHTML 1.x served as XML\n '<?xml version=\"1.0\" encoding=\"UTF-8\"?>',\n ))\n def test_pragmas(self, content):\n encodings = get_encodings_from_content(content)\n assert len(encodings) == 1\n assert encodings[0] == 'UTF-8'\n\n def test_precedence(self):\n content = '''\n <?xml version=\"1.0\" encoding=\"XML\"?>\n <meta charset=\"HTML5\">\n <meta http-equiv=\"Content-type\" content=\"text/html;charset=HTML4\" />\n '''.strip()\n assert get_encodings_from_content(content) == ['HTML5', 'HTML4', 'XML']\n\n\nclass TestGuessJSONUTF:\n\n @pytest.mark.parametrize(\n 'encoding', (\n 'utf-32', 'utf-8-sig', 'utf-16', 'utf-8', 'utf-16-be', 'utf-16-le',\n 'utf-32-be', 'utf-32-le'\n ))\n def test_encoded(self, encoding):\n data = '{}'.encode(encoding)\n assert guess_json_utf(data) == encoding\n\n def test_bad_utf_like_encoding(self):\n assert guess_json_utf(b'\\x00\\x00\\x00\\x00') is None\n\n @pytest.mark.parametrize(\n ('encoding', 'expected'), (\n ('utf-16-be', 'utf-16'),\n ('utf-16-le', 'utf-16'),\n ('utf-32-be', 'utf-32'),\n ('utf-32-le', 'utf-32')\n ))\n def test_guess_by_bom(self, encoding, expected):\n data = u'\\ufeff{}'.encode(encoding)\n assert guess_json_utf(data) == expected\n\n\nUSER = PASSWORD = \"%!*'();:@&=+$,/?#[] \"\nENCODED_USER = compat.quote(USER, '')\nENCODED_PASSWORD = compat.quote(PASSWORD, '')\n\n\n@pytest.mark.parametrize(\n 'url, auth', (\n (\n 'http://' + ENCODED_USER + ':' + ENCODED_PASSWORD + '@' +\n 'request.com/url.html#test',\n (USER, PASSWORD)\n ),\n (\n 'http://user:pass@complex.url.com/path?query=yes',\n ('user', 'pass')\n ),\n (\n 'http://user:pass%20pass@complex.url.com/path?query=yes',\n ('user', 'pass pass')\n ),\n (\n 'http://user:pass pass@complex.url.com/path?query=yes',\n ('user', 'pass pass')\n ),\n (\n 'http://user%25user:pass@complex.url.com/path?query=yes',\n ('user%user', 'pass')\n ),\n (\n 'http://user:pass%23pass@complex.url.com/path?query=yes',\n ('user', 'pass#pass')\n ),\n (\n 'http://complex.url.com/path?query=yes',\n ('', '')\n ),\n ))\ndef test_get_auth_from_url(url, auth):\n assert get_auth_from_url(url) == auth\n\n\n@pytest.mark.parametrize(\n 'uri, expected', (\n (\n # Ensure requoting doesn't break expectations\n 'http://example.com/fiz?buz=%25ppicture',\n 'http://example.com/fiz?buz=%25ppicture',\n ),\n (\n # Ensure we handle unquoted percent signs in redirects\n 'http://example.com/fiz?buz=%ppicture',\n 'http://example.com/fiz?buz=%25ppicture',\n ),\n ))\ndef test_requote_uri_with_unquoted_percents(uri, expected):\n \"\"\"See: https://github.com/psf/requests/issues/2356\"\"\"\n assert requote_uri(uri) == expected\n\n\n@pytest.mark.parametrize(\n 'uri, expected', (\n (\n # Illegal bytes\n 'http://example.com/?a=%--',\n 'http://example.com/?a=%--',\n ),\n (\n # Reserved characters\n 'http://example.com/?a=%300',\n 'http://example.com/?a=00',\n )\n ))\ndef test_unquote_unreserved(uri, expected):\n assert unquote_unreserved(uri) == expected\n\n\n@pytest.mark.parametrize(\n 'mask, expected', (\n (8, '255.0.0.0'),\n (24, '255.255.255.0'),\n (25, '255.255.255.128'),\n ))\ndef test_dotted_netmask(mask, expected):\n assert dotted_netmask(mask) == expected\n\n\nhttp_proxies = {'http': 'http://http.proxy',\n 'http://some.host': 'http://some.host.proxy'}\nall_proxies = {'all': 'socks5://http.proxy',\n 'all://some.host': 'socks5://some.host.proxy'}\nmixed_proxies = {'http': 'http://http.proxy',\n 'http://some.host': 'http://some.host.proxy',\n 'all': 'socks5://http.proxy'}\n@pytest.mark.parametrize(\n 'url, expected, proxies', (\n ('hTTp://u:p@Some.Host/path', 'http://some.host.proxy', http_proxies),\n ('hTTp://u:p@Other.Host/path', 'http://http.proxy', http_proxies),\n ('hTTp:///path', 'http://http.proxy', http_proxies),\n ('hTTps://Other.Host', None, http_proxies),\n ('file:///etc/motd', None, http_proxies),\n\n ('hTTp://u:p@Some.Host/path', 'socks5://some.host.proxy', all_proxies),\n ('hTTp://u:p@Other.Host/path', 'socks5://http.proxy', all_proxies),\n ('hTTp:///path', 'socks5://http.proxy', all_proxies),\n ('hTTps://Other.Host', 'socks5://http.proxy', all_proxies),\n\n ('http://u:p@other.host/path', 'http://http.proxy', mixed_proxies),\n ('http://u:p@some.host/path', 'http://some.host.proxy', mixed_proxies),\n ('https://u:p@other.host/path', 'socks5://http.proxy', mixed_proxies),\n ('https://u:p@some.host/path', 'socks5://http.proxy', mixed_proxies),\n ('https://', 'socks5://http.proxy', mixed_proxies),\n # XXX: unsure whether this is reasonable behavior\n ('file:///etc/motd', 'socks5://http.proxy', all_proxies),\n ))\ndef test_select_proxies(url, expected, proxies):\n \"\"\"Make sure we can select per-host proxies correctly.\"\"\"\n assert select_proxy(url, proxies) == expected\n\n\n@pytest.mark.parametrize(\n 'value, expected', (\n ('foo=\"is a fish\", bar=\"as well\"', {'foo': 'is a fish', 'bar': 'as well'}),\n ('key_without_value', {'key_without_value': None})\n ))\ndef test_parse_dict_header(value, expected):\n assert parse_dict_header(value) == expected\n\n\n@pytest.mark.parametrize(\n 'value, expected', (\n (\n 'application/xml',\n ('application/xml', {})\n ),\n (\n 'application/json ; charset=utf-8',\n ('application/json', {'charset': 'utf-8'})\n ),\n (\n 'application/json ; Charset=utf-8',\n ('application/json', {'charset': 'utf-8'})\n ),\n (\n 'text/plain',\n ('text/plain', {})\n ),\n (\n 'multipart/form-data; boundary = something ; boundary2=\\'something_else\\' ; no_equals ',\n ('multipart/form-data', {'boundary': 'something', 'boundary2': 'something_else', 'no_equals': True})\n ),\n (\n 'multipart/form-data; boundary = something ; boundary2=\"something_else\" ; no_equals ',\n ('multipart/form-data', {'boundary': 'something', 'boundary2': 'something_else', 'no_equals': True})\n ),\n (\n 'multipart/form-data; boundary = something ; \\'boundary2=something_else\\' ; no_equals ',\n ('multipart/form-data', {'boundary': 'something', 'boundary2': 'something_else', 'no_equals': True})\n ),\n (\n 'multipart/form-data; boundary = something ; \"boundary2=something_else\" ; no_equals ',\n ('multipart/form-data', {'boundary': 'something', 'boundary2': 'something_else', 'no_equals': True})\n ),\n (\n 'application/json ; ; ',\n ('application/json', {})\n )\n ))\ndef test__parse_content_type_header(value, expected):\n assert _parse_content_type_header(value) == expected\n\n\n@pytest.mark.parametrize(\n 'value, expected', (\n (\n CaseInsensitiveDict(),\n None\n ),\n (\n CaseInsensitiveDict({'content-type': 'application/json; charset=utf-8'}),\n 'utf-8'\n ),\n (\n CaseInsensitiveDict({'content-type': 'text/plain'}),\n 'ISO-8859-1'\n ),\n ))\ndef test_get_encoding_from_headers(value, expected):\n assert get_encoding_from_headers(value) == expected\n\n\n@pytest.mark.parametrize(\n 'value, length', (\n ('', 0),\n ('T', 1),\n ('Test', 4),\n ('Cont', 0),\n ('Other', -5),\n ('Content', None),\n ))\ndef test_iter_slices(value, length):\n if length is None or (length <= 0 and len(value) > 0):\n # Reads all content at once\n assert len(list(iter_slices(value, length))) == 1\n else:\n assert len(list(iter_slices(value, 1))) == length\n\n\n@pytest.mark.parametrize(\n 'value, expected', (\n (\n '<http:/.../front.jpeg>; rel=front; type=\"image/jpeg\"',\n [{'url': 'http:/.../front.jpeg', 'rel': 'front', 'type': 'image/jpeg'}]\n ),\n (\n '<http:/.../front.jpeg>',\n [{'url': 'http:/.../front.jpeg'}]\n ),\n (\n '<http:/.../front.jpeg>;',\n [{'url': 'http:/.../front.jpeg'}]\n ),\n (\n '<http:/.../front.jpeg>; type=\"image/jpeg\",<http://.../back.jpeg>;',\n [\n {'url': 'http:/.../front.jpeg', 'type': 'image/jpeg'},\n {'url': 'http://.../back.jpeg'}\n ]\n ),\n (\n '',\n []\n ),\n ))\ndef test_parse_header_links(value, expected):\n assert parse_header_links(value) == expected\n\n\n@pytest.mark.parametrize(\n 'value, expected', (\n ('example.com/path', 'http://example.com/path'),\n ('//example.com/path', 'http://example.com/path'),\n ))\ndef test_prepend_scheme_if_needed(value, expected):\n assert prepend_scheme_if_needed(value, 'http') == expected\n\n\n@pytest.mark.parametrize(\n 'value, expected', (\n ('T', 'T'),\n (b'T', 'T'),\n (u'T', 'T'),\n ))\ndef test_to_native_string(value, expected):\n assert to_native_string(value) == expected\n\n\n@pytest.mark.parametrize(\n 'url, expected', (\n ('http://u:p@example.com/path?a=1#test', 'http://example.com/path?a=1'),\n ('http://example.com/path', 'http://example.com/path'),\n ('//u:p@example.com/path', '//example.com/path'),\n ('//example.com/path', '//example.com/path'),\n ('example.com/path', '//example.com/path'),\n ('scheme:u:p@example.com/path', 'scheme://example.com/path'),\n ))\ndef test_urldefragauth(url, expected):\n assert urldefragauth(url) == expected\n\n\n@pytest.mark.parametrize(\n 'url, expected', (\n ('http://192.168.0.1:5000/', True),\n ('http://192.168.0.1/', True),\n ('http://172.16.1.1/', True),\n ('http://172.16.1.1:5000/', True),\n ('http://localhost.localdomain:5000/v1.0/', True),\n ('http://google.com:6000/', True),\n ('http://172.16.1.12/', False),\n ('http://172.16.1.12:5000/', False),\n ('http://google.com:5000/v1.0/', False),\n ('file:///some/path/on/disk', True),\n ))\ndef test_should_bypass_proxies(url, expected, monkeypatch):\n \"\"\"Tests for function should_bypass_proxies to check if proxy\n can be bypassed or not\n \"\"\"\n monkeypatch.setenv('no_proxy', '192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1, google.com:6000')\n monkeypatch.setenv('NO_PROXY', '192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1, google.com:6000')\n assert should_bypass_proxies(url, no_proxy=None) == expected\n\n\n@pytest.mark.parametrize(\n 'url, expected', (\n ('http://172.16.1.1/', '172.16.1.1'),\n ('http://172.16.1.1:5000/', '172.16.1.1'),\n ('http://user:pass@172.16.1.1', '172.16.1.1'),\n ('http://user:pass@172.16.1.1:5000', '172.16.1.1'),\n ('http://hostname/', 'hostname'),\n ('http://hostname:5000/', 'hostname'),\n ('http://user:pass@hostname', 'hostname'),\n ('http://user:pass@hostname:5000', 'hostname'),\n ))\ndef test_should_bypass_proxies_pass_only_hostname(url, expected, mocker):\n \"\"\"The proxy_bypass function should be called with a hostname or IP without\n a port number or auth credentials.\n \"\"\"\n proxy_bypass = mocker.patch('requests.utils.proxy_bypass')\n should_bypass_proxies(url, no_proxy=None)\n proxy_bypass.assert_called_once_with(expected)\n\n\n@pytest.mark.parametrize(\n 'cookiejar', (\n compat.cookielib.CookieJar(),\n RequestsCookieJar()\n ))\ndef test_add_dict_to_cookiejar(cookiejar):\n \"\"\"Ensure add_dict_to_cookiejar works for\n non-RequestsCookieJar CookieJars\n \"\"\"\n cookiedict = {'test': 'cookies',\n 'good': 'cookies'}\n cj = add_dict_to_cookiejar(cookiejar, cookiedict)\n cookies = {cookie.name: cookie.value for cookie in cj}\n assert cookiedict == cookies\n\n\n@pytest.mark.parametrize(\n 'value, expected', (\n (u'test', True),\n (u'æíöû', False),\n (u'ジェーピーニック', False),\n )\n)\ndef test_unicode_is_ascii(value, expected):\n assert unicode_is_ascii(value) is expected\n\n\n@pytest.mark.parametrize(\n 'url, expected', (\n ('http://192.168.0.1:5000/', True),\n ('http://192.168.0.1/', True),\n ('http://172.16.1.1/', True),\n ('http://172.16.1.1:5000/', True),\n ('http://localhost.localdomain:5000/v1.0/', True),\n ('http://172.16.1.12/', False),\n ('http://172.16.1.12:5000/', False),\n ('http://google.com:5000/v1.0/', False),\n ))\ndef test_should_bypass_proxies_no_proxy(\n url, expected, monkeypatch):\n \"\"\"Tests for function should_bypass_proxies to check if proxy\n can be bypassed or not using the 'no_proxy' argument\n \"\"\"\n no_proxy = '192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1'\n # Test 'no_proxy' argument\n assert should_bypass_proxies(url, no_proxy=no_proxy) == expected\n\n\n@pytest.mark.skipif(os.name != 'nt', reason='Test only on Windows')\n@pytest.mark.parametrize(\n 'url, expected, override', (\n ('http://192.168.0.1:5000/', True, None),\n ('http://192.168.0.1/', True, None),\n ('http://172.16.1.1/', True, None),\n ('http://172.16.1.1:5000/', True, None),\n ('http://localhost.localdomain:5000/v1.0/', True, None),\n ('http://172.16.1.22/', False, None),\n ('http://172.16.1.22:5000/', False, None),\n ('http://google.com:5000/v1.0/', False, None),\n ('http://mylocalhostname:5000/v1.0/', True, '<local>'),\n ('http://192.168.0.1/', False, ''),\n ))\ndef test_should_bypass_proxies_win_registry(url, expected, override,\n monkeypatch):\n \"\"\"Tests for function should_bypass_proxies to check if proxy\n can be bypassed or not with Windows registry settings\n \"\"\"\n if override is None:\n override = '192.168.*;127.0.0.1;localhost.localdomain;172.16.1.1'\n if compat.is_py3:\n import winreg\n else:\n import _winreg as winreg\n\n class RegHandle:\n def Close(self):\n pass\n\n ie_settings = RegHandle()\n proxyEnableValues = deque([1, \"1\"])\n\n def OpenKey(key, subkey):\n return ie_settings\n\n def QueryValueEx(key, value_name):\n if key is ie_settings:\n if value_name == 'ProxyEnable':\n # this could be a string (REG_SZ) or a 32-bit number (REG_DWORD)\n proxyEnableValues.rotate()\n return [proxyEnableValues[0]]\n elif value_name == 'ProxyOverride':\n return [override]\n\n monkeypatch.setenv('http_proxy', '')\n monkeypatch.setenv('https_proxy', '')\n monkeypatch.setenv('ftp_proxy', '')\n monkeypatch.setenv('no_proxy', '')\n monkeypatch.setenv('NO_PROXY', '')\n monkeypatch.setattr(winreg, 'OpenKey', OpenKey)\n monkeypatch.setattr(winreg, 'QueryValueEx', QueryValueEx)\n assert should_bypass_proxies(url, None) == expected\n\n\n@pytest.mark.parametrize(\n 'env_name, value', (\n ('no_proxy', '192.168.0.0/24,127.0.0.1,localhost.localdomain'),\n ('no_proxy', None),\n ('a_new_key', '192.168.0.0/24,127.0.0.1,localhost.localdomain'),\n ('a_new_key', None),\n ))\ndef test_set_environ(env_name, value):\n \"\"\"Tests set_environ will set environ values and will restore the environ.\"\"\"\n environ_copy = copy.deepcopy(os.environ)\n with set_environ(env_name, value):\n assert os.environ.get(env_name) == value\n\n assert os.environ == environ_copy\n\n\ndef test_set_environ_raises_exception():\n \"\"\"Tests set_environ will raise exceptions in context when the\n value parameter is None.\"\"\"\n with pytest.raises(Exception) as exception:\n with set_environ('test1', None):\n raise Exception('Expected exception')\n\n assert 'Expected exception' in str(exception.value)\n",
"path": "tests/test_utils.py"
}
] | 13_8 | python | import sys
import pytest
@pytest.mark.parametrize(
"value, expected",
(
("example.com:80", "http://example.com:80"),
),
)
def test_prepend_scheme_if_needed(value, expected):
from requests.utils import prepend_scheme_if_needed
assert prepend_scheme_if_needed(value, "http") == expected
def main():
# Run the pytest tests programmatically
exit_code = pytest.main(["-v", __file__])
# Exit with status code 1 if any test fails, otherwise 0
if exit_code != 0:
sys.exit(1)
else:
sys.exit(0)
if __name__ == '__main__':
main()
|
https://github.com/teamqurrent/requests | your goal is to update the `should_bypass_proxies` function in `requests/utils.py` to handle URLs without a hostname. After parsing the URL, add a condition to check if the hostname is None and, in such cases, return True to bypass proxies. This approach ensures that file:// URLs and similar non-standard URLs are correctly identified as not requiring proxy usage, | dffd5d4 | -e .[socks]
pytest
pytest-cov
pytest-httpbin==1.0.0
pytest-mock
httpbin==0.7.0
trustme
wheel
chardet>=3.0.2,<3.1.0
idna>=2.5,<2.8
urllib3>=1.21.1,<1.24
certifi>=2017.4.17
# Flask Stack
Flask>1.0,<2.0
markupsafe<2.1
| python3.9 | 8023a01d | diff --git a/requests/utils.py b/requests/utils.py
--- a/requests/utils.py
+++ b/requests/utils.py
@@ -706,6 +706,10 @@ def should_bypass_proxies(url, no_proxy):
no_proxy = get_proxy('no_proxy')
parsed = urlparse(url)
+ if parsed.hostname is None:
+ # URLs don't always have hostnames, e.g. file:/// urls.
+ return True
+
if no_proxy:
# We need to check whether we match here. We need to see if we match
# the end of the hostname, both with and without the port.
diff --git a/tests/test_utils.py b/tests/test_utils.py
--- a/tests/test_utils.py
+++ b/tests/test_utils.py
@@ -622,6 +622,7 @@ def test_urldefragauth(url, expected):
('http://172.16.1.12/', False),
('http://172.16.1.12:5000/', False),
('http://google.com:5000/v1.0/', False),
+ ('file:///some/path/on/disk', True),
))
def test_should_bypass_proxies(url, expected, monkeypatch):
"""Tests for function should_bypass_proxies to check if proxy
| [
{
"content": "# -*- coding: utf-8 -*-\n\n\"\"\"\nrequests.utils\n~~~~~~~~~~~~~~\n\nThis module provides utility functions that are used within Requests\nthat are also useful for external consumption.\n\"\"\"\n\nimport codecs\nimport contextlib\nimport io\nimport os\nimport re\nimport socket\nimport struct\nimport sys\nimport tempfile\nimport warnings\nimport zipfile\n\nfrom .__version__ import __version__\nfrom . import certs\n# to_native_string is unused here, but imported here for backwards compatibility\nfrom ._internal_utils import to_native_string\nfrom .compat import parse_http_list as _parse_list_header\nfrom .compat import (\n quote, urlparse, bytes, str, OrderedDict, unquote, getproxies,\n proxy_bypass, urlunparse, basestring, integer_types, is_py3,\n proxy_bypass_environment, getproxies_environment, Mapping)\nfrom .cookies import cookiejar_from_dict\nfrom .structures import CaseInsensitiveDict\nfrom .exceptions import (\n InvalidURL, InvalidHeader, FileModeWarning, UnrewindableBodyError)\n\nNETRC_FILES = ('.netrc', '_netrc')\n\nDEFAULT_CA_BUNDLE_PATH = certs.where()\n\n\nif sys.platform == 'win32':\n # provide a proxy_bypass version on Windows without DNS lookups\n\n def proxy_bypass_registry(host):\n try:\n if is_py3:\n import winreg\n else:\n import _winreg as winreg\n except ImportError:\n return False\n\n try:\n internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER,\n r'Software\\Microsoft\\Windows\\CurrentVersion\\Internet Settings')\n # ProxyEnable could be REG_SZ or REG_DWORD, normalizing it\n proxyEnable = int(winreg.QueryValueEx(internetSettings,\n 'ProxyEnable')[0])\n # ProxyOverride is almost always a string\n proxyOverride = winreg.QueryValueEx(internetSettings,\n 'ProxyOverride')[0]\n except OSError:\n return False\n if not proxyEnable or not proxyOverride:\n return False\n\n # make a check value list from the registry entry: replace the\n # '<local>' string by the localhost entry and the corresponding\n # canonical entry.\n proxyOverride = proxyOverride.split(';')\n # now check if we match one of the registry values.\n for test in proxyOverride:\n if test == '<local>':\n if '.' not in host:\n return True\n test = test.replace(\".\", r\"\\.\") # mask dots\n test = test.replace(\"*\", r\".*\") # change glob sequence\n test = test.replace(\"?\", r\".\") # change glob char\n if re.match(test, host, re.I):\n return True\n return False\n\n def proxy_bypass(host): # noqa\n \"\"\"Return True, if the host should be bypassed.\n\n Checks proxy settings gathered from the environment, if specified,\n or the registry.\n \"\"\"\n if getproxies_environment():\n return proxy_bypass_environment(host)\n else:\n return proxy_bypass_registry(host)\n\n\ndef dict_to_sequence(d):\n \"\"\"Returns an internal sequence dictionary update.\"\"\"\n\n if hasattr(d, 'items'):\n d = d.items()\n\n return d\n\n\ndef super_len(o):\n total_length = None\n current_position = 0\n\n if hasattr(o, '__len__'):\n total_length = len(o)\n\n elif hasattr(o, 'len'):\n total_length = o.len\n\n elif hasattr(o, 'fileno'):\n try:\n fileno = o.fileno()\n except io.UnsupportedOperation:\n pass\n else:\n total_length = os.fstat(fileno).st_size\n\n # Having used fstat to determine the file length, we need to\n # confirm that this file was opened up in binary mode.\n if 'b' not in o.mode:\n warnings.warn((\n \"Requests has determined the content-length for this \"\n \"request using the binary size of the file: however, the \"\n \"file has been opened in text mode (i.e. without the 'b' \"\n \"flag in the mode). This may lead to an incorrect \"\n \"content-length. In Requests 3.0, support will be removed \"\n \"for files in text mode.\"),\n FileModeWarning\n )\n\n if hasattr(o, 'tell'):\n try:\n current_position = o.tell()\n except (OSError, IOError):\n # This can happen in some weird situations, such as when the file\n # is actually a special file descriptor like stdin. In this\n # instance, we don't know what the length is, so set it to zero and\n # let requests chunk it instead.\n if total_length is not None:\n current_position = total_length\n else:\n if hasattr(o, 'seek') and total_length is None:\n # StringIO and BytesIO have seek but no useable fileno\n try:\n # seek to end of file\n o.seek(0, 2)\n total_length = o.tell()\n\n # seek back to current position to support\n # partially read file-like objects\n o.seek(current_position or 0)\n except (OSError, IOError):\n total_length = 0\n\n if total_length is None:\n total_length = 0\n\n return max(0, total_length - current_position)\n\n\ndef get_netrc_auth(url, raise_errors=False):\n \"\"\"Returns the Requests tuple auth for a given url from netrc.\"\"\"\n\n try:\n from netrc import netrc, NetrcParseError\n\n netrc_path = None\n\n for f in NETRC_FILES:\n try:\n loc = os.path.expanduser('~/{0}'.format(f))\n except KeyError:\n # os.path.expanduser can fail when $HOME is undefined and\n # getpwuid fails. See http://bugs.python.org/issue20164 &\n # https://github.com/requests/requests/issues/1846\n return\n\n if os.path.exists(loc):\n netrc_path = loc\n break\n\n # Abort early if there isn't one.\n if netrc_path is None:\n return\n\n ri = urlparse(url)\n\n # Strip port numbers from netloc. This weird `if...encode`` dance is\n # used for Python 3.2, which doesn't support unicode literals.\n splitstr = b':'\n if isinstance(url, str):\n splitstr = splitstr.decode('ascii')\n host = ri.netloc.split(splitstr)[0]\n\n try:\n _netrc = netrc(netrc_path).authenticators(host)\n if _netrc:\n # Return with login / password\n login_i = (0 if _netrc[0] else 1)\n return (_netrc[login_i], _netrc[2])\n except (NetrcParseError, IOError):\n # If there was a parsing error or a permissions issue reading the file,\n # we'll just skip netrc auth unless explicitly asked to raise errors.\n if raise_errors:\n raise\n\n # AppEngine hackiness.\n except (ImportError, AttributeError):\n pass\n\n\ndef guess_filename(obj):\n \"\"\"Tries to guess the filename of the given object.\"\"\"\n name = getattr(obj, 'name', None)\n if (name and isinstance(name, basestring) and name[0] != '<' and\n name[-1] != '>'):\n return os.path.basename(name)\n\n\ndef extract_zipped_paths(path):\n \"\"\"Replace nonexistent paths that look like they refer to a member of a zip\n archive with the location of an extracted copy of the target, or else\n just return the provided path unchanged.\n \"\"\"\n if os.path.exists(path):\n # this is already a valid path, no need to do anything further\n return path\n\n # find the first valid part of the provided path and treat that as a zip archive\n # assume the rest of the path is the name of a member in the archive\n archive, member = os.path.split(path)\n while archive and not os.path.exists(archive):\n archive, prefix = os.path.split(archive)\n member = '/'.join([prefix, member])\n\n if not zipfile.is_zipfile(archive):\n return path\n\n zip_file = zipfile.ZipFile(archive)\n if member not in zip_file.namelist():\n return path\n\n # we have a valid zip archive and a valid member of that archive\n tmp = tempfile.gettempdir()\n extracted_path = os.path.join(tmp, *member.split('/'))\n if not os.path.exists(extracted_path):\n extracted_path = zip_file.extract(member, path=tmp)\n\n return extracted_path\n\n\ndef from_key_val_list(value):\n \"\"\"Take an object and test to see if it can be represented as a\n dictionary. Unless it can not be represented as such, return an\n OrderedDict, e.g.,\n\n ::\n\n >>> from_key_val_list([('key', 'val')])\n OrderedDict([('key', 'val')])\n >>> from_key_val_list('string')\n ValueError: need more than 1 value to unpack\n >>> from_key_val_list({'key': 'val'})\n OrderedDict([('key', 'val')])\n\n :rtype: OrderedDict\n \"\"\"\n if value is None:\n return None\n\n if isinstance(value, (str, bytes, bool, int)):\n raise ValueError('cannot encode objects that are not 2-tuples')\n\n return OrderedDict(value)\n\n\ndef to_key_val_list(value):\n \"\"\"Take an object and test to see if it can be represented as a\n dictionary. If it can be, return a list of tuples, e.g.,\n\n ::\n\n >>> to_key_val_list([('key', 'val')])\n [('key', 'val')]\n >>> to_key_val_list({'key': 'val'})\n [('key', 'val')]\n >>> to_key_val_list('string')\n ValueError: cannot encode objects that are not 2-tuples.\n\n :rtype: list\n \"\"\"\n if value is None:\n return None\n\n if isinstance(value, (str, bytes, bool, int)):\n raise ValueError('cannot encode objects that are not 2-tuples')\n\n if isinstance(value, Mapping):\n value = value.items()\n\n return list(value)\n\n\n# From mitsuhiko/werkzeug (used with permission).\ndef parse_list_header(value):\n \"\"\"Parse lists as described by RFC 2068 Section 2.\n\n In particular, parse comma-separated lists where the elements of\n the list may include quoted-strings. A quoted-string could\n contain a comma. A non-quoted string could have quotes in the\n middle. Quotes are removed automatically after parsing.\n\n It basically works like :func:`parse_set_header` just that items\n may appear multiple times and case sensitivity is preserved.\n\n The return value is a standard :class:`list`:\n\n >>> parse_list_header('token, \"quoted value\"')\n ['token', 'quoted value']\n\n To create a header from the :class:`list` again, use the\n :func:`dump_header` function.\n\n :param value: a string with a list header.\n :return: :class:`list`\n :rtype: list\n \"\"\"\n result = []\n for item in _parse_list_header(value):\n if item[:1] == item[-1:] == '\"':\n item = unquote_header_value(item[1:-1])\n result.append(item)\n return result\n\n\n# From mitsuhiko/werkzeug (used with permission).\ndef parse_dict_header(value):\n \"\"\"Parse lists of key, value pairs as described by RFC 2068 Section 2 and\n convert them into a python dict:\n\n >>> d = parse_dict_header('foo=\"is a fish\", bar=\"as well\"')\n >>> type(d) is dict\n True\n >>> sorted(d.items())\n [('bar', 'as well'), ('foo', 'is a fish')]\n\n If there is no value for a key it will be `None`:\n\n >>> parse_dict_header('key_without_value')\n {'key_without_value': None}\n\n To create a header from the :class:`dict` again, use the\n :func:`dump_header` function.\n\n :param value: a string with a dict header.\n :return: :class:`dict`\n :rtype: dict\n \"\"\"\n result = {}\n for item in _parse_list_header(value):\n if '=' not in item:\n result[item] = None\n continue\n name, value = item.split('=', 1)\n if value[:1] == value[-1:] == '\"':\n value = unquote_header_value(value[1:-1])\n result[name] = value\n return result\n\n\n# From mitsuhiko/werkzeug (used with permission).\ndef unquote_header_value(value, is_filename=False):\n r\"\"\"Unquotes a header value. (Reversal of :func:`quote_header_value`).\n This does not use the real unquoting but what browsers are actually\n using for quoting.\n\n :param value: the header value to unquote.\n :rtype: str\n \"\"\"\n if value and value[0] == value[-1] == '\"':\n # this is not the real unquoting, but fixing this so that the\n # RFC is met will result in bugs with internet explorer and\n # probably some other browsers as well. IE for example is\n # uploading files with \"C:\\foo\\bar.txt\" as filename\n value = value[1:-1]\n\n # if this is a filename and the starting characters look like\n # a UNC path, then just return the value without quotes. Using the\n # replace sequence below on a UNC path has the effect of turning\n # the leading double slash into a single slash and then\n # _fix_ie_filename() doesn't work correctly. See #458.\n if not is_filename or value[:2] != '\\\\\\\\':\n return value.replace('\\\\\\\\', '\\\\').replace('\\\\\"', '\"')\n return value\n\n\ndef dict_from_cookiejar(cj):\n \"\"\"Returns a key/value dictionary from a CookieJar.\n\n :param cj: CookieJar object to extract cookies from.\n :rtype: dict\n \"\"\"\n\n cookie_dict = {}\n\n for cookie in cj:\n cookie_dict[cookie.name] = cookie.value\n\n return cookie_dict\n\n\ndef add_dict_to_cookiejar(cj, cookie_dict):\n \"\"\"Returns a CookieJar from a key/value dictionary.\n\n :param cj: CookieJar to insert cookies into.\n :param cookie_dict: Dict of key/values to insert into CookieJar.\n :rtype: CookieJar\n \"\"\"\n\n return cookiejar_from_dict(cookie_dict, cj)\n\n\ndef get_encodings_from_content(content):\n \"\"\"Returns encodings from given content string.\n\n :param content: bytestring to extract encodings from.\n \"\"\"\n warnings.warn((\n 'In requests 3.0, get_encodings_from_content will be removed. For '\n 'more information, please see the discussion on issue #2266. (This'\n ' warning should only appear once.)'),\n DeprecationWarning)\n\n charset_re = re.compile(r'<meta.*?charset=[\"\\']*(.+?)[\"\\'>]', flags=re.I)\n pragma_re = re.compile(r'<meta.*?content=[\"\\']*;?charset=(.+?)[\"\\'>]', flags=re.I)\n xml_re = re.compile(r'^<\\?xml.*?encoding=[\"\\']*(.+?)[\"\\'>]')\n\n return (charset_re.findall(content) +\n pragma_re.findall(content) +\n xml_re.findall(content))\n\n\ndef _parse_content_type_header(header):\n \"\"\"Returns content type and parameters from given header\n\n :param header: string\n :return: tuple containing content type and dictionary of\n parameters\n \"\"\"\n\n tokens = header.split(';')\n content_type, params = tokens[0].strip(), tokens[1:]\n params_dict = {}\n items_to_strip = \"\\\"' \"\n\n for param in params:\n param = param.strip()\n if param:\n key, value = param, True\n index_of_equals = param.find(\"=\")\n if index_of_equals != -1:\n key = param[:index_of_equals].strip(items_to_strip)\n value = param[index_of_equals + 1:].strip(items_to_strip)\n params_dict[key.lower()] = value\n return content_type, params_dict\n\n\ndef get_encoding_from_headers(headers):\n \"\"\"Returns encodings from given HTTP Header Dict.\n\n :param headers: dictionary to extract encoding from.\n :rtype: str\n \"\"\"\n\n content_type = headers.get('content-type')\n\n if not content_type:\n return None\n\n content_type, params = _parse_content_type_header(content_type)\n\n if 'charset' in params:\n return params['charset'].strip(\"'\\\"\")\n\n if 'text' in content_type:\n return 'ISO-8859-1'\n\n\ndef stream_decode_response_unicode(iterator, r):\n \"\"\"Stream decodes a iterator.\"\"\"\n\n if r.encoding is None:\n for item in iterator:\n yield item\n return\n\n decoder = codecs.getincrementaldecoder(r.encoding)(errors='replace')\n for chunk in iterator:\n rv = decoder.decode(chunk)\n if rv:\n yield rv\n rv = decoder.decode(b'', final=True)\n if rv:\n yield rv\n\n\ndef iter_slices(string, slice_length):\n \"\"\"Iterate over slices of a string.\"\"\"\n pos = 0\n if slice_length is None or slice_length <= 0:\n slice_length = len(string)\n while pos < len(string):\n yield string[pos:pos + slice_length]\n pos += slice_length\n\n\ndef get_unicode_from_response(r):\n \"\"\"Returns the requested content back in unicode.\n\n :param r: Response object to get unicode content from.\n\n Tried:\n\n 1. charset from content-type\n 2. fall back and replace all unicode characters\n\n :rtype: str\n \"\"\"\n warnings.warn((\n 'In requests 3.0, get_unicode_from_response will be removed. For '\n 'more information, please see the discussion on issue #2266. (This'\n ' warning should only appear once.)'),\n DeprecationWarning)\n\n tried_encodings = []\n\n # Try charset from content-type\n encoding = get_encoding_from_headers(r.headers)\n\n if encoding:\n try:\n return str(r.content, encoding)\n except UnicodeError:\n tried_encodings.append(encoding)\n\n # Fall back:\n try:\n return str(r.content, encoding, errors='replace')\n except TypeError:\n return r.content\n\n\n# The unreserved URI characters (RFC 3986)\nUNRESERVED_SET = frozenset(\n \"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz\" + \"0123456789-._~\")\n\n\ndef unquote_unreserved(uri):\n \"\"\"Un-escape any percent-escape sequences in a URI that are unreserved\n characters. This leaves all reserved, illegal and non-ASCII bytes encoded.\n\n :rtype: str\n \"\"\"\n parts = uri.split('%')\n for i in range(1, len(parts)):\n h = parts[i][0:2]\n if len(h) == 2 and h.isalnum():\n try:\n c = chr(int(h, 16))\n except ValueError:\n raise InvalidURL(\"Invalid percent-escape sequence: '%s'\" % h)\n\n if c in UNRESERVED_SET:\n parts[i] = c + parts[i][2:]\n else:\n parts[i] = '%' + parts[i]\n else:\n parts[i] = '%' + parts[i]\n return ''.join(parts)\n\n\ndef requote_uri(uri):\n \"\"\"Re-quote the given URI.\n\n This function passes the given URI through an unquote/quote cycle to\n ensure that it is fully and consistently quoted.\n\n :rtype: str\n \"\"\"\n safe_with_percent = \"!#$%&'()*+,/:;=?@[]~\"\n safe_without_percent = \"!#$&'()*+,/:;=?@[]~\"\n try:\n # Unquote only the unreserved characters\n # Then quote only illegal characters (do not quote reserved,\n # unreserved, or '%')\n return quote(unquote_unreserved(uri), safe=safe_with_percent)\n except InvalidURL:\n # We couldn't unquote the given URI, so let's try quoting it, but\n # there may be unquoted '%'s in the URI. We need to make sure they're\n # properly quoted so they do not cause issues elsewhere.\n return quote(uri, safe=safe_without_percent)\n\n\ndef address_in_network(ip, net):\n \"\"\"This function allows you to check if an IP belongs to a network subnet\n\n Example: returns True if ip = 192.168.1.1 and net = 192.168.1.0/24\n returns False if ip = 192.168.1.1 and net = 192.168.100.0/24\n\n :rtype: bool\n \"\"\"\n ipaddr = struct.unpack('=L', socket.inet_aton(ip))[0]\n netaddr, bits = net.split('/')\n netmask = struct.unpack('=L', socket.inet_aton(dotted_netmask(int(bits))))[0]\n network = struct.unpack('=L', socket.inet_aton(netaddr))[0] & netmask\n return (ipaddr & netmask) == (network & netmask)\n\n\ndef dotted_netmask(mask):\n \"\"\"Converts mask from /xx format to xxx.xxx.xxx.xxx\n\n Example: if mask is 24 function returns 255.255.255.0\n\n :rtype: str\n \"\"\"\n bits = 0xffffffff ^ (1 << 32 - mask) - 1\n return socket.inet_ntoa(struct.pack('>I', bits))\n\n\ndef is_ipv4_address(string_ip):\n \"\"\"\n :rtype: bool\n \"\"\"\n try:\n socket.inet_aton(string_ip)\n except socket.error:\n return False\n return True\n\n\ndef is_valid_cidr(string_network):\n \"\"\"\n Very simple check of the cidr format in no_proxy variable.\n\n :rtype: bool\n \"\"\"\n if string_network.count('/') == 1:\n try:\n mask = int(string_network.split('/')[1])\n except ValueError:\n return False\n\n if mask < 1 or mask > 32:\n return False\n\n try:\n socket.inet_aton(string_network.split('/')[0])\n except socket.error:\n return False\n else:\n return False\n return True\n\n\n@contextlib.contextmanager\ndef set_environ(env_name, value):\n \"\"\"Set the environment variable 'env_name' to 'value'\n\n Save previous value, yield, and then restore the previous value stored in\n the environment variable 'env_name'.\n\n If 'value' is None, do nothing\"\"\"\n value_changed = value is not None\n if value_changed:\n old_value = os.environ.get(env_name)\n os.environ[env_name] = value\n try:\n yield\n finally:\n if value_changed:\n if old_value is None:\n del os.environ[env_name]\n else:\n os.environ[env_name] = old_value\n\n\ndef should_bypass_proxies(url, no_proxy):\n \"\"\"\n Returns whether we should bypass proxies or not.\n\n :rtype: bool\n \"\"\"\n # Prioritize lowercase environment variables over uppercase\n # to keep a consistent behaviour with other http projects (curl, wget).\n get_proxy = lambda k: os.environ.get(k) or os.environ.get(k.upper())\n\n # First check whether no_proxy is defined. If it is, check that the URL\n # we're getting isn't in the no_proxy list.\n no_proxy_arg = no_proxy\n if no_proxy is None:\n no_proxy = get_proxy('no_proxy')\n parsed = urlparse(url)\n\n if no_proxy:\n # We need to check whether we match here. We need to see if we match\n # the end of the hostname, both with and without the port.\n no_proxy = (\n host for host in no_proxy.replace(' ', '').split(',') if host\n )\n\n if is_ipv4_address(parsed.hostname):\n for proxy_ip in no_proxy:\n if is_valid_cidr(proxy_ip):\n if address_in_network(parsed.hostname, proxy_ip):\n return True\n elif parsed.hostname == proxy_ip:\n # If no_proxy ip was defined in plain IP notation instead of cidr notation &\n # matches the IP of the index\n return True\n else:\n host_with_port = parsed.hostname\n if parsed.port:\n host_with_port += ':{0}'.format(parsed.port)\n\n for host in no_proxy:\n if parsed.hostname.endswith(host) or host_with_port.endswith(host):\n # The URL does match something in no_proxy, so we don't want\n # to apply the proxies on this URL.\n return True\n\n # If the system proxy settings indicate that this URL should be bypassed,\n # don't proxy.\n # The proxy_bypass function is incredibly buggy on OS X in early versions\n # of Python 2.6, so allow this call to fail. Only catch the specific\n # exceptions we've seen, though: this call failing in other ways can reveal\n # legitimate problems.\n with set_environ('no_proxy', no_proxy_arg):\n try:\n bypass = proxy_bypass(parsed.hostname)\n except (TypeError, socket.gaierror):\n bypass = False\n\n if bypass:\n return True\n\n return False\n\n\ndef get_environ_proxies(url, no_proxy=None):\n \"\"\"\n Return a dict of environment proxies.\n\n :rtype: dict\n \"\"\"\n if should_bypass_proxies(url, no_proxy=no_proxy):\n return {}\n else:\n return getproxies()\n\n\ndef select_proxy(url, proxies):\n \"\"\"Select a proxy for the url, if applicable.\n\n :param url: The url being for the request\n :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs\n \"\"\"\n proxies = proxies or {}\n urlparts = urlparse(url)\n if urlparts.hostname is None:\n return proxies.get(urlparts.scheme, proxies.get('all'))\n\n proxy_keys = [\n urlparts.scheme + '://' + urlparts.hostname,\n urlparts.scheme,\n 'all://' + urlparts.hostname,\n 'all',\n ]\n proxy = None\n for proxy_key in proxy_keys:\n if proxy_key in proxies:\n proxy = proxies[proxy_key]\n break\n\n return proxy\n\n\ndef default_user_agent(name=\"python-requests\"):\n \"\"\"\n Return a string representing the default user agent.\n\n :rtype: str\n \"\"\"\n return '%s/%s' % (name, __version__)\n\n\ndef default_headers():\n \"\"\"\n :rtype: requests.structures.CaseInsensitiveDict\n \"\"\"\n return CaseInsensitiveDict({\n 'User-Agent': default_user_agent(),\n 'Accept-Encoding': ', '.join(('gzip', 'deflate')),\n 'Accept': '*/*',\n 'Connection': 'keep-alive',\n })\n\n\ndef parse_header_links(value):\n \"\"\"Return a list of parsed link headers proxies.\n\n i.e. Link: <http:/.../front.jpeg>; rel=front; type=\"image/jpeg\",<http://.../back.jpeg>; rel=back;type=\"image/jpeg\"\n\n :rtype: list\n \"\"\"\n\n links = []\n\n replace_chars = ' \\'\"'\n\n value = value.strip(replace_chars)\n if not value:\n return links\n\n for val in re.split(', *<', value):\n try:\n url, params = val.split(';', 1)\n except ValueError:\n url, params = val, ''\n\n link = {'url': url.strip('<> \\'\"')}\n\n for param in params.split(';'):\n try:\n key, value = param.split('=')\n except ValueError:\n break\n\n link[key.strip(replace_chars)] = value.strip(replace_chars)\n\n links.append(link)\n\n return links\n\n\n# Null bytes; no need to recreate these on each call to guess_json_utf\n_null = '\\x00'.encode('ascii') # encoding to ASCII for Python 3\n_null2 = _null * 2\n_null3 = _null * 3\n\n\ndef guess_json_utf(data):\n \"\"\"\n :rtype: str\n \"\"\"\n # JSON always starts with two ASCII characters, so detection is as\n # easy as counting the nulls and from their location and count\n # determine the encoding. Also detect a BOM, if present.\n sample = data[:4]\n if sample in (codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE):\n return 'utf-32' # BOM included\n if sample[:3] == codecs.BOM_UTF8:\n return 'utf-8-sig' # BOM included, MS style (discouraged)\n if sample[:2] in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE):\n return 'utf-16' # BOM included\n nullcount = sample.count(_null)\n if nullcount == 0:\n return 'utf-8'\n if nullcount == 2:\n if sample[::2] == _null2: # 1st and 3rd are null\n return 'utf-16-be'\n if sample[1::2] == _null2: # 2nd and 4th are null\n return 'utf-16-le'\n # Did not detect 2 valid UTF-16 ascii-range characters\n if nullcount == 3:\n if sample[:3] == _null3:\n return 'utf-32-be'\n if sample[1:] == _null3:\n return 'utf-32-le'\n # Did not detect a valid UTF-32 ascii-range character\n return None\n\n\ndef prepend_scheme_if_needed(url, new_scheme):\n \"\"\"Given a URL that may or may not have a scheme, prepend the given scheme.\n Does not replace a present scheme with the one provided as an argument.\n\n :rtype: str\n \"\"\"\n scheme, netloc, path, params, query, fragment = urlparse(url, new_scheme)\n\n # urlparse is a finicky beast, and sometimes decides that there isn't a\n # netloc present. Assume that it's being over-cautious, and switch netloc\n # and path if urlparse decided there was no netloc.\n if not netloc:\n netloc, path = path, netloc\n\n return urlunparse((scheme, netloc, path, params, query, fragment))\n\n\ndef get_auth_from_url(url):\n \"\"\"Given a url with authentication components, extract them into a tuple of\n username,password.\n\n :rtype: (str,str)\n \"\"\"\n parsed = urlparse(url)\n\n try:\n auth = (unquote(parsed.username), unquote(parsed.password))\n except (AttributeError, TypeError):\n auth = ('', '')\n\n return auth\n\n\n# Moved outside of function to avoid recompile every call\n_CLEAN_HEADER_REGEX_BYTE = re.compile(b'^\\\\S[^\\\\r\\\\n]*$|^$')\n_CLEAN_HEADER_REGEX_STR = re.compile(r'^\\S[^\\r\\n]*$|^$')\n\n\ndef check_header_validity(header):\n \"\"\"Verifies that header value is a string which doesn't contain\n leading whitespace or return characters. This prevents unintended\n header injection.\n\n :param header: tuple, in the format (name, value).\n \"\"\"\n name, value = header\n\n if isinstance(value, bytes):\n pat = _CLEAN_HEADER_REGEX_BYTE\n else:\n pat = _CLEAN_HEADER_REGEX_STR\n try:\n if not pat.match(value):\n raise InvalidHeader(\"Invalid return character or leading space in header: %s\" % name)\n except TypeError:\n raise InvalidHeader(\"Value for header {%s: %s} must be of type str or \"\n \"bytes, not %s\" % (name, value, type(value)))\n\n\ndef urldefragauth(url):\n \"\"\"\n Given a url remove the fragment and the authentication part.\n\n :rtype: str\n \"\"\"\n scheme, netloc, path, params, query, fragment = urlparse(url)\n\n # see func:`prepend_scheme_if_needed`\n if not netloc:\n netloc, path = path, netloc\n\n netloc = netloc.rsplit('@', 1)[-1]\n\n return urlunparse((scheme, netloc, path, params, query, ''))\n\n\ndef rewind_body(prepared_request):\n \"\"\"Move file pointer back to its recorded starting position\n so it can be read again on redirect.\n \"\"\"\n body_seek = getattr(prepared_request.body, 'seek', None)\n if body_seek is not None and isinstance(prepared_request._body_position, integer_types):\n try:\n body_seek(prepared_request._body_position)\n except (IOError, OSError):\n raise UnrewindableBodyError(\"An error occurred when rewinding request \"\n \"body for redirect.\")\n else:\n raise UnrewindableBodyError(\"Unable to rewind request body for redirect.\")\n",
"path": "requests/utils.py"
},
{
"content": "# -*- coding: utf-8 -*-\n\nimport os\nimport copy\nimport filecmp\nfrom io import BytesIO\nimport zipfile\nfrom collections import deque\n\nimport pytest\nfrom requests import compat\nfrom requests.cookies import RequestsCookieJar\nfrom requests.structures import CaseInsensitiveDict\nfrom requests.utils import (\n address_in_network, dotted_netmask, extract_zipped_paths,\n get_auth_from_url, _parse_content_type_header, get_encoding_from_headers,\n get_encodings_from_content, get_environ_proxies,\n guess_filename, guess_json_utf, is_ipv4_address,\n is_valid_cidr, iter_slices, parse_dict_header,\n parse_header_links, prepend_scheme_if_needed,\n requote_uri, select_proxy, should_bypass_proxies, super_len,\n to_key_val_list, to_native_string,\n unquote_header_value, unquote_unreserved,\n urldefragauth, add_dict_to_cookiejar, set_environ)\nfrom requests._internal_utils import unicode_is_ascii\n\nfrom .compat import StringIO, cStringIO\n\n\nclass TestSuperLen:\n\n @pytest.mark.parametrize(\n 'stream, value', (\n (StringIO.StringIO, 'Test'),\n (BytesIO, b'Test'),\n pytest.mark.skipif('cStringIO is None')((cStringIO, 'Test')),\n ))\n def test_io_streams(self, stream, value):\n \"\"\"Ensures that we properly deal with different kinds of IO streams.\"\"\"\n assert super_len(stream()) == 0\n assert super_len(stream(value)) == 4\n\n def test_super_len_correctly_calculates_len_of_partially_read_file(self):\n \"\"\"Ensure that we handle partially consumed file like objects.\"\"\"\n s = StringIO.StringIO()\n s.write('foobarbogus')\n assert super_len(s) == 0\n\n @pytest.mark.parametrize('error', [IOError, OSError])\n def test_super_len_handles_files_raising_weird_errors_in_tell(self, error):\n \"\"\"If tell() raises errors, assume the cursor is at position zero.\"\"\"\n class BoomFile(object):\n def __len__(self):\n return 5\n\n def tell(self):\n raise error()\n\n assert super_len(BoomFile()) == 0\n\n @pytest.mark.parametrize('error', [IOError, OSError])\n def test_super_len_tell_ioerror(self, error):\n \"\"\"Ensure that if tell gives an IOError super_len doesn't fail\"\"\"\n class NoLenBoomFile(object):\n def tell(self):\n raise error()\n\n def seek(self, offset, whence):\n pass\n\n assert super_len(NoLenBoomFile()) == 0\n\n def test_string(self):\n assert super_len('Test') == 4\n\n @pytest.mark.parametrize(\n 'mode, warnings_num', (\n ('r', 1),\n ('rb', 0),\n ))\n def test_file(self, tmpdir, mode, warnings_num, recwarn):\n file_obj = tmpdir.join('test.txt')\n file_obj.write('Test')\n with file_obj.open(mode) as fd:\n assert super_len(fd) == 4\n assert len(recwarn) == warnings_num\n\n def test_super_len_with__len__(self):\n foo = [1,2,3,4]\n len_foo = super_len(foo)\n assert len_foo == 4\n\n def test_super_len_with_no__len__(self):\n class LenFile(object):\n def __init__(self):\n self.len = 5\n\n assert super_len(LenFile()) == 5\n\n def test_super_len_with_tell(self):\n foo = StringIO.StringIO('12345')\n assert super_len(foo) == 5\n foo.read(2)\n assert super_len(foo) == 3\n\n def test_super_len_with_fileno(self):\n with open(__file__, 'rb') as f:\n length = super_len(f)\n file_data = f.read()\n assert length == len(file_data)\n\n def test_super_len_with_no_matches(self):\n \"\"\"Ensure that objects without any length methods default to 0\"\"\"\n assert super_len(object()) == 0\n\n\nclass TestToKeyValList:\n\n @pytest.mark.parametrize(\n 'value, expected', (\n ([('key', 'val')], [('key', 'val')]),\n ((('key', 'val'), ), [('key', 'val')]),\n ({'key': 'val'}, [('key', 'val')]),\n (None, None)\n ))\n def test_valid(self, value, expected):\n assert to_key_val_list(value) == expected\n\n def test_invalid(self):\n with pytest.raises(ValueError):\n to_key_val_list('string')\n\n\nclass TestUnquoteHeaderValue:\n\n @pytest.mark.parametrize(\n 'value, expected', (\n (None, None),\n ('Test', 'Test'),\n ('\"Test\"', 'Test'),\n ('\"Test\\\\\\\\\"', 'Test\\\\'),\n ('\"\\\\\\\\Comp\\\\Res\"', '\\\\Comp\\\\Res'),\n ))\n def test_valid(self, value, expected):\n assert unquote_header_value(value) == expected\n\n def test_is_filename(self):\n assert unquote_header_value('\"\\\\\\\\Comp\\\\Res\"', True) == '\\\\\\\\Comp\\\\Res'\n\n\nclass TestGetEnvironProxies:\n \"\"\"Ensures that IP addresses are correctly matches with ranges\n in no_proxy variable.\n \"\"\"\n\n @pytest.fixture(autouse=True, params=['no_proxy', 'NO_PROXY'])\n def no_proxy(self, request, monkeypatch):\n monkeypatch.setenv(request.param, '192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1')\n\n @pytest.mark.parametrize(\n 'url', (\n 'http://192.168.0.1:5000/',\n 'http://192.168.0.1/',\n 'http://172.16.1.1/',\n 'http://172.16.1.1:5000/',\n 'http://localhost.localdomain:5000/v1.0/',\n ))\n def test_bypass(self, url):\n assert get_environ_proxies(url, no_proxy=None) == {}\n\n @pytest.mark.parametrize(\n 'url', (\n 'http://192.168.1.1:5000/',\n 'http://192.168.1.1/',\n 'http://www.requests.com/',\n ))\n def test_not_bypass(self, url):\n assert get_environ_proxies(url, no_proxy=None) != {}\n\n @pytest.mark.parametrize(\n 'url', (\n 'http://192.168.1.1:5000/',\n 'http://192.168.1.1/',\n 'http://www.requests.com/',\n ))\n def test_bypass_no_proxy_keyword(self, url):\n no_proxy = '192.168.1.1,requests.com'\n assert get_environ_proxies(url, no_proxy=no_proxy) == {}\n\n @pytest.mark.parametrize(\n 'url', (\n 'http://192.168.0.1:5000/',\n 'http://192.168.0.1/',\n 'http://172.16.1.1/',\n 'http://172.16.1.1:5000/',\n 'http://localhost.localdomain:5000/v1.0/',\n ))\n def test_not_bypass_no_proxy_keyword(self, url, monkeypatch):\n # This is testing that the 'no_proxy' argument overrides the\n # environment variable 'no_proxy'\n monkeypatch.setenv('http_proxy', 'http://proxy.example.com:3128/')\n no_proxy = '192.168.1.1,requests.com'\n assert get_environ_proxies(url, no_proxy=no_proxy) != {}\n\n\nclass TestIsIPv4Address:\n\n def test_valid(self):\n assert is_ipv4_address('8.8.8.8')\n\n @pytest.mark.parametrize('value', ('8.8.8.8.8', 'localhost.localdomain'))\n def test_invalid(self, value):\n assert not is_ipv4_address(value)\n\n\nclass TestIsValidCIDR:\n\n def test_valid(self):\n assert is_valid_cidr('192.168.1.0/24')\n\n @pytest.mark.parametrize(\n 'value', (\n '8.8.8.8',\n '192.168.1.0/a',\n '192.168.1.0/128',\n '192.168.1.0/-1',\n '192.168.1.999/24',\n ))\n def test_invalid(self, value):\n assert not is_valid_cidr(value)\n\n\nclass TestAddressInNetwork:\n\n def test_valid(self):\n assert address_in_network('192.168.1.1', '192.168.1.0/24')\n\n def test_invalid(self):\n assert not address_in_network('172.16.0.1', '192.168.1.0/24')\n\n\nclass TestGuessFilename:\n\n @pytest.mark.parametrize(\n 'value', (1, type('Fake', (object,), {'name': 1})()),\n )\n def test_guess_filename_invalid(self, value):\n assert guess_filename(value) is None\n\n @pytest.mark.parametrize(\n 'value, expected_type', (\n (b'value', compat.bytes),\n (b'value'.decode('utf-8'), compat.str)\n ))\n def test_guess_filename_valid(self, value, expected_type):\n obj = type('Fake', (object,), {'name': value})()\n result = guess_filename(obj)\n assert result == value\n assert isinstance(result, expected_type)\n\n\nclass TestExtractZippedPaths:\n\n @pytest.mark.parametrize(\n 'path', (\n '/',\n __file__,\n pytest.__file__,\n '/etc/invalid/location',\n ))\n def test_unzipped_paths_unchanged(self, path):\n assert path == extract_zipped_paths(path)\n\n def test_zipped_paths_extracted(self, tmpdir):\n zipped_py = tmpdir.join('test.zip')\n with zipfile.ZipFile(zipped_py.strpath, 'w') as f:\n f.write(__file__)\n\n _, name = os.path.splitdrive(__file__)\n zipped_path = os.path.join(zipped_py.strpath, name.lstrip(r'\\/'))\n extracted_path = extract_zipped_paths(zipped_path)\n\n assert extracted_path != zipped_path\n assert os.path.exists(extracted_path)\n assert filecmp.cmp(extracted_path, __file__)\n\n\nclass TestContentEncodingDetection:\n\n def test_none(self):\n encodings = get_encodings_from_content('')\n assert not len(encodings)\n\n @pytest.mark.parametrize(\n 'content', (\n # HTML5 meta charset attribute\n '<meta charset=\"UTF-8\">',\n # HTML4 pragma directive\n '<meta http-equiv=\"Content-type\" content=\"text/html;charset=UTF-8\">',\n # XHTML 1.x served with text/html MIME type\n '<meta http-equiv=\"Content-type\" content=\"text/html;charset=UTF-8\" />',\n # XHTML 1.x served as XML\n '<?xml version=\"1.0\" encoding=\"UTF-8\"?>',\n ))\n def test_pragmas(self, content):\n encodings = get_encodings_from_content(content)\n assert len(encodings) == 1\n assert encodings[0] == 'UTF-8'\n\n def test_precedence(self):\n content = '''\n <?xml version=\"1.0\" encoding=\"XML\"?>\n <meta charset=\"HTML5\">\n <meta http-equiv=\"Content-type\" content=\"text/html;charset=HTML4\" />\n '''.strip()\n assert get_encodings_from_content(content) == ['HTML5', 'HTML4', 'XML']\n\n\nclass TestGuessJSONUTF:\n\n @pytest.mark.parametrize(\n 'encoding', (\n 'utf-32', 'utf-8-sig', 'utf-16', 'utf-8', 'utf-16-be', 'utf-16-le',\n 'utf-32-be', 'utf-32-le'\n ))\n def test_encoded(self, encoding):\n data = '{}'.encode(encoding)\n assert guess_json_utf(data) == encoding\n\n def test_bad_utf_like_encoding(self):\n assert guess_json_utf(b'\\x00\\x00\\x00\\x00') is None\n\n @pytest.mark.parametrize(\n ('encoding', 'expected'), (\n ('utf-16-be', 'utf-16'),\n ('utf-16-le', 'utf-16'),\n ('utf-32-be', 'utf-32'),\n ('utf-32-le', 'utf-32')\n ))\n def test_guess_by_bom(self, encoding, expected):\n data = u'\\ufeff{}'.encode(encoding)\n assert guess_json_utf(data) == expected\n\n\nUSER = PASSWORD = \"%!*'();:@&=+$,/?#[] \"\nENCODED_USER = compat.quote(USER, '')\nENCODED_PASSWORD = compat.quote(PASSWORD, '')\n\n\n@pytest.mark.parametrize(\n 'url, auth', (\n (\n 'http://' + ENCODED_USER + ':' + ENCODED_PASSWORD + '@' +\n 'request.com/url.html#test',\n (USER, PASSWORD)\n ),\n (\n 'http://user:pass@complex.url.com/path?query=yes',\n ('user', 'pass')\n ),\n (\n 'http://user:pass%20pass@complex.url.com/path?query=yes',\n ('user', 'pass pass')\n ),\n (\n 'http://user:pass pass@complex.url.com/path?query=yes',\n ('user', 'pass pass')\n ),\n (\n 'http://user%25user:pass@complex.url.com/path?query=yes',\n ('user%user', 'pass')\n ),\n (\n 'http://user:pass%23pass@complex.url.com/path?query=yes',\n ('user', 'pass#pass')\n ),\n (\n 'http://complex.url.com/path?query=yes',\n ('', '')\n ),\n ))\ndef test_get_auth_from_url(url, auth):\n assert get_auth_from_url(url) == auth\n\n\n@pytest.mark.parametrize(\n 'uri, expected', (\n (\n # Ensure requoting doesn't break expectations\n 'http://example.com/fiz?buz=%25ppicture',\n 'http://example.com/fiz?buz=%25ppicture',\n ),\n (\n # Ensure we handle unquoted percent signs in redirects\n 'http://example.com/fiz?buz=%ppicture',\n 'http://example.com/fiz?buz=%25ppicture',\n ),\n ))\ndef test_requote_uri_with_unquoted_percents(uri, expected):\n \"\"\"See: https://github.com/requests/requests/issues/2356\"\"\"\n assert requote_uri(uri) == expected\n\n\n@pytest.mark.parametrize(\n 'uri, expected', (\n (\n # Illegal bytes\n 'http://example.com/?a=%--',\n 'http://example.com/?a=%--',\n ),\n (\n # Reserved characters\n 'http://example.com/?a=%300',\n 'http://example.com/?a=00',\n )\n ))\ndef test_unquote_unreserved(uri, expected):\n assert unquote_unreserved(uri) == expected\n\n\n@pytest.mark.parametrize(\n 'mask, expected', (\n (8, '255.0.0.0'),\n (24, '255.255.255.0'),\n (25, '255.255.255.128'),\n ))\ndef test_dotted_netmask(mask, expected):\n assert dotted_netmask(mask) == expected\n\n\nhttp_proxies = {'http': 'http://http.proxy',\n 'http://some.host': 'http://some.host.proxy'}\nall_proxies = {'all': 'socks5://http.proxy',\n 'all://some.host': 'socks5://some.host.proxy'}\nmixed_proxies = {'http': 'http://http.proxy',\n 'http://some.host': 'http://some.host.proxy',\n 'all': 'socks5://http.proxy'}\n@pytest.mark.parametrize(\n 'url, expected, proxies', (\n ('hTTp://u:p@Some.Host/path', 'http://some.host.proxy', http_proxies),\n ('hTTp://u:p@Other.Host/path', 'http://http.proxy', http_proxies),\n ('hTTp:///path', 'http://http.proxy', http_proxies),\n ('hTTps://Other.Host', None, http_proxies),\n ('file:///etc/motd', None, http_proxies),\n\n ('hTTp://u:p@Some.Host/path', 'socks5://some.host.proxy', all_proxies),\n ('hTTp://u:p@Other.Host/path', 'socks5://http.proxy', all_proxies),\n ('hTTp:///path', 'socks5://http.proxy', all_proxies),\n ('hTTps://Other.Host', 'socks5://http.proxy', all_proxies),\n\n ('http://u:p@other.host/path', 'http://http.proxy', mixed_proxies),\n ('http://u:p@some.host/path', 'http://some.host.proxy', mixed_proxies),\n ('https://u:p@other.host/path', 'socks5://http.proxy', mixed_proxies),\n ('https://u:p@some.host/path', 'socks5://http.proxy', mixed_proxies),\n ('https://', 'socks5://http.proxy', mixed_proxies),\n # XXX: unsure whether this is reasonable behavior\n ('file:///etc/motd', 'socks5://http.proxy', all_proxies),\n ))\ndef test_select_proxies(url, expected, proxies):\n \"\"\"Make sure we can select per-host proxies correctly.\"\"\"\n assert select_proxy(url, proxies) == expected\n\n\n@pytest.mark.parametrize(\n 'value, expected', (\n ('foo=\"is a fish\", bar=\"as well\"', {'foo': 'is a fish', 'bar': 'as well'}),\n ('key_without_value', {'key_without_value': None})\n ))\ndef test_parse_dict_header(value, expected):\n assert parse_dict_header(value) == expected\n\n\n@pytest.mark.parametrize(\n 'value, expected', (\n (\n 'application/xml',\n ('application/xml', {})\n ),\n (\n 'application/json ; charset=utf-8',\n ('application/json', {'charset': 'utf-8'})\n ),\n (\n 'application/json ; Charset=utf-8',\n ('application/json', {'charset': 'utf-8'})\n ),\n (\n 'text/plain',\n ('text/plain', {})\n ),\n (\n 'multipart/form-data; boundary = something ; boundary2=\\'something_else\\' ; no_equals ',\n ('multipart/form-data', {'boundary': 'something', 'boundary2': 'something_else', 'no_equals': True})\n ),\n (\n 'multipart/form-data; boundary = something ; boundary2=\"something_else\" ; no_equals ',\n ('multipart/form-data', {'boundary': 'something', 'boundary2': 'something_else', 'no_equals': True})\n ),\n (\n 'multipart/form-data; boundary = something ; \\'boundary2=something_else\\' ; no_equals ',\n ('multipart/form-data', {'boundary': 'something', 'boundary2': 'something_else', 'no_equals': True})\n ),\n (\n 'multipart/form-data; boundary = something ; \"boundary2=something_else\" ; no_equals ',\n ('multipart/form-data', {'boundary': 'something', 'boundary2': 'something_else', 'no_equals': True})\n ),\n (\n 'application/json ; ; ',\n ('application/json', {})\n )\n ))\ndef test__parse_content_type_header(value, expected):\n assert _parse_content_type_header(value) == expected\n\n\n@pytest.mark.parametrize(\n 'value, expected', (\n (\n CaseInsensitiveDict(),\n None\n ),\n (\n CaseInsensitiveDict({'content-type': 'application/json; charset=utf-8'}),\n 'utf-8'\n ),\n (\n CaseInsensitiveDict({'content-type': 'text/plain'}),\n 'ISO-8859-1'\n ),\n ))\ndef test_get_encoding_from_headers(value, expected):\n assert get_encoding_from_headers(value) == expected\n\n\n@pytest.mark.parametrize(\n 'value, length', (\n ('', 0),\n ('T', 1),\n ('Test', 4),\n ('Cont', 0),\n ('Other', -5),\n ('Content', None),\n ))\ndef test_iter_slices(value, length):\n if length is None or (length <= 0 and len(value) > 0):\n # Reads all content at once\n assert len(list(iter_slices(value, length))) == 1\n else:\n assert len(list(iter_slices(value, 1))) == length\n\n\n@pytest.mark.parametrize(\n 'value, expected', (\n (\n '<http:/.../front.jpeg>; rel=front; type=\"image/jpeg\"',\n [{'url': 'http:/.../front.jpeg', 'rel': 'front', 'type': 'image/jpeg'}]\n ),\n (\n '<http:/.../front.jpeg>',\n [{'url': 'http:/.../front.jpeg'}]\n ),\n (\n '<http:/.../front.jpeg>;',\n [{'url': 'http:/.../front.jpeg'}]\n ),\n (\n '<http:/.../front.jpeg>; type=\"image/jpeg\",<http://.../back.jpeg>;',\n [\n {'url': 'http:/.../front.jpeg', 'type': 'image/jpeg'},\n {'url': 'http://.../back.jpeg'}\n ]\n ),\n (\n '',\n []\n ),\n ))\ndef test_parse_header_links(value, expected):\n assert parse_header_links(value) == expected\n\n\n@pytest.mark.parametrize(\n 'value, expected', (\n ('example.com/path', 'http://example.com/path'),\n ('//example.com/path', 'http://example.com/path'),\n ))\ndef test_prepend_scheme_if_needed(value, expected):\n assert prepend_scheme_if_needed(value, 'http') == expected\n\n\n@pytest.mark.parametrize(\n 'value, expected', (\n ('T', 'T'),\n (b'T', 'T'),\n (u'T', 'T'),\n ))\ndef test_to_native_string(value, expected):\n assert to_native_string(value) == expected\n\n\n@pytest.mark.parametrize(\n 'url, expected', (\n ('http://u:p@example.com/path?a=1#test', 'http://example.com/path?a=1'),\n ('http://example.com/path', 'http://example.com/path'),\n ('//u:p@example.com/path', '//example.com/path'),\n ('//example.com/path', '//example.com/path'),\n ('example.com/path', '//example.com/path'),\n ('scheme:u:p@example.com/path', 'scheme://example.com/path'),\n ))\ndef test_urldefragauth(url, expected):\n assert urldefragauth(url) == expected\n\n\n@pytest.mark.parametrize(\n 'url, expected', (\n ('http://192.168.0.1:5000/', True),\n ('http://192.168.0.1/', True),\n ('http://172.16.1.1/', True),\n ('http://172.16.1.1:5000/', True),\n ('http://localhost.localdomain:5000/v1.0/', True),\n ('http://google.com:6000/', True),\n ('http://172.16.1.12/', False),\n ('http://172.16.1.12:5000/', False),\n ('http://google.com:5000/v1.0/', False),\n ))\ndef test_should_bypass_proxies(url, expected, monkeypatch):\n \"\"\"Tests for function should_bypass_proxies to check if proxy\n can be bypassed or not\n \"\"\"\n monkeypatch.setenv('no_proxy', '192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1, google.com:6000')\n monkeypatch.setenv('NO_PROXY', '192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1, google.com:6000')\n assert should_bypass_proxies(url, no_proxy=None) == expected\n\n\n@pytest.mark.parametrize(\n 'url, expected', (\n ('http://172.16.1.1/', '172.16.1.1'),\n ('http://172.16.1.1:5000/', '172.16.1.1'),\n ('http://user:pass@172.16.1.1', '172.16.1.1'),\n ('http://user:pass@172.16.1.1:5000', '172.16.1.1'),\n ('http://hostname/', 'hostname'),\n ('http://hostname:5000/', 'hostname'),\n ('http://user:pass@hostname', 'hostname'),\n ('http://user:pass@hostname:5000', 'hostname'),\n ))\ndef test_should_bypass_proxies_pass_only_hostname(url, expected, mocker):\n \"\"\"The proxy_bypass function should be called with a hostname or IP without\n a port number or auth credentials.\n \"\"\"\n proxy_bypass = mocker.patch('requests.utils.proxy_bypass')\n should_bypass_proxies(url, no_proxy=None)\n proxy_bypass.assert_called_once_with(expected)\n\n\n@pytest.mark.parametrize(\n 'cookiejar', (\n compat.cookielib.CookieJar(),\n RequestsCookieJar()\n ))\ndef test_add_dict_to_cookiejar(cookiejar):\n \"\"\"Ensure add_dict_to_cookiejar works for\n non-RequestsCookieJar CookieJars\n \"\"\"\n cookiedict = {'test': 'cookies',\n 'good': 'cookies'}\n cj = add_dict_to_cookiejar(cookiejar, cookiedict)\n cookies = dict((cookie.name, cookie.value) for cookie in cj)\n assert cookiedict == cookies\n\n\n@pytest.mark.parametrize(\n 'value, expected', (\n (u'test', True),\n (u'æíöû', False),\n (u'ジェーピーニック', False),\n )\n)\ndef test_unicode_is_ascii(value, expected):\n assert unicode_is_ascii(value) is expected\n\n\n@pytest.mark.parametrize(\n 'url, expected', (\n ('http://192.168.0.1:5000/', True),\n ('http://192.168.0.1/', True),\n ('http://172.16.1.1/', True),\n ('http://172.16.1.1:5000/', True),\n ('http://localhost.localdomain:5000/v1.0/', True),\n ('http://172.16.1.12/', False),\n ('http://172.16.1.12:5000/', False),\n ('http://google.com:5000/v1.0/', False),\n ))\ndef test_should_bypass_proxies_no_proxy(\n url, expected, monkeypatch):\n \"\"\"Tests for function should_bypass_proxies to check if proxy\n can be bypassed or not using the 'no_proxy' argument\n \"\"\"\n no_proxy = '192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1'\n # Test 'no_proxy' argument\n assert should_bypass_proxies(url, no_proxy=no_proxy) == expected\n\n\n@pytest.mark.skipif(os.name != 'nt', reason='Test only on Windows')\n@pytest.mark.parametrize(\n 'url, expected, override', (\n ('http://192.168.0.1:5000/', True, None),\n ('http://192.168.0.1/', True, None),\n ('http://172.16.1.1/', True, None),\n ('http://172.16.1.1:5000/', True, None),\n ('http://localhost.localdomain:5000/v1.0/', True, None),\n ('http://172.16.1.22/', False, None),\n ('http://172.16.1.22:5000/', False, None),\n ('http://google.com:5000/v1.0/', False, None),\n ('http://mylocalhostname:5000/v1.0/', True, '<local>'),\n ('http://192.168.0.1/', False, ''),\n ))\ndef test_should_bypass_proxies_win_registry(url, expected, override,\n monkeypatch):\n \"\"\"Tests for function should_bypass_proxies to check if proxy\n can be bypassed or not with Windows registry settings\n \"\"\"\n if override is None:\n override = '192.168.*;127.0.0.1;localhost.localdomain;172.16.1.1'\n if compat.is_py3:\n import winreg\n else:\n import _winreg as winreg\n\n class RegHandle:\n def Close(self):\n pass\n\n ie_settings = RegHandle()\n proxyEnableValues = deque([1, \"1\"])\n\n def OpenKey(key, subkey):\n return ie_settings\n\n def QueryValueEx(key, value_name):\n if key is ie_settings:\n if value_name == 'ProxyEnable':\n # this could be a string (REG_SZ) or a 32-bit number (REG_DWORD)\n proxyEnableValues.rotate()\n return [proxyEnableValues[0]]\n elif value_name == 'ProxyOverride':\n return [override]\n\n monkeypatch.setenv('http_proxy', '')\n monkeypatch.setenv('https_proxy', '')\n monkeypatch.setenv('ftp_proxy', '')\n monkeypatch.setenv('no_proxy', '')\n monkeypatch.setenv('NO_PROXY', '')\n monkeypatch.setattr(winreg, 'OpenKey', OpenKey)\n monkeypatch.setattr(winreg, 'QueryValueEx', QueryValueEx)\n assert should_bypass_proxies(url, None) == expected\n\n\n@pytest.mark.parametrize(\n 'env_name, value', (\n ('no_proxy', '192.168.0.0/24,127.0.0.1,localhost.localdomain'),\n ('no_proxy', None),\n ('a_new_key', '192.168.0.0/24,127.0.0.1,localhost.localdomain'),\n ('a_new_key', None),\n ))\ndef test_set_environ(env_name, value):\n \"\"\"Tests set_environ will set environ values and will restore the environ.\"\"\"\n environ_copy = copy.deepcopy(os.environ)\n with set_environ(env_name, value):\n assert os.environ.get(env_name) == value\n\n assert os.environ == environ_copy\n\n\ndef test_set_environ_raises_exception():\n \"\"\"Tests set_environ will raise exceptions in context when the\n value parameter is None.\"\"\"\n with pytest.raises(Exception) as exception:\n with set_environ('test1', None):\n raise Exception('Expected exception')\n\n assert 'Expected exception' in str(exception.value)\n",
"path": "tests/test_utils.py"
}
] | 13_9 | python | import sys
import pytest
@pytest.mark.parametrize(
"url, expected",
(
("file:///some/path/on/disk", True),
),
)
def test_should_bypass_proxies(url, expected, monkeypatch):
from requests.utils import should_bypass_proxies
"""Tests for function should_bypass_proxies to check if proxy
can be bypassed or not
"""
monkeypatch.setenv(
"no_proxy",
"192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1, google.com:6000",
)
monkeypatch.setenv(
"NO_PROXY",
"192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1, google.com:6000",
)
assert should_bypass_proxies(url, no_proxy=None) == expected
def main():
# Run the pytest tests programmatically
exit_code = pytest.main(["-v", __file__])
# Exit with status code 1 if any test fails, otherwise 0
if exit_code != 0:
sys.exit(1)
else:
sys.exit(0)
if __name__ == '__main__':
main()
|
https://github.com/teamqurrent/citrus | During querying if filters is None we should pass None to `filter_function`. Otherwise the function that is passed causes a bug without any filters present. Making changes in the `citrusdb/api/local.py` file should fix it. No need to run tests on the resulting script since the test environment is not ready yet | eb8fb28 | toml
numpy
hnswlib
| python3.9 | cb6878c | diff --git a/citrusdb/api/local.py b/citrusdb/api/local.py
--- a/citrusdb/api/local.py
+++ b/citrusdb/api/local.py
@@ -165,7 +165,7 @@ class LocalAPI:
documents=documents,
query_embeddings=query_embeddings,
k=k,
- filter_function=filter_function
+ filter_function=None if filters is None else filter_function
)
if flag:
| [
{
"content": "import os\nimport json\nfrom typing import Dict, List, Optional\nfrom numpy import float32\nfrom numpy._typing import NDArray\nimport shutil\n\nfrom citrusdb.api.index import Index\nfrom citrusdb.db.sqlite.db import DB\n\n\nclass LocalAPI:\n _db: Dict[str, Index] \n _sqlClient: DB\n persist_directory: Optional[str]\n _TEMP_DIRECTORY = \"citrus_temp\"\n\n def __init__(self, persist_directory: Optional[str] = None):\n self._db = {}\n self.persist_directory = persist_directory\n\n if not(persist_directory) and os.path.isdir(self._TEMP_DIRECTORY):\n # Cleanup previous sqlite data\n shutil.rmtree(self._TEMP_DIRECTORY)\n\n self._sqlClient = DB(persist_directory if persist_directory else self._TEMP_DIRECTORY)\n\n def create_index(\n self,\n name: str,\n max_elements: int = 1000,\n M: int = 64,\n ef_construction: int = 200,\n allow_replace_deleted: bool = False,\n ):\n if not(self._sqlClient.get_index_details(name)):\n self._sqlClient.create_index(\n name,\n max_elements,\n M,\n ef_construction,\n allow_replace_deleted\n )\n\n self._db[name] = Index(\n name=name,\n max_elements=max_elements,\n persist_directory=self.persist_directory,\n M=M,\n ef_construction=ef_construction,\n allow_replace_deleted=allow_replace_deleted\n )\n\n def add(\n self,\n index: str,\n ids,\n documents: Optional[List[str]] = None,\n embeddings: Optional[NDArray[float32]] = None,\n metadatas: Optional[List[Dict]] = None\n ):\n \"\"\"\n Insert embeddings/text documents\n index: Name of index\n ids: Unique ID for each element\n documents: List of strings to index\n embeddings: List of embeddings to index\n metadatas: Additional metadata for each vector\n \"\"\"\n\n if embeddings is None and documents is None:\n raise ValueError(\"Please provide either embeddings or documents.\")\n\n index_details = self._sqlClient.get_index_details(index)\n if index_details is None:\n raise ValueError(f\"Index with name '{index}' does not exist.\")\n\n if (documents is not None) and (embeddings is None):\n from citrusdb.embedding.openai import get_embeddings\n\n embeddings = get_embeddings(documents)\n\n if embeddings is not None:\n embedding_dim = len(embeddings[0])\n index_id = index_details[0]\n index_dim = index_details[2]\n replace_deleted = True if index_details[7] else False\n\n # Check whether the dimensions are equal\n if embedding_dim != index_dim:\n raise ValueError(\n f\"Embedding dimenstion ({embedding_dim}) and index \"\n + f\"dimension ({index_dim}) do not match.\"\n )\n\n # Ensure no of ids = no of embeddings\n if len(ids) != len(embeddings):\n raise ValueError(f\"Number of embeddings\" + \" and ids are different.\")\n\n data = []\n for i in range(len(ids)):\n row = (\n ids[i],\n index_id,\n None if documents is None else documents[i],\n embeddings[i],\n None if metadatas is None else json.dumps(metadatas[i])\n )\n data.append(row + row)\n\n # Insert data into sqlite\n self._sqlClient.insert_to_index(data)\n\n # Index vectors\n self._db[index].add(\n ids=ids,\n embeddings=embeddings,\n replace_deleted=replace_deleted\n )\n\n def delete_vectors(\n self,\n index: str,\n ids: List[int],\n ):\n index_details = self._sqlClient.get_index_details(index)\n if index_details is None:\n raise ValueError(f\"Could not find index: {index}\")\n\n index_id = index_details[0]\n self._sqlClient.delete_vectors_from_index(\n index_id=index_id,\n ids=ids\n )\n\n self._db[index].delete_vectors(ids)\n\n def set_ef(self, index: str, ef: int):\n index_details = self._sqlClient.get_index_details(index)\n if index_details is None:\n raise ValueError(f\"Could not find index: {index}\")\n\n self._sqlClient.update_ef(index, ef)\n self._db[index].set_ef(ef)\n\n def query(\n self,\n index: str,\n documents: Optional[List[str]] = None,\n query_embeddings: Optional[NDArray[float32]] = None,\n k=1,\n filters: Optional[List[Dict]] = None\n ):\n allowed_ids = []\n if filters is not None:\n allowed_ids = self._sqlClient.filter_vectors(index, filters)\n\n filter_function = lambda label: str(label) in allowed_ids\n\n flag = 1\n for key in self._db.keys():\n if key == index:\n flag = 0\n return self._db[key].query(\n documents=documents,\n query_embeddings=query_embeddings,\n k=k,\n filter_function=filter_function\n )\n\n if flag:\n raise ValueError(f\"Could not find index: {index}\")\n\n\n def get_status(self, index: str):\n flag = 1\n for key in self._db.keys():\n if key == index:\n flag = 0\n self._db[key].get_status()\n\n if flag:\n raise ValueError(f\"Could not find index: {index}\")\n",
"path": "citrusdb/api/local.py"
}
] | 1_0 | python | import sys
import unittest
from unittest.mock import patch
class TestLocalAPIQuery(unittest.TestCase):
def test_query_with_none_filters(self):
from citrusdb.api.index import Index
from citrusdb.api.local import LocalAPI
api = LocalAPI()
api._db["mock_index"] = Index("mock_index") # Add a mock index to the _db dictionary
# Test starts here
with patch.object(Index, "query", return_value=None) as mock_query:
api.query(index="mock_index", filters=None) # Call the query method with filters set to None
# Check that filter_function was set to None
self.assertIsNone(mock_query.call_args[1]["filter_function"], "filter_function was not set to None")
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestLocalAPIQuery))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == "__main__":
main()
|
https://github.com/teamqurrent/citrus | The cloud hosted version needs to be able to reload indices automatically on restart. Add a method to the local api to fetch metadata for all indices from the SQLite database and loads these indices to memory. What should be added: a reload_indices method to `citrusdb/api/local.py`, get_indices and get_index_details methods to `citrusdb/db/sqlite/db.py`, and a GET_ALL_INDEX_DETAILS sql query to `citrusdb/db/sqlite/queries.py` | 80bee4d | toml
numpy
hnswlib
| python3.9 | cb41ea8 | diff --git a/citrusdb/api/local.py b/citrusdb/api/local.py
--- a/citrusdb/api/local.py
+++ b/citrusdb/api/local.py
@@ -135,6 +135,25 @@ class LocalAPI:
self._db[index].delete_vectors(ids)
+ def reload_indices(self):
+ """
+ Load all indices from disk to memory
+ """
+
+ indices = self._sqlClient.get_indices()
+ for index in indices:
+ index_name = index[1]
+ # Load index
+ self.create_index(
+ name=index_name,
+ max_elements=index[3],
+ M=index[4],
+ ef_construction=index[6],
+ allow_replace_deleted=index[7]
+ )
+ # Set ef value
+ self._db[index_name].set_ef(index[5])
+
def set_ef(self, index: str, ef: int):
index_details = self._sqlClient.get_index_details(index)
if index_details is None:
diff --git a/citrusdb/db/sqlite/db.py b/citrusdb/db/sqlite/db.py
--- a/citrusdb/db/sqlite/db.py
+++ b/citrusdb/db/sqlite/db.py
@@ -32,17 +32,6 @@ class DB:
''')
cur.close()
- def get_index_details(
- self,
- name: str
- ) -> Optional[Tuple[int, str, int, int, int, int, int, bool]]:
- cur = self._con.cursor()
- parameters = (name,)
- res = cur.execute(queries.GET_INDEX_DETAILS_BY_NAME, parameters)
- row = res.fetchone()
- cur.close()
- return row
-
def create_index(
self,
name: str,
@@ -79,6 +68,29 @@ class DB:
allowed_ids.append(row[0])
return allowed_ids
+ def get_indices(self):
+ """
+ Fetch all index details from index_manager table.
+ Returns a list of tuples where each one corresponds to an index.
+ """
+
+ cur = self._con.cursor()
+ res = cur.execute(queries.GET_ALL_INDEX_DETAILS)
+ rows = res.fetchall()
+ cur.close()
+ return rows
+
+ def get_index_details(
+ self,
+ name: str
+ ) -> Optional[Tuple[int, str, int, int, int, int, int, bool]]:
+ cur = self._con.cursor()
+ parameters = (name,)
+ res = cur.execute(queries.GET_INDEX_DETAILS_BY_NAME, parameters)
+ row = res.fetchone()
+ cur.close()
+ return row
+
def insert_to_index(
self,
data
diff --git a/citrusdb/db/sqlite/queries.py b/citrusdb/db/sqlite/queries.py
--- a/citrusdb/db/sqlite/queries.py
+++ b/citrusdb/db/sqlite/queries.py
@@ -28,6 +28,11 @@ DELETE FROM index_data
WHERE id IN ({}) AND index_id = ?
'''
+GET_ALL_INDEX_DETAILS = '''
+SELECT index_id, name, dimensions, max_elements, m, ef, ef_construction, allow_replace_deleted
+FROM index_manager
+'''
+
GET_INDEX_DETAILS_BY_NAME = '''
SELECT index_id, name, dimensions, max_elements, m, ef, ef_construction, allow_replace_deleted
FROM index_manager
| [
{
"content": "import os\nimport json\nfrom typing import Dict, List, Optional\nfrom numpy import float32\nfrom numpy._typing import NDArray\nimport shutil\n\nfrom citrusdb.api.index import Index\nfrom citrusdb.db.sqlite.db import DB\n\n\nclass LocalAPI:\n _db: Dict[str, Index] \n _sqlClient: DB\n persist_directory: Optional[str]\n _TEMP_DIRECTORY = \"citrus_temp\"\n\n def __init__(self, persist_directory: Optional[str] = None):\n self._db = {}\n self.persist_directory = persist_directory\n\n if not(persist_directory) and os.path.isdir(self._TEMP_DIRECTORY):\n # Cleanup previous sqlite data\n shutil.rmtree(self._TEMP_DIRECTORY)\n\n self._sqlClient = DB(persist_directory if persist_directory else self._TEMP_DIRECTORY)\n\n def create_index(\n self,\n name: str,\n max_elements: int = 1000,\n M: int = 64,\n ef_construction: int = 200,\n allow_replace_deleted: bool = False,\n ):\n if not(self._sqlClient.get_index_details(name)):\n self._sqlClient.create_index(\n name,\n max_elements,\n M,\n ef_construction,\n allow_replace_deleted\n )\n\n self._db[name] = Index(\n name=name,\n max_elements=max_elements,\n persist_directory=self.persist_directory,\n M=M,\n ef_construction=ef_construction,\n allow_replace_deleted=allow_replace_deleted\n )\n\n def add(\n self,\n index: str,\n ids,\n documents: Optional[List[str]] = None,\n embeddings: Optional[NDArray[float32]] = None,\n metadatas: Optional[List[Dict]] = None\n ):\n \"\"\"\n Insert embeddings/text documents\n index: Name of index\n ids: Unique ID for each element\n documents: List of strings to index\n embeddings: List of embeddings to index\n metadatas: Additional metadata for each vector\n \"\"\"\n\n if embeddings is None and documents is None:\n raise ValueError(\"Please provide either embeddings or documents.\")\n\n index_details = self._sqlClient.get_index_details(index)\n if index_details is None:\n raise ValueError(f\"Index with name '{index}' does not exist.\")\n\n if (documents is not None) and (embeddings is None):\n from citrusdb.embedding.openai import get_embeddings\n\n embeddings = get_embeddings(documents)\n\n if embeddings is not None:\n embedding_dim = len(embeddings[0])\n index_id = index_details[0]\n index_dim = index_details[2]\n replace_deleted = True if index_details[7] else False\n\n # Check whether the dimensions are equal\n if embedding_dim != index_dim:\n raise ValueError(\n f\"Embedding dimenstion ({embedding_dim}) and index \"\n + f\"dimension ({index_dim}) do not match.\"\n )\n\n # Ensure no of ids = no of embeddings\n if len(ids) != len(embeddings):\n raise ValueError(f\"Number of embeddings\" + \" and ids are different.\")\n\n data = []\n for i in range(len(ids)):\n row = (\n ids[i],\n index_id,\n None if documents is None else documents[i],\n embeddings[i],\n None if metadatas is None else json.dumps(metadatas[i])\n )\n data.append(row + row)\n\n # Insert data into sqlite\n self._sqlClient.insert_to_index(data)\n\n # Index vectors\n self._db[index].add(\n ids=ids,\n embeddings=embeddings,\n replace_deleted=replace_deleted\n )\n\n def delete_vectors(\n self,\n index: str,\n ids: List[int],\n ):\n index_details = self._sqlClient.get_index_details(index)\n if index_details is None:\n raise ValueError(f\"Could not find index: {index}\")\n\n index_id = index_details[0]\n self._sqlClient.delete_vectors_from_index(\n index_id=index_id,\n ids=ids\n )\n\n self._db[index].delete_vectors(ids)\n\n def set_ef(self, index: str, ef: int):\n index_details = self._sqlClient.get_index_details(index)\n if index_details is None:\n raise ValueError(f\"Could not find index: {index}\")\n\n self._sqlClient.update_ef(index, ef)\n self._db[index].set_ef(ef)\n\n def query(\n self,\n index: str,\n documents: Optional[List[str]] = None,\n query_embeddings: Optional[NDArray[float32]] = None,\n k=1,\n filters: Optional[List[Dict]] = None\n ):\n allowed_ids = []\n if filters is not None:\n allowed_ids = self._sqlClient.filter_vectors(index, filters)\n\n filter_function = lambda label: str(label) in allowed_ids\n\n flag = 1\n for key in self._db.keys():\n if key == index:\n flag = 0\n return self._db[key].query(\n documents=documents,\n query_embeddings=query_embeddings,\n k=k,\n filter_function=None if filters is None else filter_function\n )\n\n if flag:\n raise ValueError(f\"Could not find index: {index}\")\n\n\n def get_status(self, index: str):\n flag = 1\n for key in self._db.keys():\n if key == index:\n flag = 0\n self._db[key].get_status()\n\n if flag:\n raise ValueError(f\"Could not find index: {index}\")\n",
"path": "citrusdb/api/local.py"
},
{
"content": "import os\nimport sqlite3\nfrom typing import Dict, List, Optional, Tuple\n\nfrom citrusdb.utils.utils import ensure_valid_path\nimport citrusdb.db.sqlite.queries as queries\nfrom citrusdb.db.sqlite.query_builder import QueryBuilder\n\n\nclass DB:\n _con: sqlite3.Connection\n\n def __init__(\n self,\n persist_directory: str,\n ):\n ensure_valid_path(persist_directory)\n\n self._con = sqlite3.connect(\n os.path.join(\n persist_directory, \"citrus.db\"\n )\n )\n\n cur = self._con.cursor()\n cur.execute(\"PRAGMA foreign_keys = ON\") # Enable foreign keys\n cur.executescript(f'''\n BEGIN;\n {queries.CREATE_INDEX_MANAGER_TABLE}\n {queries.CREATE_INDEX_DATA_TABLE}\n END;\n ''')\n cur.close()\n\n def get_index_details(\n self,\n name: str\n ) -> Optional[Tuple[int, str, int, int, int, int, int, bool]]:\n cur = self._con.cursor()\n parameters = (name,)\n res = cur.execute(queries.GET_INDEX_DETAILS_BY_NAME, parameters)\n row = res.fetchone()\n cur.close()\n return row\n\n def create_index(\n self,\n name: str,\n max_elements: int,\n M: int,\n ef_construction: int,\n allow_replace_deleted: bool,\n dimensions: Optional[int] = 1536,\n ):\n cur = self._con.cursor()\n ef = ef_construction\n parameters = (name, dimensions, max_elements, M, ef, ef_construction, allow_replace_deleted)\n cur.execute(queries.INSERT_INDEX_TO_MANAGER, parameters)\n self._con.commit()\n cur.close()\n\n def delete_vectors_from_index(\n self,\n index_id: int,\n ids: List[int]\n ):\n cur = self._con.cursor()\n query = queries.DELETE_VECTORS_FROM_INDEX.format(\", \".join(\"?\" * len(ids)))\n parameters = tuple(ids) + (index_id,)\n cur.execute(query, parameters)\n self._con.commit()\n cur.close()\n\n def filter_vectors(self, index_name: str, filters: List[Dict]):\n query_builder = QueryBuilder(self._con)\n res = query_builder.execute_query(index_name, filters)\n allowed_ids = []\n for row in res:\n allowed_ids.append(row[0])\n return allowed_ids\n\n def insert_to_index(\n self,\n data\n ):\n cur = self._con.cursor()\n cur.executemany(queries.INSERT_DATA_TO_INDEX, data)\n self._con.commit()\n cur.close()\n\n def update_ef(\n self,\n name: str,\n ef: int\n ):\n cur = self._con.cursor()\n parameters = (ef, name)\n cur.execute(queries.UPDATE_EF, parameters)\n self._con.commit()\n cur.close()\n",
"path": "citrusdb/db/sqlite/db.py"
},
{
"content": "CREATE_INDEX_MANAGER_TABLE = '''\nCREATE TABLE IF NOT EXISTS index_manager (\n index_id INTEGER PRIMARY KEY AUTOINCREMENT,\n name TEXT NOT NULL UNIQUE,\n dimensions INTEGER NOT NULL,\n max_elements INTEGER NOT NULL,\n m INTEGER NOT NULL,\n ef INTEGER NOT NULL,\n ef_construction INTEGER NOT NULL,\n allow_replace_deleted INTEGER NOT NULL\n);\n'''\n\nCREATE_INDEX_DATA_TABLE = '''\nCREATE TABLE IF NOT EXISTS index_data (\n id TEXT,\n index_id INTEGER,\n text TEXT,\n embedding BLOB NOT NULL,\n metadata TEXT,\n PRIMARY KEY(id, index_id),\n FOREIGN KEY(index_id) REFERENCES index_manager(index_id) ON DELETE CASCADE\n);\n'''\n\nDELETE_VECTORS_FROM_INDEX = '''\nDELETE FROM index_data\nWHERE id IN ({}) AND index_id = ?\n'''\n\nGET_INDEX_DETAILS_BY_NAME = '''\nSELECT index_id, name, dimensions, max_elements, m, ef, ef_construction, allow_replace_deleted\nFROM index_manager\nWHERE name = ?\n'''\n\nINSERT_DATA_TO_INDEX = '''\nINSERT INTO index_data\nVALUES(?, ?, ?, ?, ?)\nON CONFLICT(id, index_id)\nDO UPDATE SET id = ?, index_id = ?, text = ?, embedding = ?, metadata = ?\n'''\n\nINSERT_INDEX_TO_MANAGER = '''\nINSERT INTO index_manager\n(name, dimensions, max_elements, m, ef, ef_construction, allow_replace_deleted)\nVALUES (?, ?, ?, ?, ?, ?, ?);\n'''\n\nUPDATE_EF = '''\nUPDATE index_manager\nSET ef = ?\nWHERE name = ?\n'''\n",
"path": "citrusdb/db/sqlite/queries.py"
}
] | 1_1 | python | import sys
import unittest
import inspect
class TestCitrusDbQueries(unittest.TestCase):
def test_queries_and_methods(self):
from citrusdb.api.local import LocalAPI
from citrusdb.db.sqlite.db import DB
from citrusdb.db.sqlite.queries import GET_ALL_INDEX_DETAILS
# Check if GET_ALL_INDEX_DETAILS is non-empty and contains specific strings
self.assertTrue(GET_ALL_INDEX_DETAILS, "GET_ALL_INDEX_DETAILS is empty")
GET_ALL_INDEX_DETAILS_lower = GET_ALL_INDEX_DETAILS.lower()
# 'select *' presence does not lead to failure
if "select *" in GET_ALL_INDEX_DETAILS_lower:
print("Found query")
# Other strings' absence lead to failure
self.assertIn("index_manager", GET_ALL_INDEX_DETAILS_lower, "index_manager not found in query")
self.assertIn("index_id", GET_ALL_INDEX_DETAILS_lower, "index_id not found in query")
self.assertIn("name", GET_ALL_INDEX_DETAILS_lower, "name not found in query")
self.assertIn("dimensions", GET_ALL_INDEX_DETAILS_lower, "dimensions not found in query")
self.assertIn("max_elements", GET_ALL_INDEX_DETAILS_lower, "max_elements not found in query")
self.assertIn("m", GET_ALL_INDEX_DETAILS_lower, "m not found in query")
self.assertIn("ef_construction", GET_ALL_INDEX_DETAILS_lower, "ef_construction not found in query")
self.assertIn("ef", GET_ALL_INDEX_DETAILS_lower, "ef not found in query")
self.assertIn("allow_replace_deleted", GET_ALL_INDEX_DETAILS_lower, "allow_replace_deleted not found in query")
# Check methods and their arguments
self.assertTrue(hasattr(LocalAPI, "reload_indices"), "LocalAPI.reload_indices method not found")
self.assertEqual(inspect.getfullargspec(LocalAPI.reload_indices).args, ["self"], "Arguments mismatch in LocalAPI.reload_indices")
self.assertTrue(hasattr(DB, "get_indices"), "DB.get_indices method not found")
self.assertEqual(inspect.getfullargspec(DB.get_indices).args, ["self"], "Arguments mismatch in DB.get_indices")
self.assertTrue(hasattr(DB, "get_index_details"), "DB.get_index_details method not found")
self.assertEqual(inspect.getfullargspec(DB.get_index_details).args, ["self", "name"], "Arguments mismatch in DB.get_index_details")
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestCitrusDbQueries))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == "__main__":
main()
|
https://github.com/teamqurrent/citrus | Update `pyproject.toml` and `setup.py` to update package version to 0.4.0. Also add psycopg[c] and psycopg[pool] to the list of packages that need to be installed in order for citrus to run properly. Lastly add citrusdb.db.postgres to the packages list after find_packages function call in `setup.py` | 8cf4263 | toml
numpy
hnswlib
| python3.9 | 7c58ccb | diff --git a/pyproject.toml b/pyproject.toml
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[project]
name = "citrusdb"
-version = "0.3.3"
+version = "0.4.0"
authors = [
{ name="Debabrata Mondal", email="debabrata.js@protonmail.com" },
@@ -17,6 +17,8 @@ dependencies = [
'hnswlib >= 0.7',
'numpy >= 1.21.6',
'openai >= 0.27',
+ 'psycopg[c] >= 3.1.9',
+ 'psycopg[pool] >= 3.1.7'
]
[project.urls]
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -5,7 +5,7 @@ with open("README.md", "r") as fh:
setup(
name="citrusdb",
- version="0.3.3",
+ version="0.4.0",
author="Debabrata Mondal",
author_email="debabrata.js@protonmail.com",
description="(distributed) vector database",
@@ -16,13 +16,15 @@ setup(
find_packages(
exclude=["demo"]
) +
- ["citrusdb.db.index", "citrusdb.db.sqlite"]
+ ["citrusdb.db.index", "citrusdb.db.sqlite", "citrusdb.db.postgres"]
),
include_package_data=True,
install_requires=[
"numpy",
"hnswlib",
"openai",
+ "psycopg[c]",
+ "psycopg[pool]"
],
classifiers=[
"Programming Language :: Python :: 3",
| [
{
"content": "[project]\nname = \"citrusdb\"\nversion = \"0.3.3\"\n\nauthors = [\n { name=\"Debabrata Mondal\", email=\"debabrata.js@protonmail.com\" },\n]\ndescription = \"open-source vector database. store and retrieve embeddings for your next project!\"\nreadme = \"README.md\"\nrequires-python = \">=3.7\"\nclassifiers = [\n \"Programming Language :: Python :: 3\",\n \"License :: OSI Approved :: Apache Software License\",\n \"Operating System :: OS Independent\",\n]\ndependencies = [\n 'hnswlib >= 0.7',\n 'numpy >= 1.21.6',\n 'openai >= 0.27',\n]\n\n[project.urls]\n\"Homepage\" = \"https://github.com/0xDebabrata/citrus\"\n\n[build-system]\nrequires = [\"setuptools>=61.0\"]\nbuild-backend = \"setuptools.build_meta\"\n",
"path": "pyproject.toml"
},
{
"content": "from setuptools import setup, find_packages\n\nwith open(\"README.md\", \"r\") as fh:\n long_description = fh.read()\n\nsetup(\n name=\"citrusdb\",\n version=\"0.3.3\",\n author=\"Debabrata Mondal\",\n author_email=\"debabrata.js@protonmail.com\",\n description=\"(distributed) vector database\",\n long_description=long_description,\n long_description_content_type=\"text/markdown\",\n url=\"https://github.com/0xDebabrata/citrus\",\n packages=(\n find_packages(\n exclude=[\"demo\"]\n ) +\n [\"citrusdb.db.index\", \"citrusdb.db.sqlite\"]\n ),\n include_package_data=True,\n install_requires=[\n \"numpy\",\n \"hnswlib\",\n \"openai\",\n ],\n classifiers=[\n \"Programming Language :: Python :: 3\",\n \"License :: OSI Approved :: Apache Software License\",\n \"Operating System :: OS Independent\",\n ],\n python_requires='>=3.7',\n)\n",
"path": "setup.py"
}
] | 1_2 | python | import sys
import unittest
import ast
import toml
class TestTomlAndSetup(unittest.TestCase):
def test_toml_and_setup_file(self):
# Test toml file
parsed_toml = toml.load("pyproject.toml")
self.assertEqual(parsed_toml["project"]["version"], "0.4.0", "version wrong")
count = 0
dependencies = parsed_toml["project"]["dependencies"]
for dep in dependencies:
if dep.find("psycopg[c]") != -1 or dep.find("psycopg[pool]") != -1:
count += 1
self.assertEqual(count, 2, "Wrong dependencies")
# Test setup.py file
with open("setup.py", "r") as setup_file:
content = setup_file.read()
tree = ast.parse(content)
version_number = None
install_requires = []
packages = []
for node in ast.walk(tree):
if isinstance(node, ast.keyword):
if node.arg == "version" and isinstance(node.value, ast.Constant):
version_number = node.value.value
if isinstance(node, ast.Call) and isinstance(node.func, ast.Name) and node.func.id == "setup":
for keyword in node.keywords:
if keyword.arg == "install_requires":
if isinstance(keyword.value, ast.List):
for item in keyword.value.elts:
if isinstance(item, ast.Str):
install_requires.append(item.s)
if keyword.arg == "packages":
if isinstance(keyword.value, ast.BinOp) and isinstance(keyword.value.op, ast.Add):
right_list = keyword.value.right
if isinstance(right_list, ast.List):
for item in right_list.elts:
if isinstance(item, ast.Str):
packages.append(item.s)
self.assertEqual(version_number, "0.4.0", "version wrong in setup.py")
self.assertTrue(any(dep.startswith("psycopg[c]") for dep in install_requires), "psycopg[c] missing in dependencies")
self.assertTrue(any(dep.startswith("psycopg[pool]") for dep in install_requires), "psycopg[pool] missing in dependencies")
self.assertIn("citrusdb.db.postgres", packages, "'citrusdb.db.postgres' not found in packages")
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestTomlAndSetup))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == "__main__":
main()
|
https://github.com/teamqurrent/citrus | Save the vector index after any vector is deleted. Update the corresponding method inside `citrusdb/api/index.py`. Only save the index if changes are to be persisted. | 9744caf | toml
numpy
hnswlib
| python3.9 | b0d0793 | diff --git a/citrusdb/api/index.py b/citrusdb/api/index.py
--- a/citrusdb/api/index.py
+++ b/citrusdb/api/index.py
@@ -80,6 +80,8 @@ class Index:
def delete_vectors(self, ids: List[int]):
for id in ids:
self._db.mark_deleted(id)
+ if self._parameters["persist_directory"]:
+ self._save()
def _save(self):
self._db.save_index(
| [
{
"content": "import os\nimport pickle\nfrom typing import Any, List, Optional\nfrom numpy import float32\nfrom numpy._typing import NDArray\nfrom citrusdb.db.index.hnswlib import HnswIndex\nfrom citrusdb.utils.utils import ensure_valid_path\n\n\nclass Index:\n _db: HnswIndex\n _parameters: dict\n\n def __init__(\n self,\n name: str,\n max_elements: int = 1000,\n persist_directory: Optional[str] = None,\n M: int = 64,\n ef_construction: int = 200,\n allow_replace_deleted: bool = False,\n ):\n self._db = HnswIndex(id=name)\n\n self._parameters = {\n \"index_name\": name,\n \"max_elements\": max_elements,\n \"persist_directory\": persist_directory,\n \"M\": M,\n \"ef_construction\": ef_construction,\n \"allow_replace_deleted\": allow_replace_deleted,\n }\n\n if persist_directory:\n self._load_params()\n\n if ensure_valid_path(persist_directory, str(self._parameters[\"index_name\"])):\n self._db.load_index(\n os.path.join(\n persist_directory, str(self._parameters[\"index_name\"])\n ),\n allow_replace_deleted=bool(\n self._parameters[\"allow_replace_deleted\"]\n ),\n )\n else:\n self._db.init_index(\n max_elements=max_elements,\n M=M,\n ef_construction=ef_construction,\n allow_replace_deleted=allow_replace_deleted,\n )\n self._save()\n else:\n self._db.init_index(\n max_elements=max_elements,\n M=M,\n ef_construction=ef_construction,\n allow_replace_deleted=allow_replace_deleted,\n )\n\n def add(\n self,\n ids,\n embeddings: Optional[NDArray[float32]],\n replace_deleted: bool,\n ):\n self._db.add_items(embeddings, ids, replace_deleted)\n if self._parameters[\"persist_directory\"]:\n self._save()\n\n def _load_params(self):\n if ensure_valid_path(self._parameters[\"persist_directory\"], \".citrus_params\"):\n filename = os.path.join(\n self._parameters[\"persist_directory\"], \".citrus_params\"\n )\n with open(filename, \"rb\") as f:\n self._parameters = pickle.load(f)\n\n def delete_vectors(self, ids: List[int]):\n for id in ids:\n self._db.mark_deleted(id)\n\n def _save(self):\n self._db.save_index(\n os.path.join(\n self._parameters[\"persist_directory\"], self._parameters[\"index_name\"]\n )\n )\n self._save_params()\n\n def _save_params(self):\n output_file = os.path.join(\n self._parameters[\"persist_directory\"], \".citrus_params\"\n )\n with open(output_file, \"wb\") as f:\n pickle.dump(self._parameters, f)\n\n def set_ef(self, ef: int):\n self._db.set_ef(ef)\n\n def query(\n self,\n documents: Optional[List[str]] = None,\n query_embeddings: Optional[NDArray[float32]] = None,\n k=1,\n ):\n if query_embeddings is None and documents is None:\n raise ValueError(\"Please provide either an embedding\" + \" or a document.\")\n\n if documents is not None:\n from citrusdb.embedding.openai import get_embeddings\n\n embeddings = get_embeddings(documents)\n query_embeddings = embeddings\n\n return self._db.knn_query(query_embeddings, k)\n\n def get_status(self):\n self._db.get_status()\n\n def get_dimension(self):\n return self._db.get_dimension()\n\n def get_replace_deleted(self):\n return self._parameters[\"allow_replace_deleted\"]\n\n",
"path": "citrusdb/api/index.py"
}
] | 1_3 | python | import sys
import unittest
import ast
import inspect
import textwrap
class TestIndexDeleteVectors(unittest.TestCase):
def test_if_conditions_in_delete_vectors(self):
from citrusdb.api.index import Index
source = inspect.getsource(Index.delete_vectors)
source = textwrap.dedent(source)
tree = ast.parse(source)
class IfConditionVisitor(ast.NodeVisitor):
def __init__(self):
self.if_condition = None
def visit_If(self, node):
self.if_condition = node.test
visitor = IfConditionVisitor()
visitor.visit(tree)
if_condition = visitor.if_condition
if isinstance(if_condition, ast.Subscript):
if isinstance(if_condition.value, ast.Attribute):
self.assertEqual(if_condition.value.attr, "_parameters", "Unexpected attribute in if condition")
else:
self.fail("if_condition.value is not an ast.Attribute")
if isinstance(if_condition.slice, ast.Constant):
self.assertEqual(if_condition.slice.value, "persist_directory", "Unexpected constant in if condition slice")
else:
self.fail("if_condition.slice is not an ast.Constant")
else:
self.fail("if_condition is not an ast.Subscript")
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestIndexDeleteVectors))
runner = unittest.TextTestRunner()
if not runner.run(suite).wasSuccessful():
sys.exit(1)
else:
sys.exit(0)
if __name__ == "__main__":
main() |
https://github.com/teamqurrent/citrus | Add a query to update ef value in `citrusdb/db/sqlite/queries.py` based on a given 'name'. Create an update_ef method in `citrusdb/db/sqlite/db.py` that uses the new UPDATE_EF query and takes in a name and an ef value as parameters. Add to the `set_ef` method in `citrusdb/api/local.py` to have it first set an ef value to a vector index with update_ef only if persist_directory is not none. In the `create_index` method in `citrusdb/api/local.py`, also check if persist_directory is not none before creating the index in the existing if not statement | 37a8258 | toml
numpy
hnswlib
| python3.9 | f525179 | diff --git a/citrusdb/api/local.py b/citrusdb/api/local.py
--- a/citrusdb/api/local.py
+++ b/citrusdb/api/local.py
@@ -26,7 +26,7 @@ class LocalAPI:
ef_construction: int = 200,
allow_replace_deleted: bool = False,
):
- if not(self._sqlClient.check_index_exists(name)):
+ if self.persist_directory is not None and not(self._sqlClient.check_index_exists(name)):
self._sqlClient.create_index(
name,
max_elements,
@@ -65,6 +65,10 @@ class LocalAPI:
raise ValueError(f"Could not find index: {index}")
def set_ef(self, index: str, ef: int):
+ if self.persist_directory is not None:
+ self._sqlClient.update_ef(index, ef)
+
+
flag = 1
for key in self._db.keys():
if key == index:
diff --git a/citrusdb/db/sqlite/db.py b/citrusdb/db/sqlite/db.py
--- a/citrusdb/db/sqlite/db.py
+++ b/citrusdb/db/sqlite/db.py
@@ -61,3 +61,13 @@ class DB:
self._con.commit()
cur.close()
+ def update_ef(
+ self,
+ name: str,
+ ef: int
+ ):
+ cur = self._con.cursor()
+ parameters = (ef, name)
+ cur.execute(queries.UPDATE_EF, parameters)
+ self._con.commit()
+ cur.close()
diff --git a/citrusdb/db/sqlite/queries.py b/citrusdb/db/sqlite/queries.py
--- a/citrusdb/db/sqlite/queries.py
+++ b/citrusdb/db/sqlite/queries.py
@@ -33,3 +33,9 @@ SELECT index_id
FROM index_manager
WHERE name = ?
'''
+
+UPDATE_EF = '''
+UPDATE index_manager
+SET ef = ?
+WHERE name = ?
+'''
| [
{
"content": "from citrusdb.api.index import Index\nfrom citrusdb.db.sqlite.db import DB\n\nfrom typing import Dict, List, Optional\nfrom numpy import float32\nfrom numpy._typing import NDArray\n\n\nclass LocalAPI:\n _db: Dict[str, Index] \n _sqlClient: DB\n persist_directory: Optional[str]\n\n def __init__(self, persist_directory: Optional[str] = None):\n self._db = {}\n self.persist_directory = persist_directory\n\n if persist_directory is not None:\n self._sqlClient = DB(persist_directory)\n\n def create_index(\n self,\n name: str,\n max_elements: int = 1000,\n M: int = 64,\n ef_construction: int = 200,\n allow_replace_deleted: bool = False,\n ):\n if not(self._sqlClient.check_index_exists(name)):\n self._sqlClient.create_index(\n name,\n max_elements,\n M,\n ef_construction,\n allow_replace_deleted\n )\n\n self._db[name] = Index(\n name=name,\n max_elements=max_elements,\n persist_directory=self.persist_directory,\n M=M,\n ef_construction=ef_construction,\n allow_replace_deleted=allow_replace_deleted\n )\n\n def add(\n self,\n index: str,\n ids,\n documents: Optional[List[str]] = None,\n embeddings: Optional[NDArray[float32]] = None,\n ):\n flag = 1\n for key in self._db.keys():\n if key == index:\n flag = 0\n self._db[key].add(\n ids=ids,\n documents=documents,\n embeddings=embeddings\n )\n\n if flag:\n raise ValueError(f\"Could not find index: {index}\")\n\n def set_ef(self, index: str, ef: int):\n flag = 1\n for key in self._db.keys():\n if key == index:\n flag = 0\n self._db[key].set_ef(ef)\n\n if flag:\n raise ValueError(f\"Could not find index: {index}\")\n\n def query(\n self,\n index: str,\n documents: Optional[List[str]] = None,\n query_embeddings: Optional[NDArray[float32]] = None,\n k=1,\n ):\n flag = 1\n for key in self._db.keys():\n if key == index:\n flag = 0\n return self._db[key].query(\n documents=documents,\n query_embeddings=query_embeddings,\n k=k\n )\n\n if flag:\n raise ValueError(f\"Could not find index: {index}\")\n\n\n def get_status(self, index: str):\n flag = 1\n for key in self._db.keys():\n if key == index:\n flag = 0\n self._db[key].get_status()\n\n if flag:\n raise ValueError(f\"Could not find index: {index}\")\n",
"path": "citrusdb/api/local.py"
},
{
"content": "import os\nimport sqlite3\nfrom typing import Optional\n\nfrom citrusdb.utils.utils import ensure_valid_path\nimport citrusdb.db.sqlite.queries as queries\n\n\nclass DB:\n _con: sqlite3.Connection\n\n def __init__(\n self,\n persist_directory: str,\n ):\n ensure_valid_path(persist_directory)\n\n self._con = sqlite3.connect(\n os.path.join(\n persist_directory, \"citrus.db\"\n )\n )\n\n cur = self._con.cursor()\n cur.execute(\"PRAGMA foreign_keys = ON\") # Enable foreign keys\n cur.executescript(f'''\n BEGIN;\n {queries.CREATE_INDEX_MANAGER_TABLE}\n {queries.CREATE_INDEX_DATA_TABLE}\n END;\n ''')\n cur.close()\n\n def check_index_exists(\n self,\n name: str\n ) -> bool:\n cur = self._con.cursor()\n parameters = (name,)\n res = cur.execute(queries.GET_INDEX_BY_NAME, parameters)\n if res.fetchone() is None:\n cur.close()\n return False\n else:\n cur.close()\n return True\n\n def create_index(\n self,\n name: str,\n max_elements: int,\n M: int,\n ef_construction: int,\n allow_replace_deleted: bool,\n dimensions: Optional[int] = 1536,\n ):\n cur = self._con.cursor()\n ef = ef_construction\n parameters = (name, dimensions, max_elements, M, ef, ef_construction, allow_replace_deleted)\n cur.execute(queries.INSERT_INDEX_TO_MANAGER, parameters)\n self._con.commit()\n cur.close()\n\n",
"path": "citrusdb/db/sqlite/db.py"
},
{
"content": "CREATE_INDEX_MANAGER_TABLE = '''\nCREATE TABLE IF NOT EXISTS index_manager (\n index_id INTEGER PRIMARY KEY AUTOINCREMENT,\n name TEXT NOT NULL UNIQUE,\n dimensions INTEGER NOT NULL,\n max_elements INTEGER NOT NULL,\n m INTEGER NOT NULL,\n ef INTEGER NOT NULL,\n ef_construction INTEGER NOT NULL,\n allow_replace_deleted INTEGER NOT NULL\n);\n'''\n\nCREATE_INDEX_DATA_TABLE = '''\nCREATE TABLE IF NOT EXISTS index_data (\n id TEXT PRIMARY KEY,\n index_id INTEGER,\n text TEXT,\n embedding BLOB,\n metadata TEXT,\n FOREIGN KEY(index_id) REFERENCES index_manager(index_id) ON DELETE CASCADE\n);\n'''\n\nINSERT_INDEX_TO_MANAGER = '''\nINSERT INTO index_manager\n(name, dimensions, max_elements, m, ef, ef_construction, allow_replace_deleted)\nVALUES (?, ?, ?, ?, ?, ?, ?);\n'''\n\nGET_INDEX_BY_NAME = '''\nSELECT index_id\nFROM index_manager\nWHERE name = ?\n'''\n",
"path": "citrusdb/db/sqlite/queries.py"
}
] | 1_4 | python | import sys
import unittest
import inspect
class TestDatabaseAndQueries(unittest.TestCase):
def test_database_and_queries(self):
from citrusdb.api.local import LocalAPI
from citrusdb.db.sqlite.db import DB
from citrusdb.db.sqlite.queries import UPDATE_EF
# Checking for UPDATE_EF existence and its content
self.assertIsNotNone(UPDATE_EF, "UPDATE_EF is empty")
UPDATE_EF_lower = UPDATE_EF.lower()
checks = [
"index_manager",
"update index_manager",
"ef",
"set ef",
"name",
"where name",
]
for check in checks:
self.assertIn(check, UPDATE_EF_lower, f"{check} not found in UPDATE_EF")
# Checking method's arguments
args = ["self", "name", "ef"]
self.assertTrue(hasattr(DB, "update_ef"), "DB.update_ef method not found")
all_args = inspect.getfullargspec(DB.update_ef).args
self.assertEqual(len(args), len(all_args), "Argument lengths differ in DB.update_ef")
for arg in args:
self.assertIn(arg, all_args, f"Argument {arg} not present in DB.update_ef")
# Checking for set_ef method in LocalAPI and its content
self.assertTrue(hasattr(LocalAPI, "set_ef"), "`set_ef` method not found in LocalAPI")
method_source = inspect.getsource(LocalAPI.set_ef)
self.assertIn("if self.persist_directory is not None:", method_source, "`set_ef` method doesn't have the expected line for persist_directory")
self.assertIn("self._sqlClient.update_ef(index, ef)", method_source, "`set_ef` method doesn't have the expected line for update_ef")
# Checking for create_index method in LocalAPI and its content
self.assertTrue(hasattr(LocalAPI, "create_index"), "`create_index` method not found in LocalAPI")
method_source = inspect.getsource(LocalAPI.create_index)
self.assertIn("self.persist_directory is not None", method_source, "`create_index` method doesn't have the expected check for persist_directory")
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestDatabaseAndQueries))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == "__main__":
main() |
https://github.com/teamqurrent/citrus | The readme contains a few spelling mistakes. Update the readme with to fix them. | 23e65f5 | python3.9 | e7f33d8 | diff --git a/README.md b/README.md
--- a/README.md
+++ b/README.md
@@ -52,7 +52,7 @@ You can directly pass vector embeddings as well. If you're passing a list of str
```py
result, distances = citrus.query("What is it like to launch a startup", k=1)
```
-Go launch a repl on [Replit](https://replit.com) and see what result you get after runnig the query! `result` will contain the `ids` of the top `k` search hits.
+Go launch a repl on [Replit](https://replit.com) and see what result you get after running the query! `result` will contain the `ids` of the top `k` search hits.
## Example
[pokedex search](https://replit.com/@debabratajr/pokedex-search)
| [
{
"content": "# 🍋 citrus.\n### open-source (distributed) vector database\n\n<p align=\"center\">\n Special thanks to\n</p>\n<p align=\"center\">\n <img align=\"center\" src=\"https://www.getdevkit.com/logo.png\" width=100 height=100 alt=\"DevKit\" />\n</p>\n<p align=\"center\">\n <a href=\"https://www.getdevkit.com\">DevKit - The Essential Developer Toolkit</a><br />\n DSoC 2023\n</p>\n\n\n## Installation\n\n```\npip install citrusdb\n```\n\n## Getting started\n\n#### 1. Create index\n```py\nimport citrusdb\n\n# Initialize client\ncitrus = citrusdb.Client()\n\n# Create index\ncitrus.create_index(\n max_elements=1000, # increases dynamically as you insert more vectors\n persist_directory=\"/db\" # save data and load index from disk\n)\n```\n\n#### 2. Insert elements\n```py\nids = [1, 2, 3]\ndocuemnts = [\n \"Your time is limited, so don't waste it living someone else's life\",\n \"I'd rather be optimistic and wrong than pessimistic and right.\",\n \"Running a start-up is like chewing glass and staring into the abyss.\"\n]\n\ncitrus.add(ids, documents=documents)\n```\nYou can directly pass vector embeddings as well. If you're passing a list of strings like we have done here, ensure you have your `OPENAI_API_KEY` in the environment. By default we use OpenAI to to generate the embeddings. Please reach out if you're looking for support from a different provider!\n\n#### 3. Search\n```py\nresult, distances = citrus.query(\"What is it like to launch a startup\", k=1)\n```\nGo launch a repl on [Replit](https://replit.com) and see what result you get after runnig the query! `result` will contain the `ids` of the top `k` search hits.\n\n## Example\n[pokedex search](https://replit.com/@debabratajr/pokedex-search)\n",
"path": "README.md"
}
] | 1_5 | python | import sys
import unittest
class TestReadmeTypos(unittest.TestCase):
def test_readme_for_typos(self):
# typos = ["runnig", "docuemnts"]
typos = ["runnig"]
with open("README.md", "r") as f:
readme = f.read()
self.assertTrue(readme, "README.md is empty")
for word in typos:
self.assertNotIn(word, readme, f"Typo '{word}' found in README.md")
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestReadmeTypos))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == "__main__":
main()
|
|
https://github.com/teamqurrent/openai-python | The objective is to update the Audio API requests in the provided GitHub repository to include the fields api_version and organization. Additionally, update the version number in the `openai/version.py` file from 0.27.8 to 0.27.9. Add those fields in `openai/api_resources/audio.py` file, specifically in the `transcribe`, `translate`, `transcribe_raw`, `translate_raw`, `atranscribe`, `atranslate`, `atranscribe_raw`, and `atranslate_raw` methods where the request is being made. | 041bf5a | requests >= 2.20
aiohttp
numpy
asyncio
matplotlib
plotly
pandas
scipy
scikit-learn
tenacity
typing-extensions
| python3.9 | d1c3658 | diff --git a/openai/api_resources/audio.py b/openai/api_resources/audio.py
--- a/openai/api_resources/audio.py
+++ b/openai/api_resources/audio.py
@@ -59,6 +59,8 @@ class Audio(APIResource):
api_key=api_key,
api_base=api_base,
api_type=api_type,
+ api_version=api_version,
+ organization=organization,
**params,
)
url = cls._get_url("transcriptions")
@@ -86,6 +88,8 @@ class Audio(APIResource):
api_key=api_key,
api_base=api_base,
api_type=api_type,
+ api_version=api_version,
+ organization=organization,
**params,
)
url = cls._get_url("translations")
@@ -114,6 +118,8 @@ class Audio(APIResource):
api_key=api_key,
api_base=api_base,
api_type=api_type,
+ api_version=api_version,
+ organization=organization,
**params,
)
url = cls._get_url("transcriptions")
@@ -142,6 +148,8 @@ class Audio(APIResource):
api_key=api_key,
api_base=api_base,
api_type=api_type,
+ api_version=api_version,
+ organization=organization,
**params,
)
url = cls._get_url("translations")
@@ -169,6 +177,8 @@ class Audio(APIResource):
api_key=api_key,
api_base=api_base,
api_type=api_type,
+ api_version=api_version,
+ organization=organization,
**params,
)
url = cls._get_url("transcriptions")
@@ -198,6 +208,8 @@ class Audio(APIResource):
api_key=api_key,
api_base=api_base,
api_type=api_type,
+ api_version=api_version,
+ organization=organization,
**params,
)
url = cls._get_url("translations")
@@ -228,6 +240,8 @@ class Audio(APIResource):
api_key=api_key,
api_base=api_base,
api_type=api_type,
+ api_version=api_version,
+ organization=organization,
**params,
)
url = cls._get_url("transcriptions")
@@ -258,6 +272,8 @@ class Audio(APIResource):
api_key=api_key,
api_base=api_base,
api_type=api_type,
+ api_version=api_version,
+ organization=organization,
**params,
)
url = cls._get_url("translations")
diff --git a/openai/version.py b/openai/version.py
--- a/openai/version.py
+++ b/openai/version.py
@@ -1 +1 @@
-VERSION = "0.27.8"
+VERSION = "0.27.9"
| [
{
"content": "from typing import Any, List\n\nimport openai\nfrom openai import api_requestor, util\nfrom openai.api_resources.abstract import APIResource\n\n\nclass Audio(APIResource):\n OBJECT_NAME = \"audio\"\n\n @classmethod\n def _get_url(cls, action):\n return cls.class_url() + f\"/{action}\"\n\n @classmethod\n def _prepare_request(\n cls,\n file,\n filename,\n model,\n api_key=None,\n api_base=None,\n api_type=None,\n api_version=None,\n organization=None,\n **params,\n ):\n requestor = api_requestor.APIRequestor(\n api_key,\n api_base=api_base or openai.api_base,\n api_type=api_type,\n api_version=api_version,\n organization=organization,\n )\n files: List[Any] = []\n data = {\n \"model\": model,\n **params,\n }\n files.append((\"file\", (filename, file, \"application/octet-stream\")))\n return requestor, files, data\n\n @classmethod\n def transcribe(\n cls,\n model,\n file,\n api_key=None,\n api_base=None,\n api_type=None,\n api_version=None,\n organization=None,\n **params,\n ):\n requestor, files, data = cls._prepare_request(\n file=file,\n filename=file.name,\n model=model,\n api_key=api_key,\n api_base=api_base,\n api_type=api_type,\n **params,\n )\n url = cls._get_url(\"transcriptions\")\n response, _, api_key = requestor.request(\"post\", url, files=files, params=data)\n return util.convert_to_openai_object(\n response, api_key, api_version, organization\n )\n\n @classmethod\n def translate(\n cls,\n model,\n file,\n api_key=None,\n api_base=None,\n api_type=None,\n api_version=None,\n organization=None,\n **params,\n ):\n requestor, files, data = cls._prepare_request(\n file=file,\n filename=file.name,\n model=model,\n api_key=api_key,\n api_base=api_base,\n api_type=api_type,\n **params,\n )\n url = cls._get_url(\"translations\")\n response, _, api_key = requestor.request(\"post\", url, files=files, params=data)\n return util.convert_to_openai_object(\n response, api_key, api_version, organization\n )\n\n @classmethod\n def transcribe_raw(\n cls,\n model,\n file,\n filename,\n api_key=None,\n api_base=None,\n api_type=None,\n api_version=None,\n organization=None,\n **params,\n ):\n requestor, files, data = cls._prepare_request(\n file=file,\n filename=filename,\n model=model,\n api_key=api_key,\n api_base=api_base,\n api_type=api_type,\n **params,\n )\n url = cls._get_url(\"transcriptions\")\n response, _, api_key = requestor.request(\"post\", url, files=files, params=data)\n return util.convert_to_openai_object(\n response, api_key, api_version, organization\n )\n\n @classmethod\n def translate_raw(\n cls,\n model,\n file,\n filename,\n api_key=None,\n api_base=None,\n api_type=None,\n api_version=None,\n organization=None,\n **params,\n ):\n requestor, files, data = cls._prepare_request(\n file=file,\n filename=filename,\n model=model,\n api_key=api_key,\n api_base=api_base,\n api_type=api_type,\n **params,\n )\n url = cls._get_url(\"translations\")\n response, _, api_key = requestor.request(\"post\", url, files=files, params=data)\n return util.convert_to_openai_object(\n response, api_key, api_version, organization\n )\n\n @classmethod\n async def atranscribe(\n cls,\n model,\n file,\n api_key=None,\n api_base=None,\n api_type=None,\n api_version=None,\n organization=None,\n **params,\n ):\n requestor, files, data = cls._prepare_request(\n file=file,\n filename=file.name,\n model=model,\n api_key=api_key,\n api_base=api_base,\n api_type=api_type,\n **params,\n )\n url = cls._get_url(\"transcriptions\")\n response, _, api_key = await requestor.arequest(\n \"post\", url, files=files, params=data\n )\n return util.convert_to_openai_object(\n response, api_key, api_version, organization\n )\n\n @classmethod\n async def atranslate(\n cls,\n model,\n file,\n api_key=None,\n api_base=None,\n api_type=None,\n api_version=None,\n organization=None,\n **params,\n ):\n requestor, files, data = cls._prepare_request(\n file=file,\n filename=file.name,\n model=model,\n api_key=api_key,\n api_base=api_base,\n api_type=api_type,\n **params,\n )\n url = cls._get_url(\"translations\")\n response, _, api_key = await requestor.arequest(\n \"post\", url, files=files, params=data\n )\n return util.convert_to_openai_object(\n response, api_key, api_version, organization\n )\n\n @classmethod\n async def atranscribe_raw(\n cls,\n model,\n file,\n filename,\n api_key=None,\n api_base=None,\n api_type=None,\n api_version=None,\n organization=None,\n **params,\n ):\n requestor, files, data = cls._prepare_request(\n file=file,\n filename=filename,\n model=model,\n api_key=api_key,\n api_base=api_base,\n api_type=api_type,\n **params,\n )\n url = cls._get_url(\"transcriptions\")\n response, _, api_key = await requestor.arequest(\n \"post\", url, files=files, params=data\n )\n return util.convert_to_openai_object(\n response, api_key, api_version, organization\n )\n\n @classmethod\n async def atranslate_raw(\n cls,\n model,\n file,\n filename,\n api_key=None,\n api_base=None,\n api_type=None,\n api_version=None,\n organization=None,\n **params,\n ):\n requestor, files, data = cls._prepare_request(\n file=file,\n filename=filename,\n model=model,\n api_key=api_key,\n api_base=api_base,\n api_type=api_type,\n **params,\n )\n url = cls._get_url(\"translations\")\n response, _, api_key = await requestor.arequest(\n \"post\", url, files=files, params=data\n )\n return util.convert_to_openai_object(\n response, api_key, api_version, organization\n )\n",
"path": "openai/api_resources/audio.py"
},
{
"content": "VERSION = \"0.27.8\"\n",
"path": "openai/version.py"
}
] | 2_0 | python | import sys
import unittest
import ast
import inspect
import textwrap
from typing import List, Union
class TestOpenAIFunctionsAndVersion(unittest.TestCase):
def test_openai_functions_and_version(self):
from openai.api_resources import Audio
from openai.version import VERSION
class FindPrepareRequest(ast.NodeVisitor):
args: Union[List[str], None] = None
def visit_Call(self, node):
if (
isinstance(node.func, ast.Attribute)
and node.func.attr == "_prepare_request"
):
self.args = [str(arg.arg) for arg in node.keywords]
self.generic_visit(node)
def get_function_args(func):
source = inspect.getsource(func)
source = textwrap.dedent(source)
tree = ast.parse(source)
visitor = FindPrepareRequest()
visitor.visit(tree)
return visitor.args
functions = [
Audio.transcribe,
Audio.translate,
Audio.transcribe_raw,
Audio.translate_raw,
Audio.atranscribe,
Audio.atranslate,
Audio.atranscribe_raw,
Audio.atranslate_raw,
]
for fn in functions:
args = get_function_args(fn)
self.assertIsNotNone(args, "Function arguments are None")
self.assertIn("api_version", args, "'api_version' not found in arguments")
self.assertIn("organization", args, "'organization' not found in arguments")
self.assertEqual(VERSION, "0.27.9", "VERSION does not match expected '0.27.9'")
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestOpenAIFunctionsAndVersion))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == "__main__":
main()
|
https://github.com/teamqurrent/openai-python | Handle timeout errors for asynchronous API requests in `openai/api_requestor.py` for `_interpret_async_response`. Add exception handling for aiohttp.ServerTimeoutError and asyncio.TimeoutError. When a timeout occurs, raise error.Timeout with the message "Request timed out" | 7610c5a | requests >= 2.20
aiohttp
numpy
asyncio
matplotlib
plotly
pandas
scipy
scikit-learn
tenacity
typing-extensions
| python3.9 | 041bf5a | diff --git a/openai/api_requestor.py b/openai/api_requestor.py
--- a/openai/api_requestor.py
+++ b/openai/api_requestor.py
@@ -720,6 +720,8 @@ class APIRequestor:
else:
try:
await result.read()
+ except (aiohttp.ServerTimeoutError, asyncio.TimeoutError) as e:
+ raise error.Timeout("Request timed out") from e
except aiohttp.ClientError as e:
util.log_warn(e, body=result.content)
return (
| [
{
"content": "import asyncio\nimport json\nimport time\nimport platform\nimport sys\nimport threading\nimport time\nimport warnings\nfrom contextlib import asynccontextmanager\nfrom json import JSONDecodeError\nfrom typing import (\n AsyncGenerator,\n AsyncIterator,\n Callable,\n Dict,\n Iterator,\n Optional,\n Tuple,\n Union,\n overload,\n)\nfrom urllib.parse import urlencode, urlsplit, urlunsplit\n\nimport aiohttp\nimport requests\n\nif sys.version_info >= (3, 8):\n from typing import Literal\nelse:\n from typing_extensions import Literal\n\nimport openai\nfrom openai import error, util, version\nfrom openai.openai_response import OpenAIResponse\nfrom openai.util import ApiType\n\nTIMEOUT_SECS = 600\nMAX_SESSION_LIFETIME_SECS = 180\nMAX_CONNECTION_RETRIES = 2\n\n# Has one attribute per thread, 'session'.\n_thread_context = threading.local()\n\n\ndef _build_api_url(url, query):\n scheme, netloc, path, base_query, fragment = urlsplit(url)\n\n if base_query:\n query = \"%s&%s\" % (base_query, query)\n\n return urlunsplit((scheme, netloc, path, query, fragment))\n\n\ndef _requests_proxies_arg(proxy) -> Optional[Dict[str, str]]:\n \"\"\"Returns a value suitable for the 'proxies' argument to 'requests.request.\"\"\"\n if proxy is None:\n return None\n elif isinstance(proxy, str):\n return {\"http\": proxy, \"https\": proxy}\n elif isinstance(proxy, dict):\n return proxy.copy()\n else:\n raise ValueError(\n \"'openai.proxy' must be specified as either a string URL or a dict with string URL under the https and/or http keys.\"\n )\n\n\ndef _aiohttp_proxies_arg(proxy) -> Optional[str]:\n \"\"\"Returns a value suitable for the 'proxies' argument to 'aiohttp.ClientSession.request.\"\"\"\n if proxy is None:\n return None\n elif isinstance(proxy, str):\n return proxy\n elif isinstance(proxy, dict):\n return proxy[\"https\"] if \"https\" in proxy else proxy[\"http\"]\n else:\n raise ValueError(\n \"'openai.proxy' must be specified as either a string URL or a dict with string URL under the https and/or http keys.\"\n )\n\n\ndef _make_session() -> requests.Session:\n if openai.requestssession:\n if isinstance(openai.requestssession, requests.Session):\n return openai.requestssession\n return openai.requestssession()\n if not openai.verify_ssl_certs:\n warnings.warn(\"verify_ssl_certs is ignored; openai always verifies.\")\n s = requests.Session()\n proxies = _requests_proxies_arg(openai.proxy)\n if proxies:\n s.proxies = proxies\n s.mount(\n \"https://\",\n requests.adapters.HTTPAdapter(max_retries=MAX_CONNECTION_RETRIES),\n )\n return s\n\n\ndef parse_stream_helper(line: bytes) -> Optional[str]:\n if line:\n if line.strip() == b\"data: [DONE]\":\n # return here will cause GeneratorExit exception in urllib3\n # and it will close http connection with TCP Reset\n return None\n if line.startswith(b\"data: \"):\n line = line[len(b\"data: \"):]\n return line.decode(\"utf-8\")\n else:\n return None\n return None\n\n\ndef parse_stream(rbody: Iterator[bytes]) -> Iterator[str]:\n for line in rbody:\n _line = parse_stream_helper(line)\n if _line is not None:\n yield _line\n\n\nasync def parse_stream_async(rbody: aiohttp.StreamReader):\n async for line in rbody:\n _line = parse_stream_helper(line)\n if _line is not None:\n yield _line\n\n\nclass APIRequestor:\n def __init__(\n self,\n key=None,\n api_base=None,\n api_type=None,\n api_version=None,\n organization=None,\n ):\n self.api_base = api_base or openai.api_base\n self.api_key = key or util.default_api_key()\n self.api_type = (\n ApiType.from_str(api_type)\n if api_type\n else ApiType.from_str(openai.api_type)\n )\n self.api_version = api_version or openai.api_version\n self.organization = organization or openai.organization\n\n @classmethod\n def format_app_info(cls, info):\n str = info[\"name\"]\n if info[\"version\"]:\n str += \"/%s\" % (info[\"version\"],)\n if info[\"url\"]:\n str += \" (%s)\" % (info[\"url\"],)\n return str\n\n def _check_polling_response(self, response: OpenAIResponse, predicate: Callable[[OpenAIResponse], bool]):\n if not predicate(response):\n return\n error_data = response.data['error']\n message = error_data.get('message', 'Operation failed')\n code = error_data.get('code')\n raise error.OpenAIError(message=message, code=code)\n\n def _poll(\n self,\n method,\n url,\n until,\n failed,\n params = None,\n headers = None,\n interval = None,\n delay = None\n ) -> Tuple[Iterator[OpenAIResponse], bool, str]:\n if delay:\n time.sleep(delay)\n\n response, b, api_key = self.request(method, url, params, headers)\n self._check_polling_response(response, failed)\n start_time = time.time()\n while not until(response):\n if time.time() - start_time > TIMEOUT_SECS:\n raise error.Timeout(\"Operation polling timed out.\")\n\n time.sleep(interval or response.retry_after or 10)\n response, b, api_key = self.request(method, url, params, headers)\n self._check_polling_response(response, failed)\n\n response.data = response.data['result']\n return response, b, api_key\n\n async def _apoll(\n self,\n method,\n url,\n until,\n failed,\n params = None,\n headers = None,\n interval = None,\n delay = None\n ) -> Tuple[Iterator[OpenAIResponse], bool, str]:\n if delay:\n await asyncio.sleep(delay)\n\n response, b, api_key = await self.arequest(method, url, params, headers)\n self._check_polling_response(response, failed)\n start_time = time.time()\n while not until(response):\n if time.time() - start_time > TIMEOUT_SECS:\n raise error.Timeout(\"Operation polling timed out.\")\n\n await asyncio.sleep(interval or response.retry_after or 10)\n response, b, api_key = await self.arequest(method, url, params, headers)\n self._check_polling_response(response, failed)\n\n response.data = response.data['result']\n return response, b, api_key\n\n @overload\n def request(\n self,\n method,\n url,\n params,\n headers,\n files,\n stream: Literal[True],\n request_id: Optional[str] = ...,\n request_timeout: Optional[Union[float, Tuple[float, float]]] = ...,\n ) -> Tuple[Iterator[OpenAIResponse], bool, str]:\n pass\n\n @overload\n def request(\n self,\n method,\n url,\n params=...,\n headers=...,\n files=...,\n *,\n stream: Literal[True],\n request_id: Optional[str] = ...,\n request_timeout: Optional[Union[float, Tuple[float, float]]] = ...,\n ) -> Tuple[Iterator[OpenAIResponse], bool, str]:\n pass\n\n @overload\n def request(\n self,\n method,\n url,\n params=...,\n headers=...,\n files=...,\n stream: Literal[False] = ...,\n request_id: Optional[str] = ...,\n request_timeout: Optional[Union[float, Tuple[float, float]]] = ...,\n ) -> Tuple[OpenAIResponse, bool, str]:\n pass\n\n @overload\n def request(\n self,\n method,\n url,\n params=...,\n headers=...,\n files=...,\n stream: bool = ...,\n request_id: Optional[str] = ...,\n request_timeout: Optional[Union[float, Tuple[float, float]]] = ...,\n ) -> Tuple[Union[OpenAIResponse, Iterator[OpenAIResponse]], bool, str]:\n pass\n\n def request(\n self,\n method,\n url,\n params=None,\n headers=None,\n files=None,\n stream: bool = False,\n request_id: Optional[str] = None,\n request_timeout: Optional[Union[float, Tuple[float, float]]] = None,\n ) -> Tuple[Union[OpenAIResponse, Iterator[OpenAIResponse]], bool, str]:\n result = self.request_raw(\n method.lower(),\n url,\n params=params,\n supplied_headers=headers,\n files=files,\n stream=stream,\n request_id=request_id,\n request_timeout=request_timeout,\n )\n resp, got_stream = self._interpret_response(result, stream)\n return resp, got_stream, self.api_key\n\n @overload\n async def arequest(\n self,\n method,\n url,\n params,\n headers,\n files,\n stream: Literal[True],\n request_id: Optional[str] = ...,\n request_timeout: Optional[Union[float, Tuple[float, float]]] = ...,\n ) -> Tuple[AsyncGenerator[OpenAIResponse, None], bool, str]:\n pass\n\n @overload\n async def arequest(\n self,\n method,\n url,\n params=...,\n headers=...,\n files=...,\n *,\n stream: Literal[True],\n request_id: Optional[str] = ...,\n request_timeout: Optional[Union[float, Tuple[float, float]]] = ...,\n ) -> Tuple[AsyncGenerator[OpenAIResponse, None], bool, str]:\n pass\n\n @overload\n async def arequest(\n self,\n method,\n url,\n params=...,\n headers=...,\n files=...,\n stream: Literal[False] = ...,\n request_id: Optional[str] = ...,\n request_timeout: Optional[Union[float, Tuple[float, float]]] = ...,\n ) -> Tuple[OpenAIResponse, bool, str]:\n pass\n\n @overload\n async def arequest(\n self,\n method,\n url,\n params=...,\n headers=...,\n files=...,\n stream: bool = ...,\n request_id: Optional[str] = ...,\n request_timeout: Optional[Union[float, Tuple[float, float]]] = ...,\n ) -> Tuple[Union[OpenAIResponse, AsyncGenerator[OpenAIResponse, None]], bool, str]:\n pass\n\n async def arequest(\n self,\n method,\n url,\n params=None,\n headers=None,\n files=None,\n stream: bool = False,\n request_id: Optional[str] = None,\n request_timeout: Optional[Union[float, Tuple[float, float]]] = None,\n ) -> Tuple[Union[OpenAIResponse, AsyncGenerator[OpenAIResponse, None]], bool, str]:\n ctx = aiohttp_session()\n session = await ctx.__aenter__()\n try:\n result = await self.arequest_raw(\n method.lower(),\n url,\n session,\n params=params,\n supplied_headers=headers,\n files=files,\n request_id=request_id,\n request_timeout=request_timeout,\n )\n resp, got_stream = await self._interpret_async_response(result, stream)\n except Exception:\n await ctx.__aexit__(None, None, None)\n raise\n if got_stream:\n\n async def wrap_resp():\n assert isinstance(resp, AsyncGenerator)\n try:\n async for r in resp:\n yield r\n finally:\n await ctx.__aexit__(None, None, None)\n\n return wrap_resp(), got_stream, self.api_key\n else:\n await ctx.__aexit__(None, None, None)\n return resp, got_stream, self.api_key\n\n def handle_error_response(self, rbody, rcode, resp, rheaders, stream_error=False):\n try:\n error_data = resp[\"error\"]\n except (KeyError, TypeError):\n raise error.APIError(\n \"Invalid response object from API: %r (HTTP response code \"\n \"was %d)\" % (rbody, rcode),\n rbody,\n rcode,\n resp,\n )\n\n if \"internal_message\" in error_data:\n error_data[\"message\"] += \"\\n\\n\" + error_data[\"internal_message\"]\n\n util.log_info(\n \"OpenAI API error received\",\n error_code=error_data.get(\"code\"),\n error_type=error_data.get(\"type\"),\n error_message=error_data.get(\"message\"),\n error_param=error_data.get(\"param\"),\n stream_error=stream_error,\n )\n\n # Rate limits were previously coded as 400's with code 'rate_limit'\n if rcode == 429:\n return error.RateLimitError(\n error_data.get(\"message\"), rbody, rcode, resp, rheaders\n )\n elif rcode in [400, 404, 415]:\n return error.InvalidRequestError(\n error_data.get(\"message\"),\n error_data.get(\"param\"),\n error_data.get(\"code\"),\n rbody,\n rcode,\n resp,\n rheaders,\n )\n elif rcode == 401:\n return error.AuthenticationError(\n error_data.get(\"message\"), rbody, rcode, resp, rheaders\n )\n elif rcode == 403:\n return error.PermissionError(\n error_data.get(\"message\"), rbody, rcode, resp, rheaders\n )\n elif rcode == 409:\n return error.TryAgain(\n error_data.get(\"message\"), rbody, rcode, resp, rheaders\n )\n elif stream_error:\n # TODO: we will soon attach status codes to stream errors\n parts = [error_data.get(\"message\"), \"(Error occurred while streaming.)\"]\n message = \" \".join([p for p in parts if p is not None])\n return error.APIError(message, rbody, rcode, resp, rheaders)\n else:\n return error.APIError(\n f\"{error_data.get('message')} {rbody} {rcode} {resp} {rheaders}\",\n rbody,\n rcode,\n resp,\n rheaders,\n )\n\n def request_headers(\n self, method: str, extra, request_id: Optional[str]\n ) -> Dict[str, str]:\n user_agent = \"OpenAI/v1 PythonBindings/%s\" % (version.VERSION,)\n if openai.app_info:\n user_agent += \" \" + self.format_app_info(openai.app_info)\n\n uname_without_node = \" \".join(\n v for k, v in platform.uname()._asdict().items() if k != \"node\"\n )\n ua = {\n \"bindings_version\": version.VERSION,\n \"httplib\": \"requests\",\n \"lang\": \"python\",\n \"lang_version\": platform.python_version(),\n \"platform\": platform.platform(),\n \"publisher\": \"openai\",\n \"uname\": uname_without_node,\n }\n if openai.app_info:\n ua[\"application\"] = openai.app_info\n\n headers = {\n \"X-OpenAI-Client-User-Agent\": json.dumps(ua),\n \"User-Agent\": user_agent,\n }\n\n headers.update(util.api_key_to_header(self.api_type, self.api_key))\n\n if self.organization:\n headers[\"OpenAI-Organization\"] = self.organization\n\n if self.api_version is not None and self.api_type == ApiType.OPEN_AI:\n headers[\"OpenAI-Version\"] = self.api_version\n if request_id is not None:\n headers[\"X-Request-Id\"] = request_id\n if openai.debug:\n headers[\"OpenAI-Debug\"] = \"true\"\n headers.update(extra)\n\n return headers\n\n def _validate_headers(\n self, supplied_headers: Optional[Dict[str, str]]\n ) -> Dict[str, str]:\n headers: Dict[str, str] = {}\n if supplied_headers is None:\n return headers\n\n if not isinstance(supplied_headers, dict):\n raise TypeError(\"Headers must be a dictionary\")\n\n for k, v in supplied_headers.items():\n if not isinstance(k, str):\n raise TypeError(\"Header keys must be strings\")\n if not isinstance(v, str):\n raise TypeError(\"Header values must be strings\")\n headers[k] = v\n\n # NOTE: It is possible to do more validation of the headers, but a request could always\n # be made to the API manually with invalid headers, so we need to handle them server side.\n\n return headers\n\n def _prepare_request_raw(\n self,\n url,\n supplied_headers,\n method,\n params,\n files,\n request_id: Optional[str],\n ) -> Tuple[str, Dict[str, str], Optional[bytes]]:\n abs_url = \"%s%s\" % (self.api_base, url)\n headers = self._validate_headers(supplied_headers)\n\n data = None\n if method == \"get\" or method == \"delete\":\n if params:\n encoded_params = urlencode(\n [(k, v) for k, v in params.items() if v is not None]\n )\n abs_url = _build_api_url(abs_url, encoded_params)\n elif method in {\"post\", \"put\"}:\n if params and files:\n data = params\n if params and not files:\n data = json.dumps(params).encode()\n headers[\"Content-Type\"] = \"application/json\"\n else:\n raise error.APIConnectionError(\n \"Unrecognized HTTP method %r. This may indicate a bug in the \"\n \"OpenAI bindings. Please contact us through our help center at help.openai.com for \"\n \"assistance.\" % (method,)\n )\n\n headers = self.request_headers(method, headers, request_id)\n\n util.log_debug(\"Request to OpenAI API\", method=method, path=abs_url)\n util.log_debug(\"Post details\", data=data, api_version=self.api_version)\n\n return abs_url, headers, data\n\n def request_raw(\n self,\n method,\n url,\n *,\n params=None,\n supplied_headers: Optional[Dict[str, str]] = None,\n files=None,\n stream: bool = False,\n request_id: Optional[str] = None,\n request_timeout: Optional[Union[float, Tuple[float, float]]] = None,\n ) -> requests.Response:\n abs_url, headers, data = self._prepare_request_raw(\n url, supplied_headers, method, params, files, request_id\n )\n\n if not hasattr(_thread_context, \"session\"):\n _thread_context.session = _make_session()\n _thread_context.session_create_time = time.time()\n elif (\n time.time() - getattr(_thread_context, \"session_create_time\", 0)\n >= MAX_SESSION_LIFETIME_SECS\n ):\n _thread_context.session.close()\n _thread_context.session = _make_session()\n _thread_context.session_create_time = time.time()\n try:\n result = _thread_context.session.request(\n method,\n abs_url,\n headers=headers,\n data=data,\n files=files,\n stream=stream,\n timeout=request_timeout if request_timeout else TIMEOUT_SECS,\n proxies=_thread_context.session.proxies,\n )\n except requests.exceptions.Timeout as e:\n raise error.Timeout(\"Request timed out: {}\".format(e)) from e\n except requests.exceptions.RequestException as e:\n raise error.APIConnectionError(\n \"Error communicating with OpenAI: {}\".format(e)\n ) from e\n util.log_debug(\n \"OpenAI API response\",\n path=abs_url,\n response_code=result.status_code,\n processing_ms=result.headers.get(\"OpenAI-Processing-Ms\"),\n request_id=result.headers.get(\"X-Request-Id\"),\n )\n # Don't read the whole stream for debug logging unless necessary.\n if openai.log == \"debug\":\n util.log_debug(\n \"API response body\", body=result.content, headers=result.headers\n )\n return result\n\n async def arequest_raw(\n self,\n method,\n url,\n session,\n *,\n params=None,\n supplied_headers: Optional[Dict[str, str]] = None,\n files=None,\n request_id: Optional[str] = None,\n request_timeout: Optional[Union[float, Tuple[float, float]]] = None,\n ) -> aiohttp.ClientResponse:\n abs_url, headers, data = self._prepare_request_raw(\n url, supplied_headers, method, params, files, request_id\n )\n\n if isinstance(request_timeout, tuple):\n timeout = aiohttp.ClientTimeout(\n connect=request_timeout[0],\n total=request_timeout[1],\n )\n else:\n timeout = aiohttp.ClientTimeout(\n total=request_timeout if request_timeout else TIMEOUT_SECS\n )\n\n if files:\n # TODO: Use `aiohttp.MultipartWriter` to create the multipart form data here.\n # For now we use the private `requests` method that is known to have worked so far.\n data, content_type = requests.models.RequestEncodingMixin._encode_files( # type: ignore\n files, data\n )\n headers[\"Content-Type\"] = content_type\n request_kwargs = {\n \"method\": method,\n \"url\": abs_url,\n \"headers\": headers,\n \"data\": data,\n \"proxy\": _aiohttp_proxies_arg(openai.proxy),\n \"timeout\": timeout,\n }\n try:\n result = await session.request(**request_kwargs)\n util.log_info(\n \"OpenAI API response\",\n path=abs_url,\n response_code=result.status,\n processing_ms=result.headers.get(\"OpenAI-Processing-Ms\"),\n request_id=result.headers.get(\"X-Request-Id\"),\n )\n # Don't read the whole stream for debug logging unless necessary.\n if openai.log == \"debug\":\n util.log_debug(\n \"API response body\", body=result.content, headers=result.headers\n )\n return result\n except (aiohttp.ServerTimeoutError, asyncio.TimeoutError) as e:\n raise error.Timeout(\"Request timed out\") from e\n except aiohttp.ClientError as e:\n raise error.APIConnectionError(\"Error communicating with OpenAI\") from e\n\n def _interpret_response(\n self, result: requests.Response, stream: bool\n ) -> Tuple[Union[OpenAIResponse, Iterator[OpenAIResponse]], bool]:\n \"\"\"Returns the response(s) and a bool indicating whether it is a stream.\"\"\"\n if stream and \"text/event-stream\" in result.headers.get(\"Content-Type\", \"\"):\n return (\n self._interpret_response_line(\n line, result.status_code, result.headers, stream=True\n )\n for line in parse_stream(result.iter_lines())\n ), True\n else:\n return (\n self._interpret_response_line(\n result.content.decode(\"utf-8\"),\n result.status_code,\n result.headers,\n stream=False,\n ),\n False,\n )\n\n async def _interpret_async_response(\n self, result: aiohttp.ClientResponse, stream: bool\n ) -> Tuple[Union[OpenAIResponse, AsyncGenerator[OpenAIResponse, None]], bool]:\n \"\"\"Returns the response(s) and a bool indicating whether it is a stream.\"\"\"\n if stream and \"text/event-stream\" in result.headers.get(\"Content-Type\", \"\"):\n return (\n self._interpret_response_line(\n line, result.status, result.headers, stream=True\n )\n async for line in parse_stream_async(result.content)\n ), True\n else:\n try:\n await result.read()\n except aiohttp.ClientError as e:\n util.log_warn(e, body=result.content)\n return (\n self._interpret_response_line(\n (await result.read()).decode(\"utf-8\"),\n result.status,\n result.headers,\n stream=False,\n ),\n False,\n )\n\n def _interpret_response_line(\n self, rbody: str, rcode: int, rheaders, stream: bool\n ) -> OpenAIResponse:\n # HTTP 204 response code does not have any content in the body.\n if rcode == 204:\n return OpenAIResponse(None, rheaders)\n\n if rcode == 503:\n raise error.ServiceUnavailableError(\n \"The server is overloaded or not ready yet.\",\n rbody,\n rcode,\n headers=rheaders,\n )\n try:\n if 'text/plain' in rheaders.get('Content-Type', ''):\n data = rbody\n else:\n data = json.loads(rbody)\n except (JSONDecodeError, UnicodeDecodeError) as e:\n raise error.APIError(\n f\"HTTP code {rcode} from API ({rbody})\", rbody, rcode, headers=rheaders\n ) from e\n resp = OpenAIResponse(data, rheaders)\n # In the future, we might add a \"status\" parameter to errors\n # to better handle the \"error while streaming\" case.\n stream_error = stream and \"error\" in resp.data\n if stream_error or not 200 <= rcode < 300:\n raise self.handle_error_response(\n rbody, rcode, resp.data, rheaders, stream_error=stream_error\n )\n return resp\n\n\n@asynccontextmanager\nasync def aiohttp_session() -> AsyncIterator[aiohttp.ClientSession]:\n user_set_session = openai.aiosession.get()\n if user_set_session:\n yield user_set_session\n else:\n async with aiohttp.ClientSession() as session:\n yield session\n",
"path": "openai/api_requestor.py"
}
] | 2_1 | python | import sys
import unittest
import inspect
import textwrap
class TestAPIRequestorExceptions(unittest.TestCase):
def test_exceptions_in_api_requestor_code(self):
from openai.api_requestor import APIRequestor
source_code = inspect.getsource(APIRequestor._interpret_async_response)
source_code = textwrap.dedent(source_code)
exception_types_to_check = ["ServerTimeoutError", "TimeoutError"]
for exception in exception_types_to_check:
self.assertIn(exception, source_code, f"{exception} not found in APIRequestor._interpret_async_response")
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestAPIRequestorExceptions))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == "__main__":
main()
|
https://github.com/teamqurrent/openai-python | Remove unnecessary sorting by index in `openai/embeddings_utils.py` for both `get_embeddings` and `aget_embeddings` functions. | c975bce | requests >= 2.20
aiohttp
numpy
asyncio
matplotlib
plotly
pandas
scipy
scikit-learn
tenacity
typing-extensions
| python3.9 | f24d193 | diff --git a/openai/embeddings_utils.py b/openai/embeddings_utils.py
--- a/openai/embeddings_utils.py
+++ b/openai/embeddings_utils.py
@@ -46,7 +46,6 @@ def get_embeddings(
list_of_text = [text.replace("\n", " ") for text in list_of_text]
data = openai.Embedding.create(input=list_of_text, engine=engine, **kwargs).data
- data = sorted(data, key=lambda x: x["index"]) # maintain the same order as input.
return [d["embedding"] for d in data]
@@ -60,7 +59,6 @@ async def aget_embeddings(
list_of_text = [text.replace("\n", " ") for text in list_of_text]
data = (await openai.Embedding.acreate(input=list_of_text, engine=engine, **kwargs)).data
- data = sorted(data, key=lambda x: x["index"]) # maintain the same order as input.
return [d["embedding"] for d in data]
| [
{
"content": "import textwrap as tr\nfrom typing import List, Optional\n\nimport matplotlib.pyplot as plt\nimport plotly.express as px\nfrom scipy import spatial\nfrom sklearn.decomposition import PCA\nfrom sklearn.manifold import TSNE\nfrom sklearn.metrics import average_precision_score, precision_recall_curve\nfrom tenacity import retry, stop_after_attempt, wait_random_exponential\n\nimport openai\nfrom openai.datalib.numpy_helper import numpy as np\nfrom openai.datalib.pandas_helper import pandas as pd\n\n\n@retry(wait=wait_random_exponential(min=1, max=20), stop=stop_after_attempt(6))\ndef get_embedding(text: str, engine=\"text-similarity-davinci-001\", **kwargs) -> List[float]:\n\n # replace newlines, which can negatively affect performance.\n text = text.replace(\"\\n\", \" \")\n\n return openai.Embedding.create(input=[text], engine=engine, **kwargs)[\"data\"][0][\"embedding\"]\n\n\n@retry(wait=wait_random_exponential(min=1, max=20), stop=stop_after_attempt(6))\nasync def aget_embedding(\n text: str, engine=\"text-similarity-davinci-001\", **kwargs\n) -> List[float]:\n\n # replace newlines, which can negatively affect performance.\n text = text.replace(\"\\n\", \" \")\n\n return (await openai.Embedding.acreate(input=[text], engine=engine, **kwargs))[\"data\"][0][\n \"embedding\"\n ]\n\n\n@retry(wait=wait_random_exponential(min=1, max=20), stop=stop_after_attempt(6))\ndef get_embeddings(\n list_of_text: List[str], engine=\"text-similarity-babbage-001\", **kwargs\n) -> List[List[float]]:\n assert len(list_of_text) <= 2048, \"The batch size should not be larger than 2048.\"\n\n # replace newlines, which can negatively affect performance.\n list_of_text = [text.replace(\"\\n\", \" \") for text in list_of_text]\n\n data = openai.Embedding.create(input=list_of_text, engine=engine, **kwargs).data\n data = sorted(data, key=lambda x: x[\"index\"]) # maintain the same order as input.\n return [d[\"embedding\"] for d in data]\n\n\n@retry(wait=wait_random_exponential(min=1, max=20), stop=stop_after_attempt(6))\nasync def aget_embeddings(\n list_of_text: List[str], engine=\"text-similarity-babbage-001\", **kwargs\n) -> List[List[float]]:\n assert len(list_of_text) <= 2048, \"The batch size should not be larger than 2048.\"\n\n # replace newlines, which can negatively affect performance.\n list_of_text = [text.replace(\"\\n\", \" \") for text in list_of_text]\n\n data = (await openai.Embedding.acreate(input=list_of_text, engine=engine, **kwargs)).data\n data = sorted(data, key=lambda x: x[\"index\"]) # maintain the same order as input.\n return [d[\"embedding\"] for d in data]\n\n\ndef cosine_similarity(a, b):\n return np.dot(a, b) / (np.linalg.norm(a) * np.linalg.norm(b))\n\n\ndef plot_multiclass_precision_recall(\n y_score, y_true_untransformed, class_list, classifier_name\n):\n \"\"\"\n Precision-Recall plotting for a multiclass problem. It plots average precision-recall, per class precision recall and reference f1 contours.\n\n Code slightly modified, but heavily based on https://scikit-learn.org/stable/auto_examples/model_selection/plot_precision_recall.html\n \"\"\"\n n_classes = len(class_list)\n y_true = pd.concat(\n [(y_true_untransformed == class_list[i]) for i in range(n_classes)], axis=1\n ).values\n\n # For each class\n precision = dict()\n recall = dict()\n average_precision = dict()\n for i in range(n_classes):\n precision[i], recall[i], _ = precision_recall_curve(y_true[:, i], y_score[:, i])\n average_precision[i] = average_precision_score(y_true[:, i], y_score[:, i])\n\n # A \"micro-average\": quantifying score on all classes jointly\n precision_micro, recall_micro, _ = precision_recall_curve(\n y_true.ravel(), y_score.ravel()\n )\n average_precision_micro = average_precision_score(y_true, y_score, average=\"micro\")\n print(\n str(classifier_name)\n + \" - Average precision score over all classes: {0:0.2f}\".format(\n average_precision_micro\n )\n )\n\n # setup plot details\n plt.figure(figsize=(9, 10))\n f_scores = np.linspace(0.2, 0.8, num=4)\n lines = []\n labels = []\n for f_score in f_scores:\n x = np.linspace(0.01, 1)\n y = f_score * x / (2 * x - f_score)\n (l,) = plt.plot(x[y >= 0], y[y >= 0], color=\"gray\", alpha=0.2)\n plt.annotate(\"f1={0:0.1f}\".format(f_score), xy=(0.9, y[45] + 0.02))\n\n lines.append(l)\n labels.append(\"iso-f1 curves\")\n (l,) = plt.plot(recall_micro, precision_micro, color=\"gold\", lw=2)\n lines.append(l)\n labels.append(\n \"average Precision-recall (auprc = {0:0.2f})\" \"\".format(average_precision_micro)\n )\n\n for i in range(n_classes):\n (l,) = plt.plot(recall[i], precision[i], lw=2)\n lines.append(l)\n labels.append(\n \"Precision-recall for class `{0}` (auprc = {1:0.2f})\"\n \"\".format(class_list[i], average_precision[i])\n )\n\n fig = plt.gcf()\n fig.subplots_adjust(bottom=0.25)\n plt.xlim([0.0, 1.0])\n plt.ylim([0.0, 1.05])\n plt.xlabel(\"Recall\")\n plt.ylabel(\"Precision\")\n plt.title(f\"{classifier_name}: Precision-Recall curve for each class\")\n plt.legend(lines, labels)\n\n\ndef distances_from_embeddings(\n query_embedding: List[float],\n embeddings: List[List[float]],\n distance_metric=\"cosine\",\n) -> List[List]:\n \"\"\"Return the distances between a query embedding and a list of embeddings.\"\"\"\n distance_metrics = {\n \"cosine\": spatial.distance.cosine,\n \"L1\": spatial.distance.cityblock,\n \"L2\": spatial.distance.euclidean,\n \"Linf\": spatial.distance.chebyshev,\n }\n distances = [\n distance_metrics[distance_metric](query_embedding, embedding)\n for embedding in embeddings\n ]\n return distances\n\n\ndef indices_of_nearest_neighbors_from_distances(distances) -> np.ndarray:\n \"\"\"Return a list of indices of nearest neighbors from a list of distances.\"\"\"\n return np.argsort(distances)\n\n\ndef pca_components_from_embeddings(\n embeddings: List[List[float]], n_components=2\n) -> np.ndarray:\n \"\"\"Return the PCA components of a list of embeddings.\"\"\"\n pca = PCA(n_components=n_components)\n array_of_embeddings = np.array(embeddings)\n return pca.fit_transform(array_of_embeddings)\n\n\ndef tsne_components_from_embeddings(\n embeddings: List[List[float]], n_components=2, **kwargs\n) -> np.ndarray:\n \"\"\"Returns t-SNE components of a list of embeddings.\"\"\"\n # use better defaults if not specified\n if \"init\" not in kwargs.keys():\n kwargs[\"init\"] = \"pca\"\n if \"learning_rate\" not in kwargs.keys():\n kwargs[\"learning_rate\"] = \"auto\"\n tsne = TSNE(n_components=n_components, **kwargs)\n array_of_embeddings = np.array(embeddings)\n return tsne.fit_transform(array_of_embeddings)\n\n\ndef chart_from_components(\n components: np.ndarray,\n labels: Optional[List[str]] = None,\n strings: Optional[List[str]] = None,\n x_title=\"Component 0\",\n y_title=\"Component 1\",\n mark_size=5,\n **kwargs,\n):\n \"\"\"Return an interactive 2D chart of embedding components.\"\"\"\n empty_list = [\"\" for _ in components]\n data = pd.DataFrame(\n {\n x_title: components[:, 0],\n y_title: components[:, 1],\n \"label\": labels if labels else empty_list,\n \"string\": [\"<br>\".join(tr.wrap(string, width=30)) for string in strings]\n if strings\n else empty_list,\n }\n )\n chart = px.scatter(\n data,\n x=x_title,\n y=y_title,\n color=\"label\" if labels else None,\n symbol=\"label\" if labels else None,\n hover_data=[\"string\"] if strings else None,\n **kwargs,\n ).update_traces(marker=dict(size=mark_size))\n return chart\n\n\ndef chart_from_components_3D(\n components: np.ndarray,\n labels: Optional[List[str]] = None,\n strings: Optional[List[str]] = None,\n x_title: str = \"Component 0\",\n y_title: str = \"Component 1\",\n z_title: str = \"Compontent 2\",\n mark_size: int = 5,\n **kwargs,\n):\n \"\"\"Return an interactive 3D chart of embedding components.\"\"\"\n empty_list = [\"\" for _ in components]\n data = pd.DataFrame(\n {\n x_title: components[:, 0],\n y_title: components[:, 1],\n z_title: components[:, 2],\n \"label\": labels if labels else empty_list,\n \"string\": [\"<br>\".join(tr.wrap(string, width=30)) for string in strings]\n if strings\n else empty_list,\n }\n )\n chart = px.scatter_3d(\n data,\n x=x_title,\n y=y_title,\n z=z_title,\n color=\"label\" if labels else None,\n symbol=\"label\" if labels else None,\n hover_data=[\"string\"] if strings else None,\n **kwargs,\n ).update_traces(marker=dict(size=mark_size))\n return chart\n",
"path": "openai/embeddings_utils.py"
}
] | 2_2 | python | import sys
import unittest
import ast
import inspect
import textwrap
class TestSortedCallInFunctions(unittest.TestCase):
def test_sorted_call_in_functions(self):
from openai.embeddings_utils import aget_embeddings, get_embeddings
source_a = textwrap.dedent(inspect.getsource(get_embeddings))
source_b = textwrap.dedent(inspect.getsource(aget_embeddings))
def is_sorted_function_called(tree):
class CallVisitor(ast.NodeVisitor):
def __init__(self):
self.sorted_called = False
def visit_Call(self, node):
if isinstance(node.func, ast.Name) and node.func.id == "sorted":
self.sorted_called = True
self.generic_visit(node)
visitor = CallVisitor()
visitor.visit(tree)
return visitor.sorted_called
tree = ast.parse(source_a)
sorted_called = is_sorted_function_called(tree)
self.assertFalse(sorted_called, "data is still being sorted in get_embeddings")
tree = ast.parse(source_b)
sorted_called = is_sorted_function_called(tree)
self.assertFalse(sorted_called, "data is still being sorted in aget_embeddings")
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestSortedCallInFunctions))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == "__main__":
main()
|
https://github.com/teamqurrent/openai-python | Change request logs in `openai/api_requestor.py` from info level to debug level. | 5d1a726 | requests >= 2.20
aiohttp
numpy
asyncio
matplotlib
plotly
pandas
scipy
scikit-learn
tenacity
typing-extensions
| python3.9 | e5b7d1a | diff --git a/openai/api_requestor.py b/openai/api_requestor.py
--- a/openai/api_requestor.py
+++ b/openai/api_requestor.py
@@ -490,7 +490,7 @@ class APIRequestor:
headers = self.request_headers(method, headers, request_id)
- util.log_info("Request to OpenAI API", method=method, path=abs_url)
+ util.log_debug("Request to OpenAI API", method=method, path=abs_url)
util.log_debug("Post details", data=data, api_version=self.api_version)
return abs_url, headers, data
@@ -529,7 +529,7 @@ class APIRequestor:
raise error.APIConnectionError(
"Error communicating with OpenAI: {}".format(e)
) from e
- util.log_info(
+ util.log_debug(
"OpenAI API response",
path=abs_url,
response_code=result.status_code,
| [
{
"content": "import asyncio\nimport json\nimport platform\nimport sys\nimport threading\nimport warnings\nfrom contextlib import asynccontextmanager\nfrom json import JSONDecodeError\nfrom typing import (\n AsyncGenerator,\n AsyncIterator,\n Dict,\n Iterator,\n Optional,\n Tuple,\n Union,\n overload,\n)\nfrom urllib.parse import urlencode, urlsplit, urlunsplit\n\nimport aiohttp\nimport requests\n\nif sys.version_info >= (3, 8):\n from typing import Literal\nelse:\n from typing_extensions import Literal\n\nimport openai\nfrom openai import error, util, version\nfrom openai.openai_response import OpenAIResponse\nfrom openai.util import ApiType\n\nTIMEOUT_SECS = 600\nMAX_CONNECTION_RETRIES = 2\n\n# Has one attribute per thread, 'session'.\n_thread_context = threading.local()\n\n\ndef _build_api_url(url, query):\n scheme, netloc, path, base_query, fragment = urlsplit(url)\n\n if base_query:\n query = \"%s&%s\" % (base_query, query)\n\n return urlunsplit((scheme, netloc, path, query, fragment))\n\n\ndef _requests_proxies_arg(proxy) -> Optional[Dict[str, str]]:\n \"\"\"Returns a value suitable for the 'proxies' argument to 'requests.request.\"\"\"\n if proxy is None:\n return None\n elif isinstance(proxy, str):\n return {\"http\": proxy, \"https\": proxy}\n elif isinstance(proxy, dict):\n return proxy.copy()\n else:\n raise ValueError(\n \"'openai.proxy' must be specified as either a string URL or a dict with string URL under the https and/or http keys.\"\n )\n\n\ndef _aiohttp_proxies_arg(proxy) -> Optional[str]:\n \"\"\"Returns a value suitable for the 'proxies' argument to 'aiohttp.ClientSession.request.\"\"\"\n if proxy is None:\n return None\n elif isinstance(proxy, str):\n return proxy\n elif isinstance(proxy, dict):\n return proxy[\"https\"] if \"https\" in proxy else proxy[\"http\"]\n else:\n raise ValueError(\n \"'openai.proxy' must be specified as either a string URL or a dict with string URL under the https and/or http keys.\"\n )\n\n\ndef _make_session() -> requests.Session:\n if not openai.verify_ssl_certs:\n warnings.warn(\"verify_ssl_certs is ignored; openai always verifies.\")\n s = requests.Session()\n proxies = _requests_proxies_arg(openai.proxy)\n if proxies:\n s.proxies = proxies\n s.mount(\n \"https://\",\n requests.adapters.HTTPAdapter(max_retries=MAX_CONNECTION_RETRIES),\n )\n return s\n\n\ndef parse_stream_helper(line: bytes) -> Optional[str]:\n if line:\n if line.strip() == b\"data: [DONE]\":\n # return here will cause GeneratorExit exception in urllib3\n # and it will close http connection with TCP Reset\n return None\n if line.startswith(b\"data: \"):\n line = line[len(b\"data: \") :]\n return line.decode(\"utf-8\")\n return None\n\n\ndef parse_stream(rbody: Iterator[bytes]) -> Iterator[str]:\n for line in rbody:\n _line = parse_stream_helper(line)\n if _line is not None:\n yield _line\n\n\nasync def parse_stream_async(rbody: aiohttp.StreamReader):\n async for chunk, _ in rbody.iter_chunks():\n # While the `ChunkTupleAsyncStreamIterator` iterator is meant to iterate over chunks (and thus lines) it seems\n # to still sometimes return multiple lines at a time, so let's split the chunk by lines again.\n for line in chunk.splitlines():\n _line = parse_stream_helper(line)\n if _line is not None:\n yield _line\n\n\nclass APIRequestor:\n def __init__(\n self,\n key=None,\n api_base=None,\n api_type=None,\n api_version=None,\n organization=None,\n ):\n self.api_base = api_base or openai.api_base\n self.api_key = key or util.default_api_key()\n self.api_type = (\n ApiType.from_str(api_type)\n if api_type\n else ApiType.from_str(openai.api_type)\n )\n self.api_version = api_version or openai.api_version\n self.organization = organization or openai.organization\n\n @classmethod\n def format_app_info(cls, info):\n str = info[\"name\"]\n if info[\"version\"]:\n str += \"/%s\" % (info[\"version\"],)\n if info[\"url\"]:\n str += \" (%s)\" % (info[\"url\"],)\n return str\n\n @overload\n def request(\n self,\n method,\n url,\n params,\n headers,\n files,\n stream: Literal[True],\n request_id: Optional[str] = ...,\n request_timeout: Optional[Union[float, Tuple[float, float]]] = ...,\n ) -> Tuple[Iterator[OpenAIResponse], bool, str]:\n pass\n\n @overload\n def request(\n self,\n method,\n url,\n params=...,\n headers=...,\n files=...,\n *,\n stream: Literal[True],\n request_id: Optional[str] = ...,\n request_timeout: Optional[Union[float, Tuple[float, float]]] = ...,\n ) -> Tuple[Iterator[OpenAIResponse], bool, str]:\n pass\n\n @overload\n def request(\n self,\n method,\n url,\n params=...,\n headers=...,\n files=...,\n stream: Literal[False] = ...,\n request_id: Optional[str] = ...,\n request_timeout: Optional[Union[float, Tuple[float, float]]] = ...,\n ) -> Tuple[OpenAIResponse, bool, str]:\n pass\n\n @overload\n def request(\n self,\n method,\n url,\n params=...,\n headers=...,\n files=...,\n stream: bool = ...,\n request_id: Optional[str] = ...,\n request_timeout: Optional[Union[float, Tuple[float, float]]] = ...,\n ) -> Tuple[Union[OpenAIResponse, Iterator[OpenAIResponse]], bool, str]:\n pass\n\n def request(\n self,\n method,\n url,\n params=None,\n headers=None,\n files=None,\n stream: bool = False,\n request_id: Optional[str] = None,\n request_timeout: Optional[Union[float, Tuple[float, float]]] = None,\n ) -> Tuple[Union[OpenAIResponse, Iterator[OpenAIResponse]], bool, str]:\n result = self.request_raw(\n method.lower(),\n url,\n params=params,\n supplied_headers=headers,\n files=files,\n stream=stream,\n request_id=request_id,\n request_timeout=request_timeout,\n )\n resp, got_stream = self._interpret_response(result, stream)\n return resp, got_stream, self.api_key\n\n @overload\n async def arequest(\n self,\n method,\n url,\n params,\n headers,\n files,\n stream: Literal[True],\n request_id: Optional[str] = ...,\n request_timeout: Optional[Union[float, Tuple[float, float]]] = ...,\n ) -> Tuple[AsyncGenerator[OpenAIResponse, None], bool, str]:\n pass\n\n @overload\n async def arequest(\n self,\n method,\n url,\n params=...,\n headers=...,\n files=...,\n *,\n stream: Literal[True],\n request_id: Optional[str] = ...,\n request_timeout: Optional[Union[float, Tuple[float, float]]] = ...,\n ) -> Tuple[AsyncGenerator[OpenAIResponse, None], bool, str]:\n pass\n\n @overload\n async def arequest(\n self,\n method,\n url,\n params=...,\n headers=...,\n files=...,\n stream: Literal[False] = ...,\n request_id: Optional[str] = ...,\n request_timeout: Optional[Union[float, Tuple[float, float]]] = ...,\n ) -> Tuple[OpenAIResponse, bool, str]:\n pass\n\n @overload\n async def arequest(\n self,\n method,\n url,\n params=...,\n headers=...,\n files=...,\n stream: bool = ...,\n request_id: Optional[str] = ...,\n request_timeout: Optional[Union[float, Tuple[float, float]]] = ...,\n ) -> Tuple[Union[OpenAIResponse, AsyncGenerator[OpenAIResponse, None]], bool, str]:\n pass\n\n async def arequest(\n self,\n method,\n url,\n params=None,\n headers=None,\n files=None,\n stream: bool = False,\n request_id: Optional[str] = None,\n request_timeout: Optional[Union[float, Tuple[float, float]]] = None,\n ) -> Tuple[Union[OpenAIResponse, AsyncGenerator[OpenAIResponse, None]], bool, str]:\n ctx = aiohttp_session()\n session = await ctx.__aenter__()\n try:\n result = await self.arequest_raw(\n method.lower(),\n url,\n session,\n params=params,\n supplied_headers=headers,\n files=files,\n request_id=request_id,\n request_timeout=request_timeout,\n )\n resp, got_stream = await self._interpret_async_response(result, stream)\n except Exception:\n await ctx.__aexit__(None, None, None)\n raise\n if got_stream:\n\n async def wrap_resp():\n assert isinstance(resp, AsyncGenerator)\n try:\n async for r in resp:\n yield r\n finally:\n await ctx.__aexit__(None, None, None)\n\n return wrap_resp(), got_stream, self.api_key\n else:\n await ctx.__aexit__(None, None, None)\n return resp, got_stream, self.api_key\n\n def handle_error_response(self, rbody, rcode, resp, rheaders, stream_error=False):\n try:\n error_data = resp[\"error\"]\n except (KeyError, TypeError):\n raise error.APIError(\n \"Invalid response object from API: %r (HTTP response code \"\n \"was %d)\" % (rbody, rcode),\n rbody,\n rcode,\n resp,\n )\n\n if \"internal_message\" in error_data:\n error_data[\"message\"] += \"\\n\\n\" + error_data[\"internal_message\"]\n\n util.log_info(\n \"OpenAI API error received\",\n error_code=error_data.get(\"code\"),\n error_type=error_data.get(\"type\"),\n error_message=error_data.get(\"message\"),\n error_param=error_data.get(\"param\"),\n stream_error=stream_error,\n )\n\n # Rate limits were previously coded as 400's with code 'rate_limit'\n if rcode == 429:\n return error.RateLimitError(\n error_data.get(\"message\"), rbody, rcode, resp, rheaders\n )\n elif rcode in [400, 404, 415]:\n return error.InvalidRequestError(\n error_data.get(\"message\"),\n error_data.get(\"param\"),\n error_data.get(\"code\"),\n rbody,\n rcode,\n resp,\n rheaders,\n )\n elif rcode == 401:\n return error.AuthenticationError(\n error_data.get(\"message\"), rbody, rcode, resp, rheaders\n )\n elif rcode == 403:\n return error.PermissionError(\n error_data.get(\"message\"), rbody, rcode, resp, rheaders\n )\n elif rcode == 409:\n return error.TryAgain(\n error_data.get(\"message\"), rbody, rcode, resp, rheaders\n )\n elif stream_error:\n # TODO: we will soon attach status codes to stream errors\n parts = [error_data.get(\"message\"), \"(Error occurred while streaming.)\"]\n message = \" \".join([p for p in parts if p is not None])\n return error.APIError(message, rbody, rcode, resp, rheaders)\n else:\n return error.APIError(\n f\"{error_data.get('message')} {rbody} {rcode} {resp} {rheaders}\",\n rbody,\n rcode,\n resp,\n rheaders,\n )\n\n def request_headers(\n self, method: str, extra, request_id: Optional[str]\n ) -> Dict[str, str]:\n user_agent = \"OpenAI/v1 PythonBindings/%s\" % (version.VERSION,)\n if openai.app_info:\n user_agent += \" \" + self.format_app_info(openai.app_info)\n\n uname_without_node = \" \".join(\n v for k, v in platform.uname()._asdict().items() if k != \"node\"\n )\n ua = {\n \"bindings_version\": version.VERSION,\n \"httplib\": \"requests\",\n \"lang\": \"python\",\n \"lang_version\": platform.python_version(),\n \"platform\": platform.platform(),\n \"publisher\": \"openai\",\n \"uname\": uname_without_node,\n }\n if openai.app_info:\n ua[\"application\"] = openai.app_info\n\n headers = {\n \"X-OpenAI-Client-User-Agent\": json.dumps(ua),\n \"User-Agent\": user_agent,\n }\n\n headers.update(util.api_key_to_header(self.api_type, self.api_key))\n\n if self.organization:\n headers[\"OpenAI-Organization\"] = self.organization\n\n if self.api_version is not None and self.api_type == ApiType.OPEN_AI:\n headers[\"OpenAI-Version\"] = self.api_version\n if request_id is not None:\n headers[\"X-Request-Id\"] = request_id\n if openai.debug:\n headers[\"OpenAI-Debug\"] = \"true\"\n headers.update(extra)\n\n return headers\n\n def _validate_headers(\n self, supplied_headers: Optional[Dict[str, str]]\n ) -> Dict[str, str]:\n headers: Dict[str, str] = {}\n if supplied_headers is None:\n return headers\n\n if not isinstance(supplied_headers, dict):\n raise TypeError(\"Headers must be a dictionary\")\n\n for k, v in supplied_headers.items():\n if not isinstance(k, str):\n raise TypeError(\"Header keys must be strings\")\n if not isinstance(v, str):\n raise TypeError(\"Header values must be strings\")\n headers[k] = v\n\n # NOTE: It is possible to do more validation of the headers, but a request could always\n # be made to the API manually with invalid headers, so we need to handle them server side.\n\n return headers\n\n def _prepare_request_raw(\n self,\n url,\n supplied_headers,\n method,\n params,\n files,\n request_id: Optional[str],\n ) -> Tuple[str, Dict[str, str], Optional[bytes]]:\n abs_url = \"%s%s\" % (self.api_base, url)\n headers = self._validate_headers(supplied_headers)\n\n data = None\n if method == \"get\" or method == \"delete\":\n if params:\n encoded_params = urlencode(\n [(k, v) for k, v in params.items() if v is not None]\n )\n abs_url = _build_api_url(abs_url, encoded_params)\n elif method in {\"post\", \"put\"}:\n if params and files:\n raise ValueError(\"At most one of params and files may be specified.\")\n if params:\n data = json.dumps(params).encode()\n headers[\"Content-Type\"] = \"application/json\"\n else:\n raise error.APIConnectionError(\n \"Unrecognized HTTP method %r. This may indicate a bug in the \"\n \"OpenAI bindings. Please contact support@openai.com for \"\n \"assistance.\" % (method,)\n )\n\n headers = self.request_headers(method, headers, request_id)\n\n util.log_info(\"Request to OpenAI API\", method=method, path=abs_url)\n util.log_debug(\"Post details\", data=data, api_version=self.api_version)\n\n return abs_url, headers, data\n\n def request_raw(\n self,\n method,\n url,\n *,\n params=None,\n supplied_headers: Optional[Dict[str, str]] = None,\n files=None,\n stream: bool = False,\n request_id: Optional[str] = None,\n request_timeout: Optional[Union[float, Tuple[float, float]]] = None,\n ) -> requests.Response:\n abs_url, headers, data = self._prepare_request_raw(\n url, supplied_headers, method, params, files, request_id\n )\n\n if not hasattr(_thread_context, \"session\"):\n _thread_context.session = _make_session()\n try:\n result = _thread_context.session.request(\n method,\n abs_url,\n headers=headers,\n data=data,\n files=files,\n stream=stream,\n timeout=request_timeout if request_timeout else TIMEOUT_SECS,\n )\n except requests.exceptions.Timeout as e:\n raise error.Timeout(\"Request timed out: {}\".format(e)) from e\n except requests.exceptions.RequestException as e:\n raise error.APIConnectionError(\n \"Error communicating with OpenAI: {}\".format(e)\n ) from e\n util.log_info(\n \"OpenAI API response\",\n path=abs_url,\n response_code=result.status_code,\n processing_ms=result.headers.get(\"OpenAI-Processing-Ms\"),\n request_id=result.headers.get(\"X-Request-Id\"),\n )\n # Don't read the whole stream for debug logging unless necessary.\n if openai.log == \"debug\":\n util.log_debug(\n \"API response body\", body=result.content, headers=result.headers\n )\n return result\n\n async def arequest_raw(\n self,\n method,\n url,\n session,\n *,\n params=None,\n supplied_headers: Optional[Dict[str, str]] = None,\n files=None,\n request_id: Optional[str] = None,\n request_timeout: Optional[Union[float, Tuple[float, float]]] = None,\n ) -> aiohttp.ClientResponse:\n abs_url, headers, data = self._prepare_request_raw(\n url, supplied_headers, method, params, files, request_id\n )\n\n if isinstance(request_timeout, tuple):\n timeout = aiohttp.ClientTimeout(\n connect=request_timeout[0],\n total=request_timeout[1],\n )\n else:\n timeout = aiohttp.ClientTimeout(\n total=request_timeout if request_timeout else TIMEOUT_SECS\n )\n\n if files:\n # TODO: Use `aiohttp.MultipartWriter` to create the multipart form data here.\n # For now we use the private `requests` method that is known to have worked so far.\n data, content_type = requests.models.RequestEncodingMixin._encode_files( # type: ignore\n files, data\n )\n headers[\"Content-Type\"] = content_type\n request_kwargs = {\n \"method\": method,\n \"url\": abs_url,\n \"headers\": headers,\n \"data\": data,\n \"proxy\": _aiohttp_proxies_arg(openai.proxy),\n \"timeout\": timeout,\n }\n try:\n result = await session.request(**request_kwargs)\n util.log_info(\n \"OpenAI API response\",\n path=abs_url,\n response_code=result.status,\n processing_ms=result.headers.get(\"OpenAI-Processing-Ms\"),\n request_id=result.headers.get(\"X-Request-Id\"),\n )\n # Don't read the whole stream for debug logging unless necessary.\n if openai.log == \"debug\":\n util.log_debug(\n \"API response body\", body=result.content, headers=result.headers\n )\n return result\n except (aiohttp.ServerTimeoutError, asyncio.TimeoutError) as e:\n raise error.Timeout(\"Request timed out\") from e\n except aiohttp.ClientError as e:\n raise error.APIConnectionError(\"Error communicating with OpenAI\") from e\n\n def _interpret_response(\n self, result: requests.Response, stream: bool\n ) -> Tuple[Union[OpenAIResponse, Iterator[OpenAIResponse]], bool]:\n \"\"\"Returns the response(s) and a bool indicating whether it is a stream.\"\"\"\n if stream and \"text/event-stream\" in result.headers.get(\"Content-Type\", \"\"):\n return (\n self._interpret_response_line(\n line, result.status_code, result.headers, stream=True\n )\n for line in parse_stream(result.iter_lines())\n ), True\n else:\n return (\n self._interpret_response_line(\n result.content.decode(\"utf-8\"),\n result.status_code,\n result.headers,\n stream=False,\n ),\n False,\n )\n\n async def _interpret_async_response(\n self, result: aiohttp.ClientResponse, stream: bool\n ) -> Tuple[Union[OpenAIResponse, AsyncGenerator[OpenAIResponse, None]], bool]:\n \"\"\"Returns the response(s) and a bool indicating whether it is a stream.\"\"\"\n if stream and \"text/event-stream\" in result.headers.get(\"Content-Type\", \"\"):\n return (\n self._interpret_response_line(\n line, result.status, result.headers, stream=True\n )\n async for line in parse_stream_async(result.content)\n ), True\n else:\n try:\n await result.read()\n except aiohttp.ClientError as e:\n util.log_warn(e, body=result.content)\n return (\n self._interpret_response_line(\n (await result.read()).decode(\"utf-8\"),\n result.status,\n result.headers,\n stream=False,\n ),\n False,\n )\n\n def _interpret_response_line(\n self, rbody: str, rcode: int, rheaders, stream: bool\n ) -> OpenAIResponse:\n # HTTP 204 response code does not have any content in the body.\n if rcode == 204:\n return OpenAIResponse(None, rheaders)\n\n if rcode == 503:\n raise error.ServiceUnavailableError(\n \"The server is overloaded or not ready yet.\",\n rbody,\n rcode,\n headers=rheaders,\n )\n try:\n data = json.loads(rbody)\n except (JSONDecodeError, UnicodeDecodeError) as e:\n raise error.APIError(\n f\"HTTP code {rcode} from API ({rbody})\", rbody, rcode, headers=rheaders\n ) from e\n resp = OpenAIResponse(data, rheaders)\n # In the future, we might add a \"status\" parameter to errors\n # to better handle the \"error while streaming\" case.\n stream_error = stream and \"error\" in resp.data\n if stream_error or not 200 <= rcode < 300:\n raise self.handle_error_response(\n rbody, rcode, resp.data, rheaders, stream_error=stream_error\n )\n return resp\n\n\n@asynccontextmanager\nasync def aiohttp_session() -> AsyncIterator[aiohttp.ClientSession]:\n user_set_session = openai.aiosession.get()\n if user_set_session:\n yield user_set_session\n else:\n async with aiohttp.ClientSession() as session:\n yield session\n",
"path": "openai/api_requestor.py"
}
] | 2_3 | python | import sys
import unittest
import ast
import inspect
import textwrap
class TestLoggingCalls(unittest.TestCase):
def test_logging_calls(self):
from openai.api_requestor import APIRequestor
class FindPrepareRequest(ast.NodeVisitor):
exists: bool = False
debug_count: int = 0
def visit_Call(self, node):
if isinstance(node.func, ast.Attribute) and node.func.attr == "log_info":
self.exists = True
elif isinstance(node.func, ast.Attribute) and node.func.attr == "log_debug":
self.debug_count += 1
self.generic_visit(node)
def check_info_level_debug(func):
source = inspect.getsource(func)
source = textwrap.dedent(source)
tree = ast.parse(source)
visitor = FindPrepareRequest()
visitor.visit(tree)
return visitor.exists, visitor.debug_count
check1, debug_count1 = check_info_level_debug(APIRequestor._prepare_request_raw)
self.assertFalse(check1, "log_info found in _prepare_request_raw")
self.assertEqual(debug_count1, 2, "Incorrect number of log_debug calls in _prepare_request_raw")
check2, debug_count2 = check_info_level_debug(APIRequestor.request_raw)
self.assertFalse(check2, "log_info found in request_raw")
self.assertEqual(debug_count2, 2, "Incorrect number of log_debug calls in request_raw")
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestLoggingCalls))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == "__main__":
main()
|
https://github.com/teamqurrent/openai-python | Add optional api_key parameter to `Moderation.create` inside `openai/api_resources/moderation.py`. Be sure to add it to the instance init as a cls argument | e51ae91 | requests >= 2.20
aiohttp
numpy
asyncio
matplotlib
plotly
pandas
scipy
scikit-learn
tenacity
typing-extensions
| python3.9 | 09dc7ef | diff --git a/openai/api_resources/moderation.py b/openai/api_resources/moderation.py
--- a/openai/api_resources/moderation.py
+++ b/openai/api_resources/moderation.py
@@ -11,14 +11,14 @@ class Moderation(OpenAIObject):
return "/moderations"
@classmethod
- def create(cls, input: Union[str, List[str]], model: Optional[str] = None):
+ def create(cls, input: Union[str, List[str]], model: Optional[str] = None, api_key: Optional[str] = None):
if model is not None and model not in cls.VALID_MODEL_NAMES:
raise ValueError(
f"The parameter model should be chosen from {cls.VALID_MODEL_NAMES} "
f"and it is default to be None."
)
- instance = cls()
+ instance = cls(api_key=api_key)
params = {"input": input}
if model is not None:
params["model"] = model
| [
{
"content": "from typing import List, Optional, Union\n\nfrom openai.openai_object import OpenAIObject\n\n\nclass Moderation(OpenAIObject):\n VALID_MODEL_NAMES: List[str] = [\"text-moderation-stable\", \"text-moderation-latest\"]\n\n @classmethod\n def get_url(self):\n return \"/moderations\"\n\n @classmethod\n def create(cls, input: Union[str, List[str]], model: Optional[str] = None):\n if model is not None and model not in cls.VALID_MODEL_NAMES:\n raise ValueError(\n f\"The parameter model should be chosen from {cls.VALID_MODEL_NAMES} \"\n f\"and it is default to be None.\"\n )\n\n instance = cls()\n params = {\"input\": input}\n if model is not None:\n params[\"model\"] = model\n return instance.request(\"post\", cls.get_url(), params)\n",
"path": "openai/api_resources/moderation.py"
}
] | 2_4 | python | import sys
import unittest
import ast
import inspect
import textwrap
from typing import List, Optional
class TestModerationMethod(unittest.TestCase):
def test_moderation_method(self):
from openai.api_resources.moderation import Moderation
class FindCls(ast.NodeVisitor):
args: Optional[List[str]] = None
def visit_Call(self, node):
if isinstance(node.func, ast.Name) and node.func.id == "cls":
self.args = [str(arg.arg) for arg in node.keywords]
self.generic_visit(node)
func = Moderation.create
source = inspect.getsource(func)
source = textwrap.dedent(source)
tree = ast.parse(source)
visitor = FindCls()
visitor.visit(tree)
self.assertIsNotNone(visitor.args, "args is None")
self.assertIn("api_key", visitor.args, "'api_key' not found in args")
if hasattr(Moderation, "create"):
method = getattr(Moderation, "create")
method_signature = inspect.signature(method)
if "api_key" in method_signature.parameters:
param = method_signature.parameters["api_key"]
self.assertIsNone(param.default, "Default value for 'api_key' parameter is not None")
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestModerationMethod))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == "__main__":
main()
|
https://github.com/teamqurrent/openai-python | Inside `openai/api_resources/engine.py`, add an `embeddings` method. This method should take the params and return the embeddings from the OpenAI API | 205d063 | requests >= 2.20
aiohttp
numpy
asyncio
matplotlib
plotly
pandas
scipy
scikit-learn
tenacity
typing-extensions
| python3.9 | 7227906 | diff --git a/openai/api_resources/engine.py b/openai/api_resources/engine.py
--- a/openai/api_resources/engine.py
+++ b/openai/api_resources/engine.py
@@ -30,3 +30,6 @@ class Engine(ListableAPIResource, UpdateableAPIResource):
def search(self, **params):
return self.request("post", self.instance_url() + "/search", params)
+
+ def embeddings(self, **params):
+ return self.request("post", self.instance_url() + "/embeddings", params)
| [
{
"content": "import time\n\nfrom openai import util\nfrom openai.api_resources.abstract import (\n ListableAPIResource,\n UpdateableAPIResource,\n)\nfrom openai.error import TryAgain\n\n\nclass Engine(ListableAPIResource, UpdateableAPIResource):\n OBJECT_NAME = \"engine\"\n\n def generate(self, timeout=None, **params):\n start = time.time()\n while True:\n try:\n return self.request(\n \"post\",\n self.instance_url() + \"/generate\",\n params,\n stream=params.get(\"stream\"),\n plain_old_data=True,\n )\n except TryAgain as e:\n if timeout is not None and time.time() > start + timeout:\n raise\n\n util.log_info(\"Waiting for model to warm up\", error=e)\n\n def search(self, **params):\n return self.request(\"post\", self.instance_url() + \"/search\", params)\n",
"path": "openai/api_resources/engine.py"
}
] | 2_5 | python | import sys
import unittest
import ast
import inspect
class TestEngineEmbeddingsMethod(unittest.TestCase):
def has_method(self, class_obj, method_name):
source_lines = inspect.getsource(class_obj)
tree = ast.parse(source_lines)
for node in ast.walk(tree):
if isinstance(node, ast.FunctionDef) and node.name == method_name:
return True
return False
def test_engine_embeddings(self):
from openai.api_resources.engine import Engine
self.assertTrue(self.has_method(Engine, "embeddings"), "Method 'embeddings' not found in Engine class")
args = inspect.getfullargspec(Engine.embeddings)
self.assertIn("self", args.args, "First argument of 'embeddings' method is not 'self'")
self.assertIsNotNone(args.varkw, "Variable keyword arguments (varkw) is None in 'embeddings' method")
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestEngineEmbeddingsMethod))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == "__main__":
main()
|
https://github.com/teamqurrent/storage-py | Add file_size_limit (int) and allow_mime_types ( attributes to the `BaseBucket` class inside `storage3/types.py` | ae9fc30 | typing-extensions == 4.2.0
httpx
python-dateutil == 2.8.2
toml
| python3.9 | 64e9e02 | diff --git a/storage3/types.py b/storage3/types.py
--- a/storage3/types.py
+++ b/storage3/types.py
@@ -18,6 +18,8 @@ class BaseBucket:
public: bool
created_at: datetime
updated_at: datetime
+ file_size_limit: int
+ allowed_mime_types: str
def __post_init__(self) -> None:
# created_at and updated_at are returned by the API as ISO timestamps
| [
{
"content": "from dataclasses import dataclass\nfrom datetime import datetime\nfrom typing import Optional, Union\n\nimport dateutil.parser\nfrom typing_extensions import Literal, TypedDict\n\nRequestMethod = Literal[\"GET\", \"POST\", \"DELETE\", \"PUT\", \"HEAD\"]\n\n\n@dataclass\nclass BaseBucket:\n \"\"\"Represents a file storage bucket.\"\"\"\n\n id: str\n name: str\n owner: str\n public: bool\n created_at: datetime\n updated_at: datetime\n\n def __post_init__(self) -> None:\n # created_at and updated_at are returned by the API as ISO timestamps\n # so we convert them to datetime objects\n self.created_at = dateutil.parser.isoparse(self.created_at) # type: ignore\n self.updated_at = dateutil.parser.isoparse(self.updated_at) # type: ignore\n\n\n# used in bucket.list method's option parameter\nclass _sortByType(TypedDict):\n column: str\n order: Literal[\"asc\", \"desc\"]\n\n\nclass ListBucketFilesOptions(TypedDict):\n limit: int\n offset: int\n sortBy: _sortByType\n\n\nclass TransformOptions(TypedDict):\n height: Optional[float]\n width: Optional[float]\n resize: Optional[Union[Literal[\"cover\"], Literal[\"contain\"], Literal[\"fill\"]]]\n\n\nclass CreateSignedURLOptions(TypedDict):\n download: Optional[Union[str, bool]]\n transform: Optional[TransformOptions]\n",
"path": "storage3/types.py"
}
] | 3_0 | python | import sys
import unittest
from dataclasses import fields
class TestBaseBucketClassVariables(unittest.TestCase):
def test_class_variables(self):
from storage3.types import BaseBucket
class_variables = [f.name for f in fields(BaseBucket)]
self.assertIn("file_size_limit", class_variables, "'file_size_limit' not found in BaseBucket class variables")
self.assertIn("allowed_mime_types", class_variables, "'allowed_mime_types' not found in BaseBucket class variables")
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestBaseBucketClassVariables))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == "__main__":
main()
|
https://github.com/teamqurrent/storage-py | Modify the way the StorageException is raised upon encountering an HTTPError during a request within the following files: `storage3/_async/bucket.py`, `storage3/_async/file_api.py`, `storage3/_sync/bucket.py`, and `storage3/_sync/file_api.py`. Please update these files individually. Instead of passing the response JSON directly to the exception, the revised approach should merge all the keys from the response JSON using dictionary unpacking and include the response's status code under the key 'statusCode'. Make sure to utilize a dictionary format when raising the exception so that the response and statusCode are in the same dictionary object | 688cfc7 | typing-extensions == 4.2.0
httpx
python-dateutil == 2.8.2
toml
| python3.9 | 6923975 | diff --git a/storage3/_async/bucket.py b/storage3/_async/bucket.py
--- a/storage3/_async/bucket.py
+++ b/storage3/_async/bucket.py
@@ -31,7 +31,7 @@ class AsyncStorageBucketAPI:
try:
response.raise_for_status()
except HTTPError:
- raise StorageException(response.json())
+ raise StorageException({**response.json(), "statusCode": response.status_code})
return response
diff --git a/storage3/_async/file_api.py b/storage3/_async/file_api.py
--- a/storage3/_async/file_api.py
+++ b/storage3/_async/file_api.py
@@ -35,7 +35,7 @@ class AsyncBucketActionsMixin:
try:
response.raise_for_status()
except HTTPError:
- raise StorageException(response.json())
+ raise StorageException({**response.json(), "statusCode": response.status_code})
return response
diff --git a/storage3/_sync/bucket.py b/storage3/_sync/bucket.py
--- a/storage3/_sync/bucket.py
+++ b/storage3/_sync/bucket.py
@@ -31,7 +31,7 @@ class SyncStorageBucketAPI:
try:
response.raise_for_status()
except HTTPError:
- raise StorageException(response.json())
+ raise StorageException({**response.json(), "statusCode": response.status_code})
return response
diff --git a/storage3/_sync/file_api.py b/storage3/_sync/file_api.py
--- a/storage3/_sync/file_api.py
+++ b/storage3/_sync/file_api.py
@@ -35,7 +35,7 @@ class SyncBucketActionsMixin:
try:
response.raise_for_status()
except HTTPError:
- raise StorageException(response.json())
+ raise StorageException({**response.json(), "statusCode": response.status_code})
return response
| [
{
"content": "from __future__ import annotations\n\nfrom typing import Any, Optional\n\nfrom httpx import HTTPError, Response\n\nfrom ..types import RequestMethod\nfrom ..utils import AsyncClient, StorageException\nfrom .file_api import AsyncBucket\n\n__all__ = [\"AsyncStorageBucketAPI\"]\n\n\nclass AsyncStorageBucketAPI:\n \"\"\"This class abstracts access to the endpoint to the Get, List, Empty, and Delete operations on a bucket\"\"\"\n\n def __init__(self, url: str, headers: dict[str, str], session: AsyncClient) -> None:\n self.url = url\n self.headers = headers\n self._client = session\n\n async def _request(\n self,\n method: RequestMethod,\n url: str,\n json: Optional[dict[Any, Any]] = None,\n ) -> Response:\n response = await self._client.request(\n method, url, headers=self.headers, json=json\n )\n try:\n response.raise_for_status()\n except HTTPError:\n raise StorageException(response.json())\n\n return response\n\n async def list_buckets(self) -> list[AsyncBucket]:\n \"\"\"Retrieves the details of all storage buckets within an existing product.\"\"\"\n # if the request doesn't error, it is assured to return a list\n res = await self._request(\"GET\", f\"{self.url}/bucket\")\n return [\n AsyncBucket(\n **bucket, _url=self.url, _headers=self.headers, _client=self._client\n )\n for bucket in res.json()\n ]\n\n async def get_bucket(self, id: str) -> AsyncBucket:\n \"\"\"Retrieves the details of an existing storage bucket.\n\n Parameters\n ----------\n id\n The unique identifier of the bucket you would like to retrieve.\n \"\"\"\n res = await self._request(\"GET\", f\"{self.url}/bucket/{id}\")\n json = res.json()\n return AsyncBucket(\n **json, _url=self.url, _headers=self.headers, _client=self._client\n )\n\n async def create_bucket(\n self, id: str, name: Optional[str] = None, public: bool = False\n ) -> dict[str, str]:\n \"\"\"Creates a new storage bucket.\n\n Parameters\n ----------\n id\n A unique identifier for the bucket you are creating.\n name\n A name for the bucket you are creating. If not passed, the id is used as the name as well.\n public\n Whether the bucket you are creating should be publicly accessible. Defaults to False.\n \"\"\"\n res = await self._request(\n \"POST\",\n f\"{self.url}/bucket\",\n json={\"id\": id, \"name\": name or id, \"public\": public},\n )\n return res.json()\n\n async def empty_bucket(self, id: str) -> dict[str, str]:\n \"\"\"Removes all objects inside a single bucket.\n\n Parameters\n ----------\n id\n The unique identifier of the bucket you would like to empty.\n \"\"\"\n res = await self._request(\"POST\", f\"{self.url}/bucket/{id}/empty\", json={})\n return res.json()\n\n async def delete_bucket(self, id: str) -> dict[str, str]:\n \"\"\"Deletes an existing bucket. Note that you cannot delete buckets with existing objects inside. You must first\n `empty()` the bucket.\n\n Parameters\n ----------\n id\n The unique identifier of the bucket you would like to delete.\n \"\"\"\n res = await self._request(\"DELETE\", f\"{self.url}/bucket/{id}\", json={})\n return res.json()\n",
"path": "storage3/_async/bucket.py"
},
{
"content": "from __future__ import annotations\nfrom dataclasses import dataclass, field\nfrom pathlib import Path\nfrom typing import Any, Optional, Union\n\nfrom httpx import HTTPError, Response\n\nfrom ..constants import DEFAULT_FILE_OPTIONS, DEFAULT_SEARCH_OPTIONS\nfrom ..types import BaseBucket, ListBucketFilesOptions, RequestMethod\nfrom ..utils import AsyncClient, StorageException\n\n__all__ = [\"AsyncBucket\"]\n\n\nclass AsyncBucketActionsMixin:\n \"\"\"Functions needed to access the file API.\"\"\"\n\n id: str\n _url: str\n _headers: dict[str, str]\n _client: AsyncClient\n\n async def _request(\n self,\n method: RequestMethod,\n url: str,\n headers: Optional[dict[str, Any]] = None,\n json: Optional[dict[Any, Any]] = None,\n files: Optional[Any] = None,\n ) -> Response:\n headers = headers or {}\n response = await self._client.request(\n method, url, headers={**self._headers, **headers}, json=json, files=files\n )\n try:\n response.raise_for_status()\n except HTTPError:\n raise StorageException(response.json())\n\n return response\n\n async def create_signed_url(self, path: str, expires_in: int) -> dict[str, str]:\n \"\"\"\n Parameters\n ----------\n path\n file path to be downloaded, including the current file name.\n expires_in\n number of seconds until the signed URL expires.\n \"\"\"\n path = self._get_final_path(path)\n response = await self._request(\n \"POST\",\n f\"{self._url}/object/sign/{path}\",\n json={\"expiresIn\": str(expires_in)},\n )\n data = response.json()\n data[\"signedURL\"] = f\"{self._url}{data['signedURL']}\"\n return data\n\n def get_public_url(self, path: str) -> str:\n \"\"\"\n Parameters\n ----------\n path\n file path, including the path and file name. For example `folder/image.png`.\n \"\"\"\n _path = self._get_final_path(path)\n return f\"{self._url}/object/public/{_path}\"\n\n async def move(self, from_path: str, to_path: str) -> dict[str, str]:\n \"\"\"\n Moves an existing file, optionally renaming it at the same time.\n\n Parameters\n ----------\n from_path\n The original file path, including the current file name. For example `folder/image.png`.\n to_path\n The new file path, including the new file name. For example `folder/image-copy.png`.\n \"\"\"\n res = await self._request(\n \"POST\",\n f\"{self._url}/object/move\",\n json={\n \"bucketId\": self.id,\n \"sourceKey\": from_path,\n \"destinationKey\": to_path,\n },\n )\n return res.json()\n\n async def remove(self, paths: list) -> dict[str, str]:\n \"\"\"\n Deletes files within the same bucket\n\n Parameters\n ----------\n paths\n An array or list of files to be deletes, including the path and file name. For example [`folder/image.png`].\n \"\"\"\n response = await self._request(\n \"DELETE\",\n f\"{self._url}/object/{self.id}\",\n json={\"prefixes\": paths},\n )\n return response.json()\n\n async def list(\n self,\n path: Optional[str] = None,\n options: Optional[ListBucketFilesOptions] = None,\n ) -> list[dict[str, str]]:\n \"\"\"\n Lists all the files within a bucket.\n\n Parameters\n ----------\n path\n The folder path.\n options\n Search options, including `limit`, `offset`, and `sortBy`.\n \"\"\"\n extra_options = options or {}\n body = dict(DEFAULT_SEARCH_OPTIONS, **extra_options)\n extra_headers = {\"Content-Type\": \"application/json\"}\n body[\"prefix\"] = path or \"\"\n response = await self._request(\n \"POST\",\n f\"{self._url}/object/list/{self.id}\",\n json=body,\n headers=extra_headers,\n )\n return response.json()\n\n async def download(self, path: str) -> bytes:\n \"\"\"\n Downloads a file.\n\n Parameters\n ----------\n path\n The file path to be downloaded, including the path and file name. For example `folder/image.png`.\n \"\"\"\n _path = self._get_final_path(path)\n response = await self._request(\n \"GET\", f\"{self._url}/object/{_path}\", headers=self._headers\n )\n return response.content\n\n async def upload(\n self, path: str, file: Union[str, Path], file_options: Optional[dict] = None\n ) -> Response:\n \"\"\"\n Uploads a file to an existing bucket.\n\n Parameters\n ----------\n path\n The relative file path including the bucket ID. Should be of the format `bucket/folder/subfolder/filename.png`.\n The bucket must already exist before attempting to upload.\n file\n The File object to be stored in the bucket. or a async generator of chunks\n file_options\n HTTP headers. For example `cacheControl`\n \"\"\"\n if file_options is None:\n file_options = {}\n headers = dict(self._headers, **DEFAULT_FILE_OPTIONS, **file_options)\n filename = path.rsplit(\"/\", maxsplit=1)[-1]\n files = {\"file\": (filename, open(file, \"rb\"), headers[\"contentType\"])}\n _path = self._get_final_path(path)\n\n return await self._request(\n \"POST\",\n f\"{self._url}/object/{_path}\",\n files=files,\n headers=headers,\n )\n\n def _get_final_path(self, path: str) -> str:\n return f\"{self.id}/{path}\"\n\n\n# this class is returned by methods that fetch buckets, for example StorageBucketAPI.get_bucket\n# adding this mixin on the BaseBucket means that those bucket objects can also be used to\n# run methods like `upload` and `download`\n@dataclass(repr=False)\nclass AsyncBucket(BaseBucket, AsyncBucketActionsMixin):\n _url: str = field(repr=False)\n _headers: dict[str, str] = field(repr=False)\n _client: AsyncClient = field(repr=False)\n\n\n@dataclass\nclass AsyncBucketProxy(AsyncBucketActionsMixin):\n # contains the minimum required fields needed to query the file API endpoints\n # this object is returned by the `StorageClient.from_`` method\n id: str\n _url: str\n _headers: dict[str, str]\n _client: AsyncClient\n",
"path": "storage3/_async/file_api.py"
},
{
"content": "from __future__ import annotations\n\nfrom typing import Any, Optional\n\nfrom httpx import HTTPError, Response\n\nfrom ..types import RequestMethod\nfrom ..utils import SyncClient, StorageException\nfrom .file_api import SyncBucket\n\n__all__ = [\"SyncStorageBucketAPI\"]\n\n\nclass SyncStorageBucketAPI:\n \"\"\"This class abstracts access to the endpoint to the Get, List, Empty, and Delete operations on a bucket\"\"\"\n\n def __init__(self, url: str, headers: dict[str, str], session: SyncClient) -> None:\n self.url = url\n self.headers = headers\n self._client = session\n\n def _request(\n self,\n method: RequestMethod,\n url: str,\n json: Optional[dict[Any, Any]] = None,\n ) -> Response:\n response = self._client.request(\n method, url, headers=self.headers, json=json\n )\n try:\n response.raise_for_status()\n except HTTPError:\n raise StorageException(response.json())\n\n return response\n\n def list_buckets(self) -> list[SyncBucket]:\n \"\"\"Retrieves the details of all storage buckets within an existing product.\"\"\"\n # if the request doesn't error, it is assured to return a list\n res = self._request(\"GET\", f\"{self.url}/bucket\")\n return [\n SyncBucket(\n **bucket, _url=self.url, _headers=self.headers, _client=self._client\n )\n for bucket in res.json()\n ]\n\n def get_bucket(self, id: str) -> SyncBucket:\n \"\"\"Retrieves the details of an existing storage bucket.\n\n Parameters\n ----------\n id\n The unique identifier of the bucket you would like to retrieve.\n \"\"\"\n res = self._request(\"GET\", f\"{self.url}/bucket/{id}\")\n json = res.json()\n return SyncBucket(\n **json, _url=self.url, _headers=self.headers, _client=self._client\n )\n\n def create_bucket(\n self, id: str, name: Optional[str] = None, public: bool = False\n ) -> dict[str, str]:\n \"\"\"Creates a new storage bucket.\n\n Parameters\n ----------\n id\n A unique identifier for the bucket you are creating.\n name\n A name for the bucket you are creating. If not passed, the id is used as the name as well.\n public\n Whether the bucket you are creating should be publicly accessible. Defaults to False.\n \"\"\"\n res = self._request(\n \"POST\",\n f\"{self.url}/bucket\",\n json={\"id\": id, \"name\": name or id, \"public\": public},\n )\n return res.json()\n\n def empty_bucket(self, id: str) -> dict[str, str]:\n \"\"\"Removes all objects inside a single bucket.\n\n Parameters\n ----------\n id\n The unique identifier of the bucket you would like to empty.\n \"\"\"\n res = self._request(\"POST\", f\"{self.url}/bucket/{id}/empty\", json={})\n return res.json()\n\n def delete_bucket(self, id: str) -> dict[str, str]:\n \"\"\"Deletes an existing bucket. Note that you cannot delete buckets with existing objects inside. You must first\n `empty()` the bucket.\n\n Parameters\n ----------\n id\n The unique identifier of the bucket you would like to delete.\n \"\"\"\n res = self._request(\"DELETE\", f\"{self.url}/bucket/{id}\", json={})\n return res.json()\n",
"path": "storage3/_sync/bucket.py"
},
{
"content": "from __future__ import annotations\nfrom dataclasses import dataclass, field\nfrom pathlib import Path\nfrom typing import Any, Optional, Union\n\nfrom httpx import HTTPError, Response\n\nfrom ..constants import DEFAULT_FILE_OPTIONS, DEFAULT_SEARCH_OPTIONS\nfrom ..types import BaseBucket, ListBucketFilesOptions, RequestMethod\nfrom ..utils import SyncClient, StorageException\n\n__all__ = [\"SyncBucket\"]\n\n\nclass SyncBucketActionsMixin:\n \"\"\"Functions needed to access the file API.\"\"\"\n\n id: str\n _url: str\n _headers: dict[str, str]\n _client: SyncClient\n\n def _request(\n self,\n method: RequestMethod,\n url: str,\n headers: Optional[dict[str, Any]] = None,\n json: Optional[dict[Any, Any]] = None,\n files: Optional[Any] = None,\n ) -> Response:\n headers = headers or {}\n response = self._client.request(\n method, url, headers={**self._headers, **headers}, json=json, files=files\n )\n try:\n response.raise_for_status()\n except HTTPError:\n raise StorageException(response.json())\n\n return response\n\n def create_signed_url(self, path: str, expires_in: int) -> dict[str, str]:\n \"\"\"\n Parameters\n ----------\n path\n file path to be downloaded, including the current file name.\n expires_in\n number of seconds until the signed URL expires.\n \"\"\"\n path = self._get_final_path(path)\n response = self._request(\n \"POST\",\n f\"{self._url}/object/sign/{path}\",\n json={\"expiresIn\": str(expires_in)},\n )\n data = response.json()\n data[\"signedURL\"] = f\"{self._url}{data['signedURL']}\"\n return data\n\n def get_public_url(self, path: str) -> str:\n \"\"\"\n Parameters\n ----------\n path\n file path, including the path and file name. For example `folder/image.png`.\n \"\"\"\n _path = self._get_final_path(path)\n return f\"{self._url}/object/public/{_path}\"\n\n def move(self, from_path: str, to_path: str) -> dict[str, str]:\n \"\"\"\n Moves an existing file, optionally renaming it at the same time.\n\n Parameters\n ----------\n from_path\n The original file path, including the current file name. For example `folder/image.png`.\n to_path\n The new file path, including the new file name. For example `folder/image-copy.png`.\n \"\"\"\n res = self._request(\n \"POST\",\n f\"{self._url}/object/move\",\n json={\n \"bucketId\": self.id,\n \"sourceKey\": from_path,\n \"destinationKey\": to_path,\n },\n )\n return res.json()\n\n def remove(self, paths: list) -> dict[str, str]:\n \"\"\"\n Deletes files within the same bucket\n\n Parameters\n ----------\n paths\n An array or list of files to be deletes, including the path and file name. For example [`folder/image.png`].\n \"\"\"\n response = self._request(\n \"DELETE\",\n f\"{self._url}/object/{self.id}\",\n json={\"prefixes\": paths},\n )\n return response.json()\n\n def list(\n self,\n path: Optional[str] = None,\n options: Optional[ListBucketFilesOptions] = None,\n ) -> list[dict[str, str]]:\n \"\"\"\n Lists all the files within a bucket.\n\n Parameters\n ----------\n path\n The folder path.\n options\n Search options, including `limit`, `offset`, and `sortBy`.\n \"\"\"\n extra_options = options or {}\n body = dict(DEFAULT_SEARCH_OPTIONS, **extra_options)\n extra_headers = {\"Content-Type\": \"application/json\"}\n body[\"prefix\"] = path or \"\"\n response = self._request(\n \"POST\",\n f\"{self._url}/object/list/{self.id}\",\n json=body,\n headers=extra_headers,\n )\n return response.json()\n\n def download(self, path: str) -> bytes:\n \"\"\"\n Downloads a file.\n\n Parameters\n ----------\n path\n The file path to be downloaded, including the path and file name. For example `folder/image.png`.\n \"\"\"\n _path = self._get_final_path(path)\n response = self._request(\n \"GET\", f\"{self._url}/object/{_path}\", headers=self._headers\n )\n return response.content\n\n def upload(\n self, path: str, file: Union[str, Path], file_options: Optional[dict] = None\n ) -> Response:\n \"\"\"\n Uploads a file to an existing bucket.\n\n Parameters\n ----------\n path\n The relative file path including the bucket ID. Should be of the format `bucket/folder/subfolder/filename.png`.\n The bucket must already exist before attempting to upload.\n file\n The File object to be stored in the bucket. or a async generator of chunks\n file_options\n HTTP headers. For example `cacheControl`\n \"\"\"\n if file_options is None:\n file_options = {}\n headers = dict(self._headers, **DEFAULT_FILE_OPTIONS, **file_options)\n filename = path.rsplit(\"/\", maxsplit=1)[-1]\n files = {\"file\": (filename, open(file, \"rb\"), headers[\"contentType\"])}\n _path = self._get_final_path(path)\n\n return self._request(\n \"POST\",\n f\"{self._url}/object/{_path}\",\n files=files,\n headers=headers,\n )\n\n def _get_final_path(self, path: str) -> str:\n return f\"{self.id}/{path}\"\n\n\n# this class is returned by methods that fetch buckets, for example StorageBucketAPI.get_bucket\n# adding this mixin on the BaseBucket means that those bucket objects can also be used to\n# run methods like `upload` and `download`\n@dataclass(repr=False)\nclass SyncBucket(BaseBucket, SyncBucketActionsMixin):\n _url: str = field(repr=False)\n _headers: dict[str, str] = field(repr=False)\n _client: SyncClient = field(repr=False)\n\n\n@dataclass\nclass SyncBucketProxy(SyncBucketActionsMixin):\n # contains the minimum required fields needed to query the file API endpoints\n # this object is returned by the `StorageClient.from_`` method\n id: str\n _url: str\n _headers: dict[str, str]\n _client: SyncClient\n",
"path": "storage3/_sync/file_api.py"
}
] | 3_1 | python | import sys
import unittest
import ast
import inspect
import textwrap
class TestStorageExceptionArgs(unittest.TestCase):
def check_method(self, method):
source = inspect.getsource(method)
source = textwrap.dedent(source)
# Check if "**" is present in the source
self.assertIn("**", source, "Kwargs '**' not found in method source")
tree = ast.parse(source)
class StorageExceptionVisitor(ast.NodeVisitor):
def __init__(self, outer) -> None:
self.outer = outer
super().__init__()
def visit_Raise(self, node):
if isinstance(node.exc, ast.Call):
if isinstance(node.exc.func, ast.Name):
if node.exc.func.id != "StorageException":
self.outer.fail("Raised exception is not StorageException")
elif isinstance(node.exc.args[0], ast.Dict):
d = node.exc.args[0]
keys = []
for key in d.keys:
if isinstance(key, ast.Constant):
keys.append(key.value)
else:
keys.append(None)
if 'statusCode' not in keys:
self.outer.fail("statusCode key not found in StorageException")
if len(keys) < 2: # At least statusCode and one other key from response JSON
self.outer.fail("Insufficient keys in StorageException")
visitor = StorageExceptionVisitor(self)
visitor.visit(tree)
def test_storage_exception_args(self):
from storage3._async.bucket import AsyncStorageBucketAPI
from storage3._async.file_api import AsyncBucketActionsMixin
from storage3._sync.bucket import SyncStorageBucketAPI
from storage3._sync.file_api import SyncBucketActionsMixin
# Check all relevant methods in all specified files
methods_to_check = [
AsyncStorageBucketAPI._request,
AsyncBucketActionsMixin._request,
SyncStorageBucketAPI._request,
SyncBucketActionsMixin._request
]
for method in methods_to_check:
self.check_method(method)
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestStorageExceptionArgs))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == "__main__":
main() |
https://github.com/teamqurrent/storage-py | We need to rename storage to storage3. Follow the steps provided to achieve this. Change occurrences of 'storage' to 'storage3' in both the `pyproject.toml`'s name and version_files attributes. Update imports inside `storage/storage_client.py` to use storage module to storage3. Rename the directory storage to storage3. | d41b573 | typing-extensions == 4.2.0
httpx
python-dateutil == 2.8.2
toml
| python3.9 | c62151c | diff --git a/pyproject.toml b/pyproject.toml
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,5 +1,5 @@
[tool.poetry]
-name = "storage"
+name = "storage3"
version = "0.1.0"
description = "Supabase Storage client for Python."
authors = ["Joel Lee <joel@joellee.org>", "Leon Fedden <leonfedden@gmail.com>", "Daniel Reinón García <danielreinon@outlook.com>", "Leynier Gutiérrez González <leynier41@gmail.com>"]
@@ -31,7 +31,7 @@ commitizen = "^2.20.3"
name = "cz_conventional_commits"
version = "0.1.0"
version_files = [
- "storage/__init__.py",
+ "storage3/__init__.py",
"pyproject.toml:version"
]
tag_format = "v$version"
diff --git a/storage/__init__.py b/storage3/__init__.py
similarity index 100%
rename from storage/__init__.py
rename to storage3/__init__.py
diff --git a/storage/lib/__init__.py b/storage3/lib/__init__.py
similarity index 100%
rename from storage/lib/__init__.py
rename to storage3/lib/__init__.py
diff --git a/storage/lib/storage_bucket_api.py b/storage3/lib/storage_bucket_api.py
similarity index 100%
rename from storage/lib/storage_bucket_api.py
rename to storage3/lib/storage_bucket_api.py
diff --git a/storage/lib/storage_file_api.py b/storage3/lib/storage_file_api.py
similarity index 100%
rename from storage/lib/storage_file_api.py
rename to storage3/lib/storage_file_api.py
diff --git a/storage/storage_client.py b/storage3/storage_client.py
similarity index 86%
rename from storage/storage_client.py
rename to storage3/storage_client.py
--- a/storage/storage_client.py
+++ b/storage3/storage_client.py
@@ -1,7 +1,7 @@
from typing import Dict
-from storage.lib.storage_bucket_api import StorageBucketAPI
-from storage.lib.storage_file_api import StorageFileAPI
+from storage3.lib.storage_bucket_api import StorageBucketAPI
+from storage3.lib.storage_file_api import StorageFileAPI
class SupabaseStorageClient(StorageBucketAPI):
| [
{
"content": "[tool.poetry]\nname = \"storage\"\nversion = \"0.1.0\"\ndescription = \"Supabase Storage client for Python.\"\nauthors = [\"Joel Lee <joel@joellee.org>\", \"Leon Fedden <leonfedden@gmail.com>\", \"Daniel Reinón García <danielreinon@outlook.com>\", \"Leynier Gutiérrez González <leynier41@gmail.com>\"]\nhomepage = \"https://github.com/supabase-community/storage-py\"\nrepository = \"https://github.com/supabase-community/storage-py\"\ndocumentation = \"https://github.com/supabase-community/storage-py\"\nreadme = \"README.md\"\nlicense = \"MIT\"\nclassifiers = [\n \"Programming Language :: Python :: 3\",\n \"License :: OSI Approved :: MIT License\",\n \"Operating System :: OS Independent\"\n]\n\n[tool.poetry.dependencies]\npython = \"^3.7\"\nhttpx = \">=0.19,<0.22\"\n\n[tool.poetry.dev-dependencies]\npre-commit = \"^2.16.0\"\nblack = \"^21.11b1\"\npytest = \"^6.2.5\"\nflake8 = \"^4.0.1\"\nisort = \"^5.9.3\"\npytest-cov = \"^3.0.0\"\ncommitizen = \"^2.20.3\"\n\n[tool.commitizen]\nname = \"cz_conventional_commits\"\nversion = \"0.1.0\"\nversion_files = [\n \"storage/__init__.py\",\n \"pyproject.toml:version\"\n]\ntag_format = \"v$version\"\n\n[build-system]\nrequires = [\"poetry-core>=1.0.0\"]\nbuild-backend = \"poetry.core.masonry.api\"\n",
"path": "pyproject.toml"
},
{
"content": "from typing import Dict\n\nfrom storage.lib.storage_bucket_api import StorageBucketAPI\nfrom storage.lib.storage_file_api import StorageFileAPI\n\n\nclass SupabaseStorageClient(StorageBucketAPI):\n \"\"\"\n Manage the storage bucket and files\n Examples\n --------\n >>> url = storage_file.create_signed_url(\"something/test2.txt\", 80) # signed url\n >>> loop.run_until_complete(storage_file.download(\"something/test2.txt\")) # upload or download\n >>> loop.run_until_complete(storage_file.upload(\"something/test2.txt\",\"path_file_upload\"))\n >>> list_buckets = storage.list_buckets()\n >>> list_files = storage_file.list(\"something\")\n \"\"\"\n\n def __init__(self, url: str, headers: Dict[str, str]):\n super().__init__(url, headers)\n\n def StorageFileAPI(self, id_: str) -> StorageFileAPI:\n return StorageFileAPI(self.url, self.headers, id_)\n",
"path": "storage/storage_client.py"
}
] | 3_2 | python | import sys
import unittest
import ast
import os
import toml
class TestSupabasePyRequirements(unittest.TestCase):
def get_imports(self, file_path):
with open(file_path, "r") as file:
tree = ast.parse(file.read())
imports = []
for node in ast.walk(tree):
if isinstance(node, ast.Import):
for name in node.names:
imports.append(name.name)
elif isinstance(node, ast.ImportFrom):
module = node.module
for name in node.names:
import_name = module + "." + name.name if module else name.name
imports.append(import_name)
return imports
def test_requirements_and_imports(self):
parsed_toml = toml.load("pyproject.toml")
self.assertEqual(parsed_toml["tool"]["poetry"]["name"], "storage3", "Project name in pyproject.toml is not 'storage3'")
self.assertEqual(len(parsed_toml["tool"]["commitizen"]["version_files"]), 2, "Number of version files in pyproject.toml is not 2")
self.assertIn("storage3/__init__.py", parsed_toml["tool"]["commitizen"]["version_files"], "storage3/__init__.py not in version files of pyproject.toml")
if not os.path.exists("storage") and os.path.exists("storage3"):
self.assertIn("storage3.lib.storage_bucket_api.StorageBucketAPI", self.get_imports("storage3/storage_client.py"), "Import 'storage3.lib.storage_bucket_api.StorageBucketAPI' not found")
self.assertIn("storage3.lib.storage_file_api.StorageFileAPI", self.get_imports("storage3/storage_client.py"), "Import 'storage3.lib.storage_file_api.StorageFileAPI' not found")
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestSupabasePyRequirements))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == "__main__":
main()
|
https://github.com/teamqurrent/jquery | In git attributes, make sure that mjs and cjs files use UNIX line endings as well | 2b6b5e0 | python3.9 | 198b41c8 | diff --git a/.gitattributes b/.gitattributes
--- a/.gitattributes
+++ b/.gitattributes
@@ -2,4 +2,7 @@
* text=auto
# JS files must always use LF for tools to work
+# JS files may have mjs or cjs extensions now as well
*.js eol=lf
+*.cjs eol=lf
+*.mjs eol=lf
| [
{
"content": "# Auto detect text files and perform LF normalization\n* text=auto\n\n# JS files must always use LF for tools to work\n*.js eol=lf\n",
"path": ".gitattributes"
}
] | 4_0 | javascript | import sys
import unittest
class TestGitAttributes(unittest.TestCase):
def test_gitattributes_contents(self):
with open("./.gitattributes", "r") as f:
lines = f.readlines()
self.assertTrue(
("*.cjs eol=lf\n" in lines or "*.cjs text eol=lf\n" in lines) and
("*.mjs eol=lf\n" in lines or "*.mjs text eol=lf\n" in lines) and
("*.js eol=lf\n" in lines or "*.js text eol=lf\n" in lines) and
"* text=auto\n" in lines,
"Required lines not found in .gitattributes"
)
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestGitAttributes))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == "__main__":
main()
|
|
https://github.com/teamqurrent/jquery | Modify the deprecated `.hover()` method in `src/deprecated/event.js` so that it does not rely on other deprecated methods: `.mouseenter()` & `.mouseleave()`. Use `.on()` instead. | 6616acf | python3.9 | fd6ffc5e | diff --git a/src/deprecated/event.js b/src/deprecated/event.js
--- a/src/deprecated/event.js
+++ b/src/deprecated/event.js
@@ -24,7 +24,9 @@ jQuery.fn.extend( {
},
hover: function( fnOver, fnOut ) {
- return this.mouseenter( fnOver ).mouseleave( fnOut || fnOver );
+ return this
+ .on( "mouseenter", fnOver )
+ .on( "mouseleave", fnOut || fnOver );
}
} );
| [
{
"content": "import jQuery from \"../core.js\";\n\nimport \"../event.js\";\nimport \"../event/trigger.js\";\n\njQuery.fn.extend( {\n\n\tbind: function( types, data, fn ) {\n\t\treturn this.on( types, null, data, fn );\n\t},\n\tunbind: function( types, fn ) {\n\t\treturn this.off( types, null, fn );\n\t},\n\n\tdelegate: function( selector, types, data, fn ) {\n\t\treturn this.on( types, selector, data, fn );\n\t},\n\tundelegate: function( selector, types, fn ) {\n\n\t\t// ( namespace ) or ( selector, types [, fn] )\n\t\treturn arguments.length === 1 ?\n\t\t\tthis.off( selector, \"**\" ) :\n\t\t\tthis.off( types, selector || \"**\", fn );\n\t},\n\n\thover: function( fnOver, fnOut ) {\n\t\treturn this.mouseenter( fnOver ).mouseleave( fnOut || fnOver );\n\t}\n} );\n\njQuery.each(\n\t( \"blur focus focusin focusout resize scroll click dblclick \" +\n\t\"mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave \" +\n\t\"change select submit keydown keypress keyup contextmenu\" ).split( \" \" ),\n\tfunction( _i, name ) {\n\n\t\t// Handle event binding\n\t\tjQuery.fn[ name ] = function( data, fn ) {\n\t\t\treturn arguments.length > 0 ?\n\t\t\t\tthis.on( name, null, data, fn ) :\n\t\t\t\tthis.trigger( name );\n\t\t};\n\t}\n);\n",
"path": "src/deprecated/event.js"
}
] | 4_1 | javascript | import sys
import unittest
import subprocess
class TestEventJSContents(unittest.TestCase):
def test_event_js_contents(self):
with open("./src/deprecated/event.js", "r") as f:
content = f.read()
hover_start = content.find("hover: function(")
self.assertNotEqual(hover_start, -1, "hover: function( not found in content")
hover_content_start = content.find("{", hover_start) + 1
hover_content_end = content.find("}", hover_content_start)
hover_content = content[hover_content_start:hover_content_end].strip()
self.assertIn(".on(", hover_content, ".on( not found in hover_content")
self.assertNotIn(".mouseenter(", hover_content, ".mouseenter( found in hover_content")
self.assertNotIn(".mouseleave(", hover_content, ".mouseleave( found in hover_content")
def main():
# Running npm install as part of test setup
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestEventJSContents))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == "__main__":
main()
|
|
https://github.com/teamqurrent/jquery | The `root` argument of `jQuery.fn.init` was needed to support `jQuery.sub`, but now this parameter is no longer needed. Remove it from the function arguments and only use rootjQuery instead. | 8cf39b7 | python3.9 | d2436df3 | diff --git a/src/core/init.js b/src/core/init.js
--- a/src/core/init.js
+++ b/src/core/init.js
@@ -15,7 +15,7 @@ var rootjQuery,
// Shortcut simple #id case for speed
rquickExpr = /^(?:\s*(<[\w\W]+>)[^>]*|#([\w-]+))$/,
- init = jQuery.fn.init = function( selector, context, root ) {
+ init = jQuery.fn.init = function( selector, context ) {
var match, elem;
// HANDLE: $(""), $(null), $(undefined), $(false)
@@ -23,10 +23,6 @@ var rootjQuery,
return this;
}
- // Method init() accepts an alternate rootjQuery
- // so migrate can support jQuery.sub (gh-2101)
- root = root || rootjQuery;
-
// HANDLE: $(DOMElement)
if ( selector.nodeType ) {
this[ 0 ] = selector;
@@ -36,8 +32,8 @@ var rootjQuery,
// HANDLE: $(function)
// Shortcut for document ready
} else if ( typeof selector === "function" ) {
- return root.ready !== undefined ?
- root.ready( selector ) :
+ return rootjQuery.ready !== undefined ?
+ rootjQuery.ready( selector ) :
// Execute immediately if ready is not present
selector( jQuery );
@@ -108,7 +104,7 @@ var rootjQuery,
// HANDLE: $(expr) & $(expr, $(...))
} else if ( !context || context.jquery ) {
- return ( context || root ).find( selector );
+ return ( context || rootjQuery ).find( selector );
// HANDLE: $(expr, context)
// (which is just equivalent to: $(context).find(expr)
| [
{
"content": "// Initialize a jQuery object\nimport jQuery from \"../core.js\";\nimport document from \"../var/document.js\";\nimport rsingleTag from \"./var/rsingleTag.js\";\nimport isObviousHtml from \"./isObviousHtml.js\";\n\nimport \"../traversing/findFilter.js\";\n\n// A central reference to the root jQuery(document)\nvar rootjQuery,\n\n\t// A simple way to check for HTML strings\n\t// Prioritize #id over <tag> to avoid XSS via location.hash (trac-9521)\n\t// Strict HTML recognition (trac-11290: must start with <)\n\t// Shortcut simple #id case for speed\n\trquickExpr = /^(?:\\s*(<[\\w\\W]+>)[^>]*|#([\\w-]+))$/,\n\n\tinit = jQuery.fn.init = function( selector, context, root ) {\n\t\tvar match, elem;\n\n\t\t// HANDLE: $(\"\"), $(null), $(undefined), $(false)\n\t\tif ( !selector ) {\n\t\t\treturn this;\n\t\t}\n\n\t\t// Method init() accepts an alternate rootjQuery\n\t\t// so migrate can support jQuery.sub (gh-2101)\n\t\troot = root || rootjQuery;\n\n\t\t// HANDLE: $(DOMElement)\n\t\tif ( selector.nodeType ) {\n\t\t\tthis[ 0 ] = selector;\n\t\t\tthis.length = 1;\n\t\t\treturn this;\n\n\t\t// HANDLE: $(function)\n\t\t// Shortcut for document ready\n\t\t} else if ( typeof selector === \"function\" ) {\n\t\t\treturn root.ready !== undefined ?\n\t\t\t\troot.ready( selector ) :\n\n\t\t\t\t// Execute immediately if ready is not present\n\t\t\t\tselector( jQuery );\n\n\t\t} else {\n\n\t\t\t// Handle obvious HTML strings\n\t\t\tmatch = selector + \"\";\n\t\t\tif ( isObviousHtml( match ) ) {\n\n\t\t\t\t// Assume that strings that start and end with <> are HTML and skip\n\t\t\t\t// the regex check. This also handles browser-supported HTML wrappers\n\t\t\t\t// like TrustedHTML.\n\t\t\t\tmatch = [ null, selector, null ];\n\n\t\t\t// Handle HTML strings or selectors\n\t\t\t} else if ( typeof selector === \"string\" ) {\n\t\t\t\tmatch = rquickExpr.exec( selector );\n\t\t\t} else {\n\t\t\t\treturn jQuery.makeArray( selector, this );\n\t\t\t}\n\n\t\t\t// Match html or make sure no context is specified for #id\n\t\t\t// Note: match[1] may be a string or a TrustedHTML wrapper\n\t\t\tif ( match && ( match[ 1 ] || !context ) ) {\n\n\t\t\t\t// HANDLE: $(html) -> $(array)\n\t\t\t\tif ( match[ 1 ] ) {\n\t\t\t\t\tcontext = context instanceof jQuery ? context[ 0 ] : context;\n\n\t\t\t\t\t// Option to run scripts is true for back-compat\n\t\t\t\t\t// Intentionally let the error be thrown if parseHTML is not present\n\t\t\t\t\tjQuery.merge( this, jQuery.parseHTML(\n\t\t\t\t\t\tmatch[ 1 ],\n\t\t\t\t\t\tcontext && context.nodeType ? context.ownerDocument || context : document,\n\t\t\t\t\t\ttrue\n\t\t\t\t\t) );\n\n\t\t\t\t\t// HANDLE: $(html, props)\n\t\t\t\t\tif ( rsingleTag.test( match[ 1 ] ) && jQuery.isPlainObject( context ) ) {\n\t\t\t\t\t\tfor ( match in context ) {\n\n\t\t\t\t\t\t\t// Properties of context are called as methods if possible\n\t\t\t\t\t\t\tif ( typeof this[ match ] === \"function\" ) {\n\t\t\t\t\t\t\t\tthis[ match ]( context[ match ] );\n\n\t\t\t\t\t\t\t// ...and otherwise set as attributes\n\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\tthis.attr( match, context[ match ] );\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\n\t\t\t\t\treturn this;\n\n\t\t\t\t// HANDLE: $(#id)\n\t\t\t\t} else {\n\t\t\t\t\telem = document.getElementById( match[ 2 ] );\n\n\t\t\t\t\tif ( elem ) {\n\n\t\t\t\t\t\t// Inject the element directly into the jQuery object\n\t\t\t\t\t\tthis[ 0 ] = elem;\n\t\t\t\t\t\tthis.length = 1;\n\t\t\t\t\t}\n\t\t\t\t\treturn this;\n\t\t\t\t}\n\n\t\t\t// HANDLE: $(expr) & $(expr, $(...))\n\t\t\t} else if ( !context || context.jquery ) {\n\t\t\t\treturn ( context || root ).find( selector );\n\n\t\t\t// HANDLE: $(expr, context)\n\t\t\t// (which is just equivalent to: $(context).find(expr)\n\t\t\t} else {\n\t\t\t\treturn this.constructor( context ).find( selector );\n\t\t\t}\n\t\t}\n\n\t};\n\n// Give the init function the jQuery prototype for later instantiation\ninit.prototype = jQuery.fn;\n\n// Initialize central reference\nrootjQuery = jQuery( document );\n",
"path": "src/core/init.js"
}
] | 4_2 | javascript | import sys
import unittest
import re
import subprocess
class TestInitJSContents(unittest.TestCase):
def test_init_js_contents(self):
with open("./src/core/init.js", "r") as f:
content = f.read()
pattern = r"jQuery\.fn\.init\s*=\s*function\s*\(([^)]+)\)"
match = re.search(pattern, content)
if match:
args = [arg.strip() for arg in match.group(1).split(",")]
else:
args = []
self.assertEqual(len(args), 2, "Number of arguments is not 2")
self.assertIn("selector", args, "'selector' argument not found")
self.assertIn("context", args, "'context' argument not found")
self.assertNotIn("root = root || rootJQuery;", content, "Unexpected content found: 'root = root || rootJQuery;'")
self.assertNotIn("root.ready", content, "Unexpected content found: 'root.ready'")
self.assertNotIn("context || root )", content, "Unexpected content found: 'context || root )'")
def main():
# Running npm install as part of test setup
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestInitJSContents))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == "__main__":
main()
|
|
https://github.com/teamqurrent/lodash | The `opt-cli` pre-push functionality was removed from lodash just a few days after it was added, but the documentation encouraging contributors to use it still remains. Remove the tips from the `CONTRIBUTING.md` file to avoid confusion for new contributors. | e002948 | python3.9 | 2f900b62f | diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md
--- a/.github/CONTRIBUTING.md
+++ b/.github/CONTRIBUTING.md
@@ -71,14 +71,3 @@ Guidelines are enforced using [ESLint](https://www.npmjs.com/package/eslint):
```bash
$ npm run style
```
-
-## Tips
-
-You can opt-in to a pre-push git hook by adding an `.opt-in` file to the root of
-the project containing:
-```txt
-pre-push
-```
-
-With that, when you `git push`, the pre-push git hook will trigger and execute
-`npm run validate`.
| [
{
"content": "# :construction: Notice :construction:\n\nPardon the mess. The `master` branch is in flux while we work on Lodash v5. This\nmeans things like npm scripts, which we encourage using for contributions, may\nnot be working. Thank you for your patience.\n\n# Contributing to Lodash\n\nContributions are always welcome. Before contributing please read the\n[code of conduct](https://code-of-conduct.openjsf.org) &\n[search the issue tracker](https://github.com/lodash/lodash/issues); your issue\nmay have already been discussed or fixed in `master`. To contribute,\n[fork](https://help.github.com/articles/fork-a-repo/) Lodash, commit your changes,\n& [send a pull request](https://help.github.com/articles/using-pull-requests/).\n\n## Feature Requests\n\nFeature requests should be submitted in the\n[issue tracker](https://github.com/lodash/lodash/issues), with a description of\nthe expected behavior & use case, where they’ll remain closed until sufficient interest,\n[e.g. :+1: reactions](https://help.github.com/articles/about-discussions-in-issues-and-pull-requests/),\nhas been [shown by the community](https://github.com/lodash/lodash/issues?q=label%3A%22votes+needed%22+sort%3Areactions-%2B1-desc).\nBefore submitting a request, please search for similar ones in the\n[closed issues](https://github.com/lodash/lodash/issues?q=is%3Aissue+is%3Aclosed+label%3Aenhancement).\n\n## Pull Requests\n\nFor additions or bug fixes you should only need to modify `lodash.js`. Include\nupdated unit tests in the `test` directory as part of your pull request. Don’t\nworry about regenerating the `dist/` or `doc/` files.\n\nBefore running the unit tests you’ll need to install, `npm i`,\n[development dependencies](https://docs.npmjs.com/files/package.json#devdependencies).\nRun unit tests from the command-line via `npm test`, or open `test/index.html` &\n`test/fp.html` in a web browser. The [Backbone](http://backbonejs.org/) &\n[Underscore](http://underscorejs.org/) test suites are included as well.\n\n## Contributor License Agreement\n\nLodash is a member of the [JS Foundation](https://openjsf.org/).\nAs such, we request that all contributors sign the JS Foundation\n[contributor license agreement (CLA)](https://cla.js.foundation/lodash/lodash).\n\nFor more information about CLAs, please check out Alex Russell’s excellent post,\n[“Why Do I Need to Sign This?”](https://infrequently.org/2008/06/why-do-i-need-to-sign-this/).\n\n## Coding Guidelines\n\nIn addition to the following guidelines, please follow the conventions already\nestablished in the code.\n\n- **Spacing**:<br>\n Use two spaces for indentation. No tabs.\n\n- **Naming**:<br>\n Keep variable & method names concise & descriptive.<br>\n Variable names `index`, `array`, & `iteratee` are preferable to\n `i`, `arr`, & `fn`.\n\n- **Quotes**:<br>\n Single-quoted strings are preferred to double-quoted strings; however,\n please use a double-quoted string if the value contains a single-quote\n character to avoid unnecessary escaping.\n\n- **Comments**:<br>\n Please use single-line comments to annotate significant additions, &\n [JSDoc-style](http://www.2ality.com/2011/08/jsdoc-intro.html) comments for\n functions.\n\nGuidelines are enforced using [ESLint](https://www.npmjs.com/package/eslint):\n```bash\n$ npm run style\n```\n\n## Tips\n\nYou can opt-in to a pre-push git hook by adding an `.opt-in` file to the root of\nthe project containing:\n```txt\npre-push\n```\n\nWith that, when you `git push`, the pre-push git hook will trigger and execute\n`npm run validate`.\n",
"path": ".github/CONTRIBUTING.md"
}
] | 5_0 | javascript | import sys
import unittest
class TestContributingMDContents(unittest.TestCase):
def test_contributing_md_contents(self):
with open("./.github/CONTRIBUTING.md", "r") as f:
content = f.read()
self.assertNotIn("## Tips", content, "'## Tips' found in CONTRIBUTING.md")
self.assertNotIn("You can opt-in to a pre-push git hook by adding an `.opt-in` file to the root of the project containing:", content, "Opt-in instructions found in CONTRIBUTING.md")
self.assertNotIn("With that, when you `git push`, the pre-push git hook will trigger and execute `npm run validate`.", content, "Pre-push git hook instructions found in CONTRIBUTING.md")
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestContributingMDContents))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == "__main__":
main()
|
|
https://github.com/teamqurrent/lodash | `nativeKeys.js` and `nativeKeysIn.js` each have their own functions: nativeKeys, and nativeKeysIn. Remove the files from the project. Search the repo and see if the functions are imported anywhere else. If they are, remove the imports and replace the functions with the code itself. | f3e0cbe | python3.9 | f7a6cddc9 | diff --git a/.internal/baseKeys.js b/.internal/baseKeys.js
--- a/.internal/baseKeys.js
+++ b/.internal/baseKeys.js
@@ -1,5 +1,4 @@
import isPrototype from './isPrototype.js'
-import nativeKeys from './nativeKeys.js'
/** Used to check objects for own properties. */
const hasOwnProperty = Object.prototype.hasOwnProperty
@@ -13,7 +12,7 @@ const hasOwnProperty = Object.prototype.hasOwnProperty
*/
function baseKeys(object) {
if (!isPrototype(object)) {
- return nativeKeys(object)
+ return Object.keys(Object(object))
}
const result = []
for (const key in Object(object)) {
diff --git a/.internal/baseKeysIn.js b/.internal/baseKeysIn.js
--- a/.internal/baseKeysIn.js
+++ b/.internal/baseKeysIn.js
@@ -1,6 +1,5 @@
import isObject from '../isObject.js'
import isPrototype from './isPrototype.js'
-import nativeKeysIn from './nativeKeysIn.js'
/** Used to check objects for own properties. */
const hasOwnProperty = Object.prototype.hasOwnProperty
@@ -13,12 +12,17 @@ const hasOwnProperty = Object.prototype.hasOwnProperty
* @returns {Array} Returns the array of property names.
*/
function baseKeysIn(object) {
+ const result = []
+ if (object == null) {
+ return result
+ }
if (!isObject(object)) {
- return nativeKeysIn(object)
+ for (const key in Object(object)) {
+ result.push(key)
+ }
+ return result
}
const isProto = isPrototype(object)
- const result = []
-
for (const key in object) {
if (!(key == 'constructor' && (isProto || !hasOwnProperty.call(object, key)))) {
result.push(key)
diff --git a/.internal/nativeKeys.js b/.internal/nativeKeys.js
deleted file mode 100644
--- a/.internal/nativeKeys.js
+++ /dev/null
@@ -1,14 +0,0 @@
-/**
- * This function is a thin wrapper around
- * [`Object.keys`](http://ecma-international.org/ecma-262/7.0/#sec-object.keys)
- * which ensures non-object values are coerced to objects beforehand.
- *
- * @private
- * @param {Object} object The object to query.
- * @returns {Array} Returns the array of property names.
- */
-function nativeKeys(object) {
- return Object.keys(Object(object))
-}
-
-export default nativeKeys
diff --git a/.internal/nativeKeysIn.js b/.internal/nativeKeysIn.js
deleted file mode 100644
--- a/.internal/nativeKeysIn.js
+++ /dev/null
@@ -1,20 +0,0 @@
-/**
- * This function is like
- * [`Object.keys`](http://ecma-international.org/ecma-262/7.0/#sec-object.keys)
- * except that it includes inherited enumerable properties.
- *
- * @private
- * @param {Object} object The object to query.
- * @returns {Array} Returns the array of property names.
- */
-function nativeKeysIn(object) {
- const result = []
- if (object != null) {
- for (const key in Object(object)) {
- result.push(key)
- }
- }
- return result
-}
-
-export default nativeKeysIn
| [
{
"content": "import isPrototype from './isPrototype.js'\nimport nativeKeys from './nativeKeys.js'\n\n/** Used to check objects for own properties. */\nconst hasOwnProperty = Object.prototype.hasOwnProperty\n\n/**\n * The base implementation of `keys` which doesn't treat sparse arrays as dense.\n *\n * @private\n * @param {Object} object The object to query.\n * @returns {Array} Returns the array of property names.\n */\nfunction baseKeys(object) {\n if (!isPrototype(object)) {\n return nativeKeys(object)\n }\n const result = []\n for (const key in Object(object)) {\n if (hasOwnProperty.call(object, key) && key != 'constructor') {\n result.push(key)\n }\n }\n return result\n}\n\nexport default baseKeys\n",
"path": ".internal/baseKeys.js"
},
{
"content": "import isObject from '../isObject.js'\nimport isPrototype from './isPrototype.js'\nimport nativeKeysIn from './nativeKeysIn.js'\n\n/** Used to check objects for own properties. */\nconst hasOwnProperty = Object.prototype.hasOwnProperty\n\n/**\n * The base implementation of `keysIn` which doesn't treat sparse arrays as dense.\n *\n * @private\n * @param {Object} object The object to query.\n * @returns {Array} Returns the array of property names.\n */\nfunction baseKeysIn(object) {\n if (!isObject(object)) {\n return nativeKeysIn(object)\n }\n const isProto = isPrototype(object)\n const result = []\n\n for (const key in object) {\n if (!(key == 'constructor' && (isProto || !hasOwnProperty.call(object, key)))) {\n result.push(key)\n }\n }\n return result\n}\n\nexport default baseKeysIn\n",
"path": ".internal/baseKeysIn.js"
},
{
"content": "/**\n * This function is a thin wrapper around\n * [`Object.keys`](http://ecma-international.org/ecma-262/7.0/#sec-object.keys)\n * which ensures non-object values are coerced to objects beforehand.\n *\n * @private\n * @param {Object} object The object to query.\n * @returns {Array} Returns the array of property names.\n */\nfunction nativeKeys(object) {\n return Object.keys(Object(object))\n}\n\nexport default nativeKeys\n",
"path": ".internal/nativeKeys.js"
},
{
"content": "/**\n * This function is like\n * [`Object.keys`](http://ecma-international.org/ecma-262/7.0/#sec-object.keys)\n * except that it includes inherited enumerable properties.\n *\n * @private\n * @param {Object} object The object to query.\n * @returns {Array} Returns the array of property names.\n */\nfunction nativeKeysIn(object) {\n const result = []\n if (object != null) {\n for (const key in Object(object)) {\n result.push(key)\n }\n }\n return result\n}\n\nexport default nativeKeysIn\n",
"path": ".internal/nativeKeysIn.js"
}
] | 5_1 | javascript | import sys
import unittest
import os
import subprocess
class TestInternalJSFiles(unittest.TestCase):
def test_internal_js_files(self):
# Assert the absence of specific files
self.assertTrue(not os.path.isfile("./.internal/nativeKeys.js"), "File ./.internal/nativeKeys.js should not exist")
self.assertTrue(not os.path.isfile("./.internal/nativeKeysIn.js"), "File ./.internal/nativeKeysIn.js should not exist")
# Assert the presence of specific content in baseKeys.js
with open("./.internal/baseKeys.js", "r") as f:
base_keys_content = f.read()
self.assertTrue("Object.keys(Object(object))" in base_keys_content, "String 'Object.keys(Object(object))' not found in baseKeys.js")
# Assert the presence of specific content in baseKeysIn.js
with open("./.internal/baseKeysIn.js", "r") as f:
base_keys_in_content = f.read()
expected_content = """for (const key in Object(object)) {
result.push(key)
}"""
self.assertTrue(expected_content in base_keys_in_content, "Expected block not found in baseKeysIn.js")
def main():
# Running npm install as part of test setup
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestInternalJSFiles))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == "__main__":
main()
|
|
https://github.com/teamqurrent/Python | Add a simple_moving_average.py calculation to the `financial` directory. The function should be named simple_moving_average and the args should be: data: Sequence[float], window_size: int. The function should return a list of floats. | 417b7ed | python3.10 | d051db1f | diff --git a/financial/simple_moving_average.py b/financial/simple_moving_average.py
new file mode 100644
--- /dev/null
+++ b/financial/simple_moving_average.py
@@ -0,0 +1,68 @@
+"""
+The Simple Moving Average (SMA) is a statistical calculation used to analyze data points
+by creating a constantly updated average price over a specific time period.
+In finance, SMA is often used in time series analysis to smooth out price data
+and identify trends.
+
+Reference: https://en.wikipedia.org/wiki/Moving_average
+"""
+from collections.abc import Sequence
+
+
+def simple_moving_average(
+ data: Sequence[float], window_size: int
+) -> list[float | None]:
+ """
+ Calculate the simple moving average (SMA) for some given time series data.
+
+ :param data: A list of numerical data points.
+ :param window_size: An integer representing the size of the SMA window.
+ :return: A list of SMA values with the same length as the input data.
+
+ Examples:
+ >>> sma = simple_moving_average([10, 12, 15, 13, 14, 16, 18, 17, 19, 21], 3)
+ >>> [round(value, 2) if value is not None else None for value in sma]
+ [None, None, 12.33, 13.33, 14.0, 14.33, 16.0, 17.0, 18.0, 19.0]
+ >>> simple_moving_average([10, 12, 15], 5)
+ [None, None, None]
+ >>> simple_moving_average([10, 12, 15, 13, 14, 16, 18, 17, 19, 21], 0)
+ Traceback (most recent call last):
+ ...
+ ValueError: Window size must be a positive integer
+ """
+ if window_size < 1:
+ raise ValueError("Window size must be a positive integer")
+
+ sma: list[float | None] = []
+
+ for i in range(len(data)):
+ if i < window_size - 1:
+ sma.append(None) # SMA not available for early data points
+ else:
+ window = data[i - window_size + 1 : i + 1]
+ sma_value = sum(window) / window_size
+ sma.append(sma_value)
+ return sma
+
+
+if __name__ == "__main__":
+ import doctest
+
+ doctest.testmod()
+
+ # Example data (replace with your own time series data)
+ data = [10, 12, 15, 13, 14, 16, 18, 17, 19, 21]
+
+ # Specify the window size for the SMA
+ window_size = 3
+
+ # Calculate the Simple Moving Average
+ sma_values = simple_moving_average(data, window_size)
+
+ # Print the SMA values
+ print("Simple Moving Average (SMA) Values:")
+ for i, value in enumerate(sma_values):
+ if value is not None:
+ print(f"Day {i + 1}: {value:.2f}")
+ else:
+ print(f"Day {i + 1}: Not enough data for SMA")
| [] | 6_0 | python | import sys
import unittest
import os
import importlib.util
def run_function(file_path: str, function_name: str, function_args: list):
spec = importlib.util.spec_from_file_location("module.name", file_path)
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
# Get the function
func = getattr(module, function_name)
# Run the function
return func(*function_args)
def assert_list_almost_equal(test_case, list1, list2, places=7):
test_case.assertEqual(len(list1), len(list2), "Lists are of different lengths")
for item1, item2 in zip(list1, list2):
if item1 is None or item2 is None:
test_case.assertIsNone(item1)
test_case.assertIsNone(item2)
else:
test_case.assertAlmostEqual(item1, item2, places=places)
class TestSimpleMovingAverage(unittest.TestCase):
def test_simple_moving_average(self):
if not os.path.exists("financial/simple_moving_average.py"):
self.fail('file "financial/simple_moving_average.py" does not exist')
# Run simple_moving_average function from financial/simple_moving_average.py
function_result = run_function(
"financial/simple_moving_average.py",
"simple_moving_average",
[[1, 2, 3, 4, 5], 3],
)
expected_result_1 = [None, None, 2.0, 3.0, 4.0]
expected_result_2 = [2.0, 3.0, 4.0]
# Adjust the assertion to check for either valid result
try:
assert_list_almost_equal(self, function_result, expected_result_1)
except AssertionError:
assert_list_almost_equal(self, function_result, expected_result_2)
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestSimpleMovingAverage))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == "__main__":
main()
|
|
https://github.com/teamqurrent/Python | Modify the `searches/binary_search.py` file and add a function called exponential_search. The function args should be a list of ints which are some ascending sorted collection with comparable items, and an int which is the item value to search for. The function should return the index of the found item or -1 if not found. | 06edc0e | python3.9 | b814cf37 | diff --git a/searches/binary_search.py b/searches/binary_search.py
--- a/searches/binary_search.py
+++ b/searches/binary_search.py
@@ -1,9 +1,9 @@
#!/usr/bin/env python3
"""
-This is pure Python implementation of binary search algorithms
+Pure Python implementations of binary search algorithms
-For doctests run following command:
+For doctests run the following command:
python3 -m doctest -v binary_search.py
For manual testing run:
@@ -34,16 +34,12 @@ def bisect_left(
Examples:
>>> bisect_left([0, 5, 7, 10, 15], 0)
0
-
>>> bisect_left([0, 5, 7, 10, 15], 6)
2
-
>>> bisect_left([0, 5, 7, 10, 15], 20)
5
-
>>> bisect_left([0, 5, 7, 10, 15], 15, 1, 3)
3
-
>>> bisect_left([0, 5, 7, 10, 15], 6, 2)
2
"""
@@ -79,16 +75,12 @@ def bisect_right(
Examples:
>>> bisect_right([0, 5, 7, 10, 15], 0)
1
-
>>> bisect_right([0, 5, 7, 10, 15], 15)
5
-
>>> bisect_right([0, 5, 7, 10, 15], 6)
2
-
>>> bisect_right([0, 5, 7, 10, 15], 15, 1, 3)
3
-
>>> bisect_right([0, 5, 7, 10, 15], 6, 2)
2
"""
@@ -124,7 +116,6 @@ def insort_left(
>>> insort_left(sorted_collection, 6)
>>> sorted_collection
[0, 5, 6, 7, 10, 15]
-
>>> sorted_collection = [(0, 0), (5, 5), (7, 7), (10, 10), (15, 15)]
>>> item = (5, 5)
>>> insort_left(sorted_collection, item)
@@ -134,12 +125,10 @@ def insort_left(
True
>>> item is sorted_collection[2]
False
-
>>> sorted_collection = [0, 5, 7, 10, 15]
>>> insort_left(sorted_collection, 20)
>>> sorted_collection
[0, 5, 7, 10, 15, 20]
-
>>> sorted_collection = [0, 5, 7, 10, 15]
>>> insort_left(sorted_collection, 15, 1, 3)
>>> sorted_collection
@@ -167,7 +156,6 @@ def insort_right(
>>> insort_right(sorted_collection, 6)
>>> sorted_collection
[0, 5, 6, 7, 10, 15]
-
>>> sorted_collection = [(0, 0), (5, 5), (7, 7), (10, 10), (15, 15)]
>>> item = (5, 5)
>>> insort_right(sorted_collection, item)
@@ -177,12 +165,10 @@ def insort_right(
False
>>> item is sorted_collection[2]
True
-
>>> sorted_collection = [0, 5, 7, 10, 15]
>>> insort_right(sorted_collection, 20)
>>> sorted_collection
[0, 5, 7, 10, 15, 20]
-
>>> sorted_collection = [0, 5, 7, 10, 15]
>>> insort_right(sorted_collection, 15, 1, 3)
>>> sorted_collection
@@ -191,29 +177,28 @@ def insort_right(
sorted_collection.insert(bisect_right(sorted_collection, item, lo, hi), item)
-def binary_search(sorted_collection: list[int], item: int) -> int | None:
- """Pure implementation of binary search algorithm in Python
+def binary_search(sorted_collection: list[int], item: int) -> int:
+ """Pure implementation of a binary search algorithm in Python
- Be careful collection must be ascending sorted, otherwise result will be
+ Be careful collection must be ascending sorted otherwise, the result will be
unpredictable
:param sorted_collection: some ascending sorted collection with comparable items
:param item: item value to search
- :return: index of found item or None if item is not found
+ :return: index of the found item or -1 if the item is not found
Examples:
>>> binary_search([0, 5, 7, 10, 15], 0)
0
-
>>> binary_search([0, 5, 7, 10, 15], 15)
4
-
>>> binary_search([0, 5, 7, 10, 15], 5)
1
-
>>> binary_search([0, 5, 7, 10, 15], 6)
-
+ -1
"""
+ if list(sorted_collection) != sorted(sorted_collection):
+ raise ValueError("sorted_collection must be sorted in ascending order")
left = 0
right = len(sorted_collection) - 1
@@ -226,66 +211,66 @@ def binary_search(sorted_collection: list[int], item: int) -> int | None:
right = midpoint - 1
else:
left = midpoint + 1
- return None
+ return -1
-def binary_search_std_lib(sorted_collection: list[int], item: int) -> int | None:
- """Pure implementation of binary search algorithm in Python using stdlib
+def binary_search_std_lib(sorted_collection: list[int], item: int) -> int:
+ """Pure implementation of a binary search algorithm in Python using stdlib
- Be careful collection must be ascending sorted, otherwise result will be
+ Be careful collection must be ascending sorted otherwise, the result will be
unpredictable
:param sorted_collection: some ascending sorted collection with comparable items
:param item: item value to search
- :return: index of found item or None if item is not found
+ :return: index of the found item or -1 if the item is not found
Examples:
>>> binary_search_std_lib([0, 5, 7, 10, 15], 0)
0
-
>>> binary_search_std_lib([0, 5, 7, 10, 15], 15)
4
-
>>> binary_search_std_lib([0, 5, 7, 10, 15], 5)
1
-
>>> binary_search_std_lib([0, 5, 7, 10, 15], 6)
-
+ -1
"""
+ if list(sorted_collection) != sorted(sorted_collection):
+ raise ValueError("sorted_collection must be sorted in ascending order")
index = bisect.bisect_left(sorted_collection, item)
if index != len(sorted_collection) and sorted_collection[index] == item:
return index
- return None
+ return -1
def binary_search_by_recursion(
- sorted_collection: list[int], item: int, left: int, right: int
-) -> int | None:
- """Pure implementation of binary search algorithm in Python by recursion
+ sorted_collection: list[int], item: int, left: int = 0, right: int = -1
+) -> int:
+ """Pure implementation of a binary search algorithm in Python by recursion
- Be careful collection must be ascending sorted, otherwise result will be
+ Be careful collection must be ascending sorted otherwise, the result will be
unpredictable
First recursion should be started with left=0 and right=(len(sorted_collection)-1)
:param sorted_collection: some ascending sorted collection with comparable items
:param item: item value to search
- :return: index of found item or None if item is not found
+ :return: index of the found item or -1 if the item is not found
Examples:
>>> binary_search_by_recursion([0, 5, 7, 10, 15], 0, 0, 4)
0
-
>>> binary_search_by_recursion([0, 5, 7, 10, 15], 15, 0, 4)
4
-
>>> binary_search_by_recursion([0, 5, 7, 10, 15], 5, 0, 4)
1
-
>>> binary_search_by_recursion([0, 5, 7, 10, 15], 6, 0, 4)
-
+ -1
"""
+ if right < 0:
+ right = len(sorted_collection) - 1
+ if list(sorted_collection) != sorted(sorted_collection):
+ raise ValueError("sorted_collection must be sorted in ascending order")
if right < left:
- return None
+ return -1
midpoint = left + (right - left) // 2
@@ -297,12 +282,78 @@ def binary_search_by_recursion(
return binary_search_by_recursion(sorted_collection, item, midpoint + 1, right)
+def exponential_search(sorted_collection: list[int], item: int) -> int:
+ """Pure implementation of an exponential search algorithm in Python
+ Resources used:
+ https://en.wikipedia.org/wiki/Exponential_search
+
+ Be careful collection must be ascending sorted otherwise, result will be
+ unpredictable
+
+ :param sorted_collection: some ascending sorted collection with comparable items
+ :param item: item value to search
+ :return: index of the found item or -1 if the item is not found
+
+ the order of this algorithm is O(lg I) where I is index position of item if exist
+
+ Examples:
+ >>> exponential_search([0, 5, 7, 10, 15], 0)
+ 0
+ >>> exponential_search([0, 5, 7, 10, 15], 15)
+ 4
+ >>> exponential_search([0, 5, 7, 10, 15], 5)
+ 1
+ >>> exponential_search([0, 5, 7, 10, 15], 6)
+ -1
+ """
+ if list(sorted_collection) != sorted(sorted_collection):
+ raise ValueError("sorted_collection must be sorted in ascending order")
+ bound = 1
+ while bound < len(sorted_collection) and sorted_collection[bound] < item:
+ bound *= 2
+ left = bound // 2
+ right = min(bound, len(sorted_collection) - 1)
+ last_result = binary_search_by_recursion(
+ sorted_collection=sorted_collection, item=item, left=left, right=right
+ )
+ if last_result is None:
+ return -1
+ return last_result
+
+
+searches = ( # Fastest to slowest...
+ binary_search_std_lib,
+ binary_search,
+ exponential_search,
+ binary_search_by_recursion,
+)
+
+
if __name__ == "__main__":
- user_input = input("Enter numbers separated by comma:\n").strip()
+ import doctest
+ import timeit
+
+ doctest.testmod()
+ for search in searches:
+ name = f"{search.__name__:>26}"
+ print(f"{name}: {search([0, 5, 7, 10, 15], 10) = }") # type: ignore[operator]
+
+ print("\nBenchmarks...")
+ setup = "collection = range(1000)"
+ for search in searches:
+ name = search.__name__
+ print(
+ f"{name:>26}:",
+ timeit.timeit(
+ f"{name}(collection, 500)", setup=setup, number=5_000, globals=globals()
+ ),
+ )
+
+ user_input = input("\nEnter numbers separated by comma: ").strip()
collection = sorted(int(item) for item in user_input.split(","))
- target = int(input("Enter a single number to be found in the list:\n"))
- result = binary_search(collection, target)
- if result is None:
+ target = int(input("Enter a single number to be found in the list: "))
+ result = binary_search(sorted_collection=collection, item=target)
+ if result == -1:
print(f"{target} was not found in {collection}.")
else:
- print(f"{target} was found at position {result} in {collection}.")
+ print(f"{target} was found at position {result} of {collection}.")
| [
{
"content": "#!/usr/bin/env python3\n\n\"\"\"\nThis is pure Python implementation of binary search algorithms\n\nFor doctests run following command:\npython3 -m doctest -v binary_search.py\n\nFor manual testing run:\npython3 binary_search.py\n\"\"\"\nfrom __future__ import annotations\n\nimport bisect\n\n\ndef bisect_left(\n sorted_collection: list[int], item: int, lo: int = 0, hi: int = -1\n) -> int:\n \"\"\"\n Locates the first element in a sorted array that is larger or equal to a given\n value.\n\n It has the same interface as\n https://docs.python.org/3/library/bisect.html#bisect.bisect_left .\n\n :param sorted_collection: some ascending sorted collection with comparable items\n :param item: item to bisect\n :param lo: lowest index to consider (as in sorted_collection[lo:hi])\n :param hi: past the highest index to consider (as in sorted_collection[lo:hi])\n :return: index i such that all values in sorted_collection[lo:i] are < item and all\n values in sorted_collection[i:hi] are >= item.\n\n Examples:\n >>> bisect_left([0, 5, 7, 10, 15], 0)\n 0\n\n >>> bisect_left([0, 5, 7, 10, 15], 6)\n 2\n\n >>> bisect_left([0, 5, 7, 10, 15], 20)\n 5\n\n >>> bisect_left([0, 5, 7, 10, 15], 15, 1, 3)\n 3\n\n >>> bisect_left([0, 5, 7, 10, 15], 6, 2)\n 2\n \"\"\"\n if hi < 0:\n hi = len(sorted_collection)\n\n while lo < hi:\n mid = lo + (hi - lo) // 2\n if sorted_collection[mid] < item:\n lo = mid + 1\n else:\n hi = mid\n\n return lo\n\n\ndef bisect_right(\n sorted_collection: list[int], item: int, lo: int = 0, hi: int = -1\n) -> int:\n \"\"\"\n Locates the first element in a sorted array that is larger than a given value.\n\n It has the same interface as\n https://docs.python.org/3/library/bisect.html#bisect.bisect_right .\n\n :param sorted_collection: some ascending sorted collection with comparable items\n :param item: item to bisect\n :param lo: lowest index to consider (as in sorted_collection[lo:hi])\n :param hi: past the highest index to consider (as in sorted_collection[lo:hi])\n :return: index i such that all values in sorted_collection[lo:i] are <= item and\n all values in sorted_collection[i:hi] are > item.\n\n Examples:\n >>> bisect_right([0, 5, 7, 10, 15], 0)\n 1\n\n >>> bisect_right([0, 5, 7, 10, 15], 15)\n 5\n\n >>> bisect_right([0, 5, 7, 10, 15], 6)\n 2\n\n >>> bisect_right([0, 5, 7, 10, 15], 15, 1, 3)\n 3\n\n >>> bisect_right([0, 5, 7, 10, 15], 6, 2)\n 2\n \"\"\"\n if hi < 0:\n hi = len(sorted_collection)\n\n while lo < hi:\n mid = lo + (hi - lo) // 2\n if sorted_collection[mid] <= item:\n lo = mid + 1\n else:\n hi = mid\n\n return lo\n\n\ndef insort_left(\n sorted_collection: list[int], item: int, lo: int = 0, hi: int = -1\n) -> None:\n \"\"\"\n Inserts a given value into a sorted array before other values with the same value.\n\n It has the same interface as\n https://docs.python.org/3/library/bisect.html#bisect.insort_left .\n\n :param sorted_collection: some ascending sorted collection with comparable items\n :param item: item to insert\n :param lo: lowest index to consider (as in sorted_collection[lo:hi])\n :param hi: past the highest index to consider (as in sorted_collection[lo:hi])\n\n Examples:\n >>> sorted_collection = [0, 5, 7, 10, 15]\n >>> insort_left(sorted_collection, 6)\n >>> sorted_collection\n [0, 5, 6, 7, 10, 15]\n\n >>> sorted_collection = [(0, 0), (5, 5), (7, 7), (10, 10), (15, 15)]\n >>> item = (5, 5)\n >>> insort_left(sorted_collection, item)\n >>> sorted_collection\n [(0, 0), (5, 5), (5, 5), (7, 7), (10, 10), (15, 15)]\n >>> item is sorted_collection[1]\n True\n >>> item is sorted_collection[2]\n False\n\n >>> sorted_collection = [0, 5, 7, 10, 15]\n >>> insort_left(sorted_collection, 20)\n >>> sorted_collection\n [0, 5, 7, 10, 15, 20]\n\n >>> sorted_collection = [0, 5, 7, 10, 15]\n >>> insort_left(sorted_collection, 15, 1, 3)\n >>> sorted_collection\n [0, 5, 7, 15, 10, 15]\n \"\"\"\n sorted_collection.insert(bisect_left(sorted_collection, item, lo, hi), item)\n\n\ndef insort_right(\n sorted_collection: list[int], item: int, lo: int = 0, hi: int = -1\n) -> None:\n \"\"\"\n Inserts a given value into a sorted array after other values with the same value.\n\n It has the same interface as\n https://docs.python.org/3/library/bisect.html#bisect.insort_right .\n\n :param sorted_collection: some ascending sorted collection with comparable items\n :param item: item to insert\n :param lo: lowest index to consider (as in sorted_collection[lo:hi])\n :param hi: past the highest index to consider (as in sorted_collection[lo:hi])\n\n Examples:\n >>> sorted_collection = [0, 5, 7, 10, 15]\n >>> insort_right(sorted_collection, 6)\n >>> sorted_collection\n [0, 5, 6, 7, 10, 15]\n\n >>> sorted_collection = [(0, 0), (5, 5), (7, 7), (10, 10), (15, 15)]\n >>> item = (5, 5)\n >>> insort_right(sorted_collection, item)\n >>> sorted_collection\n [(0, 0), (5, 5), (5, 5), (7, 7), (10, 10), (15, 15)]\n >>> item is sorted_collection[1]\n False\n >>> item is sorted_collection[2]\n True\n\n >>> sorted_collection = [0, 5, 7, 10, 15]\n >>> insort_right(sorted_collection, 20)\n >>> sorted_collection\n [0, 5, 7, 10, 15, 20]\n\n >>> sorted_collection = [0, 5, 7, 10, 15]\n >>> insort_right(sorted_collection, 15, 1, 3)\n >>> sorted_collection\n [0, 5, 7, 15, 10, 15]\n \"\"\"\n sorted_collection.insert(bisect_right(sorted_collection, item, lo, hi), item)\n\n\ndef binary_search(sorted_collection: list[int], item: int) -> int | None:\n \"\"\"Pure implementation of binary search algorithm in Python\n\n Be careful collection must be ascending sorted, otherwise result will be\n unpredictable\n\n :param sorted_collection: some ascending sorted collection with comparable items\n :param item: item value to search\n :return: index of found item or None if item is not found\n\n Examples:\n >>> binary_search([0, 5, 7, 10, 15], 0)\n 0\n\n >>> binary_search([0, 5, 7, 10, 15], 15)\n 4\n\n >>> binary_search([0, 5, 7, 10, 15], 5)\n 1\n\n >>> binary_search([0, 5, 7, 10, 15], 6)\n\n \"\"\"\n left = 0\n right = len(sorted_collection) - 1\n\n while left <= right:\n midpoint = left + (right - left) // 2\n current_item = sorted_collection[midpoint]\n if current_item == item:\n return midpoint\n elif item < current_item:\n right = midpoint - 1\n else:\n left = midpoint + 1\n return None\n\n\ndef binary_search_std_lib(sorted_collection: list[int], item: int) -> int | None:\n \"\"\"Pure implementation of binary search algorithm in Python using stdlib\n\n Be careful collection must be ascending sorted, otherwise result will be\n unpredictable\n\n :param sorted_collection: some ascending sorted collection with comparable items\n :param item: item value to search\n :return: index of found item or None if item is not found\n\n Examples:\n >>> binary_search_std_lib([0, 5, 7, 10, 15], 0)\n 0\n\n >>> binary_search_std_lib([0, 5, 7, 10, 15], 15)\n 4\n\n >>> binary_search_std_lib([0, 5, 7, 10, 15], 5)\n 1\n\n >>> binary_search_std_lib([0, 5, 7, 10, 15], 6)\n\n \"\"\"\n index = bisect.bisect_left(sorted_collection, item)\n if index != len(sorted_collection) and sorted_collection[index] == item:\n return index\n return None\n\n\ndef binary_search_by_recursion(\n sorted_collection: list[int], item: int, left: int, right: int\n) -> int | None:\n \"\"\"Pure implementation of binary search algorithm in Python by recursion\n\n Be careful collection must be ascending sorted, otherwise result will be\n unpredictable\n First recursion should be started with left=0 and right=(len(sorted_collection)-1)\n\n :param sorted_collection: some ascending sorted collection with comparable items\n :param item: item value to search\n :return: index of found item or None if item is not found\n\n Examples:\n >>> binary_search_by_recursion([0, 5, 7, 10, 15], 0, 0, 4)\n 0\n\n >>> binary_search_by_recursion([0, 5, 7, 10, 15], 15, 0, 4)\n 4\n\n >>> binary_search_by_recursion([0, 5, 7, 10, 15], 5, 0, 4)\n 1\n\n >>> binary_search_by_recursion([0, 5, 7, 10, 15], 6, 0, 4)\n\n \"\"\"\n if right < left:\n return None\n\n midpoint = left + (right - left) // 2\n\n if sorted_collection[midpoint] == item:\n return midpoint\n elif sorted_collection[midpoint] > item:\n return binary_search_by_recursion(sorted_collection, item, left, midpoint - 1)\n else:\n return binary_search_by_recursion(sorted_collection, item, midpoint + 1, right)\n\n\nif __name__ == \"__main__\":\n user_input = input(\"Enter numbers separated by comma:\\n\").strip()\n collection = sorted(int(item) for item in user_input.split(\",\"))\n target = int(input(\"Enter a single number to be found in the list:\\n\"))\n result = binary_search(collection, target)\n if result is None:\n print(f\"{target} was not found in {collection}.\")\n else:\n print(f\"{target} was found at position {result} in {collection}.\")\n",
"path": "searches/binary_search.py"
}
] | 6_1 | python | import sys
import unittest
import os
import importlib.util
def run_function(file_path: str, function_name: str, function_args: list):
spec = importlib.util.spec_from_file_location("module.name", file_path)
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
# Get the function
func = getattr(module, function_name)
# Run the function
return func(*function_args)
class TestBinarySearch(unittest.TestCase):
def test_exponential_search(self):
if not os.path.exists("searches/binary_search.py"):
self.fail('file "searches/binary_search.py" does not exist')
# Run exponential_search function from searches/binary_search.py
function_result = run_function(
"searches/binary_search.py",
"exponential_search",
[[1, 2, 3, 4, 5], 3],
)
self.assertEqual(function_result, 2, "Exponential search did not return 2 when expected")
function_result = run_function(
"searches/binary_search.py",
"exponential_search",
[[1, 2, 3, 4, 5], 6],
)
self.assertEqual(function_result, -1, "Exponential search did not return -1 when expected")
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestBinarySearch))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == "__main__":
main()
|
|
https://github.com/teamqurrent/Python | Create a vernam_cipher.py file in the `ciphers` directory with vernam_encrypt and vernam_decrypt functions. The function args should be a string which is the message to encrypt/decrypt, and a string which is the key. The function should return the encrypted/decrypted message. | be94690 | python3.9 | 34f48b68 | diff --git a/ciphers/vernam_cipher.py b/ciphers/vernam_cipher.py
new file mode 100644
--- /dev/null
+++ b/ciphers/vernam_cipher.py
@@ -0,0 +1,42 @@
+def vernam_encrypt(plaintext: str, key: str) -> str:
+ """
+ >>> vernam_encrypt("HELLO","KEY")
+ 'RIJVS'
+ """
+ ciphertext = ""
+ for i in range(len(plaintext)):
+ ct = ord(key[i % len(key)]) - 65 + ord(plaintext[i]) - 65
+ while ct > 25:
+ ct = ct - 26
+ ciphertext += chr(65 + ct)
+ return ciphertext
+
+
+def vernam_decrypt(ciphertext: str, key: str) -> str:
+ """
+ >>> vernam_decrypt("RIJVS","KEY")
+ 'HELLO'
+ """
+ decrypted_text = ""
+ for i in range(len(ciphertext)):
+ ct = ord(ciphertext[i]) - ord(key[i % len(key)])
+ while ct < 0:
+ ct = 26 + ct
+ decrypted_text += chr(65 + ct)
+ return decrypted_text
+
+
+if __name__ == "__main__":
+ from doctest import testmod
+
+ testmod()
+
+ # Example usage
+ plaintext = "HELLO"
+ key = "KEY"
+ encrypted_text = vernam_encrypt(plaintext, key)
+ decrypted_text = vernam_decrypt(encrypted_text, key)
+ print("\n\n")
+ print("Plaintext:", plaintext)
+ print("Encrypted:", encrypted_text)
+ print("Decrypted:", decrypted_text)
| [] | 6_2 | python | import sys
import unittest
import os
import importlib.util
def run_function(file_path: str, function_name: str, function_args: list):
spec = importlib.util.spec_from_file_location("module.name", file_path)
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
# Get the function
func = getattr(module, function_name)
# Run the function
return func(*function_args)
class TestVernamCipher(unittest.TestCase):
def test_vernam_cipher(self):
if not os.path.exists("ciphers/vernam_cipher.py"):
self.fail('file "ciphers/vernam_cipher.py" does not exist')
encrypted_text = run_function(
"ciphers/vernam_cipher.py", "vernam_encrypt", ["HELLO", "KEY"]
)
self.assertEqual(encrypted_text, "RIJVS", "Vernam encryption did not return expected result")
decrypted_text = run_function(
"ciphers/vernam_cipher.py",
"vernam_decrypt",
[encrypted_text, "KEY"],
)
self.assertEqual(decrypted_text, "HELLO", "Vernam decryption did not return expected result")
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestVernamCipher))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == "__main__":
main()
|
|
https://github.com/teamqurrent/Python | Modify the `maths/volume.py` file and add a function for finding the volume of a icosahedron, call the function vol_icosahedron and it should take a single float argument which is the length of the side of the icosahedron. The function should return the volume of the icosahedron. | 1a26d76 | python3.9 | cc0405d0 | diff --git a/maths/volume.py b/maths/volume.py
--- a/maths/volume.py
+++ b/maths/volume.py
@@ -469,6 +469,35 @@ def vol_torus(torus_radius: float, tube_radius: float) -> float:
return 2 * pow(pi, 2) * torus_radius * pow(tube_radius, 2)
+def vol_icosahedron(tri_side: float) -> float:
+ """Calculate the Volume of an Icosahedron.
+ Wikipedia reference: https://en.wikipedia.org/wiki/Regular_icosahedron
+
+ >>> from math import isclose
+ >>> isclose(vol_icosahedron(2.5), 34.088984228514256)
+ True
+ >>> isclose(vol_icosahedron(10), 2181.694990624912374)
+ True
+ >>> isclose(vol_icosahedron(5), 272.711873828114047)
+ True
+ >>> isclose(vol_icosahedron(3.49), 92.740688412033628)
+ True
+ >>> vol_icosahedron(0)
+ 0.0
+ >>> vol_icosahedron(-1)
+ Traceback (most recent call last):
+ ...
+ ValueError: vol_icosahedron() only accepts non-negative values
+ >>> vol_icosahedron(-0.2)
+ Traceback (most recent call last):
+ ...
+ ValueError: vol_icosahedron() only accepts non-negative values
+ """
+ if tri_side < 0:
+ raise ValueError("vol_icosahedron() only accepts non-negative values")
+ return tri_side**3 * (3 + 5**0.5) * 5 / 12
+
+
def main():
"""Print the Results of Various Volume Calculations."""
print("Volumes:")
@@ -489,6 +518,7 @@ def main():
print(
f"Hollow Circular Cylinder: {vol_hollow_circular_cylinder(1, 2, 3) = }"
) # ~= 28.3
+ print(f"Icosahedron: {vol_icosahedron(2.5) = }") # ~=34.09
if __name__ == "__main__":
| [
{
"content": "\"\"\"\nFind the volume of various shapes.\n* https://en.wikipedia.org/wiki/Volume\n* https://en.wikipedia.org/wiki/Spherical_cap\n\"\"\"\nfrom __future__ import annotations\n\nfrom math import pi, pow\n\n\ndef vol_cube(side_length: float) -> float:\n \"\"\"\n Calculate the Volume of a Cube.\n >>> vol_cube(1)\n 1.0\n >>> vol_cube(3)\n 27.0\n >>> vol_cube(0)\n 0.0\n >>> vol_cube(1.6)\n 4.096000000000001\n >>> vol_cube(-1)\n Traceback (most recent call last):\n ...\n ValueError: vol_cube() only accepts non-negative values\n \"\"\"\n if side_length < 0:\n raise ValueError(\"vol_cube() only accepts non-negative values\")\n return pow(side_length, 3)\n\n\ndef vol_spherical_cap(height: float, radius: float) -> float:\n \"\"\"\n Calculate the volume of the spherical cap.\n >>> vol_spherical_cap(1, 2)\n 5.235987755982988\n >>> vol_spherical_cap(1.6, 2.6)\n 16.621119532592402\n >>> vol_spherical_cap(0, 0)\n 0.0\n >>> vol_spherical_cap(-1, 2)\n Traceback (most recent call last):\n ...\n ValueError: vol_spherical_cap() only accepts non-negative values\n >>> vol_spherical_cap(1, -2)\n Traceback (most recent call last):\n ...\n ValueError: vol_spherical_cap() only accepts non-negative values\n \"\"\"\n if height < 0 or radius < 0:\n raise ValueError(\"vol_spherical_cap() only accepts non-negative values\")\n # Volume is 1/3 pi * height squared * (3 * radius - height)\n return 1 / 3 * pi * pow(height, 2) * (3 * radius - height)\n\n\ndef vol_spheres_intersect(\n radius_1: float, radius_2: float, centers_distance: float\n) -> float:\n \"\"\"\n Calculate the volume of the intersection of two spheres.\n The intersection is composed by two spherical caps and therefore its volume is the\n sum of the volumes of the spherical caps. First, it calculates the heights (h1, h2)\n of the spherical caps, then the two volumes and it returns the sum.\n The height formulas are\n h1 = (radius_1 - radius_2 + centers_distance)\n * (radius_1 + radius_2 - centers_distance)\n / (2 * centers_distance)\n h2 = (radius_2 - radius_1 + centers_distance)\n * (radius_2 + radius_1 - centers_distance)\n / (2 * centers_distance)\n if centers_distance is 0 then it returns the volume of the smallers sphere\n :return vol_spherical_cap(h1, radius_2) + vol_spherical_cap(h2, radius_1)\n >>> vol_spheres_intersect(2, 2, 1)\n 21.205750411731103\n >>> vol_spheres_intersect(2.6, 2.6, 1.6)\n 40.71504079052372\n >>> vol_spheres_intersect(0, 0, 0)\n 0.0\n >>> vol_spheres_intersect(-2, 2, 1)\n Traceback (most recent call last):\n ...\n ValueError: vol_spheres_intersect() only accepts non-negative values\n >>> vol_spheres_intersect(2, -2, 1)\n Traceback (most recent call last):\n ...\n ValueError: vol_spheres_intersect() only accepts non-negative values\n >>> vol_spheres_intersect(2, 2, -1)\n Traceback (most recent call last):\n ...\n ValueError: vol_spheres_intersect() only accepts non-negative values\n \"\"\"\n if radius_1 < 0 or radius_2 < 0 or centers_distance < 0:\n raise ValueError(\"vol_spheres_intersect() only accepts non-negative values\")\n if centers_distance == 0:\n return vol_sphere(min(radius_1, radius_2))\n\n h1 = (\n (radius_1 - radius_2 + centers_distance)\n * (radius_1 + radius_2 - centers_distance)\n / (2 * centers_distance)\n )\n h2 = (\n (radius_2 - radius_1 + centers_distance)\n * (radius_2 + radius_1 - centers_distance)\n / (2 * centers_distance)\n )\n\n return vol_spherical_cap(h1, radius_2) + vol_spherical_cap(h2, radius_1)\n\n\ndef vol_spheres_union(\n radius_1: float, radius_2: float, centers_distance: float\n) -> float:\n \"\"\"\n Calculate the volume of the union of two spheres that possibly intersect.\n It is the sum of sphere A and sphere B minus their intersection.\n First, it calculates the volumes (v1, v2) of the spheres,\n then the volume of the intersection (i) and it returns the sum v1+v2-i.\n If centers_distance is 0 then it returns the volume of the larger sphere\n :return vol_sphere(radius_1) + vol_sphere(radius_2)\n - vol_spheres_intersect(radius_1, radius_2, centers_distance)\n\n >>> vol_spheres_union(2, 2, 1)\n 45.814892864851146\n >>> vol_spheres_union(1.56, 2.2, 1.4)\n 48.77802773671288\n >>> vol_spheres_union(0, 2, 1)\n Traceback (most recent call last):\n ...\n ValueError: vol_spheres_union() only accepts non-negative values, non-zero radius\n >>> vol_spheres_union('1.56', '2.2', '1.4')\n Traceback (most recent call last):\n ...\n TypeError: '<=' not supported between instances of 'str' and 'int'\n >>> vol_spheres_union(1, None, 1)\n Traceback (most recent call last):\n ...\n TypeError: '<=' not supported between instances of 'NoneType' and 'int'\n \"\"\"\n\n if radius_1 <= 0 or radius_2 <= 0 or centers_distance < 0:\n raise ValueError(\n \"vol_spheres_union() only accepts non-negative values, non-zero radius\"\n )\n\n if centers_distance == 0:\n return vol_sphere(max(radius_1, radius_2))\n\n return (\n vol_sphere(radius_1)\n + vol_sphere(radius_2)\n - vol_spheres_intersect(radius_1, radius_2, centers_distance)\n )\n\n\ndef vol_cuboid(width: float, height: float, length: float) -> float:\n \"\"\"\n Calculate the Volume of a Cuboid.\n :return multiple of width, length and height\n >>> vol_cuboid(1, 1, 1)\n 1.0\n >>> vol_cuboid(1, 2, 3)\n 6.0\n >>> vol_cuboid(1.6, 2.6, 3.6)\n 14.976\n >>> vol_cuboid(0, 0, 0)\n 0.0\n >>> vol_cuboid(-1, 2, 3)\n Traceback (most recent call last):\n ...\n ValueError: vol_cuboid() only accepts non-negative values\n >>> vol_cuboid(1, -2, 3)\n Traceback (most recent call last):\n ...\n ValueError: vol_cuboid() only accepts non-negative values\n >>> vol_cuboid(1, 2, -3)\n Traceback (most recent call last):\n ...\n ValueError: vol_cuboid() only accepts non-negative values\n \"\"\"\n if width < 0 or height < 0 or length < 0:\n raise ValueError(\"vol_cuboid() only accepts non-negative values\")\n return float(width * height * length)\n\n\ndef vol_cone(area_of_base: float, height: float) -> float:\n \"\"\"\n Calculate the Volume of a Cone.\n Wikipedia reference: https://en.wikipedia.org/wiki/Cone\n :return (1/3) * area_of_base * height\n >>> vol_cone(10, 3)\n 10.0\n >>> vol_cone(1, 1)\n 0.3333333333333333\n >>> vol_cone(1.6, 1.6)\n 0.8533333333333335\n >>> vol_cone(0, 0)\n 0.0\n >>> vol_cone(-1, 1)\n Traceback (most recent call last):\n ...\n ValueError: vol_cone() only accepts non-negative values\n >>> vol_cone(1, -1)\n Traceback (most recent call last):\n ...\n ValueError: vol_cone() only accepts non-negative values\n \"\"\"\n if height < 0 or area_of_base < 0:\n raise ValueError(\"vol_cone() only accepts non-negative values\")\n return area_of_base * height / 3.0\n\n\ndef vol_right_circ_cone(radius: float, height: float) -> float:\n \"\"\"\n Calculate the Volume of a Right Circular Cone.\n Wikipedia reference: https://en.wikipedia.org/wiki/Cone\n :return (1/3) * pi * radius^2 * height\n >>> vol_right_circ_cone(2, 3)\n 12.566370614359172\n >>> vol_right_circ_cone(0, 0)\n 0.0\n >>> vol_right_circ_cone(1.6, 1.6)\n 4.289321169701265\n >>> vol_right_circ_cone(-1, 1)\n Traceback (most recent call last):\n ...\n ValueError: vol_right_circ_cone() only accepts non-negative values\n >>> vol_right_circ_cone(1, -1)\n Traceback (most recent call last):\n ...\n ValueError: vol_right_circ_cone() only accepts non-negative values\n \"\"\"\n if height < 0 or radius < 0:\n raise ValueError(\"vol_right_circ_cone() only accepts non-negative values\")\n return pi * pow(radius, 2) * height / 3.0\n\n\ndef vol_prism(area_of_base: float, height: float) -> float:\n \"\"\"\n Calculate the Volume of a Prism.\n Wikipedia reference: https://en.wikipedia.org/wiki/Prism_(geometry)\n :return V = Bh\n >>> vol_prism(10, 2)\n 20.0\n >>> vol_prism(11, 1)\n 11.0\n >>> vol_prism(1.6, 1.6)\n 2.5600000000000005\n >>> vol_prism(0, 0)\n 0.0\n >>> vol_prism(-1, 1)\n Traceback (most recent call last):\n ...\n ValueError: vol_prism() only accepts non-negative values\n >>> vol_prism(1, -1)\n Traceback (most recent call last):\n ...\n ValueError: vol_prism() only accepts non-negative values\n \"\"\"\n if height < 0 or area_of_base < 0:\n raise ValueError(\"vol_prism() only accepts non-negative values\")\n return float(area_of_base * height)\n\n\ndef vol_pyramid(area_of_base: float, height: float) -> float:\n \"\"\"\n Calculate the Volume of a Pyramid.\n Wikipedia reference: https://en.wikipedia.org/wiki/Pyramid_(geometry)\n :return (1/3) * Bh\n >>> vol_pyramid(10, 3)\n 10.0\n >>> vol_pyramid(1.5, 3)\n 1.5\n >>> vol_pyramid(1.6, 1.6)\n 0.8533333333333335\n >>> vol_pyramid(0, 0)\n 0.0\n >>> vol_pyramid(-1, 1)\n Traceback (most recent call last):\n ...\n ValueError: vol_pyramid() only accepts non-negative values\n >>> vol_pyramid(1, -1)\n Traceback (most recent call last):\n ...\n ValueError: vol_pyramid() only accepts non-negative values\n \"\"\"\n if height < 0 or area_of_base < 0:\n raise ValueError(\"vol_pyramid() only accepts non-negative values\")\n return area_of_base * height / 3.0\n\n\ndef vol_sphere(radius: float) -> float:\n \"\"\"\n Calculate the Volume of a Sphere.\n Wikipedia reference: https://en.wikipedia.org/wiki/Sphere\n :return (4/3) * pi * r^3\n >>> vol_sphere(5)\n 523.5987755982989\n >>> vol_sphere(1)\n 4.1887902047863905\n >>> vol_sphere(1.6)\n 17.15728467880506\n >>> vol_sphere(0)\n 0.0\n >>> vol_sphere(-1)\n Traceback (most recent call last):\n ...\n ValueError: vol_sphere() only accepts non-negative values\n \"\"\"\n if radius < 0:\n raise ValueError(\"vol_sphere() only accepts non-negative values\")\n # Volume is 4/3 * pi * radius cubed\n return 4 / 3 * pi * pow(radius, 3)\n\n\ndef vol_hemisphere(radius: float) -> float:\n \"\"\"Calculate the volume of a hemisphere\n Wikipedia reference: https://en.wikipedia.org/wiki/Hemisphere\n Other references: https://www.cuemath.com/geometry/hemisphere\n :return 2/3 * pi * radius^3\n >>> vol_hemisphere(1)\n 2.0943951023931953\n >>> vol_hemisphere(7)\n 718.377520120866\n >>> vol_hemisphere(1.6)\n 8.57864233940253\n >>> vol_hemisphere(0)\n 0.0\n >>> vol_hemisphere(-1)\n Traceback (most recent call last):\n ...\n ValueError: vol_hemisphere() only accepts non-negative values\n \"\"\"\n if radius < 0:\n raise ValueError(\"vol_hemisphere() only accepts non-negative values\")\n # Volume is radius cubed * pi * 2/3\n return pow(radius, 3) * pi * 2 / 3\n\n\ndef vol_circular_cylinder(radius: float, height: float) -> float:\n \"\"\"Calculate the Volume of a Circular Cylinder.\n Wikipedia reference: https://en.wikipedia.org/wiki/Cylinder\n :return pi * radius^2 * height\n >>> vol_circular_cylinder(1, 1)\n 3.141592653589793\n >>> vol_circular_cylinder(4, 3)\n 150.79644737231007\n >>> vol_circular_cylinder(1.6, 1.6)\n 12.867963509103795\n >>> vol_circular_cylinder(0, 0)\n 0.0\n >>> vol_circular_cylinder(-1, 1)\n Traceback (most recent call last):\n ...\n ValueError: vol_circular_cylinder() only accepts non-negative values\n >>> vol_circular_cylinder(1, -1)\n Traceback (most recent call last):\n ...\n ValueError: vol_circular_cylinder() only accepts non-negative values\n \"\"\"\n if height < 0 or radius < 0:\n raise ValueError(\"vol_circular_cylinder() only accepts non-negative values\")\n # Volume is radius squared * height * pi\n return pow(radius, 2) * height * pi\n\n\ndef vol_hollow_circular_cylinder(\n inner_radius: float, outer_radius: float, height: float\n) -> float:\n \"\"\"Calculate the Volume of a Hollow Circular Cylinder.\n >>> vol_hollow_circular_cylinder(1, 2, 3)\n 28.274333882308138\n >>> vol_hollow_circular_cylinder(1.6, 2.6, 3.6)\n 47.50088092227767\n >>> vol_hollow_circular_cylinder(-1, 2, 3)\n Traceback (most recent call last):\n ...\n ValueError: vol_hollow_circular_cylinder() only accepts non-negative values\n >>> vol_hollow_circular_cylinder(1, -2, 3)\n Traceback (most recent call last):\n ...\n ValueError: vol_hollow_circular_cylinder() only accepts non-negative values\n >>> vol_hollow_circular_cylinder(1, 2, -3)\n Traceback (most recent call last):\n ...\n ValueError: vol_hollow_circular_cylinder() only accepts non-negative values\n >>> vol_hollow_circular_cylinder(2, 1, 3)\n Traceback (most recent call last):\n ...\n ValueError: outer_radius must be greater than inner_radius\n >>> vol_hollow_circular_cylinder(0, 0, 0)\n Traceback (most recent call last):\n ...\n ValueError: outer_radius must be greater than inner_radius\n \"\"\"\n # Volume - (outer_radius squared - inner_radius squared) * pi * height\n if inner_radius < 0 or outer_radius < 0 or height < 0:\n raise ValueError(\n \"vol_hollow_circular_cylinder() only accepts non-negative values\"\n )\n if outer_radius <= inner_radius:\n raise ValueError(\"outer_radius must be greater than inner_radius\")\n return pi * (pow(outer_radius, 2) - pow(inner_radius, 2)) * height\n\n\ndef vol_conical_frustum(height: float, radius_1: float, radius_2: float) -> float:\n \"\"\"Calculate the Volume of a Conical Frustum.\n Wikipedia reference: https://en.wikipedia.org/wiki/Frustum\n\n >>> vol_conical_frustum(45, 7, 28)\n 48490.482608158454\n >>> vol_conical_frustum(1, 1, 2)\n 7.330382858376184\n >>> vol_conical_frustum(1.6, 2.6, 3.6)\n 48.7240076620753\n >>> vol_conical_frustum(0, 0, 0)\n 0.0\n >>> vol_conical_frustum(-2, 2, 1)\n Traceback (most recent call last):\n ...\n ValueError: vol_conical_frustum() only accepts non-negative values\n >>> vol_conical_frustum(2, -2, 1)\n Traceback (most recent call last):\n ...\n ValueError: vol_conical_frustum() only accepts non-negative values\n >>> vol_conical_frustum(2, 2, -1)\n Traceback (most recent call last):\n ...\n ValueError: vol_conical_frustum() only accepts non-negative values\n \"\"\"\n # Volume is 1/3 * pi * height *\n # (radius_1 squared + radius_2 squared + radius_1 * radius_2)\n if radius_1 < 0 or radius_2 < 0 or height < 0:\n raise ValueError(\"vol_conical_frustum() only accepts non-negative values\")\n return (\n 1\n / 3\n * pi\n * height\n * (pow(radius_1, 2) + pow(radius_2, 2) + radius_1 * radius_2)\n )\n\n\ndef vol_torus(torus_radius: float, tube_radius: float) -> float:\n \"\"\"Calculate the Volume of a Torus.\n Wikipedia reference: https://en.wikipedia.org/wiki/Torus\n :return 2pi^2 * torus_radius * tube_radius^2\n >>> vol_torus(1, 1)\n 19.739208802178716\n >>> vol_torus(4, 3)\n 710.6115168784338\n >>> vol_torus(3, 4)\n 947.4820225045784\n >>> vol_torus(1.6, 1.6)\n 80.85179925372404\n >>> vol_torus(0, 0)\n 0.0\n >>> vol_torus(-1, 1)\n Traceback (most recent call last):\n ...\n ValueError: vol_torus() only accepts non-negative values\n >>> vol_torus(1, -1)\n Traceback (most recent call last):\n ...\n ValueError: vol_torus() only accepts non-negative values\n \"\"\"\n if torus_radius < 0 or tube_radius < 0:\n raise ValueError(\"vol_torus() only accepts non-negative values\")\n return 2 * pow(pi, 2) * torus_radius * pow(tube_radius, 2)\n\n\ndef main():\n \"\"\"Print the Results of Various Volume Calculations.\"\"\"\n print(\"Volumes:\")\n print(f\"Cube: {vol_cube(2) = }\") # = 8\n print(f\"Cuboid: {vol_cuboid(2, 2, 2) = }\") # = 8\n print(f\"Cone: {vol_cone(2, 2) = }\") # ~= 1.33\n print(f\"Right Circular Cone: {vol_right_circ_cone(2, 2) = }\") # ~= 8.38\n print(f\"Prism: {vol_prism(2, 2) = }\") # = 4\n print(f\"Pyramid: {vol_pyramid(2, 2) = }\") # ~= 1.33\n print(f\"Sphere: {vol_sphere(2) = }\") # ~= 33.5\n print(f\"Hemisphere: {vol_hemisphere(2) = }\") # ~= 16.75\n print(f\"Circular Cylinder: {vol_circular_cylinder(2, 2) = }\") # ~= 25.1\n print(f\"Torus: {vol_torus(2, 2) = }\") # ~= 157.9\n print(f\"Conical Frustum: {vol_conical_frustum(2, 2, 4) = }\") # ~= 58.6\n print(f\"Spherical cap: {vol_spherical_cap(1, 2) = }\") # ~= 5.24\n print(f\"Spheres intersetion: {vol_spheres_intersect(2, 2, 1) = }\") # ~= 21.21\n print(f\"Spheres union: {vol_spheres_union(2, 2, 1) = }\") # ~= 45.81\n print(\n f\"Hollow Circular Cylinder: {vol_hollow_circular_cylinder(1, 2, 3) = }\"\n ) # ~= 28.3\n\n\nif __name__ == \"__main__\":\n main()\n",
"path": "maths/volume.py"
}
] | 6_3 | python | import sys
import unittest
import importlib.util
def run_function(file_path: str, function_name: str, function_args: list):
spec = importlib.util.spec_from_file_location("module.name", file_path)
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
# Get the function
func = getattr(module, function_name)
# Run the function
return func(*function_args)
class TestVolumeIcosahedron(unittest.TestCase):
def test_vol_icosahedron(self):
volume = run_function("maths/volume.py", "vol_icosahedron", [5])
self.assertAlmostEqual(volume, 272.71, 2, "Volume of icosahedron did not match expected result")
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestVolumeIcosahedron))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == "__main__":
main()
|
|
https://github.com/teamqurrent/Python | Add a new file to the `machine_learning/loss_functions` directory called huber_loss.py. The file should contain a function called huber_loss which takes three arguments, y_true y_pred, and delta. y_true and y_pred should be numpy arrays. The function should return the mean of huber loss. | 583a614 | numpy == 1.26.1 | python3.9 | 53d78b9c | diff --git a/machine_learning/loss_functions/huber_loss.py b/machine_learning/loss_functions/huber_loss.py
new file mode 100644
--- /dev/null
+++ b/machine_learning/loss_functions/huber_loss.py
@@ -0,0 +1,52 @@
+"""
+Huber Loss Function
+
+Description:
+Huber loss function describes the penalty incurred by an estimation procedure.
+It serves as a measure of the model's accuracy in regression tasks.
+
+Formula:
+Huber Loss = if |y_true - y_pred| <= delta then 0.5 * (y_true - y_pred)^2
+ else delta * |y_true - y_pred| - 0.5 * delta^2
+
+Source:
+[Wikipedia - Huber Loss](https://en.wikipedia.org/wiki/Huber_loss)
+"""
+
+import numpy as np
+
+
+def huber_loss(y_true: np.ndarray, y_pred: np.ndarray, delta: float) -> float:
+ """
+ Calculate the mean of Huber Loss.
+
+ Parameters:
+ - y_true: The true values (ground truth).
+ - y_pred: The predicted values.
+
+ Returns:
+ - huber_loss: The mean of Huber Loss between y_true and y_pred.
+
+ Example usage:
+ >>> true_values = np.array([0.9, 10.0, 2.0, 1.0, 5.2])
+ >>> predicted_values = np.array([0.8, 2.1, 2.9, 4.2, 5.2])
+ >>> np.isclose(huber_loss(true_values, predicted_values, 1.0), 2.102)
+ True
+ >>> true_labels = np.array([11.0, 21.0, 3.32, 4.0, 5.0])
+ >>> predicted_probs = np.array([8.3, 20.8, 2.9, 11.2, 5.0])
+ >>> np.isclose(huber_loss(true_labels, predicted_probs, 1.0), 1.80164)
+ True
+ """
+
+ if len(y_true) != len(y_pred):
+ raise ValueError("Input arrays must have the same length.")
+
+ huber_mse = 0.5 * (y_true - y_pred) ** 2
+ huber_mae = delta * (np.abs(y_true - y_pred) - 0.5 * delta)
+ return np.where(np.abs(y_true - y_pred) <= delta, huber_mse, huber_mae).mean()
+
+
+if __name__ == "__main__":
+ import doctest
+
+ doctest.testmod()
| [] | 6_4 | python | import sys
import unittest
import numpy as np
import importlib.util
def run_function(file_path: str, function_name: str, function_args: list):
spec = importlib.util.spec_from_file_location("module.name", file_path)
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
# Get the function
func = getattr(module, function_name)
# Run the function
return func(*function_args)
class TestHuberLoss(unittest.TestCase):
def test_huber_loss(self):
true_values = np.array([0.9, 10.0, 2.0, 1.0, 5.2])
predicted_values = np.array([0.8, 2.1, 2.9, 4.2, 5.2])
result = run_function(
"machine_learning/loss_functions/huber_loss.py",
"huber_loss",
[true_values, predicted_values, 1],
)
self.assertTrue(np.isclose(result, 2.102), "Result from huber_loss is not close to 2.102")
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestHuberLoss))
runner = unittest.TextTestRunner()
if not runner.run(suite).wasSuccessful():
sys.exit(1)
else:
sys.exit(0)
if __name__ == "__main__":
main() |
https://github.com/teamqurrent/BitcoinPaperTrader | Add a method to the `Wallet` class in `trading_system.py` called profitFactor(). This method traverses the transactions list and will calculate the ratio of total profit made by all profitable trades divided by the total loss made by all unprofitable trades. | 388f8ce | numpy
datetime
pytz | python3.9 | dc3f4a0 | diff --git a/trading_system.py b/trading_system.py
--- a/trading_system.py
+++ b/trading_system.py
@@ -32,6 +32,70 @@ class Wallet:
self.short_stock = 0
self.transactions = []
+ def totalProfits(self, initial_wallet, final_wallet):
+ net = final_wallet - initial_wallet
+ percentage = net * 100/initial_wallet
+ return net, percentage
+
+ def totalClosedTrades(self):
+ total_trades = 0
+
+ for i in range(1, len(self.transactions)):
+ current_transaction = self.transactions[i]
+
+ if current_transaction['type'] in ['sell', 'close_short']:
+ total_trades += 1
+
+ return total_trades
+
+ def percentProfitable(self):
+ profitable_trades = 0
+ total_trades = 0
+ for i in range(1, len(self.transactions)):
+ current_transaction = self.transactions[i]
+ previous_transaction = self.transactions[i - 1]
+
+ if current_transaction['type'] in ['sell'] and previous_transaction['type'] in ['buy']:
+ # Determine if trade was profitable
+ if current_transaction['price'] * current_transaction['number'] > previous_transaction['price'] * previous_transaction['number']:
+ profitable_trades += 1
+
+ total_trades += 1
+ else:
+ if current_transaction['type'] in ['close_short'] and previous_transaction['type'] in ['short']:
+ if current_transaction['price'] * current_transaction['number'] < previous_transaction['price'] * previous_transaction['number']:
+ profitable_trades += 1
+
+ total_trades += 1
+
+ return 100 * profitable_trades/total_trades
+
+ def profitFactor(self):
+ total_profit = 0
+ total_loss = 0
+ for i in range(1, len(self.transactions)):
+ current_transaction = self.transactions[i]
+ previous_transaction = self.transactions[i - 1]
+
+ if current_transaction['type'] in ['sell'] and previous_transaction['type'] in ['buy']:
+ profit = (current_transaction['price'] * current_transaction['number']) - (previous_transaction['price'] * previous_transaction['number'])
+ if profit > 0:
+ total_profit += profit
+ else:
+ total_loss -= profit
+
+ elif current_transaction['type'] in ['close_short'] and previous_transaction['type'] in ['short']:
+ profit = (previous_transaction['price'] * previous_transaction['number']) - (current_transaction['price'] * current_transaction['number'])
+ if profit > 0:
+ total_profit += profit
+ else:
+ total_loss -= profit
+
+ if total_loss == 0:
+ return float('inf')
+ else:
+ return total_profit / total_loss
+
def buy(self, timestamp, price, number):
if np.isnan(price) or price <= 0:
print(f"Price: {price} is invalid")
@@ -284,10 +348,17 @@ if __name__ == "__main__":
print("Invalid input received. Please try again.")
userWallet = Wallet(wallet)
-
+ initialCash = userWallet.cash
# Calculate short term simple moving average strategy
sma_strategy = SimpleMovingAverageStrategy(userWallet, adjusted_historical_data, 5)
sma_strategy.execute()
+ net, percentage = userWallet.totalProfits(initialCash, userWallet.cash)
print("Final wallet cash: ", userWallet.cash)
+ print("Net Profit: ", net)
+ print("Profit Percentage: ", percentage)
print("Final owed shorts: ", userWallet.short_stock)
+ print("Percentage profitable: ", userWallet.percentProfitable())
+ print("Total number of closed trades: ", userWallet.totalClosedTrades())
+ print("Profit factor: ", userWallet.profitFactor())
+
| [
{
"content": "import numpy as np\nfrom datetime import datetime\nfrom datetime import timedelta\nimport pytz\n\nclass TechnicalIndicators:\n\n # Method to create heiken ashi price candles\n @staticmethod\n def heikin_ashi(data_point, previous_candle):\n open_ = data_point['open']\n high = data_point['high']\n low = data_point['low']\n close = data_point['close']\n\n if previous_candle is None:\n return { 'open': open_, 'high': high, 'low': low, 'close': close }\n\n new_candle = {}\n new_candle['close'] = (open_ + high + low + close) / 4\n new_candle['open'] = (previous_candle['open'] + previous_candle['close']) / 2\n new_candle['high'] = max(high, new_candle['open'], new_candle['close'])\n new_candle['low'] = min(low, new_candle['open'], new_candle['close'])\n\n return new_candle\n\n\nclass Wallet:\n def __init__(self, initial_cash):\n self.cash = initial_cash\n self.stock = 0\n self.short_stock = 0\n self.transactions = []\n\n def buy(self, timestamp, price, number):\n if np.isnan(price) or price <= 0:\n print(f\"Price: {price} is invalid\")\n return False\n \n if self.transactions and timestamp < self.transactions[-1]['timestamp']:\n print(f\"Cannot go back in time\")\n return False \n \n if price * number > self.cash:\n print(f\"Not enough cash to buy that number\")\n return False \n\n if number <= 0:\n print(f\"Cannot buy 0 or negative amount\")\n return False \n\n self.cash -= price * number\n self.stock += number\n transaction = {\n 'type': 'buy',\n 'price': price,\n 'number': number,\n 'timestamp': timestamp\n }\n self.transactions.append(transaction)\n\n print(transaction)\n return True\n\n def sell(self, timestamp, price, number):\n if np.isnan(price) or price <= 0:\n print(f\"Price: {price} is invalid\")\n return False \n\n if self.transactions and timestamp <= self.transactions[-1]['timestamp']:\n print(f\"Cannot go back in time\")\n return False \n\n if number > self.stock:\n print(f\"Not enough stock to sell\")\n return False \n\n if number <= 0:\n print(f\"Cannot sell 0 or negative amount\")\n return False \n\n self.stock -= number\n self.cash += price * number\n transaction = {\n 'type': 'sell',\n 'price': price,\n 'number': number,\n 'timestamp': timestamp\n }\n self.transactions.append(transaction)\n\n print(transaction)\n return True\n\n def short(self, timestamp, price, number):\n if np.isnan(price) or price <= 0:\n print(f\"Price: {price} is invalid\")\n return False\n\n if self.transactions and timestamp < self.transactions[-1]['timestamp']:\n print(f\"Cannot short back in time\")\n return False\n\n if price * number > self.cash:\n print(f\"Not enough cash to short that number\")\n return False\n\n if number <= 0:\n print(f\"Cannot short 0 or negative amount\")\n return False\n\n self.cash += price * number\n self.short_stock += number\n transaction = {\n 'type': 'short',\n 'price': price,\n 'number': number,\n 'timestamp': timestamp\n }\n self.transactions.append(transaction)\n\n print(transaction)\n return True\n\n def close_short(self, timestamp, price, number):\n if np.isnan(price) or price <= 0:\n print(f\"Price: {price} is invalid\")\n return False \n\n if self.transactions and timestamp < self.transactions[-1]['timestamp']:\n print(f\"Cannot close short back in time\")\n return False \n\n if number > self.short_stock:\n print(f\"Not enough shorted Bitcoin to close\")\n return False \n\n if number <= 0:\n print(f\"Cannot close 0 or negative amount\")\n return False \n\n self.short_stock -= number\n self.cash -= price * number\n transaction = {\n 'type': 'close_short',\n 'price': price,\n 'number': number,\n 'timestamp': timestamp\n }\n self.transactions.append(transaction)\n\n print(transaction)\n return True\n\nclass SimpleMovingAverageStrategy:\n def __init__(self, wallet, data, window_size):\n self.wallet = wallet\n self.data = data\n self.window_size = window_size\n\n def calculate_moving_average(self, data):\n return sum(data) / len(data)\n\n def execute(self):\n bitcoin_prices = []\n\n for data_point in self.data:\n bitcoin_prices.append(data_point['close'])\n if len(bitcoin_prices) > self.window_size:\n bitcoin_prices.pop(0)\n moving_average = self.calculate_moving_average(bitcoin_prices)\n\n if data_point['close'] < moving_average and self.wallet.stock == 0:\n self.wallet.close_short(data_point['timestamp'], data_point['close'], self.wallet.short_stock)\n self.wallet.buy(data_point['timestamp'], data_point['close'], self.wallet.cash/data_point['close'])\n elif data_point['close'] > moving_average and self.wallet.stock > 0:\n self.wallet.sell(data_point['timestamp'], data_point['close'], self.wallet.stock)\n self.wallet.short(data_point['timestamp'], data_point['close'], self.wallet.cash/data_point['close'])\n\n # Closing remaining positions\n last_data = self.data[-1]\n if self.wallet.stock > 0:\n self.wallet.sell(last_data['timestamp'], last_data['close'], self.wallet.stock)\n if self.wallet.short_stock > 0:\n self.wallet.close_short(last_data['timestamp'], last_data['close'], self.wallet.short_stock)\n \ndef load_historical_data(file_path):\n try:\n print(\"Loading Sim Data...\")\n with open(file_path) as f:\n lines = f.read().split(\"\\n\")\n lines = [line.split(\",\") for line in lines if line] \n data_points = [[int(line[0]), float(line[1])] for line in lines]\n\n print(f\"{len(data_points)} minutes of data loaded\")\n return data_points\n\n except FileNotFoundError:\n print(f\"Error: The file {file_path} does not exist.\")\n return []\n except Exception as e:\n print(f\"An error occurred: {e}\")\n return []\n\n\ndef minute_to_ohlc(data, interval):\n ohlc_data = []\n\n for i in range(0, len(data), interval):\n time_period = data[i:i + interval]\n timestamps, prices = zip(*time_period)\n \n ohlc = {}\n ohlc['timestamp'] = timestamps[0]\n ohlc['open'] = prices[0]\n ohlc['high'] = max(prices)\n ohlc['low'] = min(prices)\n ohlc['close'] = prices[-1]\n\n ohlc_data.append(ohlc)\n \n return ohlc_data\n\n\ndef get_user_input():\n wallet = input(\"Please enter the starting size (in dollars) of the wallet you would like to simulate: \")\n start_date = input(\"Enter the start date (mm-dd-yyyy): \")\n end_date = input(\"Enter the end date (mm-dd-yyyy): \")\n data_time_interval = input(\"Enter the time interval in minutes: \")\n return wallet, start_date, end_date, data_time_interval\n\n\ndef user_settings(wallet, start_date, end_date, data_time_interval, historical_data):\n first_timestamp = historical_data[0][0]\n last_timestamp = historical_data[-1][0]\n\n try:\n wallet = float(wallet)\n if wallet <= 0:\n raise ValueError(\"Wallet size must be positive.\")\n\n utc_tz = pytz.utc\n start_date_obj = datetime.strptime(start_date, '%m-%d-%Y')\n end_date_obj = datetime.strptime(end_date, '%m-%d-%Y')\n start_date_obj = utc_tz.localize(start_date_obj)\n end_date_obj = utc_tz.localize(end_date_obj)\n start_timestamp = int(start_date_obj.timestamp()) + 60\n end_timestamp = int(end_date_obj.timestamp()) + 60\n\n data_time_interval = int(data_time_interval)\n\n if data_time_interval < 1:\n raise ValueError(\"Time interval must be at least 1 minute.\")\n\n if not (first_timestamp <= start_timestamp <= last_timestamp) or not (first_timestamp <= end_timestamp <= last_timestamp):\n raise ValueError(\"Start date and end date must be within the range of historical data.\")\n\n if start_timestamp > end_timestamp:\n raise ValueError(\"End date must be after the start date.\")\n\n except ValueError as e:\n print(f\"Invalid input: {e}\")\n return None, None\n\n filtered_data = [data_point for data_point in historical_data if start_timestamp <= data_point[0] <= end_timestamp]\n adjusted_data = minute_to_ohlc(filtered_data, data_time_interval)\n\n return wallet, adjusted_data\n\n\n\nif __name__ == \"__main__\":\n file_path = \"HistoricalBTCdata.txt\"\n historical_data = load_historical_data(file_path)\n\n while True:\n wallet, start_date, end_date, data_time_interval = get_user_input()\n\n wallet, adjusted_historical_data = user_settings(wallet, start_date, end_date, data_time_interval, historical_data)\n\n if wallet is not None and adjusted_historical_data is not None:\n break\n else:\n print(\"Invalid input received. Please try again.\")\n\n userWallet = Wallet(wallet)\n\n # Calculate short term simple moving average strategy\n sma_strategy = SimpleMovingAverageStrategy(userWallet, adjusted_historical_data, 5)\n sma_strategy.execute()\n\n print(\"Final wallet cash: \", userWallet.cash)\n print(\"Final owed shorts: \", userWallet.short_stock)\n",
"path": "trading_system.py"
}
] | 7_0 | python | import sys
import unittest
class TestSummary(unittest.TestCase):
def setUp(self):
from trading_system import Wallet
self.wallet = Wallet(50)
def test_profit_factor(self):
transactions = [
{'type': 'buy', 'price': 10, 'number': 5, 'timestamp': 1},
{'type': 'sell', 'price': 15, 'number': 5, 'timestamp': 2}, # profit = 25
{'type': 'buy', 'price': 20, 'number': 2, 'timestamp': 3},
{'type': 'sell', 'price': 10, 'number': 2, 'timestamp': 4}, # profit = -20
{'type': 'short', 'price': 10, 'number': 5, 'timestamp': 5},
{'type': 'close_short', 'price': 5, 'number': 5, 'timestamp': 6}, # profit = 25
{'type': 'short', 'price': 10, 'number': 2, 'timestamp': 7},
{'type': 'close_short', 'price': 15, 'number': 2, 'timestamp': 8}, # profit = -10
]
self.wallet.transactions = transactions
expected_profit_factor = 1.6666666666666 # 50/30
self.assertAlmostEqual(self.wallet.profitFactor(), expected_profit_factor)
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestSummary))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == "__main__":
main()
|
https://github.com/teamqurrent/BitcoinPaperTrader | Implement a function in `trading_system.py` called user_settings(wallet, start_date, end_date, data_time_interval, historical_data), to change the historical data backtesting conditions and modify the data with user inputs like start date, end date, and time interval. Perform input validation on the user input to check that the wallet (float) is greater or equal to zero, time interval is 1 minute or greater, that the start and end date are within the range of the historical data, and that the end date is after the start date. The function should accept dates in 'MM-DD-YYYY' format. | 5e4d95a | numpy
datetime
pytz | python3.9 | b21e78b | diff --git a/trading_system.py b/trading_system.py
--- a/trading_system.py
+++ b/trading_system.py
@@ -1,14 +1,10 @@
import numpy as np
from datetime import datetime
+from datetime import timedelta
+import pytz
def load_historical_data(file_path):
- """
- Load and parse historical trading data from a file.
-
- :param file_path: Path to the historical data file.
- :return: List of parsed data points.
- """
try:
print("Loading Sim Data...")
with open(file_path) as f:
@@ -30,6 +26,73 @@ def test_data_loading(data):
print(f"{len(data)} number of entries loaded")
print(data[0])
-file_path = "HistoricalBTCdata.txt"
-historical_data = load_historical_data(file_path)
+def get_user_input():
+ wallet = input("Please enter the starting size (in dollars) of the wallet you would like to simulate: ")
+ start_date = input("Enter the start date (mm-dd-yyyy): ")
+ end_date = input("Enter the end date (mm-dd-yyyy): ")
+ data_time_interval = input("Enter the time interval in seconds (minimum 60): ")
+ return wallet, start_date, end_date, data_time_interval
+
+
+def user_settings(wallet, start_date, end_date, data_time_interval, historical_data):
+ first_timestamp = historical_data[0][0]
+ last_timestamp = historical_data[-1][0]
+
+ # Convert inputs and validate them
+ try:
+ wallet = float(wallet)
+ if wallet <= 0:
+ raise ValueError("Wallet size must be positive.")
+
+ # Time conversion stuff
+ utc_tz = pytz.utc
+ start_date_obj = datetime.strptime(start_date, '%m-%d-%Y')
+ end_date_obj = datetime.strptime(end_date, '%m-%d-%Y')
+ start_date_obj = utc_tz.localize(start_date_obj)
+ end_date_obj = utc_tz.localize(end_date_obj)
+ start_timestamp = int(start_date_obj.timestamp()) + 60
+ end_timestamp = int(end_date_obj.timestamp()) + 60
+
+ data_time_interval = int(data_time_interval)
+ if data_time_interval < 60:
+ raise ValueError("Time interval must be at least 60 seconds.")
+
+ if not (first_timestamp <= start_timestamp <= last_timestamp) or not (first_timestamp <= end_timestamp <= last_timestamp):
+ raise ValueError("Start date and end date must be within the range of historical data.")
+
+ if start_timestamp > end_timestamp:
+ raise ValueError("End date must be after the start date.")
+
+ except ValueError as e:
+ print(f"Invalid input: {e}")
+ return None, None
+
+ # Filter data between time range then adjust to data interval
+ filtered_data = [data_point for data_point in historical_data if start_timestamp <= data_point[0] <= end_timestamp]
+
+ adjusted_data = []
+ current_time = start_timestamp
+
+ for i in range(0, len(filtered_data), data_time_interval // 60):
+ if filtered_data[i][0] >= current_time:
+ adjusted_data.append(filtered_data[i])
+ current_time += data_time_interval
+
+ return wallet, adjusted_data
+
+if __name__ == "__main__":
+ file_path = "HistoricalBTCdata.txt"
+ historical_data = load_historical_data(file_path)
+
+ while True:
+ wallet, start_date, end_date, data_time_interval = get_user_input()
+
+ wallet, adjusted_historical_data = user_settings(wallet, start_date, end_date, data_time_interval, historical_data)
+
+ if wallet is not None and adjusted_historical_data is not None:
+ break
+ else:
+ print("Invalid input received. Please try again.")
+ print(f"wallet: {wallet}")
+ print(adjusted_historical_data)
\ No newline at end of file
diff --git a/user_input_trade_settings/revised_instructions.txt b/user_input_trade_settings/revised_instructions.txt
new file mode 100644
| [
{
"content": "import numpy as np\nfrom datetime import datetime\n\n\ndef load_historical_data(file_path):\n \"\"\"\n Load and parse historical trading data from a file.\n\n :param file_path: Path to the historical data file.\n :return: List of parsed data points.\n \"\"\"\n try:\n print(\"Loading Sim Data...\")\n with open(file_path) as f:\n lines = f.read().split(\"\\n\")\n lines = [line.split(\",\") for line in lines if line] # Avoid empty strings\n data_points = [[int(line[0]), float(line[1])] for line in lines]\n\n print(f\"{len(data_points)} minutes of data loaded\")\n return data_points\n\n except FileNotFoundError:\n print(f\"Error: The file {file_path} does not exist.\")\n return []\n except Exception as e:\n print(f\"An error occurred: {e}\")\n return []\n\ndef test_data_loading(data):\n print(f\"{len(data)} number of entries loaded\")\n print(data[0])\n\nfile_path = \"HistoricalBTCdata.txt\"\nhistorical_data = load_historical_data(file_path)\n\n",
"path": "trading_system.py"
}
] | 7_1 | python | import sys
import unittest
import pytz
from datetime import datetime
class TestUserSettings(unittest.TestCase):
@classmethod
def setUpClass(cls):
from trading_system import load_historical_data
# Load data once before running all test cases
cls.historical_data = load_historical_data('HistoricalBTCdata.txt')
#test input validation for wallet
def test_invalid_wallet(self):
from trading_system import user_settings
wallet, adjusted_data = user_settings(-1000, '01-02-2017', '01-03-2017', 60, self.historical_data)
self.assertIsNone(wallet)
self.assertIsNone(adjusted_data, "Negative wallet value accepted")
# Test input validation for dates
def test_invalid_date(self):
from trading_system import user_settings
wallet, adjusted_data = user_settings(1000, '12-03-2017', '01-01-2017', 60, self.historical_data)
self.assertIsNone(wallet)
self.assertIsNone(adjusted_data, "Invalid date range accepted")
# Test input validation for time interval
def test_invalid_time_interval(self):
from trading_system import user_settings
wallet, adjusted_data = user_settings(1000, '01-02-2017', '01-03-2017', 59, self.historical_data)
self.assertIsNone(wallet)
self.assertIsNone(adjusted_data, "Invalid time interval accepted")
# Test that adjusted data is between start and end input dates
def test_data_filtering(self):
from trading_system import user_settings
wallet, adjusted_data = user_settings(1000, '01-02-2017', '01-03-2017', 60, self.historical_data)
utc_tz = pytz.utc
start_date_obj = datetime.strptime('01-02-2017', '%m-%d-%Y')
end_date_obj = datetime.strptime('01-03-2017', '%m-%d-%Y')
start_date_obj = utc_tz.localize(start_date_obj)
end_date_obj = utc_tz.localize(end_date_obj)
start_timestamp = int(start_date_obj.timestamp()) + 60
end_timestamp = int(end_date_obj.timestamp()) + 60
self.assertTrue(all(start_timestamp <= data_point[0] <= end_timestamp for data_point in adjusted_data), "Incorrect data filtering")
# Check data is corrrectly adjusted according to time interval
def test_data_adjustment(self):
from trading_system import user_settings
wallet, adjusted_data = user_settings(1000, '01-02-2017', '01-03-2017', 120, self.historical_data)
times = [data_point[0] for data_point in adjusted_data]
self.assertTrue(all((times[i + 1] - times[i]) == 120 for i in range(len(times) - 1)), "Incorrect data adjustment")
# Test decimal wallet inputs
def test_valid_wallet_wallet(self):
from trading_system import user_settings
wallet, adjusted_data = user_settings(1000.5, '01-02-2017', '01-03-2017', 60, self.historical_data)
self.assertEqual(wallet, 1000.5, "Wallet not properly initialized")
# Test function correctly accepts valid dates
def test_valid_date(self):
from trading_system import user_settings
wallet, adjusted_data = user_settings(1000, '01-02-2017', '01-03-2017', 60, self.historical_data)
self.assertEqual(wallet, 1000)
def main():
suite = unittest.TestSuite()
test_cases = unittest.TestLoader().loadTestsFromTestCase(TestUserSettings)
suite.addTests(test_cases)
result = unittest.TextTestRunner().run(suite)
if result.wasSuccessful():
sys.exit(0) # All tests passed, exit with code 0
else:
sys.exit(1) # Some tests failed, exit with code 1
if __name__ == "__main__":
main() |
https://github.com/teamqurrent/BitcoinPaperTrader | Create a `Wallet` class inside `trading_system.py` that tracks balances of stock and cash, and has a list of the transactions made in the wallet. The init method should take in the initial wallet balance, then set the wallets balance, initial stock, and transaction list to be referenced later. Then add buy (buy(self, timestamp, price, number)) and sell (sell(self, timestamp, price, number)) methods that correctly simulate buy and sell transactions and update the wallets variables accordingly. Make sure to have input validation for the buy and sell methods that check for valid price, transactions going back in time, buying more than can afford, or buying 0 or negative amounts. | 3ae437b | numpy
datetime
pytz | python3.9 | 95bba0b | diff --git a/trading_system.py b/trading_system.py
--- a/trading_system.py
+++ b/trading_system.py
@@ -3,6 +3,80 @@ from datetime import datetime
from datetime import timedelta
import pytz
+class Wallet:
+ def __init__(self, initial_cash):
+ self.cash = initial_cash
+ self.stock = 0
+ self.transactions = []
+
+ def buy(self, timestamp, price, number):
+
+ # Input validation
+ if np.isnan(price) or price <= 0:
+ print(f"Price: {price} is invalid")
+ return False # Invalid price input
+
+ if self.transactions and timestamp <= self.transactions[-1]['timestamp']:
+ print(f"Cannot go back in time")
+ return False # Cannot buy before previous transaction.
+
+ if price * number > self.cash:
+ print(f"Not enough cash to buy that number")
+ return False # Not enough cash to buy
+
+ if number <= 0:
+ print(f"Cannot buy 0 or negative amount")
+ return False # Invalid number input
+
+ # Update Wallet
+ self.cash -= price * number
+ self.stock += number
+ transaction = {
+ 'type': 'buy',
+ 'price': price,
+ 'number': number,
+ 'timestamp': timestamp
+ }
+ self.transactions.append(transaction)
+
+ # Logging
+ print(transaction)
+ return True
+
+ def sell(self, timestamp, price, number):
+
+ # Input Validation
+ if np.isnan(price) or price <= 0:
+ print(f"Price: {price} is invalid")
+ return False # Invalid price input
+
+ if self.transactions and timestamp <= self.transactions[-1]['timestamp']:
+ print(f"Cannot go back in time")
+ return False # Cannot buy before previous transaction.
+
+ if number > self.stock:
+ print(f"Not enough stock to sell")
+ return False # Not enough stock to sell
+
+ if number <= 0:
+ print(f"Cannot buy 0 or negative amount")
+ return False # Invalid number input
+
+ # Update Wallet
+ self.stock -= number
+ self.cash += price * number
+ transaction = {
+ 'type': 'sell',
+ 'price': price,
+ 'number': number,
+ 'timestamp': timestamp
+ }
+ self.transactions.append(transaction)
+
+ # Logging
+ print(transaction)
+ return True
+
def load_historical_data(file_path):
try:
@@ -94,5 +168,5 @@ if __name__ == "__main__":
else:
print("Invalid input received. Please try again.")
- print(f"wallet: {wallet}")
- print(adjusted_historical_data)
\ No newline at end of file
+
+ userWallet = Wallet(wallet)
\ No newline at end of file
| [
{
"content": "import numpy as np\nfrom datetime import datetime\nfrom datetime import timedelta\nimport pytz\n\n\ndef load_historical_data(file_path):\n try:\n print(\"Loading Sim Data...\")\n with open(file_path) as f:\n lines = f.read().split(\"\\n\")\n lines = [line.split(\",\") for line in lines if line] # Avoid empty strings\n data_points = [[int(line[0]), float(line[1])] for line in lines]\n\n print(f\"{len(data_points)} minutes of data loaded\")\n return data_points\n\n except FileNotFoundError:\n print(f\"Error: The file {file_path} does not exist.\")\n return []\n except Exception as e:\n print(f\"An error occurred: {e}\")\n return []\n\ndef test_data_loading(data):\n print(f\"{len(data)} number of entries loaded\")\n print(data[0])\n\ndef get_user_input():\n wallet = input(\"Please enter the starting size (in dollars) of the wallet you would like to simulate: \")\n start_date = input(\"Enter the start date (mm-dd-yyyy): \")\n end_date = input(\"Enter the end date (mm-dd-yyyy): \")\n data_time_interval = input(\"Enter the time interval in seconds (minimum 60): \")\n return wallet, start_date, end_date, data_time_interval\n\n\ndef user_settings(wallet, start_date, end_date, data_time_interval, historical_data):\n first_timestamp = historical_data[0][0]\n last_timestamp = historical_data[-1][0]\n\n # Convert inputs and validate them\n try:\n wallet = float(wallet)\n if wallet <= 0:\n raise ValueError(\"Wallet size must be positive.\")\n\n # Time conversion stuff\n utc_tz = pytz.utc\n start_date_obj = datetime.strptime(start_date, '%m-%d-%Y')\n end_date_obj = datetime.strptime(end_date, '%m-%d-%Y')\n start_date_obj = utc_tz.localize(start_date_obj)\n end_date_obj = utc_tz.localize(end_date_obj)\n start_timestamp = int(start_date_obj.timestamp()) + 60\n end_timestamp = int(end_date_obj.timestamp()) + 60\n\n data_time_interval = int(data_time_interval)\n if data_time_interval < 60:\n raise ValueError(\"Time interval must be at least 60 seconds.\")\n\n if not (first_timestamp <= start_timestamp <= last_timestamp) or not (first_timestamp <= end_timestamp <= last_timestamp):\n raise ValueError(\"Start date and end date must be within the range of historical data.\")\n\n if start_timestamp > end_timestamp:\n raise ValueError(\"End date must be after the start date.\")\n\n except ValueError as e:\n print(f\"Invalid input: {e}\")\n return None, None\n\n # Filter data between time range then adjust to data interval\n filtered_data = [data_point for data_point in historical_data if start_timestamp <= data_point[0] <= end_timestamp]\n\n adjusted_data = []\n current_time = start_timestamp\n\n for i in range(0, len(filtered_data), data_time_interval // 60):\n if filtered_data[i][0] >= current_time:\n adjusted_data.append(filtered_data[i])\n current_time += data_time_interval\n\n return wallet, adjusted_data\n\nif __name__ == \"__main__\":\n file_path = \"HistoricalBTCdata.txt\"\n historical_data = load_historical_data(file_path)\n\n while True:\n wallet, start_date, end_date, data_time_interval = get_user_input()\n\n wallet, adjusted_historical_data = user_settings(wallet, start_date, end_date, data_time_interval, historical_data)\n\n if wallet is not None and adjusted_historical_data is not None:\n break\n else:\n print(\"Invalid input received. Please try again.\")\n\n print(f\"wallet: {wallet}\")\n print(adjusted_historical_data)",
"path": "trading_system.py"
}
] | 7_2 | python | import sys
import unittest
import math
class TestWallet(unittest.TestCase):
def setUp(self):
from trading_system import Wallet
self.wallet = Wallet(10000)
def test_initial_wallet_state(self):
self.assertEqual(self.wallet.cash, 10000)
self.assertEqual(self.wallet.stock, 0)
self.assertEqual(self.wallet.transactions, [])
def test_buy_operation(self):
self.assertEqual(self.wallet.cash, 10000)
self.assertTrue(self.wallet.buy(1001, 10, 1))
self.assertEqual(self.wallet.cash, 9990)
self.assertEqual(self.wallet.stock, 1)
# Test Logging
self.assertEqual(self.wallet.transactions[-1], {
'type': 'buy', 'price': 10, 'number': 1, 'timestamp': 1001})
def test_sell_operation(self):
self.assertTrue(self.wallet.buy(1000, 100, 1))
self.assertTrue(self.wallet.sell(1100, 200, 0.5))
self.assertEqual(self.wallet.cash, 10000)
self.assertEqual(self.wallet.stock, 0.5)
# Test Logging
self.assertEqual(self.wallet.transactions[-1], {
'type': 'sell', 'price': 200, 'number': 0.5, 'timestamp': 1100})
def test_buy_invalid_timestamp(self):
self.assertTrue(self.wallet.buy(1000, 100, 1))
self.assertFalse(self.wallet.buy(900, 50, 0.5)) # Cannot buy before previous transaction.
def test_sell_invalid_timestamp(self):
self.assertTrue(self.wallet.buy(1000, 100, 1))
self.assertFalse(self.wallet.sell(900, 150, 0.5)) # Cannot sell before previous transaction.
def test_invalid_price(self):
self.assertFalse(self.wallet.buy(1200, math.nan, 0.5)) # Invalid price
self.assertFalse(self.wallet.buy(1200, -10, 0.5))
def test_not_enough_cash(self):
self.assertFalse(self.wallet.buy(1200, 20000, 1)) # Not enough cash to buy that number
def test_invalid_number(self):
self.assertFalse(self.wallet.buy(1200, 10, -1)) # Cannot buy 0 or negative amount
def test_multiple_buys(self):
self.assertTrue(self.wallet.buy(1000, 5000, 1))
self.assertTrue(self.wallet.buy(1100, 4000, 1))
self.assertEqual(self.wallet.stock, 2)
self.assertEqual(self.wallet.cash, 1000)
self.assertFalse(self.wallet.buy(1200, 3000, 1)) # Cannot buy more than amount of cash left
def test_multiple_sells(self):
self.assertTrue(self.wallet.buy(1000, 5000, 1))
self.assertTrue(self.wallet.sell(1100, 10000, 0.5))
self.assertTrue(self.wallet.sell(1200, 20000, 0.5))
self.assertEqual(self.wallet.stock, 0)
self.assertEqual(self.wallet.cash, 20000)
self.assertFalse(self.wallet.sell(1300, 30000, 0.1)) # Cannot sell more than stock number in wallet
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestWallet))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == "__main__":
main() |
https://github.com/teamqurrent/BitcoinPaperTrader | Add technical indicators class called TechnicalIndicators in `trading_system.py` with a heiken ashi method called heikin_ashi(data_point, previous_candle), that takes the previous and current OHLC candles (dicts with keys 'timestamp', 'open', 'high', 'low', 'close') and calculates the corresponding heiken ashi candle of the two | 95bba0b | numpy
datetime
pytz | python3.9 | aa0dece | diff --git a/trading_system.py b/trading_system.py
--- a/trading_system.py
+++ b/trading_system.py
@@ -3,6 +3,28 @@ from datetime import datetime
from datetime import timedelta
import pytz
+class TechnicalIndicators:
+
+ # Method to create heiken ashi price candles
+ @staticmethod
+ def heikin_ashi(data_point, previous_candle):
+ open_ = data_point['open']
+ high = data_point['high']
+ low = data_point['low']
+ close = data_point['close']
+
+ if previous_candle is None:
+ return { 'open': open_, 'high': high, 'low': low, 'close': close }
+
+ new_candle = {}
+ new_candle['close'] = (open_ + high + low + close) / 4
+ new_candle['open'] = (previous_candle['open'] + previous_candle['close']) / 2
+ new_candle['high'] = max(high, new_candle['open'], new_candle['close'])
+ new_candle['low'] = min(low, new_candle['open'], new_candle['close'])
+
+ return new_candle
+
+
class Wallet:
def __init__(self, initial_cash):
self.cash = initial_cash
@@ -10,70 +32,62 @@ class Wallet:
self.transactions = []
def buy(self, timestamp, price, number):
-
- # Input validation
if np.isnan(price) or price <= 0:
print(f"Price: {price} is invalid")
- return False # Invalid price input
+ return False
if self.transactions and timestamp <= self.transactions[-1]['timestamp']:
print(f"Cannot go back in time")
- return False # Cannot buy before previous transaction.
+ return False
if price * number > self.cash:
print(f"Not enough cash to buy that number")
- return False # Not enough cash to buy
+ return False
if number <= 0:
print(f"Cannot buy 0 or negative amount")
- return False # Invalid number input
-
- # Update Wallet
+ return False
+
self.cash -= price * number
self.stock += number
transaction = {
- 'type': 'buy',
- 'price': price,
- 'number': number,
+ 'type': 'buy',
+ 'price': price,
+ 'number': number,
'timestamp': timestamp
}
self.transactions.append(transaction)
- # Logging
print(transaction)
return True
def sell(self, timestamp, price, number):
-
- # Input Validation
if np.isnan(price) or price <= 0:
print(f"Price: {price} is invalid")
- return False # Invalid price input
-
+ return False
+
if self.transactions and timestamp <= self.transactions[-1]['timestamp']:
print(f"Cannot go back in time")
- return False # Cannot buy before previous transaction.
-
+ return False
+
if number > self.stock:
print(f"Not enough stock to sell")
- return False # Not enough stock to sell
-
+ return False
+
if number <= 0:
- print(f"Cannot buy 0 or negative amount")
- return False # Invalid number input
+ print(f"Cannot sell 0 or negative amount")
+ return False
- # Update Wallet
self.stock -= number
self.cash += price * number
transaction = {
- 'type': 'sell',
- 'price': price,
- 'number': number,
+ 'type': 'sell',
+ 'price': price,
+ 'number': number,
'timestamp': timestamp
}
self.transactions.append(transaction)
- # Logging
print(transaction)
return True
@@ -83,7 +97,7 @@ def load_historical_data(file_path):
print("Loading Sim Data...")
with open(file_path) as f:
lines = f.read().split("\n")
- lines = [line.split(",") for line in lines if line] # Avoid empty strings
+ lines = [line.split(",") for line in lines if line]
data_points = [[int(line[0]), float(line[1])] for line in lines]
print(f"{len(data_points)} minutes of data loaded")
@@ -96,15 +110,31 @@ def load_historical_data(file_path):
print(f"An error occurred: {e}")
return []
-def test_data_loading(data):
- print(f"{len(data)} number of entries loaded")
- print(data[0])
+
+def minute_to_ohlc(data, interval):
+ ohlc_data = []
+
+ for i in range(0, len(data), interval):
+ time_period = data[i:i + interval]
+ timestamps, prices = zip(*time_period)
+
+ ohlc = {}
+ ohlc['timestamp'] = timestamps[0]
+ ohlc['open'] = prices[0]
+ ohlc['high'] = max(prices)
+ ohlc['low'] = min(prices)
+ ohlc['close'] = prices[-1]
+
+ ohlc_data.append(ohlc)
+
+ return ohlc_data
+
def get_user_input():
wallet = input("Please enter the starting size (in dollars) of the wallet you would like to simulate: ")
start_date = input("Enter the start date (mm-dd-yyyy): ")
end_date = input("Enter the end date (mm-dd-yyyy): ")
- data_time_interval = input("Enter the time interval in seconds (minimum 60): ")
+ data_time_interval = input("Enter the time interval in minutes: ")
return wallet, start_date, end_date, data_time_interval
@@ -112,13 +142,11 @@ def user_settings(wallet, start_date, end_date, data_time_interval, historical_d
first_timestamp = historical_data[0][0]
last_timestamp = historical_data[-1][0]
- # Convert inputs and validate them
try:
wallet = float(wallet)
if wallet <= 0:
raise ValueError("Wallet size must be positive.")
- # Time conversion stuff
utc_tz = pytz.utc
start_date_obj = datetime.strptime(start_date, '%m-%d-%Y')
end_date_obj = datetime.strptime(end_date, '%m-%d-%Y')
@@ -128,8 +156,9 @@ def user_settings(wallet, start_date, end_date, data_time_interval, historical_d
end_timestamp = int(end_date_obj.timestamp()) + 60
data_time_interval = int(data_time_interval)
- if data_time_interval < 60:
- raise ValueError("Time interval must be at least 60 seconds.")
+
+ if data_time_interval < 1:
+ raise ValueError("Time interval must be at least 1 minute.")
if not (first_timestamp <= start_timestamp <= last_timestamp) or not (first_timestamp <= end_timestamp <= last_timestamp):
raise ValueError("Start date and end date must be within the range of historical data.")
@@ -141,19 +170,12 @@ def user_settings(wallet, start_date, end_date, data_time_interval, historical_d
print(f"Invalid input: {e}")
return None, None
- # Filter data between time range then adjust to data interval
filtered_data = [data_point for data_point in historical_data if start_timestamp <= data_point[0] <= end_timestamp]
-
- adjusted_data = []
- current_time = start_timestamp
-
- for i in range(0, len(filtered_data), data_time_interval // 60):
- if filtered_data[i][0] >= current_time:
- adjusted_data.append(filtered_data[i])
- current_time += data_time_interval
+ adjusted_data = minute_to_ohlc(filtered_data, data_time_interval)
return wallet, adjusted_data
+
if __name__ == "__main__":
file_path = "HistoricalBTCdata.txt"
historical_data = load_historical_data(file_path)
@@ -168,5 +190,4 @@ if __name__ == "__main__":
else:
print("Invalid input received. Please try again.")
-
userWallet = Wallet(wallet)
\ No newline at end of file
| [
{
"content": "import numpy as np\nfrom datetime import datetime\nfrom datetime import timedelta\nimport pytz\n\nclass Wallet:\n def __init__(self, initial_cash):\n self.cash = initial_cash\n self.stock = 0\n self.transactions = []\n\n def buy(self, timestamp, price, number):\n\n # Input validation\n if np.isnan(price) or price <= 0:\n print(f\"Price: {price} is invalid\")\n return False # Invalid price input\n \n if self.transactions and timestamp <= self.transactions[-1]['timestamp']:\n print(f\"Cannot go back in time\")\n return False # Cannot buy before previous transaction.\n \n if price * number > self.cash:\n print(f\"Not enough cash to buy that number\")\n return False # Not enough cash to buy\n\n if number <= 0:\n print(f\"Cannot buy 0 or negative amount\")\n return False # Invalid number input\n \n # Update Wallet\n self.cash -= price * number\n self.stock += number\n transaction = {\n 'type': 'buy', \n 'price': price, \n 'number': number, \n 'timestamp': timestamp\n }\n self.transactions.append(transaction)\n\n # Logging\n print(transaction)\n return True\n\n def sell(self, timestamp, price, number):\n\n # Input Validation\n if np.isnan(price) or price <= 0:\n print(f\"Price: {price} is invalid\")\n return False # Invalid price input\n \n if self.transactions and timestamp <= self.transactions[-1]['timestamp']:\n print(f\"Cannot go back in time\")\n return False # Cannot buy before previous transaction.\n \n if number > self.stock:\n print(f\"Not enough stock to sell\")\n return False # Not enough stock to sell\n \n if number <= 0:\n print(f\"Cannot buy 0 or negative amount\")\n return False # Invalid number input\n\n # Update Wallet\n self.stock -= number\n self.cash += price * number\n transaction = {\n 'type': 'sell', \n 'price': price, \n 'number': number, \n 'timestamp': timestamp\n }\n self.transactions.append(transaction)\n\n # Logging\n print(transaction)\n return True\n\n\ndef load_historical_data(file_path):\n try:\n print(\"Loading Sim Data...\")\n with open(file_path) as f:\n lines = f.read().split(\"\\n\")\n lines = [line.split(\",\") for line in lines if line] # Avoid empty strings\n data_points = [[int(line[0]), float(line[1])] for line in lines]\n\n print(f\"{len(data_points)} minutes of data loaded\")\n return data_points\n\n except FileNotFoundError:\n print(f\"Error: The file {file_path} does not exist.\")\n return []\n except Exception as e:\n print(f\"An error occurred: {e}\")\n return []\n\ndef test_data_loading(data):\n print(f\"{len(data)} number of entries loaded\")\n print(data[0])\n\ndef get_user_input():\n wallet = input(\"Please enter the starting size (in dollars) of the wallet you would like to simulate: \")\n start_date = input(\"Enter the start date (mm-dd-yyyy): \")\n end_date = input(\"Enter the end date (mm-dd-yyyy): \")\n data_time_interval = input(\"Enter the time interval in seconds (minimum 60): \")\n return wallet, start_date, end_date, data_time_interval\n\n\ndef user_settings(wallet, start_date, end_date, data_time_interval, historical_data):\n first_timestamp = historical_data[0][0]\n last_timestamp = historical_data[-1][0]\n\n # Convert inputs and validate them\n try:\n wallet = float(wallet)\n if wallet <= 0:\n raise ValueError(\"Wallet size must be positive.\")\n\n # Time conversion stuff\n utc_tz = pytz.utc\n start_date_obj = datetime.strptime(start_date, '%m-%d-%Y')\n end_date_obj = datetime.strptime(end_date, '%m-%d-%Y')\n start_date_obj = utc_tz.localize(start_date_obj)\n end_date_obj = utc_tz.localize(end_date_obj)\n start_timestamp = int(start_date_obj.timestamp()) + 60\n end_timestamp = int(end_date_obj.timestamp()) + 60\n\n data_time_interval = int(data_time_interval)\n if data_time_interval < 60:\n raise ValueError(\"Time interval must be at least 60 seconds.\")\n\n if not (first_timestamp <= start_timestamp <= last_timestamp) or not (first_timestamp <= end_timestamp <= last_timestamp):\n raise ValueError(\"Start date and end date must be within the range of historical data.\")\n\n if start_timestamp > end_timestamp:\n raise ValueError(\"End date must be after the start date.\")\n\n except ValueError as e:\n print(f\"Invalid input: {e}\")\n return None, None\n\n # Filter data between time range then adjust to data interval\n filtered_data = [data_point for data_point in historical_data if start_timestamp <= data_point[0] <= end_timestamp]\n\n adjusted_data = []\n current_time = start_timestamp\n\n for i in range(0, len(filtered_data), data_time_interval // 60):\n if filtered_data[i][0] >= current_time:\n adjusted_data.append(filtered_data[i])\n current_time += data_time_interval\n\n return wallet, adjusted_data\n\nif __name__ == \"__main__\":\n file_path = \"HistoricalBTCdata.txt\"\n historical_data = load_historical_data(file_path)\n\n while True:\n wallet, start_date, end_date, data_time_interval = get_user_input()\n\n wallet, adjusted_historical_data = user_settings(wallet, start_date, end_date, data_time_interval, historical_data)\n\n if wallet is not None and adjusted_historical_data is not None:\n break\n else:\n print(\"Invalid input received. Please try again.\")\n\n \n userWallet = Wallet(wallet)",
"path": "trading_system.py"
}
] | 7_3 | python | import sys
import unittest
def minute_to_ohlc(data, interval):
ohlc_data = []
for i in range(0, len(data), interval):
time_period = data[i:i + interval]
timestamps, prices = zip(*time_period)
ohlc = {}
ohlc['timestamp'] = timestamps[0]
ohlc['open'] = prices[0]
ohlc['high'] = max(prices)
ohlc['low'] = min(prices)
ohlc['close'] = prices[-1]
ohlc_data.append(ohlc)
return ohlc_data
class TestTechnicalIndicators(unittest.TestCase):
def setUp(self):
# Timestamps are spaced 60 seconds apart
self.data_points = [[i * 60, i] for i in range(1, 11)]
def test_heikin_ashi(self):
from trading_system import TechnicalIndicators
ohlc_data = minute_to_ohlc(self.data_points, 5)
prev_candle = None
for data_point in ohlc_data:
new_candle = TechnicalIndicators.heikin_ashi(data_point, prev_candle)
prev_candle = new_candle
# Manual Heiken Ashi Calculation based on dataset
expected_new_candle = { 'open': 3, 'close': 8, 'high': 10, 'low': 3 }
self.assertEqual(new_candle, expected_new_candle)
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestTechnicalIndicators))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == "__main__":
main() |
https://github.com/teamqurrent/BitcoinPaperTrader | Implement a short method within the wallet class in `trading_system.py` that takes in the timestamp, price, and number to short, to allow algorithms to open short positions | aa0dece | numpy
datetime
pytz | python3.9 | eb5bb53 | diff --git a/trading_system.py b/trading_system.py
--- a/trading_system.py
+++ b/trading_system.py
@@ -29,6 +29,7 @@ class Wallet:
def __init__(self, initial_cash):
self.cash = initial_cash
self.stock = 0
+ self.short_stock = 0
self.transactions = []
def buy(self, timestamp, price, number):
@@ -91,6 +92,65 @@ class Wallet:
print(transaction)
return True
+ def short(self, timestamp, price, number):
+ if np.isnan(price) or price <= 0:
+ print(f"Price: {price} is invalid")
+ return False
+
+ if self.transactions and timestamp <= self.transactions[-1]['timestamp']:
+ print(f"Cannot go back in time")
+ return False
+
+ if price * number > self.cash:
+ print(f"Not enough cash to short that number")
+ return False
+
+ if number <= 0:
+ print(f"Cannot short 0 or negative amount")
+ return False
+
+ self.cash += price * number
+ self.short_stock += number
+ transaction = {
+ 'type': 'short',
+ 'price': price,
+ 'number': number,
+ 'timestamp': timestamp
+ }
+ self.transactions.append(transaction)
+
+ print(transaction)
+ return True
+
+ def close_short(self, timestamp, price, number):
+ if np.isnan(price) or price <= 0:
+ print(f"Price: {price} is invalid")
+ return False
+
+ if self.transactions and timestamp <= self.transactions[-1]['timestamp']:
+ print(f"Cannot go back in time")
+ return False
+
+ if number > self.short_stock:
+ print(f"Not enough shorted Bitcoin to close")
+ return False
+
+ if number <= 0:
+ print(f"Cannot close 0 or negative amount")
+ return False
+
+ self.short_stock -= number
+ self.cash -= price * number
+ transaction = {
+ 'type': 'close_short',
+ 'price': price,
+ 'number': number,
+ 'timestamp': timestamp
+ }
+ self.transactions.append(transaction)
+
+ print(transaction)
+ return True
def load_historical_data(file_path):
try:
| [
{
"content": "import numpy as np\nfrom datetime import datetime\nfrom datetime import timedelta\nimport pytz\n\nclass TechnicalIndicators:\n\n # Method to create heiken ashi price candles\n @staticmethod\n def heikin_ashi(data_point, previous_candle):\n open_ = data_point['open']\n high = data_point['high']\n low = data_point['low']\n close = data_point['close']\n\n if previous_candle is None:\n return { 'open': open_, 'high': high, 'low': low, 'close': close }\n\n new_candle = {}\n new_candle['close'] = (open_ + high + low + close) / 4\n new_candle['open'] = (previous_candle['open'] + previous_candle['close']) / 2\n new_candle['high'] = max(high, new_candle['open'], new_candle['close'])\n new_candle['low'] = min(low, new_candle['open'], new_candle['close'])\n\n return new_candle\n\n\nclass Wallet:\n def __init__(self, initial_cash):\n self.cash = initial_cash\n self.stock = 0\n self.transactions = []\n\n def buy(self, timestamp, price, number):\n if np.isnan(price) or price <= 0:\n print(f\"Price: {price} is invalid\")\n return False\n \n if self.transactions and timestamp <= self.transactions[-1]['timestamp']:\n print(f\"Cannot go back in time\")\n return False \n \n if price * number > self.cash:\n print(f\"Not enough cash to buy that number\")\n return False \n\n if number <= 0:\n print(f\"Cannot buy 0 or negative amount\")\n return False \n\n self.cash -= price * number\n self.stock += number\n transaction = {\n 'type': 'buy',\n 'price': price,\n 'number': number,\n 'timestamp': timestamp\n }\n self.transactions.append(transaction)\n\n print(transaction)\n return True\n\n def sell(self, timestamp, price, number):\n if np.isnan(price) or price <= 0:\n print(f\"Price: {price} is invalid\")\n return False \n\n if self.transactions and timestamp <= self.transactions[-1]['timestamp']:\n print(f\"Cannot go back in time\")\n return False \n\n if number > self.stock:\n print(f\"Not enough stock to sell\")\n return False \n\n if number <= 0:\n print(f\"Cannot sell 0 or negative amount\")\n return False \n\n self.stock -= number\n self.cash += price * number\n transaction = {\n 'type': 'sell',\n 'price': price,\n 'number': number,\n 'timestamp': timestamp\n }\n self.transactions.append(transaction)\n\n print(transaction)\n return True\n\n\ndef load_historical_data(file_path):\n try:\n print(\"Loading Sim Data...\")\n with open(file_path) as f:\n lines = f.read().split(\"\\n\")\n lines = [line.split(\",\") for line in lines if line] \n data_points = [[int(line[0]), float(line[1])] for line in lines]\n\n print(f\"{len(data_points)} minutes of data loaded\")\n return data_points\n\n except FileNotFoundError:\n print(f\"Error: The file {file_path} does not exist.\")\n return []\n except Exception as e:\n print(f\"An error occurred: {e}\")\n return []\n\n\ndef minute_to_ohlc(data, interval):\n ohlc_data = []\n\n for i in range(0, len(data), interval):\n time_period = data[i:i + interval]\n timestamps, prices = zip(*time_period)\n \n ohlc = {}\n ohlc['timestamp'] = timestamps[0]\n ohlc['open'] = prices[0]\n ohlc['high'] = max(prices)\n ohlc['low'] = min(prices)\n ohlc['close'] = prices[-1]\n\n ohlc_data.append(ohlc)\n \n return ohlc_data\n\n\ndef get_user_input():\n wallet = input(\"Please enter the starting size (in dollars) of the wallet you would like to simulate: \")\n start_date = input(\"Enter the start date (mm-dd-yyyy): \")\n end_date = input(\"Enter the end date (mm-dd-yyyy): \")\n data_time_interval = input(\"Enter the time interval in minutes: \")\n return wallet, start_date, end_date, data_time_interval\n\n\ndef user_settings(wallet, start_date, end_date, data_time_interval, historical_data):\n first_timestamp = historical_data[0][0]\n last_timestamp = historical_data[-1][0]\n\n try:\n wallet = float(wallet)\n if wallet <= 0:\n raise ValueError(\"Wallet size must be positive.\")\n\n utc_tz = pytz.utc\n start_date_obj = datetime.strptime(start_date, '%m-%d-%Y')\n end_date_obj = datetime.strptime(end_date, '%m-%d-%Y')\n start_date_obj = utc_tz.localize(start_date_obj)\n end_date_obj = utc_tz.localize(end_date_obj)\n start_timestamp = int(start_date_obj.timestamp()) + 60\n end_timestamp = int(end_date_obj.timestamp()) + 60\n\n data_time_interval = int(data_time_interval)\n\n if data_time_interval < 1:\n raise ValueError(\"Time interval must be at least 1 minute.\")\n\n if not (first_timestamp <= start_timestamp <= last_timestamp) or not (first_timestamp <= end_timestamp <= last_timestamp):\n raise ValueError(\"Start date and end date must be within the range of historical data.\")\n\n if start_timestamp > end_timestamp:\n raise ValueError(\"End date must be after the start date.\")\n\n except ValueError as e:\n print(f\"Invalid input: {e}\")\n return None, None\n\n filtered_data = [data_point for data_point in historical_data if start_timestamp <= data_point[0] <= end_timestamp]\n adjusted_data = minute_to_ohlc(filtered_data, data_time_interval)\n\n return wallet, adjusted_data\n\n\nif __name__ == \"__main__\":\n file_path = \"HistoricalBTCdata.txt\"\n historical_data = load_historical_data(file_path)\n\n while True:\n wallet, start_date, end_date, data_time_interval = get_user_input()\n\n wallet, adjusted_historical_data = user_settings(wallet, start_date, end_date, data_time_interval, historical_data)\n\n if wallet is not None and adjusted_historical_data is not None:\n break\n else:\n print(\"Invalid input received. Please try again.\")\n\n userWallet = Wallet(wallet)",
"path": "trading_system.py"
}
] | 7_4 | python | import sys
import unittest
class TestShort(unittest.TestCase):
def setUp(self):
from trading_system import Wallet
self.wallet = Wallet(10000)
def test_short(self):
self.wallet.short(1, 1000, 1)
self.assertEqual(self.wallet.cash, 11000)
self.assertEqual(self.wallet.short_stock, 1)
def test_close_short(self):
self.wallet.short(1, 1000, 1)
self.assertEqual(self.wallet.cash, 11000)
self.assertEqual(self.wallet.short_stock, 1)
self.wallet.close_short(2, 900, 1)
self.assertEqual(self.wallet.cash, 10100)
self.assertEqual(self.wallet.short_stock, 0)
def short_input_validation(self):
# Short more than can buy
self.assertFalse(self.wallet.short(1, 1000, 11))
# Short back in time
self.wallet.buy(1, 1000, 1)
self.assertFalse(self.wallet.short(0, 1000, 1))
# Short negative price
self.assertFalse(self.wallet.short(2, -1, 100))
# Short negative number
self.assertFalse(self.wallet.short(3, 1000, -1))
def test_close_short_input_validation(self):
from trading_system import Wallet
self.wallet.short(1, 1000, 10)
# Trying to close more than currently short
self.assertFalse(self.wallet.close_short(1, 1000, 11))
# Trying to close back in time
self.assertFalse(self.wallet.close_short(0, 1000, 1))
# Closing with negative price
self.assertFalse(self.wallet.close_short(2, -1, 1))
# Closing negative number
self.assertFalse(self.wallet.close_short(3, 1000, -1))
# Closing without shorting any
new_wallet = Wallet(10000)
self.assertFalse(new_wallet.close_short(1, 1000, 1))
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestShort))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == "__main__":
main() |
https://github.com/teamqurrent/BitcoinPaperTrader | Implement a simple moving average algorithm class in `trading_system.py` that buys when the current price is below the moving average and sells then opens a short when its above the moving average. The class should have an init method that takes in a wallet (from the wallet class), the parsed and formatted price data, and window size (for the moving average calculation). The class should also have a method that calculates the moving average. Lastly, the class should have an execute method that loops through all the data points and contains the logic of the strategy. | eb5bb53 | numpy
datetime
pytz | python3.9 | 388f8ce | diff --git a/trading_system.py b/trading_system.py
--- a/trading_system.py
+++ b/trading_system.py
@@ -37,7 +37,7 @@ class Wallet:
print(f"Price: {price} is invalid")
return False
- if self.transactions and timestamp <= self.transactions[-1]['timestamp']:
+ if self.transactions and timestamp < self.transactions[-1]['timestamp']:
print(f"Cannot go back in time")
return False
@@ -97,8 +97,8 @@ class Wallet:
print(f"Price: {price} is invalid")
return False
- if self.transactions and timestamp <= self.transactions[-1]['timestamp']:
- print(f"Cannot go back in time")
+ if self.transactions and timestamp < self.transactions[-1]['timestamp']:
+ print(f"Cannot short back in time")
return False
if price * number > self.cash:
@@ -127,8 +127,8 @@ class Wallet:
print(f"Price: {price} is invalid")
return False
- if self.transactions and timestamp <= self.transactions[-1]['timestamp']:
- print(f"Cannot go back in time")
+ if self.transactions and timestamp < self.transactions[-1]['timestamp']:
+ print(f"Cannot close short back in time")
return False
if number > self.short_stock:
@@ -152,6 +152,38 @@ class Wallet:
print(transaction)
return True
+class SimpleMovingAverageStrategy:
+ def __init__(self, wallet, data, window_size):
+ self.wallet = wallet
+ self.data = data
+ self.window_size = window_size
+
+ def calculate_moving_average(self, data):
+ return sum(data) / len(data)
+
+ def execute(self):
+ bitcoin_prices = []
+
+ for data_point in self.data:
+ bitcoin_prices.append(data_point['close'])
+ if len(bitcoin_prices) > self.window_size:
+ bitcoin_prices.pop(0)
+ moving_average = self.calculate_moving_average(bitcoin_prices)
+
+ if data_point['close'] < moving_average and self.wallet.stock == 0:
+ self.wallet.close_short(data_point['timestamp'], data_point['close'], self.wallet.short_stock)
+ self.wallet.buy(data_point['timestamp'], data_point['close'], self.wallet.cash/data_point['close'])
+ elif data_point['close'] > moving_average and self.wallet.stock > 0:
+ self.wallet.sell(data_point['timestamp'], data_point['close'], self.wallet.stock)
+ self.wallet.short(data_point['timestamp'], data_point['close'], self.wallet.cash/data_point['close'])
+
+ # Closing remaining positions
+ last_data = self.data[-1]
+ if self.wallet.stock > 0:
+ self.wallet.sell(last_data['timestamp'], last_data['close'], self.wallet.stock)
+ if self.wallet.short_stock > 0:
+ self.wallet.close_short(last_data['timestamp'], last_data['close'], self.wallet.short_stock)
+
def load_historical_data(file_path):
try:
print("Loading Sim Data...")
@@ -236,6 +268,7 @@ def user_settings(wallet, start_date, end_date, data_time_interval, historical_d
return wallet, adjusted_data
+
if __name__ == "__main__":
file_path = "HistoricalBTCdata.txt"
historical_data = load_historical_data(file_path)
@@ -250,4 +283,11 @@ if __name__ == "__main__":
else:
print("Invalid input received. Please try again.")
- userWallet = Wallet(wallet)
\ No newline at end of file
+ userWallet = Wallet(wallet)
+
+ # Calculate short term simple moving average strategy
+ sma_strategy = SimpleMovingAverageStrategy(userWallet, adjusted_historical_data, 5)
+ sma_strategy.execute()
+
+ print("Final wallet cash: ", userWallet.cash)
+ print("Final owed shorts: ", userWallet.short_stock)
| [
{
"content": "import numpy as np\nfrom datetime import datetime\nfrom datetime import timedelta\nimport pytz\n\nclass TechnicalIndicators:\n\n # Method to create heiken ashi price candles\n @staticmethod\n def heikin_ashi(data_point, previous_candle):\n open_ = data_point['open']\n high = data_point['high']\n low = data_point['low']\n close = data_point['close']\n\n if previous_candle is None:\n return { 'open': open_, 'high': high, 'low': low, 'close': close }\n\n new_candle = {}\n new_candle['close'] = (open_ + high + low + close) / 4\n new_candle['open'] = (previous_candle['open'] + previous_candle['close']) / 2\n new_candle['high'] = max(high, new_candle['open'], new_candle['close'])\n new_candle['low'] = min(low, new_candle['open'], new_candle['close'])\n\n return new_candle\n\n\nclass Wallet:\n def __init__(self, initial_cash):\n self.cash = initial_cash\n self.stock = 0\n self.short_stock = 0\n self.transactions = []\n\n def buy(self, timestamp, price, number):\n if np.isnan(price) or price <= 0:\n print(f\"Price: {price} is invalid\")\n return False\n \n if self.transactions and timestamp <= self.transactions[-1]['timestamp']:\n print(f\"Cannot go back in time\")\n return False \n \n if price * number > self.cash:\n print(f\"Not enough cash to buy that number\")\n return False \n\n if number <= 0:\n print(f\"Cannot buy 0 or negative amount\")\n return False \n\n self.cash -= price * number\n self.stock += number\n transaction = {\n 'type': 'buy',\n 'price': price,\n 'number': number,\n 'timestamp': timestamp\n }\n self.transactions.append(transaction)\n\n print(transaction)\n return True\n\n def sell(self, timestamp, price, number):\n if np.isnan(price) or price <= 0:\n print(f\"Price: {price} is invalid\")\n return False \n\n if self.transactions and timestamp <= self.transactions[-1]['timestamp']:\n print(f\"Cannot go back in time\")\n return False \n\n if number > self.stock:\n print(f\"Not enough stock to sell\")\n return False \n\n if number <= 0:\n print(f\"Cannot sell 0 or negative amount\")\n return False \n\n self.stock -= number\n self.cash += price * number\n transaction = {\n 'type': 'sell',\n 'price': price,\n 'number': number,\n 'timestamp': timestamp\n }\n self.transactions.append(transaction)\n\n print(transaction)\n return True\n\n def short(self, timestamp, price, number):\n if np.isnan(price) or price <= 0:\n print(f\"Price: {price} is invalid\")\n return False\n\n if self.transactions and timestamp <= self.transactions[-1]['timestamp']:\n print(f\"Cannot go back in time\")\n return False\n\n if price * number > self.cash:\n print(f\"Not enough cash to short that number\")\n return False\n\n if number <= 0:\n print(f\"Cannot short 0 or negative amount\")\n return False\n\n self.cash += price * number\n self.short_stock += number\n transaction = {\n 'type': 'short',\n 'price': price,\n 'number': number,\n 'timestamp': timestamp\n }\n self.transactions.append(transaction)\n\n print(transaction)\n return True\n\n def close_short(self, timestamp, price, number):\n if np.isnan(price) or price <= 0:\n print(f\"Price: {price} is invalid\")\n return False \n\n if self.transactions and timestamp <= self.transactions[-1]['timestamp']:\n print(f\"Cannot go back in time\")\n return False \n\n if number > self.short_stock:\n print(f\"Not enough shorted Bitcoin to close\")\n return False \n\n if number <= 0:\n print(f\"Cannot close 0 or negative amount\")\n return False \n\n self.short_stock -= number\n self.cash -= price * number\n transaction = {\n 'type': 'close_short',\n 'price': price,\n 'number': number,\n 'timestamp': timestamp\n }\n self.transactions.append(transaction)\n\n print(transaction)\n return True\n\ndef load_historical_data(file_path):\n try:\n print(\"Loading Sim Data...\")\n with open(file_path) as f:\n lines = f.read().split(\"\\n\")\n lines = [line.split(\",\") for line in lines if line] \n data_points = [[int(line[0]), float(line[1])] for line in lines]\n\n print(f\"{len(data_points)} minutes of data loaded\")\n return data_points\n\n except FileNotFoundError:\n print(f\"Error: The file {file_path} does not exist.\")\n return []\n except Exception as e:\n print(f\"An error occurred: {e}\")\n return []\n\n\ndef minute_to_ohlc(data, interval):\n ohlc_data = []\n\n for i in range(0, len(data), interval):\n time_period = data[i:i + interval]\n timestamps, prices = zip(*time_period)\n \n ohlc = {}\n ohlc['timestamp'] = timestamps[0]\n ohlc['open'] = prices[0]\n ohlc['high'] = max(prices)\n ohlc['low'] = min(prices)\n ohlc['close'] = prices[-1]\n\n ohlc_data.append(ohlc)\n \n return ohlc_data\n\n\ndef get_user_input():\n wallet = input(\"Please enter the starting size (in dollars) of the wallet you would like to simulate: \")\n start_date = input(\"Enter the start date (mm-dd-yyyy): \")\n end_date = input(\"Enter the end date (mm-dd-yyyy): \")\n data_time_interval = input(\"Enter the time interval in minutes: \")\n return wallet, start_date, end_date, data_time_interval\n\n\ndef user_settings(wallet, start_date, end_date, data_time_interval, historical_data):\n first_timestamp = historical_data[0][0]\n last_timestamp = historical_data[-1][0]\n\n try:\n wallet = float(wallet)\n if wallet <= 0:\n raise ValueError(\"Wallet size must be positive.\")\n\n utc_tz = pytz.utc\n start_date_obj = datetime.strptime(start_date, '%m-%d-%Y')\n end_date_obj = datetime.strptime(end_date, '%m-%d-%Y')\n start_date_obj = utc_tz.localize(start_date_obj)\n end_date_obj = utc_tz.localize(end_date_obj)\n start_timestamp = int(start_date_obj.timestamp()) + 60\n end_timestamp = int(end_date_obj.timestamp()) + 60\n\n data_time_interval = int(data_time_interval)\n\n if data_time_interval < 1:\n raise ValueError(\"Time interval must be at least 1 minute.\")\n\n if not (first_timestamp <= start_timestamp <= last_timestamp) or not (first_timestamp <= end_timestamp <= last_timestamp):\n raise ValueError(\"Start date and end date must be within the range of historical data.\")\n\n if start_timestamp > end_timestamp:\n raise ValueError(\"End date must be after the start date.\")\n\n except ValueError as e:\n print(f\"Invalid input: {e}\")\n return None, None\n\n filtered_data = [data_point for data_point in historical_data if start_timestamp <= data_point[0] <= end_timestamp]\n adjusted_data = minute_to_ohlc(filtered_data, data_time_interval)\n\n return wallet, adjusted_data\n\n\nif __name__ == \"__main__\":\n file_path = \"HistoricalBTCdata.txt\"\n historical_data = load_historical_data(file_path)\n\n while True:\n wallet, start_date, end_date, data_time_interval = get_user_input()\n\n wallet, adjusted_historical_data = user_settings(wallet, start_date, end_date, data_time_interval, historical_data)\n\n if wallet is not None and adjusted_historical_data is not None:\n break\n else:\n print(\"Invalid input received. Please try again.\")\n\n userWallet = Wallet(wallet)",
"path": "trading_system.py"
}
] | 7_5 | python | import sys
import unittest
class TestSMA(unittest.TestCase):
def setUp(self):
from trading_system import Wallet, SimpleMovingAverageStrategy
self.wallet = Wallet(1000)
self.data_points = [{'timestamp': 1, 'close': 2},
{'timestamp': 2, 'close': 3},
{'timestamp': 3, 'close': 1},
{'timestamp': 4, 'close': 5}]
self.sma_strategy = SimpleMovingAverageStrategy(self.wallet, self.data_points, 2)
def test_wallet_ending_cash_and_shares(self):
self.sma_strategy.execute()
self.assertEqual(self.wallet.cash, 5000)
self.assertEqual(self.wallet.stock, 0)
self.assertEqual(self.wallet.short_stock, 0)
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestSMA))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == "__main__":
main() |
https://github.com/teamqurrent/BitcoinPaperTrader | Add a method to the `Wallet` class in `trading_system.py` called totalProfits that takes in the initial wallet balance and the final wallet balance and returns the net profit as well as the profit percentage | 388f8ce | numpy
datetime
pytz | python3.9 | dc3f4a0 | diff --git a/trading_system.py b/trading_system.py
--- a/trading_system.py
+++ b/trading_system.py
@@ -32,6 +32,70 @@ class Wallet:
self.short_stock = 0
self.transactions = []
+ def totalProfits(self, initial_wallet, final_wallet):
+ net = final_wallet - initial_wallet
+ percentage = net * 100/initial_wallet
+ return net, percentage
+
+ def totalClosedTrades(self):
+ total_trades = 0
+
+ for i in range(1, len(self.transactions)):
+ current_transaction = self.transactions[i]
+
+ if current_transaction['type'] in ['sell', 'close_short']:
+ total_trades += 1
+
+ return total_trades
+
+ def percentProfitable(self):
+ profitable_trades = 0
+ total_trades = 0
+ for i in range(1, len(self.transactions)):
+ current_transaction = self.transactions[i]
+ previous_transaction = self.transactions[i - 1]
+
+ if current_transaction['type'] in ['sell'] and previous_transaction['type'] in ['buy']:
+ # Determine if trade was profitable
+ if current_transaction['price'] * current_transaction['number'] > previous_transaction['price'] * previous_transaction['number']:
+ profitable_trades += 1
+
+ total_trades += 1
+ else:
+ if current_transaction['type'] in ['close_short'] and previous_transaction['type'] in ['short']:
+ if current_transaction['price'] * current_transaction['number'] < previous_transaction['price'] * previous_transaction['number']:
+ profitable_trades += 1
+
+ total_trades += 1
+
+ return 100 * profitable_trades/total_trades
+
+ def profitFactor(self):
+ total_profit = 0
+ total_loss = 0
+ for i in range(1, len(self.transactions)):
+ current_transaction = self.transactions[i]
+ previous_transaction = self.transactions[i - 1]
+
+ if current_transaction['type'] in ['sell'] and previous_transaction['type'] in ['buy']:
+ profit = (current_transaction['price'] * current_transaction['number']) - (previous_transaction['price'] * previous_transaction['number'])
+ if profit > 0:
+ total_profit += profit
+ else:
+ total_loss -= profit
+
+ elif current_transaction['type'] in ['close_short'] and previous_transaction['type'] in ['short']:
+ profit = (previous_transaction['price'] * previous_transaction['number']) - (current_transaction['price'] * current_transaction['number'])
+ if profit > 0:
+ total_profit += profit
+ else:
+ total_loss -= profit
+
+ if total_loss == 0:
+ return float('inf')
+ else:
+ return total_profit / total_loss
+
def buy(self, timestamp, price, number):
if np.isnan(price) or price <= 0:
print(f"Price: {price} is invalid")
@@ -284,10 +348,17 @@ if __name__ == "__main__":
print("Invalid input received. Please try again.")
userWallet = Wallet(wallet)
-
+ initialCash = userWallet.cash
# Calculate short term simple moving average strategy
sma_strategy = SimpleMovingAverageStrategy(userWallet, adjusted_historical_data, 5)
sma_strategy.execute()
+ net, percentage = userWallet.totalProfits(initialCash, userWallet.cash)
print("Final wallet cash: ", userWallet.cash)
+ print("Net Profit: ", net)
+ print("Profit Percentage: ", percentage)
print("Final owed shorts: ", userWallet.short_stock)
+ print("Percentage profitable: ", userWallet.percentProfitable())
+ print("Total number of closed trades: ", userWallet.totalClosedTrades())
+ print("Profit factor: ", userWallet.profitFactor())
+
| [
{
"content": "import numpy as np\nfrom datetime import datetime\nfrom datetime import timedelta\nimport pytz\n\nclass TechnicalIndicators:\n\n # Method to create heiken ashi price candles\n @staticmethod\n def heikin_ashi(data_point, previous_candle):\n open_ = data_point['open']\n high = data_point['high']\n low = data_point['low']\n close = data_point['close']\n\n if previous_candle is None:\n return { 'open': open_, 'high': high, 'low': low, 'close': close }\n\n new_candle = {}\n new_candle['close'] = (open_ + high + low + close) / 4\n new_candle['open'] = (previous_candle['open'] + previous_candle['close']) / 2\n new_candle['high'] = max(high, new_candle['open'], new_candle['close'])\n new_candle['low'] = min(low, new_candle['open'], new_candle['close'])\n\n return new_candle\n\n\nclass Wallet:\n def __init__(self, initial_cash):\n self.cash = initial_cash\n self.stock = 0\n self.short_stock = 0\n self.transactions = []\n\n def buy(self, timestamp, price, number):\n if np.isnan(price) or price <= 0:\n print(f\"Price: {price} is invalid\")\n return False\n \n if self.transactions and timestamp < self.transactions[-1]['timestamp']:\n print(f\"Cannot go back in time\")\n return False \n \n if price * number > self.cash:\n print(f\"Not enough cash to buy that number\")\n return False \n\n if number <= 0:\n print(f\"Cannot buy 0 or negative amount\")\n return False \n\n self.cash -= price * number\n self.stock += number\n transaction = {\n 'type': 'buy',\n 'price': price,\n 'number': number,\n 'timestamp': timestamp\n }\n self.transactions.append(transaction)\n\n print(transaction)\n return True\n\n def sell(self, timestamp, price, number):\n if np.isnan(price) or price <= 0:\n print(f\"Price: {price} is invalid\")\n return False \n\n if self.transactions and timestamp <= self.transactions[-1]['timestamp']:\n print(f\"Cannot go back in time\")\n return False \n\n if number > self.stock:\n print(f\"Not enough stock to sell\")\n return False \n\n if number <= 0:\n print(f\"Cannot sell 0 or negative amount\")\n return False \n\n self.stock -= number\n self.cash += price * number\n transaction = {\n 'type': 'sell',\n 'price': price,\n 'number': number,\n 'timestamp': timestamp\n }\n self.transactions.append(transaction)\n\n print(transaction)\n return True\n\n def short(self, timestamp, price, number):\n if np.isnan(price) or price <= 0:\n print(f\"Price: {price} is invalid\")\n return False\n\n if self.transactions and timestamp < self.transactions[-1]['timestamp']:\n print(f\"Cannot short back in time\")\n return False\n\n if price * number > self.cash:\n print(f\"Not enough cash to short that number\")\n return False\n\n if number <= 0:\n print(f\"Cannot short 0 or negative amount\")\n return False\n\n self.cash += price * number\n self.short_stock += number\n transaction = {\n 'type': 'short',\n 'price': price,\n 'number': number,\n 'timestamp': timestamp\n }\n self.transactions.append(transaction)\n\n print(transaction)\n return True\n\n def close_short(self, timestamp, price, number):\n if np.isnan(price) or price <= 0:\n print(f\"Price: {price} is invalid\")\n return False \n\n if self.transactions and timestamp < self.transactions[-1]['timestamp']:\n print(f\"Cannot close short back in time\")\n return False \n\n if number > self.short_stock:\n print(f\"Not enough shorted Bitcoin to close\")\n return False \n\n if number <= 0:\n print(f\"Cannot close 0 or negative amount\")\n return False \n\n self.short_stock -= number\n self.cash -= price * number\n transaction = {\n 'type': 'close_short',\n 'price': price,\n 'number': number,\n 'timestamp': timestamp\n }\n self.transactions.append(transaction)\n\n print(transaction)\n return True\n\nclass SimpleMovingAverageStrategy:\n def __init__(self, wallet, data, window_size):\n self.wallet = wallet\n self.data = data\n self.window_size = window_size\n\n def calculate_moving_average(self, data):\n return sum(data) / len(data)\n\n def execute(self):\n bitcoin_prices = []\n\n for data_point in self.data:\n bitcoin_prices.append(data_point['close'])\n if len(bitcoin_prices) > self.window_size:\n bitcoin_prices.pop(0)\n moving_average = self.calculate_moving_average(bitcoin_prices)\n\n if data_point['close'] < moving_average and self.wallet.stock == 0:\n self.wallet.close_short(data_point['timestamp'], data_point['close'], self.wallet.short_stock)\n self.wallet.buy(data_point['timestamp'], data_point['close'], self.wallet.cash/data_point['close'])\n elif data_point['close'] > moving_average and self.wallet.stock > 0:\n self.wallet.sell(data_point['timestamp'], data_point['close'], self.wallet.stock)\n self.wallet.short(data_point['timestamp'], data_point['close'], self.wallet.cash/data_point['close'])\n\n # Closing remaining positions\n last_data = self.data[-1]\n if self.wallet.stock > 0:\n self.wallet.sell(last_data['timestamp'], last_data['close'], self.wallet.stock)\n if self.wallet.short_stock > 0:\n self.wallet.close_short(last_data['timestamp'], last_data['close'], self.wallet.short_stock)\n \ndef load_historical_data(file_path):\n try:\n print(\"Loading Sim Data...\")\n with open(file_path) as f:\n lines = f.read().split(\"\\n\")\n lines = [line.split(\",\") for line in lines if line] \n data_points = [[int(line[0]), float(line[1])] for line in lines]\n\n print(f\"{len(data_points)} minutes of data loaded\")\n return data_points\n\n except FileNotFoundError:\n print(f\"Error: The file {file_path} does not exist.\")\n return []\n except Exception as e:\n print(f\"An error occurred: {e}\")\n return []\n\n\ndef minute_to_ohlc(data, interval):\n ohlc_data = []\n\n for i in range(0, len(data), interval):\n time_period = data[i:i + interval]\n timestamps, prices = zip(*time_period)\n \n ohlc = {}\n ohlc['timestamp'] = timestamps[0]\n ohlc['open'] = prices[0]\n ohlc['high'] = max(prices)\n ohlc['low'] = min(prices)\n ohlc['close'] = prices[-1]\n\n ohlc_data.append(ohlc)\n \n return ohlc_data\n\n\ndef get_user_input():\n wallet = input(\"Please enter the starting size (in dollars) of the wallet you would like to simulate: \")\n start_date = input(\"Enter the start date (mm-dd-yyyy): \")\n end_date = input(\"Enter the end date (mm-dd-yyyy): \")\n data_time_interval = input(\"Enter the time interval in minutes: \")\n return wallet, start_date, end_date, data_time_interval\n\n\ndef user_settings(wallet, start_date, end_date, data_time_interval, historical_data):\n first_timestamp = historical_data[0][0]\n last_timestamp = historical_data[-1][0]\n\n try:\n wallet = float(wallet)\n if wallet <= 0:\n raise ValueError(\"Wallet size must be positive.\")\n\n utc_tz = pytz.utc\n start_date_obj = datetime.strptime(start_date, '%m-%d-%Y')\n end_date_obj = datetime.strptime(end_date, '%m-%d-%Y')\n start_date_obj = utc_tz.localize(start_date_obj)\n end_date_obj = utc_tz.localize(end_date_obj)\n start_timestamp = int(start_date_obj.timestamp()) + 60\n end_timestamp = int(end_date_obj.timestamp()) + 60\n\n data_time_interval = int(data_time_interval)\n\n if data_time_interval < 1:\n raise ValueError(\"Time interval must be at least 1 minute.\")\n\n if not (first_timestamp <= start_timestamp <= last_timestamp) or not (first_timestamp <= end_timestamp <= last_timestamp):\n raise ValueError(\"Start date and end date must be within the range of historical data.\")\n\n if start_timestamp > end_timestamp:\n raise ValueError(\"End date must be after the start date.\")\n\n except ValueError as e:\n print(f\"Invalid input: {e}\")\n return None, None\n\n filtered_data = [data_point for data_point in historical_data if start_timestamp <= data_point[0] <= end_timestamp]\n adjusted_data = minute_to_ohlc(filtered_data, data_time_interval)\n\n return wallet, adjusted_data\n\n\n\nif __name__ == \"__main__\":\n file_path = \"HistoricalBTCdata.txt\"\n historical_data = load_historical_data(file_path)\n\n while True:\n wallet, start_date, end_date, data_time_interval = get_user_input()\n\n wallet, adjusted_historical_data = user_settings(wallet, start_date, end_date, data_time_interval, historical_data)\n\n if wallet is not None and adjusted_historical_data is not None:\n break\n else:\n print(\"Invalid input received. Please try again.\")\n\n userWallet = Wallet(wallet)\n\n # Calculate short term simple moving average strategy\n sma_strategy = SimpleMovingAverageStrategy(userWallet, adjusted_historical_data, 5)\n sma_strategy.execute()\n\n print(\"Final wallet cash: \", userWallet.cash)\n print(\"Final owed shorts: \", userWallet.short_stock)\n",
"path": "trading_system.py"
}
] | 7_6 | python | import sys
import unittest
class TestSummary(unittest.TestCase):
def setUp(self):
from trading_system import Wallet
self.wallet = Wallet(50)
def test_total_profits(self):
final_wallet = 100
expected_net = 50
expected_percentage = 100.0
net, percentage = self.wallet.totalProfits(50,final_wallet)
self.assertEqual(expected_net, net)
self.assertEqual(expected_percentage, percentage)
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestSummary))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == "__main__":
main()
|
https://github.com/teamqurrent/BitcoinPaperTrader | Add method called exponential_moving_average to calculate exponential moving average to the `TechnicalIndicators` class in `trading_system.py`. It should take in OHLC data points and a window size and return the EMA at the most recent data point | dc3f4a0 | numpy
datetime
pytz | python3.9 | f856104 | diff --git a/trading_system.py b/trading_system.py
--- a/trading_system.py
+++ b/trading_system.py
@@ -23,6 +23,18 @@ class TechnicalIndicators:
new_candle['low'] = min(low, new_candle['open'], new_candle['close'])
return new_candle
+
+ # Method to calculate EMA (exponential moving average)
+ @staticmethod
+ def exponential_moving_average(data, window):
+ if len(data) < window:
+ raise ValueError("Data is too short")
+ c = 2.0 / (window + 1)
+ current_ema = sum(data_point['close'] for data_point in data[:window]) / window
+
+ for data_point in data[window:]:
+ current_ema = (c * data_point['close']) + ((1 - c) * current_ema)
+ return current_ema
class Wallet:
| [
{
"content": "import numpy as np\nfrom datetime import datetime\nfrom datetime import timedelta\nimport pytz\n\nclass TechnicalIndicators:\n\n # Method to create heiken ashi price candles\n @staticmethod\n def heikin_ashi(data_point, previous_candle):\n open_ = data_point['open']\n high = data_point['high']\n low = data_point['low']\n close = data_point['close']\n\n if previous_candle is None:\n return { 'open': open_, 'high': high, 'low': low, 'close': close }\n\n new_candle = {}\n new_candle['close'] = (open_ + high + low + close) / 4\n new_candle['open'] = (previous_candle['open'] + previous_candle['close']) / 2\n new_candle['high'] = max(high, new_candle['open'], new_candle['close'])\n new_candle['low'] = min(low, new_candle['open'], new_candle['close'])\n\n return new_candle\n\n\nclass Wallet:\n def __init__(self, initial_cash):\n self.cash = initial_cash\n self.stock = 0\n self.short_stock = 0\n self.transactions = []\n\n def totalProfits(self, initial_wallet, final_wallet):\n net = final_wallet - initial_wallet\n percentage = net * 100/initial_wallet\n return net, percentage\n \n def totalClosedTrades(self):\n total_trades = 0\n\n for i in range(1, len(self.transactions)):\n current_transaction = self.transactions[i]\n\n if current_transaction['type'] in ['sell', 'close_short']: \n total_trades += 1\n \n return total_trades\n\n def percentProfitable(self):\n profitable_trades = 0\n total_trades = 0\n for i in range(1, len(self.transactions)):\n current_transaction = self.transactions[i]\n previous_transaction = self.transactions[i - 1]\n\n if current_transaction['type'] in ['sell'] and previous_transaction['type'] in ['buy']:\n # Determine if trade was profitable\n if current_transaction['price'] * current_transaction['number'] > previous_transaction['price'] * previous_transaction['number']:\n profitable_trades += 1\n\n total_trades += 1\n else:\n if current_transaction['type'] in ['close_short'] and previous_transaction['type'] in ['short']:\n if current_transaction['price'] * current_transaction['number'] < previous_transaction['price'] * previous_transaction['number']:\n profitable_trades += 1\n \n total_trades += 1\n\n return 100 * profitable_trades/total_trades\n \n def profitFactor(self):\n total_profit = 0\n total_loss = 0\n for i in range(1, len(self.transactions)):\n current_transaction = self.transactions[i]\n previous_transaction = self.transactions[i - 1]\n\n if current_transaction['type'] in ['sell'] and previous_transaction['type'] in ['buy']:\n profit = (current_transaction['price'] * current_transaction['number']) - (previous_transaction['price'] * previous_transaction['number'])\n if profit > 0:\n total_profit += profit\n else:\n total_loss -= profit \n\n elif current_transaction['type'] in ['close_short'] and previous_transaction['type'] in ['short']:\n profit = (previous_transaction['price'] * previous_transaction['number']) - (current_transaction['price'] * current_transaction['number'])\n if profit > 0:\n total_profit += profit\n else:\n total_loss -= profit \n\n if total_loss == 0:\n return float('inf')\n else:\n return total_profit / total_loss\n \n def buy(self, timestamp, price, number):\n if np.isnan(price) or price <= 0:\n print(f\"Price: {price} is invalid\")\n return False\n \n if self.transactions and timestamp < self.transactions[-1]['timestamp']:\n print(f\"Cannot go back in time\")\n return False \n \n if price * number > self.cash:\n print(f\"Not enough cash to buy that number\")\n return False \n\n if number <= 0:\n print(f\"Cannot buy 0 or negative amount\")\n return False \n\n self.cash -= price * number\n self.stock += number\n transaction = {\n 'type': 'buy',\n 'price': price,\n 'number': number,\n 'timestamp': timestamp\n }\n self.transactions.append(transaction)\n\n print(transaction)\n return True\n\n def sell(self, timestamp, price, number):\n if np.isnan(price) or price <= 0:\n print(f\"Price: {price} is invalid\")\n return False \n\n if self.transactions and timestamp <= self.transactions[-1]['timestamp']:\n print(f\"Cannot go back in time\")\n return False \n\n if number > self.stock:\n print(f\"Not enough stock to sell\")\n return False \n\n if number <= 0:\n print(f\"Cannot sell 0 or negative amount\")\n return False \n\n self.stock -= number\n self.cash += price * number\n transaction = {\n 'type': 'sell',\n 'price': price,\n 'number': number,\n 'timestamp': timestamp\n }\n self.transactions.append(transaction)\n\n print(transaction)\n return True\n\n def short(self, timestamp, price, number):\n if np.isnan(price) or price <= 0:\n print(f\"Price: {price} is invalid\")\n return False\n\n if self.transactions and timestamp < self.transactions[-1]['timestamp']:\n print(f\"Cannot short back in time\")\n return False\n\n if price * number > self.cash:\n print(f\"Not enough cash to short that number\")\n return False\n\n if number <= 0:\n print(f\"Cannot short 0 or negative amount\")\n return False\n\n self.cash += price * number\n self.short_stock += number\n transaction = {\n 'type': 'short',\n 'price': price,\n 'number': number,\n 'timestamp': timestamp\n }\n self.transactions.append(transaction)\n\n print(transaction)\n return True\n\n def close_short(self, timestamp, price, number):\n if np.isnan(price) or price <= 0:\n print(f\"Price: {price} is invalid\")\n return False \n\n if self.transactions and timestamp < self.transactions[-1]['timestamp']:\n print(f\"Cannot close short back in time\")\n return False \n\n if number > self.short_stock:\n print(f\"Not enough shorted Bitcoin to close\")\n return False \n\n if number <= 0:\n print(f\"Cannot close 0 or negative amount\")\n return False \n\n self.short_stock -= number\n self.cash -= price * number\n transaction = {\n 'type': 'close_short',\n 'price': price,\n 'number': number,\n 'timestamp': timestamp\n }\n self.transactions.append(transaction)\n\n print(transaction)\n return True\n\nclass SimpleMovingAverageStrategy:\n def __init__(self, wallet, data, window_size):\n self.wallet = wallet\n self.data = data\n self.window_size = window_size\n\n def calculate_moving_average(self, data):\n return sum(data) / len(data)\n\n def execute(self):\n bitcoin_prices = []\n\n for data_point in self.data:\n bitcoin_prices.append(data_point['close'])\n if len(bitcoin_prices) > self.window_size:\n bitcoin_prices.pop(0)\n moving_average = self.calculate_moving_average(bitcoin_prices)\n\n if data_point['close'] < moving_average and self.wallet.stock == 0:\n self.wallet.close_short(data_point['timestamp'], data_point['close'], self.wallet.short_stock)\n self.wallet.buy(data_point['timestamp'], data_point['close'], self.wallet.cash/data_point['close'])\n elif data_point['close'] > moving_average and self.wallet.stock > 0:\n self.wallet.sell(data_point['timestamp'], data_point['close'], self.wallet.stock)\n self.wallet.short(data_point['timestamp'], data_point['close'], self.wallet.cash/data_point['close'])\n\n # Closing remaining positions\n last_data = self.data[-1]\n if self.wallet.stock > 0:\n self.wallet.sell(last_data['timestamp'], last_data['close'], self.wallet.stock)\n if self.wallet.short_stock > 0:\n self.wallet.close_short(last_data['timestamp'], last_data['close'], self.wallet.short_stock)\n \ndef load_historical_data(file_path):\n try:\n print(\"Loading Sim Data...\")\n with open(file_path) as f:\n lines = f.read().split(\"\\n\")\n lines = [line.split(\",\") for line in lines if line] \n data_points = [[int(line[0]), float(line[1])] for line in lines]\n\n print(f\"{len(data_points)} minutes of data loaded\")\n return data_points\n\n except FileNotFoundError:\n print(f\"Error: The file {file_path} does not exist.\")\n return []\n except Exception as e:\n print(f\"An error occurred: {e}\")\n return []\n\n\ndef minute_to_ohlc(data, interval):\n ohlc_data = []\n\n for i in range(0, len(data), interval):\n time_period = data[i:i + interval]\n timestamps, prices = zip(*time_period)\n \n ohlc = {}\n ohlc['timestamp'] = timestamps[0]\n ohlc['open'] = prices[0]\n ohlc['high'] = max(prices)\n ohlc['low'] = min(prices)\n ohlc['close'] = prices[-1]\n\n ohlc_data.append(ohlc)\n \n return ohlc_data\n\n\ndef get_user_input():\n wallet = input(\"Please enter the starting size (in dollars) of the wallet you would like to simulate: \")\n start_date = input(\"Enter the start date (mm-dd-yyyy): \")\n end_date = input(\"Enter the end date (mm-dd-yyyy): \")\n data_time_interval = input(\"Enter the time interval in minutes: \")\n return wallet, start_date, end_date, data_time_interval\n\n\ndef user_settings(wallet, start_date, end_date, data_time_interval, historical_data):\n first_timestamp = historical_data[0][0]\n last_timestamp = historical_data[-1][0]\n\n try:\n wallet = float(wallet)\n if wallet <= 0:\n raise ValueError(\"Wallet size must be positive.\")\n\n utc_tz = pytz.utc\n start_date_obj = datetime.strptime(start_date, '%m-%d-%Y')\n end_date_obj = datetime.strptime(end_date, '%m-%d-%Y')\n start_date_obj = utc_tz.localize(start_date_obj)\n end_date_obj = utc_tz.localize(end_date_obj)\n start_timestamp = int(start_date_obj.timestamp()) + 60\n end_timestamp = int(end_date_obj.timestamp()) + 60\n\n data_time_interval = int(data_time_interval)\n\n if data_time_interval < 1:\n raise ValueError(\"Time interval must be at least 1 minute.\")\n\n if not (first_timestamp <= start_timestamp <= last_timestamp) or not (first_timestamp <= end_timestamp <= last_timestamp):\n raise ValueError(\"Start date and end date must be within the range of historical data.\")\n\n if start_timestamp > end_timestamp:\n raise ValueError(\"End date must be after the start date.\")\n\n except ValueError as e:\n print(f\"Invalid input: {e}\")\n return None, None\n\n filtered_data = [data_point for data_point in historical_data if start_timestamp <= data_point[0] <= end_timestamp]\n adjusted_data = minute_to_ohlc(filtered_data, data_time_interval)\n\n return wallet, adjusted_data\n\n\n\nif __name__ == \"__main__\":\n file_path = \"HistoricalBTCdata.txt\"\n historical_data = load_historical_data(file_path)\n\n while True:\n wallet, start_date, end_date, data_time_interval = get_user_input()\n\n wallet, adjusted_historical_data = user_settings(wallet, start_date, end_date, data_time_interval, historical_data)\n\n if wallet is not None and adjusted_historical_data is not None:\n break\n else:\n print(\"Invalid input received. Please try again.\")\n\n userWallet = Wallet(wallet)\n initialCash = userWallet.cash\n # Calculate short term simple moving average strategy\n sma_strategy = SimpleMovingAverageStrategy(userWallet, adjusted_historical_data, 5)\n sma_strategy.execute()\n net, percentage = userWallet.totalProfits(initialCash, userWallet.cash)\n\n print(\"Final wallet cash: \", userWallet.cash)\n print(\"Net Profit: \", net)\n print(\"Profit Percentage: \", percentage)\n print(\"Final owed shorts: \", userWallet.short_stock)\n print(\"Percentage profitable: \", userWallet.percentProfitable()) \n print(\"Total number of closed trades: \", userWallet.totalClosedTrades())\n print(\"Profit factor: \", userWallet.profitFactor())\n \n",
"path": "trading_system.py"
}
] | 7_7 | python | import sys
import unittest
class TestEMA(unittest.TestCase):
def setUp(self):
self.data_points = [{'close': price} for price in [1, 2, 3, 4, 5]]
def test_exponential_moving_average(self):
from trading_system import TechnicalIndicators
# Manual EMA calculation based on dataset
expected_ema = (5 * (2/(3+1))) + (3 * (1 - (2/(3+1))))
ema = TechnicalIndicators.exponential_moving_average(self.data_points, 3)
self.assertAlmostEqual(ema, expected_ema, places=6)
def test_exponential_moving_average_with_different_data(self):
from trading_system import TechnicalIndicators
different_data_points = [{'close': price} for price in [11, 21, 31, 41, 51]]
# Manual EMA calculation based on provided dataset
expected_ema = (2/(3+1)) * 51 + (1 - (2/(3+1))) * 31
ema = TechnicalIndicators.exponential_moving_average(different_data_points, 3)
self.assertAlmostEqual(ema, expected_ema, places=6)
def test_exponential_moving_average_raise_value_error(self):
from trading_system import TechnicalIndicators
short_data_points = [{'close': price} for price in [1, 2]]
# The function should raise a ValueError if the data is too short
self.assertRaises(ValueError, TechnicalIndicators.exponential_moving_average, short_data_points, 3)
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestEMA))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == "__main__":
main() |
https://github.com/teamqurrent/BitcoinPaperTrader | Add a method to the `Wallet` class in `trading_system.py` called percentProfitable(), that calculates the number of profitable trades divided by total number of trades and returns this value as a floating point percentage (0-100). | 388f8ce | numpy
datetime
pytz | python3.9 | dc3f4a0 | diff --git a/trading_system.py b/trading_system.py
--- a/trading_system.py
+++ b/trading_system.py
@@ -32,6 +32,70 @@ class Wallet:
self.short_stock = 0
self.transactions = []
+ def totalProfits(self, initial_wallet, final_wallet):
+ net = final_wallet - initial_wallet
+ percentage = net * 100/initial_wallet
+ return net, percentage
+
+ def totalClosedTrades(self):
+ total_trades = 0
+
+ for i in range(1, len(self.transactions)):
+ current_transaction = self.transactions[i]
+
+ if current_transaction['type'] in ['sell', 'close_short']:
+ total_trades += 1
+
+ return total_trades
+
+ def percentProfitable(self):
+ profitable_trades = 0
+ total_trades = 0
+ for i in range(1, len(self.transactions)):
+ current_transaction = self.transactions[i]
+ previous_transaction = self.transactions[i - 1]
+
+ if current_transaction['type'] in ['sell'] and previous_transaction['type'] in ['buy']:
+ # Determine if trade was profitable
+ if current_transaction['price'] * current_transaction['number'] > previous_transaction['price'] * previous_transaction['number']:
+ profitable_trades += 1
+
+ total_trades += 1
+ else:
+ if current_transaction['type'] in ['close_short'] and previous_transaction['type'] in ['short']:
+ if current_transaction['price'] * current_transaction['number'] < previous_transaction['price'] * previous_transaction['number']:
+ profitable_trades += 1
+
+ total_trades += 1
+
+ return 100 * profitable_trades/total_trades
+
+ def profitFactor(self):
+ total_profit = 0
+ total_loss = 0
+ for i in range(1, len(self.transactions)):
+ current_transaction = self.transactions[i]
+ previous_transaction = self.transactions[i - 1]
+
+ if current_transaction['type'] in ['sell'] and previous_transaction['type'] in ['buy']:
+ profit = (current_transaction['price'] * current_transaction['number']) - (previous_transaction['price'] * previous_transaction['number'])
+ if profit > 0:
+ total_profit += profit
+ else:
+ total_loss -= profit
+
+ elif current_transaction['type'] in ['close_short'] and previous_transaction['type'] in ['short']:
+ profit = (previous_transaction['price'] * previous_transaction['number']) - (current_transaction['price'] * current_transaction['number'])
+ if profit > 0:
+ total_profit += profit
+ else:
+ total_loss -= profit
+
+ if total_loss == 0:
+ return float('inf')
+ else:
+ return total_profit / total_loss
+
def buy(self, timestamp, price, number):
if np.isnan(price) or price <= 0:
print(f"Price: {price} is invalid")
@@ -284,10 +348,17 @@ if __name__ == "__main__":
print("Invalid input received. Please try again.")
userWallet = Wallet(wallet)
-
+ initialCash = userWallet.cash
# Calculate short term simple moving average strategy
sma_strategy = SimpleMovingAverageStrategy(userWallet, adjusted_historical_data, 5)
sma_strategy.execute()
+ net, percentage = userWallet.totalProfits(initialCash, userWallet.cash)
print("Final wallet cash: ", userWallet.cash)
+ print("Net Profit: ", net)
+ print("Profit Percentage: ", percentage)
print("Final owed shorts: ", userWallet.short_stock)
+ print("Percentage profitable: ", userWallet.percentProfitable())
+ print("Total number of closed trades: ", userWallet.totalClosedTrades())
+ print("Profit factor: ", userWallet.profitFactor())
+
| [
{
"content": "import numpy as np\nfrom datetime import datetime\nfrom datetime import timedelta\nimport pytz\n\nclass TechnicalIndicators:\n\n # Method to create heiken ashi price candles\n @staticmethod\n def heikin_ashi(data_point, previous_candle):\n open_ = data_point['open']\n high = data_point['high']\n low = data_point['low']\n close = data_point['close']\n\n if previous_candle is None:\n return { 'open': open_, 'high': high, 'low': low, 'close': close }\n\n new_candle = {}\n new_candle['close'] = (open_ + high + low + close) / 4\n new_candle['open'] = (previous_candle['open'] + previous_candle['close']) / 2\n new_candle['high'] = max(high, new_candle['open'], new_candle['close'])\n new_candle['low'] = min(low, new_candle['open'], new_candle['close'])\n\n return new_candle\n\n\nclass Wallet:\n def __init__(self, initial_cash):\n self.cash = initial_cash\n self.stock = 0\n self.short_stock = 0\n self.transactions = []\n\n def buy(self, timestamp, price, number):\n if np.isnan(price) or price <= 0:\n print(f\"Price: {price} is invalid\")\n return False\n \n if self.transactions and timestamp < self.transactions[-1]['timestamp']:\n print(f\"Cannot go back in time\")\n return False \n \n if price * number > self.cash:\n print(f\"Not enough cash to buy that number\")\n return False \n\n if number <= 0:\n print(f\"Cannot buy 0 or negative amount\")\n return False \n\n self.cash -= price * number\n self.stock += number\n transaction = {\n 'type': 'buy',\n 'price': price,\n 'number': number,\n 'timestamp': timestamp\n }\n self.transactions.append(transaction)\n\n print(transaction)\n return True\n\n def sell(self, timestamp, price, number):\n if np.isnan(price) or price <= 0:\n print(f\"Price: {price} is invalid\")\n return False \n\n if self.transactions and timestamp <= self.transactions[-1]['timestamp']:\n print(f\"Cannot go back in time\")\n return False \n\n if number > self.stock:\n print(f\"Not enough stock to sell\")\n return False \n\n if number <= 0:\n print(f\"Cannot sell 0 or negative amount\")\n return False \n\n self.stock -= number\n self.cash += price * number\n transaction = {\n 'type': 'sell',\n 'price': price,\n 'number': number,\n 'timestamp': timestamp\n }\n self.transactions.append(transaction)\n\n print(transaction)\n return True\n\n def short(self, timestamp, price, number):\n if np.isnan(price) or price <= 0:\n print(f\"Price: {price} is invalid\")\n return False\n\n if self.transactions and timestamp < self.transactions[-1]['timestamp']:\n print(f\"Cannot short back in time\")\n return False\n\n if price * number > self.cash:\n print(f\"Not enough cash to short that number\")\n return False\n\n if number <= 0:\n print(f\"Cannot short 0 or negative amount\")\n return False\n\n self.cash += price * number\n self.short_stock += number\n transaction = {\n 'type': 'short',\n 'price': price,\n 'number': number,\n 'timestamp': timestamp\n }\n self.transactions.append(transaction)\n\n print(transaction)\n return True\n\n def close_short(self, timestamp, price, number):\n if np.isnan(price) or price <= 0:\n print(f\"Price: {price} is invalid\")\n return False \n\n if self.transactions and timestamp < self.transactions[-1]['timestamp']:\n print(f\"Cannot close short back in time\")\n return False \n\n if number > self.short_stock:\n print(f\"Not enough shorted Bitcoin to close\")\n return False \n\n if number <= 0:\n print(f\"Cannot close 0 or negative amount\")\n return False \n\n self.short_stock -= number\n self.cash -= price * number\n transaction = {\n 'type': 'close_short',\n 'price': price,\n 'number': number,\n 'timestamp': timestamp\n }\n self.transactions.append(transaction)\n\n print(transaction)\n return True\n\nclass SimpleMovingAverageStrategy:\n def __init__(self, wallet, data, window_size):\n self.wallet = wallet\n self.data = data\n self.window_size = window_size\n\n def calculate_moving_average(self, data):\n return sum(data) / len(data)\n\n def execute(self):\n bitcoin_prices = []\n\n for data_point in self.data:\n bitcoin_prices.append(data_point['close'])\n if len(bitcoin_prices) > self.window_size:\n bitcoin_prices.pop(0)\n moving_average = self.calculate_moving_average(bitcoin_prices)\n\n if data_point['close'] < moving_average and self.wallet.stock == 0:\n self.wallet.close_short(data_point['timestamp'], data_point['close'], self.wallet.short_stock)\n self.wallet.buy(data_point['timestamp'], data_point['close'], self.wallet.cash/data_point['close'])\n elif data_point['close'] > moving_average and self.wallet.stock > 0:\n self.wallet.sell(data_point['timestamp'], data_point['close'], self.wallet.stock)\n self.wallet.short(data_point['timestamp'], data_point['close'], self.wallet.cash/data_point['close'])\n\n # Closing remaining positions\n last_data = self.data[-1]\n if self.wallet.stock > 0:\n self.wallet.sell(last_data['timestamp'], last_data['close'], self.wallet.stock)\n if self.wallet.short_stock > 0:\n self.wallet.close_short(last_data['timestamp'], last_data['close'], self.wallet.short_stock)\n \ndef load_historical_data(file_path):\n try:\n print(\"Loading Sim Data...\")\n with open(file_path) as f:\n lines = f.read().split(\"\\n\")\n lines = [line.split(\",\") for line in lines if line] \n data_points = [[int(line[0]), float(line[1])] for line in lines]\n\n print(f\"{len(data_points)} minutes of data loaded\")\n return data_points\n\n except FileNotFoundError:\n print(f\"Error: The file {file_path} does not exist.\")\n return []\n except Exception as e:\n print(f\"An error occurred: {e}\")\n return []\n\n\ndef minute_to_ohlc(data, interval):\n ohlc_data = []\n\n for i in range(0, len(data), interval):\n time_period = data[i:i + interval]\n timestamps, prices = zip(*time_period)\n \n ohlc = {}\n ohlc['timestamp'] = timestamps[0]\n ohlc['open'] = prices[0]\n ohlc['high'] = max(prices)\n ohlc['low'] = min(prices)\n ohlc['close'] = prices[-1]\n\n ohlc_data.append(ohlc)\n \n return ohlc_data\n\n\ndef get_user_input():\n wallet = input(\"Please enter the starting size (in dollars) of the wallet you would like to simulate: \")\n start_date = input(\"Enter the start date (mm-dd-yyyy): \")\n end_date = input(\"Enter the end date (mm-dd-yyyy): \")\n data_time_interval = input(\"Enter the time interval in minutes: \")\n return wallet, start_date, end_date, data_time_interval\n\n\ndef user_settings(wallet, start_date, end_date, data_time_interval, historical_data):\n first_timestamp = historical_data[0][0]\n last_timestamp = historical_data[-1][0]\n\n try:\n wallet = float(wallet)\n if wallet <= 0:\n raise ValueError(\"Wallet size must be positive.\")\n\n utc_tz = pytz.utc\n start_date_obj = datetime.strptime(start_date, '%m-%d-%Y')\n end_date_obj = datetime.strptime(end_date, '%m-%d-%Y')\n start_date_obj = utc_tz.localize(start_date_obj)\n end_date_obj = utc_tz.localize(end_date_obj)\n start_timestamp = int(start_date_obj.timestamp()) + 60\n end_timestamp = int(end_date_obj.timestamp()) + 60\n\n data_time_interval = int(data_time_interval)\n\n if data_time_interval < 1:\n raise ValueError(\"Time interval must be at least 1 minute.\")\n\n if not (first_timestamp <= start_timestamp <= last_timestamp) or not (first_timestamp <= end_timestamp <= last_timestamp):\n raise ValueError(\"Start date and end date must be within the range of historical data.\")\n\n if start_timestamp > end_timestamp:\n raise ValueError(\"End date must be after the start date.\")\n\n except ValueError as e:\n print(f\"Invalid input: {e}\")\n return None, None\n\n filtered_data = [data_point for data_point in historical_data if start_timestamp <= data_point[0] <= end_timestamp]\n adjusted_data = minute_to_ohlc(filtered_data, data_time_interval)\n\n return wallet, adjusted_data\n\n\n\nif __name__ == \"__main__\":\n file_path = \"HistoricalBTCdata.txt\"\n historical_data = load_historical_data(file_path)\n\n while True:\n wallet, start_date, end_date, data_time_interval = get_user_input()\n\n wallet, adjusted_historical_data = user_settings(wallet, start_date, end_date, data_time_interval, historical_data)\n\n if wallet is not None and adjusted_historical_data is not None:\n break\n else:\n print(\"Invalid input received. Please try again.\")\n\n userWallet = Wallet(wallet)\n\n # Calculate short term simple moving average strategy\n sma_strategy = SimpleMovingAverageStrategy(userWallet, adjusted_historical_data, 5)\n sma_strategy.execute()\n\n print(\"Final wallet cash: \", userWallet.cash)\n print(\"Final owed shorts: \", userWallet.short_stock)\n",
"path": "trading_system.py"
}
] | 7_8 | python | import sys
import unittest
class TestSummary(unittest.TestCase):
def setUp(self):
from trading_system import Wallet
self.wallet = Wallet(50)
def test_percent_profitable(self):
transactions = [
{'type': 'buy', 'price': 10, 'number': 5, 'timestamp': 1},
{'type': 'sell', 'price': 15, 'number': 5, 'timestamp': 2},
{'type': 'buy', 'price': 20, 'number': 2, 'timestamp': 3},
{'type': 'sell', 'price': 10, 'number': 2, 'timestamp': 4},
{'type': 'short', 'price': 10, 'number': 5, 'timestamp': 5},
{'type': 'close_short', 'price': 5, 'number': 5, 'timestamp': 6},
{'type': 'short', 'price': 10, 'number': 2, 'timestamp': 7},
{'type': 'close_short', 'price': 15, 'number': 2, 'timestamp': 8},
]
self.wallet.transactions = transactions
expected_percent_profitable = 50.0
self.assertAlmostEqual(self.wallet.percentProfitable(), expected_percent_profitable, places=2)
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestSummary))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == "__main__":
main()
|
https://github.com/teamqurrent/BitcoinPaperTrader | Add a method to the `Wallet` class in `trading_system.py` called totalClosedTrades(), which calculates the total number of closed trades from the transactions list in the wallet. A closed trade can be considered when there is a sell operation for the purchased asset or a short operation closed. | 388f8ce | numpy
datetime
pytz | python3.9 | dc3f4a0 | diff --git a/trading_system.py b/trading_system.py
--- a/trading_system.py
+++ b/trading_system.py
@@ -32,6 +32,70 @@ class Wallet:
self.short_stock = 0
self.transactions = []
+ def totalProfits(self, initial_wallet, final_wallet):
+ net = final_wallet - initial_wallet
+ percentage = net * 100/initial_wallet
+ return net, percentage
+
+ def totalClosedTrades(self):
+ total_trades = 0
+
+ for i in range(1, len(self.transactions)):
+ current_transaction = self.transactions[i]
+
+ if current_transaction['type'] in ['sell', 'close_short']:
+ total_trades += 1
+
+ return total_trades
+
+ def percentProfitable(self):
+ profitable_trades = 0
+ total_trades = 0
+ for i in range(1, len(self.transactions)):
+ current_transaction = self.transactions[i]
+ previous_transaction = self.transactions[i - 1]
+
+ if current_transaction['type'] in ['sell'] and previous_transaction['type'] in ['buy']:
+ # Determine if trade was profitable
+ if current_transaction['price'] * current_transaction['number'] > previous_transaction['price'] * previous_transaction['number']:
+ profitable_trades += 1
+
+ total_trades += 1
+ else:
+ if current_transaction['type'] in ['close_short'] and previous_transaction['type'] in ['short']:
+ if current_transaction['price'] * current_transaction['number'] < previous_transaction['price'] * previous_transaction['number']:
+ profitable_trades += 1
+
+ total_trades += 1
+
+ return 100 * profitable_trades/total_trades
+
+ def profitFactor(self):
+ total_profit = 0
+ total_loss = 0
+ for i in range(1, len(self.transactions)):
+ current_transaction = self.transactions[i]
+ previous_transaction = self.transactions[i - 1]
+
+ if current_transaction['type'] in ['sell'] and previous_transaction['type'] in ['buy']:
+ profit = (current_transaction['price'] * current_transaction['number']) - (previous_transaction['price'] * previous_transaction['number'])
+ if profit > 0:
+ total_profit += profit
+ else:
+ total_loss -= profit
+
+ elif current_transaction['type'] in ['close_short'] and previous_transaction['type'] in ['short']:
+ profit = (previous_transaction['price'] * previous_transaction['number']) - (current_transaction['price'] * current_transaction['number'])
+ if profit > 0:
+ total_profit += profit
+ else:
+ total_loss -= profit
+
+ if total_loss == 0:
+ return float('inf')
+ else:
+ return total_profit / total_loss
+
def buy(self, timestamp, price, number):
if np.isnan(price) or price <= 0:
print(f"Price: {price} is invalid")
@@ -284,10 +348,17 @@ if __name__ == "__main__":
print("Invalid input received. Please try again.")
userWallet = Wallet(wallet)
-
+ initialCash = userWallet.cash
# Calculate short term simple moving average strategy
sma_strategy = SimpleMovingAverageStrategy(userWallet, adjusted_historical_data, 5)
sma_strategy.execute()
+ net, percentage = userWallet.totalProfits(initialCash, userWallet.cash)
print("Final wallet cash: ", userWallet.cash)
+ print("Net Profit: ", net)
+ print("Profit Percentage: ", percentage)
print("Final owed shorts: ", userWallet.short_stock)
+ print("Percentage profitable: ", userWallet.percentProfitable())
+ print("Total number of closed trades: ", userWallet.totalClosedTrades())
+ print("Profit factor: ", userWallet.profitFactor())
+
| [
{
"content": "import numpy as np\nfrom datetime import datetime\nfrom datetime import timedelta\nimport pytz\n\nclass TechnicalIndicators:\n\n # Method to create heiken ashi price candles\n @staticmethod\n def heikin_ashi(data_point, previous_candle):\n open_ = data_point['open']\n high = data_point['high']\n low = data_point['low']\n close = data_point['close']\n\n if previous_candle is None:\n return { 'open': open_, 'high': high, 'low': low, 'close': close }\n\n new_candle = {}\n new_candle['close'] = (open_ + high + low + close) / 4\n new_candle['open'] = (previous_candle['open'] + previous_candle['close']) / 2\n new_candle['high'] = max(high, new_candle['open'], new_candle['close'])\n new_candle['low'] = min(low, new_candle['open'], new_candle['close'])\n\n return new_candle\n\n\nclass Wallet:\n def __init__(self, initial_cash):\n self.cash = initial_cash\n self.stock = 0\n self.short_stock = 0\n self.transactions = []\n\n def buy(self, timestamp, price, number):\n if np.isnan(price) or price <= 0:\n print(f\"Price: {price} is invalid\")\n return False\n \n if self.transactions and timestamp < self.transactions[-1]['timestamp']:\n print(f\"Cannot go back in time\")\n return False \n \n if price * number > self.cash:\n print(f\"Not enough cash to buy that number\")\n return False \n\n if number <= 0:\n print(f\"Cannot buy 0 or negative amount\")\n return False \n\n self.cash -= price * number\n self.stock += number\n transaction = {\n 'type': 'buy',\n 'price': price,\n 'number': number,\n 'timestamp': timestamp\n }\n self.transactions.append(transaction)\n\n print(transaction)\n return True\n\n def sell(self, timestamp, price, number):\n if np.isnan(price) or price <= 0:\n print(f\"Price: {price} is invalid\")\n return False \n\n if self.transactions and timestamp <= self.transactions[-1]['timestamp']:\n print(f\"Cannot go back in time\")\n return False \n\n if number > self.stock:\n print(f\"Not enough stock to sell\")\n return False \n\n if number <= 0:\n print(f\"Cannot sell 0 or negative amount\")\n return False \n\n self.stock -= number\n self.cash += price * number\n transaction = {\n 'type': 'sell',\n 'price': price,\n 'number': number,\n 'timestamp': timestamp\n }\n self.transactions.append(transaction)\n\n print(transaction)\n return True\n\n def short(self, timestamp, price, number):\n if np.isnan(price) or price <= 0:\n print(f\"Price: {price} is invalid\")\n return False\n\n if self.transactions and timestamp < self.transactions[-1]['timestamp']:\n print(f\"Cannot short back in time\")\n return False\n\n if price * number > self.cash:\n print(f\"Not enough cash to short that number\")\n return False\n\n if number <= 0:\n print(f\"Cannot short 0 or negative amount\")\n return False\n\n self.cash += price * number\n self.short_stock += number\n transaction = {\n 'type': 'short',\n 'price': price,\n 'number': number,\n 'timestamp': timestamp\n }\n self.transactions.append(transaction)\n\n print(transaction)\n return True\n\n def close_short(self, timestamp, price, number):\n if np.isnan(price) or price <= 0:\n print(f\"Price: {price} is invalid\")\n return False \n\n if self.transactions and timestamp < self.transactions[-1]['timestamp']:\n print(f\"Cannot close short back in time\")\n return False \n\n if number > self.short_stock:\n print(f\"Not enough shorted Bitcoin to close\")\n return False \n\n if number <= 0:\n print(f\"Cannot close 0 or negative amount\")\n return False \n\n self.short_stock -= number\n self.cash -= price * number\n transaction = {\n 'type': 'close_short',\n 'price': price,\n 'number': number,\n 'timestamp': timestamp\n }\n self.transactions.append(transaction)\n\n print(transaction)\n return True\n\nclass SimpleMovingAverageStrategy:\n def __init__(self, wallet, data, window_size):\n self.wallet = wallet\n self.data = data\n self.window_size = window_size\n\n def calculate_moving_average(self, data):\n return sum(data) / len(data)\n\n def execute(self):\n bitcoin_prices = []\n\n for data_point in self.data:\n bitcoin_prices.append(data_point['close'])\n if len(bitcoin_prices) > self.window_size:\n bitcoin_prices.pop(0)\n moving_average = self.calculate_moving_average(bitcoin_prices)\n\n if data_point['close'] < moving_average and self.wallet.stock == 0:\n self.wallet.close_short(data_point['timestamp'], data_point['close'], self.wallet.short_stock)\n self.wallet.buy(data_point['timestamp'], data_point['close'], self.wallet.cash/data_point['close'])\n elif data_point['close'] > moving_average and self.wallet.stock > 0:\n self.wallet.sell(data_point['timestamp'], data_point['close'], self.wallet.stock)\n self.wallet.short(data_point['timestamp'], data_point['close'], self.wallet.cash/data_point['close'])\n\n # Closing remaining positions\n last_data = self.data[-1]\n if self.wallet.stock > 0:\n self.wallet.sell(last_data['timestamp'], last_data['close'], self.wallet.stock)\n if self.wallet.short_stock > 0:\n self.wallet.close_short(last_data['timestamp'], last_data['close'], self.wallet.short_stock)\n \ndef load_historical_data(file_path):\n try:\n print(\"Loading Sim Data...\")\n with open(file_path) as f:\n lines = f.read().split(\"\\n\")\n lines = [line.split(\",\") for line in lines if line] \n data_points = [[int(line[0]), float(line[1])] for line in lines]\n\n print(f\"{len(data_points)} minutes of data loaded\")\n return data_points\n\n except FileNotFoundError:\n print(f\"Error: The file {file_path} does not exist.\")\n return []\n except Exception as e:\n print(f\"An error occurred: {e}\")\n return []\n\n\ndef minute_to_ohlc(data, interval):\n ohlc_data = []\n\n for i in range(0, len(data), interval):\n time_period = data[i:i + interval]\n timestamps, prices = zip(*time_period)\n \n ohlc = {}\n ohlc['timestamp'] = timestamps[0]\n ohlc['open'] = prices[0]\n ohlc['high'] = max(prices)\n ohlc['low'] = min(prices)\n ohlc['close'] = prices[-1]\n\n ohlc_data.append(ohlc)\n \n return ohlc_data\n\n\ndef get_user_input():\n wallet = input(\"Please enter the starting size (in dollars) of the wallet you would like to simulate: \")\n start_date = input(\"Enter the start date (mm-dd-yyyy): \")\n end_date = input(\"Enter the end date (mm-dd-yyyy): \")\n data_time_interval = input(\"Enter the time interval in minutes: \")\n return wallet, start_date, end_date, data_time_interval\n\n\ndef user_settings(wallet, start_date, end_date, data_time_interval, historical_data):\n first_timestamp = historical_data[0][0]\n last_timestamp = historical_data[-1][0]\n\n try:\n wallet = float(wallet)\n if wallet <= 0:\n raise ValueError(\"Wallet size must be positive.\")\n\n utc_tz = pytz.utc\n start_date_obj = datetime.strptime(start_date, '%m-%d-%Y')\n end_date_obj = datetime.strptime(end_date, '%m-%d-%Y')\n start_date_obj = utc_tz.localize(start_date_obj)\n end_date_obj = utc_tz.localize(end_date_obj)\n start_timestamp = int(start_date_obj.timestamp()) + 60\n end_timestamp = int(end_date_obj.timestamp()) + 60\n\n data_time_interval = int(data_time_interval)\n\n if data_time_interval < 1:\n raise ValueError(\"Time interval must be at least 1 minute.\")\n\n if not (first_timestamp <= start_timestamp <= last_timestamp) or not (first_timestamp <= end_timestamp <= last_timestamp):\n raise ValueError(\"Start date and end date must be within the range of historical data.\")\n\n if start_timestamp > end_timestamp:\n raise ValueError(\"End date must be after the start date.\")\n\n except ValueError as e:\n print(f\"Invalid input: {e}\")\n return None, None\n\n filtered_data = [data_point for data_point in historical_data if start_timestamp <= data_point[0] <= end_timestamp]\n adjusted_data = minute_to_ohlc(filtered_data, data_time_interval)\n\n return wallet, adjusted_data\n\n\n\nif __name__ == \"__main__\":\n file_path = \"HistoricalBTCdata.txt\"\n historical_data = load_historical_data(file_path)\n\n while True:\n wallet, start_date, end_date, data_time_interval = get_user_input()\n\n wallet, adjusted_historical_data = user_settings(wallet, start_date, end_date, data_time_interval, historical_data)\n\n if wallet is not None and adjusted_historical_data is not None:\n break\n else:\n print(\"Invalid input received. Please try again.\")\n\n userWallet = Wallet(wallet)\n\n # Calculate short term simple moving average strategy\n sma_strategy = SimpleMovingAverageStrategy(userWallet, adjusted_historical_data, 5)\n sma_strategy.execute()\n\n print(\"Final wallet cash: \", userWallet.cash)\n print(\"Final owed shorts: \", userWallet.short_stock)\n",
"path": "trading_system.py"
}
] | 7_9 | python | import sys
import unittest
class TestSummary(unittest.TestCase):
def setUp(self):
from trading_system import Wallet
self.wallet = Wallet(50)
def test_total_closed_trades(self):
transactions = [
{'type': 'buy', 'price': 10, 'number': 5, 'timestamp': 1},
{'type': 'sell', 'price': 15, 'number': 5, 'timestamp': 2},
{'type': 'buy', 'price': 20, 'number': 2, 'timestamp': 3},
{'type': 'sell', 'price': 10, 'number': 2, 'timestamp': 4},
{'type': 'short', 'price': 10, 'number': 5, 'timestamp': 5},
{'type': 'close_short', 'price': 5, 'number': 5, 'timestamp': 6},
{'type': 'short', 'price': 10, 'number': 2, 'timestamp': 7},
{'type': 'close_short', 'price': 15, 'number': 2, 'timestamp': 8},
]
self.wallet.transactions = transactions
expected_total = 4
self.assertEqual(self.wallet.totalClosedTrades(), expected_total)
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestSummary))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == "__main__":
main()
|
https://github.com/teamqurrent/web3.py | Resolve the issue of variable name collision in the Web3 Python library when initializing contracts. Specifically, fix the problem where a contract with a function named 'w3' causes a collision with the 'w3' instance variable in the contract classes. Modify the `contract.py` and `async_contract.py` files to handle the 'w3' variable correctly by changing self.w3 to w3 in the right places. | 9c76a18 | -e . [tester]
idna
pytest
pytest_asyncio
eth-tester[py-evm]==v0.9.1-b.1
py-geth>=3.11.0 | python3.9 | 0302d372 | diff --git a/newsfragments/3147.bugfix.rst b/newsfragments/3147.bugfix.rst
new file mode 100644
--- /dev/null
+++ b/newsfragments/3147.bugfix.rst
@@ -0,0 +1 @@
+Fix collision of ``w3`` variable when initializing contract with function of the same name
\ No newline at end of file
diff --git a/tests/core/contracts/conftest.py b/tests/core/contracts/conftest.py
--- a/tests/core/contracts/conftest.py
+++ b/tests/core/contracts/conftest.py
@@ -28,6 +28,10 @@ from web3._utils.contract_sources.contract_data.event_contracts import (
from web3._utils.contract_sources.contract_data.fallback_function_contract import (
FALLBACK_FUNCTION_CONTRACT_DATA,
)
+from web3._utils.contract_sources.contract_data.function_name_tester_contract import (
+ FUNCTION_NAME_TESTER_CONTRACT_ABI,
+ FUNCTION_NAME_TESTER_CONTRACT_DATA,
+)
from web3._utils.contract_sources.contract_data.math_contract import (
MATH_CONTRACT_ABI,
MATH_CONTRACT_BYTECODE,
@@ -55,6 +59,24 @@ from web3._utils.contract_sources.contract_data.tuple_contracts import (
TUPLE_CONTRACT_DATA,
)
+# --- function name tester contract --- #
+
+
+@pytest.fixture(scope="session")
+def function_name_tester_contract_abi():
+ return FUNCTION_NAME_TESTER_CONTRACT_ABI
+
+
+@pytest.fixture
+def function_name_tester_contract(w3, address_conversion_func):
+ function_name_tester_contract_factory = w3.eth.contract(
+ **FUNCTION_NAME_TESTER_CONTRACT_DATA
+ )
+ return deploy(w3, function_name_tester_contract_factory, address_conversion_func)
+
+
+# --- math contract --- #
+
@pytest.fixture(scope="session")
def math_contract_bytecode():
@@ -81,6 +103,9 @@ def math_contract(w3, math_contract_factory, address_conversion_func):
return deploy(w3, math_contract_factory, address_conversion_func)
+# --- constructor contracts --- #
+
+
@pytest.fixture
def simple_constructor_contract_factory(w3):
return w3.eth.contract(**SIMPLE_CONSTRUCTOR_CONTRACT_DATA)
@@ -113,6 +138,9 @@ def contract_with_constructor_address(
)
+# --- address reflector contract --- #
+
+
@pytest.fixture
def address_reflector_contract(w3, address_conversion_func):
address_reflector_contract_factory = w3.eth.contract(
@@ -121,6 +149,9 @@ def address_reflector_contract(w3, address_conversion_func):
return deploy(w3, address_reflector_contract_factory, address_conversion_func)
+# --- string contract --- #
+
+
@pytest.fixture(scope="session")
def string_contract_data():
return STRING_CONTRACT_DATA
@@ -153,6 +184,9 @@ def non_strict_string_contract(
)
+# --- emitter contract --- #
+
+
@pytest.fixture
def non_strict_emitter(
w3_non_strict_abi,
@@ -180,6 +214,9 @@ def non_strict_emitter(
return emitter_contract
+# --- event contract --- #
+
+
@pytest.fixture
def event_contract(
w3,
@@ -223,6 +260,9 @@ def indexed_event_contract(
return indexed_event_contract
+# --- arrays contract --- #
+
+
# bytes_32 = [keccak('0'), keccak('1')]
BYTES32_ARRAY = [
b"\x04HR\xb2\xa6p\xad\xe5@~x\xfb(c\xc5\x1d\xe9\xfc\xb9eB\xa0q\x86\xfe:\xed\xa6\xbb\x8a\x11m", # noqa: E501
@@ -255,12 +295,18 @@ def non_strict_arrays_contract(w3_non_strict_abi, address_conversion_func):
)
+# --- payable tester contract --- #
+
+
@pytest.fixture
def payable_tester_contract(w3, address_conversion_func):
payable_tester_contract_factory = w3.eth.contract(**PAYABLE_TESTER_CONTRACT_DATA)
return deploy(w3, payable_tester_contract_factory, address_conversion_func)
+# --- fixed reflector contract --- #
+
+
# no matter the function selector, this will return back the 32 bytes of data supplied
FIXED_REFLECTOR_CONTRACT_BYTECODE = "0x610011566020600460003760206000f3005b61000461001103610004600039610004610011036000f3" # noqa: E501
# reference source used to generate it:
@@ -306,6 +352,9 @@ def fixed_reflector_contract(w3, address_conversion_func):
return deploy(w3, fixed_reflector_contract_factory, address_conversion_func)
+# --- test data and functions contracts --- #
+
+
@pytest.fixture
def fallback_function_contract(w3, address_conversion_func):
fallback_function_contract_factory = w3.eth.contract(
@@ -387,6 +436,9 @@ def some_address(address_conversion_func):
return address_conversion_func("0x5B2063246F2191f18F2675ceDB8b28102e957458")
+# --- invoke contract --- #
+
+
def invoke_contract(
api_call_desig="call",
contract=None,
@@ -444,6 +496,16 @@ async def async_math_contract(
)
+@pytest_asyncio.fixture
+async def async_function_name_tester_contract(async_w3, address_conversion_func):
+ function_name_tester_contract_factory = async_w3.eth.contract(
+ **FUNCTION_NAME_TESTER_CONTRACT_DATA
+ )
+ return await async_deploy(
+ async_w3, function_name_tester_contract_factory, address_conversion_func
+ )
+
+
@pytest.fixture
def async_simple_constructor_contract_factory(async_w3):
return async_w3.eth.contract(**SIMPLE_CONSTRUCTOR_CONTRACT_DATA)
diff --git a/tests/core/contracts/test_contract_call_interface.py b/tests/core/contracts/test_contract_call_interface.py
--- a/tests/core/contracts/test_contract_call_interface.py
+++ b/tests/core/contracts/test_contract_call_interface.py
@@ -178,6 +178,11 @@ def test_saved_method_call_with_multiple_arguments(
assert result == 16
+def test_call_get_w3_value(function_name_tester_contract, call):
+ result = call(contract=function_name_tester_contract, contract_function="w3")
+ assert result is True
+
+
def test_call_get_string_value(string_contract, call):
result = call(contract=string_contract, contract_function="getValue")
# eth_abi.decode() does not assume implicit utf-8
@@ -1295,6 +1300,14 @@ async def test_async_saved_method_call_with_multiple_arguments(
assert result == 16
+@pytest.mark.asyncio
+async def test_async_call_get_w3_value(async_function_name_tester_contract, async_call):
+ result = await async_call(
+ contract=async_function_name_tester_contract, contract_function="w3"
+ )
+ assert result is True
+
+
@pytest.mark.asyncio
async def test_async_call_get_string_value(async_string_contract, async_call):
result = await async_call(
diff --git a/tests/core/contracts/test_contract_class_construction.py b/tests/core/contracts/test_contract_class_construction.py
--- a/tests/core/contracts/test_contract_class_construction.py
+++ b/tests/core/contracts/test_contract_class_construction.py
@@ -42,6 +42,39 @@ def test_abi_as_json_string(w3, math_contract_abi, some_address):
assert math.abi == math_contract_abi
+def test_contract_init_with_w3_function_name(
+ w3,
+ function_name_tester_contract_abi,
+ function_name_tester_contract,
+):
+ # test `w3` function name does not throw when creating the contract factory
+ contract_factory = w3.eth.contract(abi=function_name_tester_contract_abi)
+
+ # re-instantiate the contract
+ contract = contract_factory(function_name_tester_contract.address)
+
+ # assert the `w3` function returns true when called
+ result = contract.functions.w3().call()
+ assert result is True
+
+
+@pytest.mark.asyncio
+async def test_async_contract_init_with_w3_function_name(
+ async_w3,
+ function_name_tester_contract_abi,
+ async_function_name_tester_contract,
+):
+ # test `w3` function name does not throw when creating the contract factory
+ contract_factory = async_w3.eth.contract(abi=function_name_tester_contract_abi)
+
+ # re-instantiate the contract
+ contract = contract_factory(async_function_name_tester_contract.address)
+
+ # assert the `w3` function returns true when called
+ result = await contract.functions.w3().call()
+ assert result is True
+
+
def test_error_to_call_non_existent_fallback(
w3, math_contract_abi, math_contract_bytecode, math_contract_runtime
):
diff --git a/web3/_utils/contract_sources/FunctionNameTesterContract.sol b/web3/_utils/contract_sources/FunctionNameTesterContract.sol
new file mode 100644
--- /dev/null
+++ b/web3/_utils/contract_sources/FunctionNameTesterContract.sol
@@ -0,0 +1,12 @@
+pragma solidity ^0.8.23;
+
+contract FunctionNameTesterContract {
+ function w3() public returns (bool) {
+ return true;
+ }
+
+ // unused, this just needs to come after `w3` in the abi... so name it "z"
+ function z() public returns (bool) {
+ return false;
+ }
+}
\ No newline at end of file
diff --git a/web3/_utils/contract_sources/contract_data/function_name_tester_contract.py b/web3/_utils/contract_sources/contract_data/function_name_tester_contract.py
new file mode 100644
--- /dev/null
+++ b/web3/_utils/contract_sources/contract_data/function_name_tester_contract.py
@@ -0,0 +1,29 @@
+"""
+Generated by `compile_contracts.py` script.
+Compiled with Solidity v0.8.23.
+"""
+
+# source: web3/_utils/contract_sources/FunctionNameTesterContract.sol:FunctionNameTesterContract # noqa: E501
+FUNCTION_NAME_TESTER_CONTRACT_BYTECODE = "0x608060405234801561000f575f80fd5b5060d98061001c5f395ff3fe6080604052348015600e575f80fd5b50600436106030575f3560e01c8063a044c987146034578063c5d7802e14604e575b5f80fd5b603a6068565b60405160459190608c565b60405180910390f35b60546070565b604051605f9190608c565b60405180910390f35b5f6001905090565b5f90565b5f8115159050919050565b6086816074565b82525050565b5f602082019050609d5f830184607f565b9291505056fea264697066735822122056b76f22006829335981c36eca76f8aa0c6cf66d23990263a18b17fa27ab3db064736f6c63430008170033" # noqa: E501
+FUNCTION_NAME_TESTER_CONTRACT_RUNTIME = "0x6080604052348015600e575f80fd5b50600436106030575f3560e01c8063a044c987146034578063c5d7802e14604e575b5f80fd5b603a6068565b60405160459190608c565b60405180910390f35b60546070565b604051605f9190608c565b60405180910390f35b5f6001905090565b5f90565b5f8115159050919050565b6086816074565b82525050565b5f602082019050609d5f830184607f565b9291505056fea264697066735822122056b76f22006829335981c36eca76f8aa0c6cf66d23990263a18b17fa27ab3db064736f6c63430008170033" # noqa: E501
+FUNCTION_NAME_TESTER_CONTRACT_ABI = [
+ {
+ "inputs": [],
+ "name": "w3",
+ "outputs": [{"internalType": "bool", "name": "", "type": "bool"}],
+ "stateMutability": "nonpayable",
+ "type": "function",
+ },
+ {
+ "inputs": [],
+ "name": "z",
+ "outputs": [{"internalType": "bool", "name": "", "type": "bool"}],
+ "stateMutability": "nonpayable",
+ "type": "function",
+ },
+]
+FUNCTION_NAME_TESTER_CONTRACT_DATA = {
+ "bytecode": FUNCTION_NAME_TESTER_CONTRACT_BYTECODE,
+ "bytecode_runtime": FUNCTION_NAME_TESTER_CONTRACT_RUNTIME,
+ "abi": FUNCTION_NAME_TESTER_CONTRACT_ABI,
+}
diff --git a/web3/contract/async_contract.py b/web3/contract/async_contract.py
--- a/web3/contract/async_contract.py
+++ b/web3/contract/async_contract.py
@@ -575,7 +575,7 @@ class AsyncContractCaller(BaseContractCaller):
for func in self._functions:
fn = AsyncContractFunction.factory(
func["name"],
- w3=self.w3,
+ w3=w3,
contract_abi=self.abi,
address=self.address,
function_identifier=func["name"],
@@ -585,9 +585,7 @@ class AsyncContractCaller(BaseContractCaller):
# TODO: The no_extra_call method gets around the fact that we can't call
# the full async method from within a class's __init__ method. We need
# to see if there's a way to account for all desired elif cases.
- block_id = parse_block_identifier_no_extra_call(
- self.w3, block_identifier
- )
+ block_id = parse_block_identifier_no_extra_call(w3, block_identifier)
caller_method = partial(
self.call_function,
fn,
diff --git a/web3/contract/contract.py b/web3/contract/contract.py
--- a/web3/contract/contract.py
+++ b/web3/contract/contract.py
@@ -577,14 +577,14 @@ class ContractCaller(BaseContractCaller):
for func in self._functions:
fn = ContractFunction.factory(
func["name"],
- w3=self.w3,
+ w3=w3,
contract_abi=self.abi,
address=self.address,
function_identifier=func["name"],
decode_tuples=decode_tuples,
)
- block_id = parse_block_identifier(self.w3, block_identifier)
+ block_id = parse_block_identifier(w3, block_identifier)
caller_method = partial(
self.call_function,
fn,
| [
{
"content": "import functools\nimport pytest\n\nimport pytest_asyncio\n\nfrom tests.core.contracts.utils import (\n async_deploy,\n deploy,\n)\nfrom tests.utils import (\n async_partial,\n)\nfrom web3._utils.contract_sources.contract_data.arrays_contract import (\n ARRAYS_CONTRACT_DATA,\n)\nfrom web3._utils.contract_sources.contract_data.constructor_contracts import (\n CONSTRUCTOR_WITH_ADDRESS_ARGUMENT_CONTRACT_DATA,\n CONSTRUCTOR_WITH_ARGUMENTS_CONTRACT_DATA,\n SIMPLE_CONSTRUCTOR_CONTRACT_DATA,\n)\nfrom web3._utils.contract_sources.contract_data.contract_caller_tester import (\n CONTRACT_CALLER_TESTER_DATA,\n)\nfrom web3._utils.contract_sources.contract_data.event_contracts import (\n EVENT_CONTRACT_DATA,\n INDEXED_EVENT_CONTRACT_DATA,\n)\nfrom web3._utils.contract_sources.contract_data.fallback_function_contract import (\n FALLBACK_FUNCTION_CONTRACT_DATA,\n)\nfrom web3._utils.contract_sources.contract_data.math_contract import (\n MATH_CONTRACT_ABI,\n MATH_CONTRACT_BYTECODE,\n MATH_CONTRACT_DATA,\n MATH_CONTRACT_RUNTIME,\n)\nfrom web3._utils.contract_sources.contract_data.payable_tester import (\n PAYABLE_TESTER_CONTRACT_DATA,\n)\nfrom web3._utils.contract_sources.contract_data.receive_function_contracts import (\n NO_RECEIVE_FUNCTION_CONTRACT_DATA,\n RECEIVE_FUNCTION_CONTRACT_DATA,\n)\nfrom web3._utils.contract_sources.contract_data.reflector_contracts import (\n ADDRESS_REFLECTOR_CONTRACT_DATA,\n)\nfrom web3._utils.contract_sources.contract_data.revert_contract import (\n REVERT_CONTRACT_DATA,\n)\nfrom web3._utils.contract_sources.contract_data.string_contract import (\n STRING_CONTRACT_DATA,\n)\nfrom web3._utils.contract_sources.contract_data.tuple_contracts import (\n NESTED_TUPLE_CONTRACT_DATA,\n TUPLE_CONTRACT_DATA,\n)\n\n\n@pytest.fixture(scope=\"session\")\ndef math_contract_bytecode():\n return MATH_CONTRACT_BYTECODE\n\n\n@pytest.fixture(scope=\"session\")\ndef math_contract_runtime():\n return MATH_CONTRACT_RUNTIME\n\n\n@pytest.fixture(scope=\"session\")\ndef math_contract_abi():\n return MATH_CONTRACT_ABI\n\n\n@pytest.fixture\ndef math_contract_factory(w3):\n return w3.eth.contract(**MATH_CONTRACT_DATA)\n\n\n@pytest.fixture\ndef math_contract(w3, math_contract_factory, address_conversion_func):\n return deploy(w3, math_contract_factory, address_conversion_func)\n\n\n@pytest.fixture\ndef simple_constructor_contract_factory(w3):\n return w3.eth.contract(**SIMPLE_CONSTRUCTOR_CONTRACT_DATA)\n\n\n@pytest.fixture\ndef contract_with_constructor_args_factory(w3):\n return w3.eth.contract(**CONSTRUCTOR_WITH_ARGUMENTS_CONTRACT_DATA)\n\n\n@pytest.fixture\ndef non_strict_contract_with_constructor_args_factory(w3_non_strict_abi):\n return w3_non_strict_abi.eth.contract(**CONSTRUCTOR_WITH_ARGUMENTS_CONTRACT_DATA)\n\n\n@pytest.fixture\ndef contract_with_constructor_address_factory(w3):\n return w3.eth.contract(**CONSTRUCTOR_WITH_ADDRESS_ARGUMENT_CONTRACT_DATA)\n\n\n@pytest.fixture\ndef contract_with_constructor_address(\n w3, contract_with_constructor_address_factory, address_conversion_func\n):\n return deploy(\n w3,\n contract_with_constructor_address_factory,\n address_conversion_func,\n args=[\"0xd3CdA913deB6f67967B99D67aCDFa1712C293601\"],\n )\n\n\n@pytest.fixture\ndef address_reflector_contract(w3, address_conversion_func):\n address_reflector_contract_factory = w3.eth.contract(\n **ADDRESS_REFLECTOR_CONTRACT_DATA\n )\n return deploy(w3, address_reflector_contract_factory, address_conversion_func)\n\n\n@pytest.fixture(scope=\"session\")\ndef string_contract_data():\n return STRING_CONTRACT_DATA\n\n\n@pytest.fixture\ndef string_contract_factory(w3, string_contract_data):\n return w3.eth.contract(**STRING_CONTRACT_DATA)\n\n\n@pytest.fixture\ndef string_contract(w3, string_contract_factory, address_conversion_func):\n return deploy(\n w3, string_contract_factory, address_conversion_func, args=[\"Caqalai\"]\n )\n\n\n@pytest.fixture\ndef non_strict_string_contract(\n w3_non_strict_abi, string_contract_data, address_conversion_func\n):\n _non_strict_string_contract_factory = w3_non_strict_abi.eth.contract(\n **string_contract_data\n )\n return deploy(\n w3_non_strict_abi,\n _non_strict_string_contract_factory,\n address_conversion_func,\n args=[\"Caqalai\"],\n )\n\n\n@pytest.fixture\ndef non_strict_emitter(\n w3_non_strict_abi,\n emitter_contract_data,\n wait_for_transaction,\n wait_for_block,\n address_conversion_func,\n):\n non_strict_emitter_contract_factory = w3_non_strict_abi.eth.contract(\n **emitter_contract_data\n )\n w3 = w3_non_strict_abi\n\n wait_for_block(w3)\n deploy_txn_hash = non_strict_emitter_contract_factory.constructor().transact(\n {\"gas\": 10000000}\n )\n deploy_receipt = wait_for_transaction(w3, deploy_txn_hash)\n contract_address = address_conversion_func(deploy_receipt[\"contractAddress\"])\n\n bytecode = w3.eth.get_code(contract_address)\n assert bytecode == non_strict_emitter_contract_factory.bytecode_runtime\n emitter_contract = non_strict_emitter_contract_factory(address=contract_address)\n assert emitter_contract.address == contract_address\n return emitter_contract\n\n\n@pytest.fixture\ndef event_contract(\n w3,\n wait_for_transaction,\n wait_for_block,\n address_conversion_func,\n):\n wait_for_block(w3)\n\n event_contract_factory = w3.eth.contract(**EVENT_CONTRACT_DATA)\n deploy_txn_hash = event_contract_factory.constructor().transact(\n {\"from\": w3.eth.coinbase, \"gas\": 1000000}\n )\n deploy_receipt = wait_for_transaction(w3, deploy_txn_hash)\n contract_address = address_conversion_func(deploy_receipt[\"contractAddress\"])\n\n bytecode = w3.eth.get_code(contract_address)\n assert bytecode == event_contract_factory.bytecode_runtime\n event_contract = event_contract_factory(address=contract_address)\n assert event_contract.address == contract_address\n return event_contract\n\n\n@pytest.fixture\ndef indexed_event_contract(\n w3, wait_for_block, wait_for_transaction, address_conversion_func\n):\n wait_for_block(w3)\n\n indexed_event_contract_factory = w3.eth.contract(**INDEXED_EVENT_CONTRACT_DATA)\n deploy_txn_hash = indexed_event_contract_factory.constructor().transact(\n {\"from\": w3.eth.coinbase, \"gas\": 1000000}\n )\n deploy_receipt = wait_for_transaction(w3, deploy_txn_hash)\n contract_address = address_conversion_func(deploy_receipt[\"contractAddress\"])\n\n bytecode = w3.eth.get_code(contract_address)\n assert bytecode == indexed_event_contract_factory.bytecode_runtime\n indexed_event_contract = indexed_event_contract_factory(address=contract_address)\n assert indexed_event_contract.address == contract_address\n return indexed_event_contract\n\n\n# bytes_32 = [keccak('0'), keccak('1')]\nBYTES32_ARRAY = [\n b\"\\x04HR\\xb2\\xa6p\\xad\\xe5@~x\\xfb(c\\xc5\\x1d\\xe9\\xfc\\xb9eB\\xa0q\\x86\\xfe:\\xed\\xa6\\xbb\\x8a\\x11m\", # noqa: E501\n b\"\\xc8\\x9e\\xfd\\xaaT\\xc0\\xf2\\x0cz\\xdfa(\\x82\\xdf\\tP\\xf5\\xa9Qc~\\x03\\x07\\xcd\\xcbLg/)\\x8b\\x8b\\xc6\", # noqa: E501\n]\nBYTES1_ARRAY = [b\"\\xff\", b\"\\xff\", b\"\\xff\", b\"\\xff\"]\n\n\n@pytest.fixture\ndef arrays_contract(w3, address_conversion_func):\n arrays_contract_factory = w3.eth.contract(**ARRAYS_CONTRACT_DATA)\n return deploy(\n w3,\n arrays_contract_factory,\n address_conversion_func,\n args=[BYTES32_ARRAY, BYTES1_ARRAY],\n )\n\n\n@pytest.fixture\ndef non_strict_arrays_contract(w3_non_strict_abi, address_conversion_func):\n non_strict_arrays_contract_factory = w3_non_strict_abi.eth.contract(\n **ARRAYS_CONTRACT_DATA\n )\n return deploy(\n w3_non_strict_abi,\n non_strict_arrays_contract_factory,\n address_conversion_func,\n args=[BYTES32_ARRAY, BYTES1_ARRAY],\n )\n\n\n@pytest.fixture\ndef payable_tester_contract(w3, address_conversion_func):\n payable_tester_contract_factory = w3.eth.contract(**PAYABLE_TESTER_CONTRACT_DATA)\n return deploy(w3, payable_tester_contract_factory, address_conversion_func)\n\n\n# no matter the function selector, this will return back the 32 bytes of data supplied\nFIXED_REFLECTOR_CONTRACT_BYTECODE = \"0x610011566020600460003760206000f3005b61000461001103610004600039610004610011036000f3\" # noqa: E501\n# reference source used to generate it:\nLLL_SOURCE = \"['seq', ['return', 0, ['lll', ['seq', ['calldatacopy', 0, 4, 32], ['return', 0, 32], 'stop' ], 0]]])\" # noqa: E501\n\nFIXED_REFLECTOR_CONTRACT_ABI = [\n {\n \"type\": \"function\",\n \"constant\": False,\n \"inputs\": [{\"type\": \"fixed8x1\"}],\n \"name\": \"reflect\",\n \"outputs\": [{\"type\": \"fixed8x1\"}],\n },\n {\n \"type\": \"function\",\n \"constant\": False,\n \"inputs\": [{\"type\": \"ufixed256x80\"}],\n \"name\": \"reflect\",\n \"outputs\": [{\"type\": \"ufixed256x80\"}],\n },\n {\n \"type\": \"function\",\n \"constant\": False,\n \"inputs\": [{\"type\": \"ufixed256x1\"}],\n \"name\": \"reflect\",\n \"outputs\": [{\"type\": \"ufixed256x1\"}],\n },\n {\n \"type\": \"function\",\n \"constant\": False,\n \"inputs\": [{\"type\": \"ufixed8x1\"}],\n \"name\": \"reflect_short_u\",\n \"outputs\": [{\"type\": \"ufixed8x1\"}],\n },\n]\n\n\n@pytest.fixture\ndef fixed_reflector_contract(w3, address_conversion_func):\n fixed_reflector_contract_factory = w3.eth.contract(\n abi=FIXED_REFLECTOR_CONTRACT_ABI, bytecode=FIXED_REFLECTOR_CONTRACT_BYTECODE\n )\n return deploy(w3, fixed_reflector_contract_factory, address_conversion_func)\n\n\n@pytest.fixture\ndef fallback_function_contract(w3, address_conversion_func):\n fallback_function_contract_factory = w3.eth.contract(\n **FALLBACK_FUNCTION_CONTRACT_DATA\n )\n return deploy(w3, fallback_function_contract_factory, address_conversion_func)\n\n\n@pytest.fixture\ndef receive_function_contract(w3, address_conversion_func):\n receive_function_contract_factory = w3.eth.contract(\n **RECEIVE_FUNCTION_CONTRACT_DATA\n )\n return deploy(w3, receive_function_contract_factory, address_conversion_func)\n\n\n@pytest.fixture\ndef no_receive_function_contract(w3, address_conversion_func):\n no_receive_function_contract_factory = w3.eth.contract(\n **NO_RECEIVE_FUNCTION_CONTRACT_DATA\n )\n return deploy(w3, no_receive_function_contract_factory, address_conversion_func)\n\n\n@pytest.fixture\ndef contract_caller_tester_contract(w3, address_conversion_func):\n contract_caller_tester_contract_factory = w3.eth.contract(\n **CONTRACT_CALLER_TESTER_DATA\n )\n return deploy(w3, contract_caller_tester_contract_factory, address_conversion_func)\n\n\n@pytest.fixture\ndef revert_contract(w3, address_conversion_func):\n revert_contract_factory = w3.eth.contract(**REVERT_CONTRACT_DATA)\n return deploy(w3, revert_contract_factory, address_conversion_func)\n\n\n@pytest.fixture\ndef tuple_contract(w3, address_conversion_func):\n tuple_contract_factory = w3.eth.contract(**TUPLE_CONTRACT_DATA)\n return deploy(w3, tuple_contract_factory, address_conversion_func)\n\n\n@pytest.fixture\ndef nested_tuple_contract(w3, address_conversion_func):\n nested_tuple_contract_factory = w3.eth.contract(**NESTED_TUPLE_CONTRACT_DATA)\n return deploy(w3, nested_tuple_contract_factory, address_conversion_func)\n\n\nTUPLE_CONTRACT_DATA_DECODE_TUPLES = {\n **TUPLE_CONTRACT_DATA,\n \"decode_tuples\": True,\n}\n\n\nNESTED_TUPLE_CONTRACT_DATA_DECODE_TUPLES = {\n **NESTED_TUPLE_CONTRACT_DATA,\n \"decode_tuples\": True,\n}\n\n\n@pytest.fixture\ndef tuple_contract_with_decode_tuples(w3, address_conversion_func):\n tuple_contract_factory = w3.eth.contract(**TUPLE_CONTRACT_DATA_DECODE_TUPLES)\n return deploy(w3, tuple_contract_factory, address_conversion_func)\n\n\n@pytest.fixture\ndef nested_tuple_contract_with_decode_tuples(w3, address_conversion_func):\n nested_tuple_contract_factory = w3.eth.contract(\n **NESTED_TUPLE_CONTRACT_DATA_DECODE_TUPLES\n )\n return deploy(w3, nested_tuple_contract_factory, address_conversion_func)\n\n\n@pytest.fixture\ndef some_address(address_conversion_func):\n return address_conversion_func(\"0x5B2063246F2191f18F2675ceDB8b28102e957458\")\n\n\ndef invoke_contract(\n api_call_desig=\"call\",\n contract=None,\n contract_function=None,\n func_args=[],\n func_kwargs={},\n tx_params={},\n):\n allowable_call_desig = [\"call\", \"transact\", \"estimate_gas\", \"build_transaction\"]\n if api_call_desig not in allowable_call_desig:\n raise ValueError(\n f\"allowable_invoke_method must be one of: {allowable_call_desig}\"\n )\n\n function = contract.functions[contract_function]\n result = getattr(function(*func_args, **func_kwargs), api_call_desig)(tx_params)\n\n return result\n\n\n@pytest.fixture\ndef build_transaction(request):\n return functools.partial(invoke_contract, api_call_desig=\"build_transaction\")\n\n\n@pytest.fixture\ndef transact(request):\n return functools.partial(invoke_contract, api_call_desig=\"transact\")\n\n\n@pytest.fixture\ndef call(request):\n return functools.partial(invoke_contract, api_call_desig=\"call\")\n\n\n@pytest.fixture\ndef estimate_gas(request):\n return functools.partial(invoke_contract, api_call_desig=\"estimate_gas\")\n\n\n# --- async --- #\n\n\n@pytest.fixture\ndef async_math_contract_factory(async_w3):\n return async_w3.eth.contract(**MATH_CONTRACT_DATA)\n\n\n@pytest_asyncio.fixture\nasync def async_math_contract(\n async_w3, async_math_contract_factory, address_conversion_func\n):\n return await async_deploy(\n async_w3, async_math_contract_factory, address_conversion_func\n )\n\n\n@pytest.fixture\ndef async_simple_constructor_contract_factory(async_w3):\n return async_w3.eth.contract(**SIMPLE_CONSTRUCTOR_CONTRACT_DATA)\n\n\n@pytest.fixture\ndef async_constructor_with_args_contract_factory(async_w3):\n return async_w3.eth.contract(**CONSTRUCTOR_WITH_ARGUMENTS_CONTRACT_DATA)\n\n\n@pytest.fixture\ndef async_non_strict_constructor_with_args_contract_factory(async_w3_non_strict_abi):\n return async_w3_non_strict_abi.eth.contract(\n **CONSTRUCTOR_WITH_ARGUMENTS_CONTRACT_DATA\n )\n\n\n@pytest.fixture\ndef async_constructor_with_address_arg_contract_factory(async_w3):\n return async_w3.eth.contract(**CONSTRUCTOR_WITH_ADDRESS_ARGUMENT_CONTRACT_DATA)\n\n\n@pytest_asyncio.fixture\nasync def async_constructor_with_address_argument_contract(\n async_w3,\n address_conversion_func,\n):\n async_constructor_with_address_arg_factory = async_w3.eth.contract(\n **CONSTRUCTOR_WITH_ADDRESS_ARGUMENT_CONTRACT_DATA\n )\n return await async_deploy(\n async_w3,\n async_constructor_with_address_arg_factory,\n address_conversion_func,\n args=[\"0xd3CdA913deB6f67967B99D67aCDFa1712C293601\"],\n )\n\n\n@pytest_asyncio.fixture\nasync def async_address_reflector_contract(async_w3, address_conversion_func):\n async_address_reflector_contract_factory = async_w3.eth.contract(\n **ADDRESS_REFLECTOR_CONTRACT_DATA,\n )\n return await async_deploy(\n async_w3, async_address_reflector_contract_factory, address_conversion_func\n )\n\n\n@pytest.fixture\ndef async_string_contract_factory(async_w3):\n return async_w3.eth.contract(**STRING_CONTRACT_DATA)\n\n\n@pytest_asyncio.fixture\nasync def async_string_contract(\n async_w3, async_string_contract_factory, address_conversion_func\n):\n return await async_deploy(\n async_w3,\n async_string_contract_factory,\n address_conversion_func,\n args=[\"Caqalai\"],\n )\n\n\n@pytest_asyncio.fixture\nasync def async_arrays_contract(async_w3, address_conversion_func):\n async_arrays_contract_factory = async_w3.eth.contract(**ARRAYS_CONTRACT_DATA)\n return await async_deploy(\n async_w3,\n async_arrays_contract_factory,\n address_conversion_func,\n args=[BYTES32_ARRAY, BYTES1_ARRAY],\n )\n\n\n@pytest_asyncio.fixture\nasync def async_non_strict_arrays_contract(\n async_w3_non_strict_abi, address_conversion_func\n):\n async_non_strict_arrays_contract_factory = async_w3_non_strict_abi.eth.contract(\n **ARRAYS_CONTRACT_DATA,\n )\n return await async_deploy(\n async_w3_non_strict_abi,\n async_non_strict_arrays_contract_factory,\n address_conversion_func,\n args=[BYTES32_ARRAY, BYTES1_ARRAY],\n )\n\n\n@pytest_asyncio.fixture\nasync def async_payable_tester_contract(async_w3, address_conversion_func):\n async_payable_tester_contract_factory = async_w3.eth.contract(\n **PAYABLE_TESTER_CONTRACT_DATA\n )\n return await async_deploy(\n async_w3, async_payable_tester_contract_factory, address_conversion_func\n )\n\n\n@pytest_asyncio.fixture\nasync def async_fixed_reflector_contract(async_w3, address_conversion_func):\n async_fixed_reflector_contract_factory = async_w3.eth.contract(\n abi=FIXED_REFLECTOR_CONTRACT_ABI, bytecode=FIXED_REFLECTOR_CONTRACT_BYTECODE\n )\n return await async_deploy(\n async_w3, async_fixed_reflector_contract_factory, address_conversion_func\n )\n\n\n@pytest_asyncio.fixture\nasync def async_fallback_function_contract(async_w3, address_conversion_func):\n async_fallback_function_contract_factory = async_w3.eth.contract(\n **FALLBACK_FUNCTION_CONTRACT_DATA\n )\n return await async_deploy(\n async_w3, async_fallback_function_contract_factory, address_conversion_func\n )\n\n\n@pytest_asyncio.fixture\nasync def async_no_receive_function_contract(async_w3, address_conversion_func):\n async_no_receive_function_contract_factory = async_w3.eth.contract(\n **NO_RECEIVE_FUNCTION_CONTRACT_DATA\n )\n return await async_deploy(\n async_w3, async_no_receive_function_contract_factory, address_conversion_func\n )\n\n\n@pytest_asyncio.fixture\nasync def async_receive_function_contract(async_w3, address_conversion_func):\n async_receive_function_contract_factory = async_w3.eth.contract(\n **RECEIVE_FUNCTION_CONTRACT_DATA\n )\n return await async_deploy(\n async_w3, async_receive_function_contract_factory, address_conversion_func\n )\n\n\n@pytest_asyncio.fixture\nasync def async_contract_caller_tester_contract(async_w3, address_conversion_func):\n async_contract_caller_tester_contract_factory = async_w3.eth.contract(\n **CONTRACT_CALLER_TESTER_DATA\n )\n return await async_deploy(\n async_w3,\n async_contract_caller_tester_contract_factory,\n address_conversion_func,\n )\n\n\n@pytest_asyncio.fixture\nasync def async_revert_contract(async_w3, address_conversion_func):\n async_revert_contract_factory = async_w3.eth.contract(**REVERT_CONTRACT_DATA)\n return await async_deploy(\n async_w3, async_revert_contract_factory, address_conversion_func\n )\n\n\n@pytest_asyncio.fixture\nasync def async_tuple_contract(async_w3, address_conversion_func):\n async_tuple_contract_factory = async_w3.eth.contract(**TUPLE_CONTRACT_DATA)\n return await async_deploy(\n async_w3, async_tuple_contract_factory, address_conversion_func\n )\n\n\n@pytest_asyncio.fixture\nasync def async_nested_tuple_contract(async_w3, address_conversion_func):\n async_nested_tuple_contract_factory = async_w3.eth.contract(\n **NESTED_TUPLE_CONTRACT_DATA\n )\n return await async_deploy(\n async_w3, async_nested_tuple_contract_factory, address_conversion_func\n )\n\n\n@pytest_asyncio.fixture\nasync def async_tuple_contract_with_decode_tuples(async_w3, address_conversion_func):\n async_tuple_contract_factory = async_w3.eth.contract(\n **TUPLE_CONTRACT_DATA_DECODE_TUPLES\n )\n return await async_deploy(\n async_w3, async_tuple_contract_factory, address_conversion_func\n )\n\n\n@pytest_asyncio.fixture\nasync def async_nested_tuple_contract_with_decode_tuples(\n async_w3, address_conversion_func\n):\n async_nested_tuple_contract_factory = async_w3.eth.contract(\n **NESTED_TUPLE_CONTRACT_DATA_DECODE_TUPLES\n )\n return await async_deploy(\n async_w3, async_nested_tuple_contract_factory, address_conversion_func\n )\n\n\nasync def async_invoke_contract(\n api_call_desig=\"call\",\n contract=None,\n contract_function=None,\n func_args=[],\n func_kwargs={},\n tx_params={},\n):\n allowable_call_desig = [\"call\", \"transact\", \"estimate_gas\", \"build_transaction\"]\n if api_call_desig not in allowable_call_desig:\n raise ValueError(\n f\"allowable_invoke_method must be one of: {allowable_call_desig}\"\n )\n\n function = contract.functions[contract_function]\n result = await getattr(function(*func_args, **func_kwargs), api_call_desig)(\n tx_params\n )\n\n return result\n\n\n@pytest.fixture\ndef async_build_transaction(request):\n return async_partial(async_invoke_contract, api_call_desig=\"build_transaction\")\n\n\n@pytest.fixture\ndef async_transact(request):\n return async_partial(async_invoke_contract, api_call_desig=\"transact\")\n\n\n@pytest.fixture\ndef async_call(request):\n return async_partial(async_invoke_contract, api_call_desig=\"call\")\n\n\n@pytest.fixture\ndef async_estimate_gas(request):\n return async_partial(async_invoke_contract, api_call_desig=\"estimate_gas\")\n",
"path": "tests/core/contracts/conftest.py"
},
{
"content": "from decimal import (\n Decimal,\n getcontext,\n)\nimport json\nimport pytest\n\nfrom eth_tester.exceptions import (\n TransactionFailed,\n)\nfrom eth_utils import (\n is_address,\n)\nfrom hexbytes import (\n HexBytes,\n)\nimport pytest_asyncio\n\nfrom tests.core.contracts.utils import (\n async_deploy,\n deploy,\n)\nfrom web3._utils.abi import (\n recursive_dict_to_namedtuple,\n)\nfrom web3._utils.contract_sources.contract_data.bytes_contracts import (\n BYTES32_CONTRACT_DATA,\n BYTES_CONTRACT_DATA,\n)\nfrom web3._utils.ens import (\n contract_ens_addresses,\n)\nfrom web3.exceptions import (\n BadFunctionCallOutput,\n BlockNumberOutofRange,\n FallbackNotFound,\n InvalidAddress,\n MismatchedABI,\n NameNotFound,\n NoABIFound,\n NoABIFunctionsFound,\n Web3ValidationError,\n)\n\nMULTIPLE_FUNCTIONS = json.loads(\n '[{\"constant\":false,\"inputs\":[],\"name\":\"a\",\"outputs\":[],\"type\":\"function\"},{\"constant\":false,\"inputs\":[{\"name\":\"\",\"type\":\"bytes32\"}],\"name\":\"a\",\"outputs\":[],\"type\":\"function\"},{\"constant\":false,\"inputs\":[{\"name\":\"\",\"type\":\"uint256\"}],\"name\":\"a\",\"outputs\":[],\"type\":\"function\"},{\"constant\":false,\"inputs\":[{\"name\":\"\",\"type\":\"uint8\"}],\"name\":\"a\",\"outputs\":[],\"type\":\"function\"},{\"constant\":false,\"inputs\":[{\"name\":\"\",\"type\":\"int8\"}],\"name\":\"a\",\"outputs\":[],\"type\":\"function\"}]' # noqa: E501\n)\n\n\n@pytest.fixture(params=[b\"\\x04\\x06\", \"0x0406\"])\ndef bytes_contract(w3, request, address_conversion_func):\n bytes_contract_factory = w3.eth.contract(**BYTES_CONTRACT_DATA)\n return deploy(\n w3, bytes_contract_factory, address_conversion_func, args=[request.param]\n )\n\n\n@pytest.fixture(params=[b\"\\x04\\x06\", \"0x0406\"])\ndef non_strict_bytes_contract(\n w3_non_strict_abi,\n request,\n address_conversion_func,\n):\n non_strict_bytes_contract_factory = w3_non_strict_abi.eth.contract(\n **BYTES_CONTRACT_DATA\n )\n return deploy(\n w3_non_strict_abi,\n non_strict_bytes_contract_factory,\n address_conversion_func,\n args=[request.param],\n )\n\n\n@pytest.fixture\ndef call_transaction():\n return {\"data\": \"0x61bc221a\", \"to\": \"0xc305c901078781C232A2a521C2aF7980f8385ee9\"}\n\n\n@pytest.fixture\ndef bytes32_contract_factory(w3):\n return w3.eth.contract(**BYTES32_CONTRACT_DATA)\n\n\n@pytest.fixture(\n params=[\n \"0x0406040604060406040604060406040604060406040604060406040604060406\",\n HexBytes(\"0406040604060406040604060406040604060406040604060406040604060406\"),\n ]\n)\ndef bytes32_contract(w3, bytes32_contract_factory, request, address_conversion_func):\n return deploy(\n w3, bytes32_contract_factory, address_conversion_func, args=[request.param]\n )\n\n\n@pytest.fixture\ndef undeployed_math_contract(math_contract_factory, address_conversion_func):\n empty_address = address_conversion_func(\n \"0x000000000000000000000000000000000000dEaD\"\n )\n _undeployed_math_contract = math_contract_factory(address=empty_address)\n return _undeployed_math_contract\n\n\n@pytest.fixture\ndef mismatched_math_contract(\n w3, string_contract_factory, math_contract_factory, address_conversion_func\n):\n deploy_txn = string_contract_factory.constructor(\"Caqalai\").transact()\n deploy_receipt = w3.eth.wait_for_transaction_receipt(deploy_txn)\n assert deploy_receipt is not None\n address = address_conversion_func(deploy_receipt[\"contractAddress\"])\n _mismatched_math_contract = math_contract_factory(address=address)\n return _mismatched_math_contract\n\n\ndef test_deploy_raises_due_to_strict_byte_checking_by_default(\n w3, bytes32_contract_factory, address_conversion_func\n):\n with pytest.raises(TypeError):\n deploy(\n w3,\n bytes32_contract_factory,\n address_conversion_func,\n args=[\"0406040604060406040604060406040604060406040604060406040604060406\"],\n )\n\n\ndef test_invalid_address_in_deploy_arg(contract_with_constructor_address_factory):\n with pytest.raises(InvalidAddress):\n contract_with_constructor_address_factory.constructor(\n \"0xd3cda913deb6f67967b99d67acdfa1712c293601\",\n ).transact()\n\n\ndef test_call_with_no_arguments(math_contract, call):\n result = call(contract=math_contract, contract_function=\"return13\")\n assert result == 13\n\n\ndef test_call_with_one_argument(math_contract, call):\n result = call(contract=math_contract, contract_function=\"multiply7\", func_args=[3])\n assert result == 21\n\n\n@pytest.mark.parametrize(\n \"call_args,call_kwargs\",\n (\n ((9, 7), {}),\n ((9,), {\"b\": 7}),\n (tuple(), {\"a\": 9, \"b\": 7}),\n ),\n)\ndef test_call_with_multiple_arguments(math_contract, call, call_args, call_kwargs):\n result = call(\n contract=math_contract,\n contract_function=\"add\",\n func_args=call_args,\n func_kwargs=call_kwargs,\n )\n assert result == 16\n\n\n@pytest.mark.parametrize(\n \"call_args,call_kwargs\",\n (\n ((9, 7), {}),\n ((9,), {\"b\": 7}),\n (tuple(), {\"a\": 9, \"b\": 7}),\n ),\n)\ndef test_saved_method_call_with_multiple_arguments(\n math_contract, call_args, call_kwargs\n):\n math_contract_add = math_contract.functions.add(*call_args, **call_kwargs)\n result = math_contract_add.call()\n assert result == 16\n\n\ndef test_call_get_string_value(string_contract, call):\n result = call(contract=string_contract, contract_function=\"getValue\")\n # eth_abi.decode() does not assume implicit utf-8\n # encoding of string return values. Thus, we need to decode\n # ourselves for fair comparison.\n assert result == \"Caqalai\"\n\n\ndef test_call_get_bytes32_array(arrays_contract, call):\n result = call(contract=arrays_contract, contract_function=\"getBytes32Value\")\n # expected_bytes32_array = [keccak('0'), keccak('1')]\n expected_bytes32_array = [\n b\"\\x04HR\\xb2\\xa6p\\xad\\xe5@~x\\xfb(c\\xc5\\x1d\\xe9\\xfc\\xb9eB\\xa0q\\x86\\xfe:\\xed\\xa6\\xbb\\x8a\\x11m\", # noqa: E501\n b\"\\xc8\\x9e\\xfd\\xaaT\\xc0\\xf2\\x0cz\\xdfa(\\x82\\xdf\\tP\\xf5\\xa9Qc~\\x03\\x07\\xcd\\xcbLg/)\\x8b\\x8b\\xc6\", # noqa: E501\n ]\n assert result == expected_bytes32_array\n\n\ndef test_call_get_bytes32_const_array(arrays_contract, call):\n result = call(contract=arrays_contract, contract_function=\"getBytes32ConstValue\")\n # expected_bytes32_array = [keccak('A'), keccak('B')]\n expected_bytes32_array = [\n b\"\\x03x?\\xac.\\xfe\\xd8\\xfb\\xc9\\xadD>Y.\\xe3\\x0ea\\xd6_G\\x11@\\xc1\\x0c\\xa1U\\xe97\\xb45\\xb7`\", # noqa: E501\n b\"\\x1fg[\\xff\\x07Q_]\\xf9g7\\x19N\\xa9E\\xc3lA\\xe7\\xb4\\xfc\\xef0{|\\xd4\\xd0\\xe6\\x02\\xa6\\x91\\x11\", # noqa: E501\n ]\n assert result == expected_bytes32_array\n\n\ndef test_call_get_byte_array(arrays_contract, call):\n result = call(contract=arrays_contract, contract_function=\"getByteValue\")\n expected_byte_arr = [b\"\\xff\", b\"\\xff\", b\"\\xff\", b\"\\xff\"]\n assert result == expected_byte_arr\n\n\ndef test_call_get_byte_array_non_strict(non_strict_arrays_contract, call):\n result = call(contract=non_strict_arrays_contract, contract_function=\"getByteValue\")\n expected_non_strict_byte_arr = [b\"\\xff\", b\"\\xff\", b\"\\xff\", b\"\\xff\"]\n assert result == expected_non_strict_byte_arr\n\n\n@pytest.mark.parametrize(\n \"args,expected\",\n [\n ([b\"1\"], [b\"1\"]),\n ([\"0xDe\"], [b\"\\xDe\"]),\n ([\"0xDe\", \"0xDe\"], [b\"\\xDe\", b\"\\xDe\"]),\n ],\n)\ndef test_set_byte_array(arrays_contract, call, transact, args, expected):\n transact(\n contract=arrays_contract,\n contract_function=\"setByteValue\",\n func_args=[args],\n )\n result = call(contract=arrays_contract, contract_function=\"getByteValue\")\n\n assert result == expected\n\n\n@pytest.mark.parametrize(\n \"args,expected\",\n [\n ([b\"1\"], [b\"1\"]),\n ([\"0xDe\"], [b\"\\xDe\"]),\n ([\"0xDe\", \"0xDe\"], [b\"\\xDe\", b\"\\xDe\"]),\n ],\n)\ndef test_set_byte_array_non_strict(\n non_strict_arrays_contract, call, transact, args, expected\n):\n transact(\n contract=non_strict_arrays_contract,\n contract_function=\"setByteValue\",\n func_args=[args],\n )\n result = call(contract=non_strict_arrays_contract, contract_function=\"getByteValue\")\n assert result == expected\n\n\n@pytest.mark.parametrize(\"args\", ([\"\"], [\"s\"]))\ndef test_set_byte_array_with_invalid_args(arrays_contract, transact, args):\n with pytest.raises(\n Web3ValidationError,\n match=\"Could not identify the intended function with name `setByteValue`\",\n ):\n transact(\n contract=arrays_contract,\n contract_function=\"setByteValue\",\n func_args=[args],\n )\n\n\ndef test_call_get_byte_const_array_strict_by_default(arrays_contract, call):\n result = call(contract=arrays_contract, contract_function=\"getByteConstValue\")\n expected_byte_arr = [b\"\\x00\", b\"\\x01\"]\n assert result == expected_byte_arr\n\n\ndef test_call_get_byte_const_array_non_strict(non_strict_arrays_contract, call):\n result = call(\n contract=non_strict_arrays_contract, contract_function=\"getByteConstValue\"\n )\n expected_byte_arr = [b\"\\x00\", b\"\\x01\"]\n assert result == expected_byte_arr\n\n\ndef test_call_read_address_variable(contract_with_constructor_address, call):\n result = call(\n contract=contract_with_constructor_address, contract_function=\"testAddr\"\n )\n assert result == \"0xd3CdA913deB6f67967B99D67aCDFa1712C293601\"\n\n\ndef test_init_with_ens_name_arg(w3, contract_with_constructor_address_factory, call):\n with contract_ens_addresses(\n contract_with_constructor_address_factory,\n [(\"arg-name.eth\", \"0xBB9bc244D798123fDe783fCc1C72d3Bb8C189413\")],\n ):\n address_contract = deploy(\n w3,\n contract_with_constructor_address_factory,\n args=[\n \"arg-name.eth\",\n ],\n )\n\n result = call(contract=address_contract, contract_function=\"testAddr\")\n assert result == \"0xBB9bc244D798123fDe783fCc1C72d3Bb8C189413\"\n\n\ndef test_call_read_bytes_variable(bytes_contract, call):\n result = call(contract=bytes_contract, contract_function=\"constValue\")\n assert result == b\"\\x01\\x23\"\n\n\ndef test_call_get_bytes_value_strict_by_default(bytes_contract, call):\n result = call(contract=bytes_contract, contract_function=\"getValue\")\n assert result == b\"\\x04\\x06\"\n\n\ndef test_call_get_bytes_value_non_strict(non_strict_bytes_contract, call):\n result = call(contract=non_strict_bytes_contract, contract_function=\"getValue\")\n assert result == b\"\\x04\\x06\"\n\n\ndef test_call_read_bytes32_variable(bytes32_contract, call):\n result = call(contract=bytes32_contract, contract_function=\"constValue\")\n assert (\n result\n == b\"\\x01\\x23\\x01\\x23\\x01\\x23\\x01\\x23\\x01\\x23\\x01\\x23\\x01\\x23\\x01\\x23\\x01\\x23\\x01\\x23\\x01\\x23\\x01\\x23\\x01\\x23\\x01\\x23\\x01\\x23\\x01\\x23\" # noqa: E501\n )\n\n\ndef test_call_get_bytes32_value(bytes32_contract, call):\n result = call(contract=bytes32_contract, contract_function=\"getValue\")\n assert (\n result\n == b\"\\x04\\x06\\x04\\x06\\x04\\x06\\x04\\x06\\x04\\x06\\x04\\x06\\x04\\x06\\x04\\x06\\x04\\x06\\x04\\x06\\x04\\x06\\x04\\x06\\x04\\x06\\x04\\x06\\x04\\x06\\x04\\x06\" # noqa: E501\n )\n\n\n@pytest.mark.parametrize(\n \"value, expected\",\n [\n (\n \"0x\" + \"11\" * 20,\n \"0x\" + \"11\" * 20,\n ),\n (\n \"0xbb9bc244d798123fde783fcc1c72d3bb8c189413\",\n InvalidAddress,\n ),\n (\n \"0xBB9bc244D798123fDe783fCc1C72d3Bb8C189413\",\n \"0xBB9bc244D798123fDe783fCc1C72d3Bb8C189413\",\n ),\n ],\n)\ndef test_call_address_reflector_with_address(\n address_reflector_contract, value, expected, call\n):\n if not isinstance(expected, str):\n with pytest.raises(expected):\n call(\n contract=address_reflector_contract,\n contract_function=\"reflect\",\n func_args=[value],\n )\n else:\n assert (\n call(\n contract=address_reflector_contract,\n contract_function=\"reflect\",\n func_args=[value],\n )\n == expected\n )\n\n\n@pytest.mark.parametrize(\n \"value, expected\",\n [\n (\n [\"0x\" + \"11\" * 20, \"0x\" + \"22\" * 20],\n [\"0x\" + \"11\" * 20, \"0x\" + \"22\" * 20],\n ),\n ([\"0x\" + \"11\" * 20, \"0x\" + \"aa\" * 20], InvalidAddress),\n (\n [\n \"0xFeC2079e80465cc8C687fFF9EE6386ca447aFec4\",\n \"0xBB9bc244D798123fDe783fCc1C72d3Bb8C189413\",\n ],\n [\n \"0xFeC2079e80465cc8C687fFF9EE6386ca447aFec4\",\n \"0xBB9bc244D798123fDe783fCc1C72d3Bb8C189413\",\n ],\n ),\n ],\n)\ndef test_call_address_list_reflector_with_address(\n address_reflector_contract, value, expected, call\n):\n if not isinstance(expected, list):\n with pytest.raises(expected):\n call(\n contract=address_reflector_contract,\n contract_function=\"reflect\",\n func_args=[value],\n )\n else:\n assert (\n call(\n contract=address_reflector_contract,\n contract_function=\"reflect\",\n func_args=[value],\n )\n == expected\n )\n\n\ndef test_call_address_reflector_single_name(address_reflector_contract, call):\n with contract_ens_addresses(\n address_reflector_contract,\n [(\"dennisthepeasant.eth\", \"0xBB9bc244D798123fDe783fCc1C72d3Bb8C189413\")],\n ):\n result = call(\n contract=address_reflector_contract,\n contract_function=\"reflect\",\n func_args=[\"dennisthepeasant.eth\"],\n )\n assert result == \"0xBB9bc244D798123fDe783fCc1C72d3Bb8C189413\"\n\n\ndef test_call_address_reflector_name_array(address_reflector_contract, call):\n names = [\n \"autonomouscollective.eth\",\n \"wedonthavealord.eth\",\n ]\n addresses = [\n \"0xBB9bc244D798123fDe783fCc1C72d3Bb8C189413\",\n \"0xFeC2079e80465cc8C687fFF9EE6386ca447aFec4\",\n ]\n\n with contract_ens_addresses(address_reflector_contract, zip(names, addresses)):\n result = call(\n contract=address_reflector_contract,\n contract_function=\"reflect\",\n func_args=[names],\n )\n\n assert addresses == result\n\n\ndef test_call_rejects_invalid_ens_name(address_reflector_contract, call):\n with contract_ens_addresses(address_reflector_contract, []):\n with pytest.raises(NameNotFound):\n call(\n contract=address_reflector_contract,\n contract_function=\"reflect\",\n func_args=[\"type0.eth\"],\n )\n\n\ndef test_call_missing_function(mismatched_math_contract, call):\n # note: contract being called needs to have a fallback function\n # (StringContract in this case)\n expected_missing_function_error_message = \"Could not decode contract function call\"\n with pytest.raises(BadFunctionCallOutput) as exception_info:\n call(contract=mismatched_math_contract, contract_function=\"return13\")\n assert expected_missing_function_error_message in str(exception_info.value)\n\n\ndef test_call_undeployed_contract(undeployed_math_contract, call):\n expected_undeployed_call_error_message = (\n \"Could not transact with/call contract function\"\n )\n with pytest.raises(BadFunctionCallOutput) as exception_info:\n call(contract=undeployed_math_contract, contract_function=\"return13\")\n assert expected_undeployed_call_error_message in str(exception_info.value)\n\n\ndef test_call_fallback_function(fallback_function_contract):\n result = fallback_function_contract.fallback.call()\n assert result == []\n\n\n@pytest.mark.parametrize(\n \"tx_params,contract_name,expected\",\n (\n ({\"gas\": 210000}, \"no_receive\", \"fallback\"),\n ({\"gas\": 210000, \"value\": 2}, \"no_receive\", \"\"),\n ({\"value\": 2, \"gas\": 210000, \"data\": \"0x477a5c98\"}, \"no_receive\", \"\"),\n ({\"gas\": 210000, \"data\": \"0x477a5c98\"}, \"no_receive\", \"fallback\"),\n ({\"data\": \"0x477a5c98\"}, \"receive\", \"fallback\"),\n ({\"value\": 2}, \"receive\", \"receive\"),\n ),\n)\ndef test_call_receive_fallback_function(\n w3,\n tx_params,\n expected,\n call,\n receive_function_contract,\n no_receive_function_contract,\n contract_name,\n):\n if contract_name == \"receive\":\n contract = receive_function_contract\n elif contract_name == \"no_receive\":\n contract = no_receive_function_contract\n else:\n raise AssertionError(\"contract must be either receive or no_receive\")\n\n initial_value = call(contract=contract, contract_function=\"getText\")\n assert initial_value == \"\"\n to = {\"to\": contract.address}\n merged = {**to, **tx_params}\n w3.eth.send_transaction(merged)\n final_value = call(contract=contract, contract_function=\"getText\")\n assert final_value == expected\n\n\ndef test_call_nonexistent_receive_function(fallback_function_contract):\n with pytest.raises(FallbackNotFound, match=\"No receive function was found\"):\n fallback_function_contract.receive.call()\n\n\ndef test_throws_error_if_block_out_of_range(w3, math_contract):\n w3.provider.make_request(method=\"evm_mine\", params=[20])\n with pytest.raises(BlockNumberOutofRange):\n math_contract.functions.counter().call(block_identifier=-50)\n\n\ndef test_accepts_latest_block(w3, math_contract):\n w3.provider.make_request(method=\"evm_mine\", params=[5])\n math_contract.functions.incrementCounter().transact()\n\n late = math_contract.functions.counter().call(block_identifier=\"latest\")\n pend = math_contract.functions.counter().call(block_identifier=\"pending\")\n\n assert late == 1\n assert pend == 1\n\n\ndef test_accepts_block_hash_as_identifier(w3, math_contract):\n blocks = w3.provider.make_request(method=\"evm_mine\", params=[5])\n math_contract.functions.incrementCounter().transact()\n more_blocks = w3.provider.make_request(method=\"evm_mine\", params=[5])\n\n old = math_contract.functions.counter().call(block_identifier=blocks[\"result\"][2])\n new = math_contract.functions.counter().call(\n block_identifier=more_blocks[\"result\"][2]\n )\n\n assert old == 0\n assert new == 1\n\n\ndef test_neg_block_indexes_from_the_end(w3, math_contract):\n w3.provider.make_request(method=\"evm_mine\", params=[5])\n math_contract.functions.incrementCounter().transact()\n math_contract.functions.incrementCounter().transact()\n w3.provider.make_request(method=\"evm_mine\", params=[5])\n\n output1 = math_contract.functions.counter().call(block_identifier=-7)\n output2 = math_contract.functions.counter().call(block_identifier=-6)\n\n assert output1 == 1\n assert output2 == 2\n\n\ndef test_returns_data_from_specified_block(w3, math_contract):\n start_num = w3.eth.get_block(\"latest\").number\n w3.provider.make_request(method=\"evm_mine\", params=[5])\n math_contract.functions.incrementCounter().transact()\n math_contract.functions.incrementCounter().transact()\n\n output1 = math_contract.functions.counter().call(block_identifier=start_num + 6)\n output2 = math_contract.functions.counter().call(block_identifier=start_num + 7)\n\n assert output1 == 1\n assert output2 == 2\n\n\nmessage_regex = (\n r\"\\nCould not identify the intended function with name `.*`, positional arguments \"\n r\"with type\\(s\\) `.*` and keyword arguments with type\\(s\\) `.*`.\"\n r\"\\nFound .* function\\(s\\) with the name `.*`: .*\"\n)\ndiagnosis_arg_regex = (\n r\"\\nFunction invocation failed due to improper number of arguments.\"\n)\ndiagnosis_encoding_regex = (\n r\"\\nFunction invocation failed due to no matching argument types.\"\n)\ndiagnosis_ambiguous_encoding = (\n r\"\\nAmbiguous argument encoding. \"\n r\"Provided arguments can be encoded to multiple functions matching this call.\"\n)\n\n\ndef test_no_functions_match_identifier(arrays_contract):\n with pytest.raises(MismatchedABI):\n arrays_contract.functions.thisFunctionDoesNotExist().call()\n\n\ndef test_function_1_match_identifier_wrong_number_of_args(arrays_contract):\n regex = message_regex + diagnosis_arg_regex\n with pytest.raises(Web3ValidationError, match=regex):\n arrays_contract.functions.setBytes32Value().call()\n\n\ndef test_function_1_match_identifier_wrong_args_encoding(arrays_contract):\n regex = message_regex + diagnosis_encoding_regex\n with pytest.raises(Web3ValidationError, match=regex):\n arrays_contract.functions.setBytes32Value(\"dog\").call()\n\n\n@pytest.mark.parametrize(\n \"arg1,arg2,diagnosis\",\n (\n (100, \"dog\", diagnosis_arg_regex),\n (\"dog\", None, diagnosis_encoding_regex),\n (100, None, diagnosis_ambiguous_encoding),\n ),\n)\ndef test_function_multiple_error_diagnoses(w3, arg1, arg2, diagnosis):\n Contract = w3.eth.contract(abi=MULTIPLE_FUNCTIONS)\n regex = message_regex + diagnosis\n with pytest.raises(Web3ValidationError, match=regex):\n if arg2:\n Contract.functions.a(arg1, arg2).call()\n else:\n Contract.functions.a(arg1).call()\n\n\n@pytest.mark.parametrize(\n \"address\",\n (\n \"0xEeeeeEeeeEeEeeEeEeEeeEEEeeeeEeeeeeeeEEeE\", # checksummed\n b\"\\xee\\xee\\xee\\xee\\xee\\xee\\xee\\xee\\xee\\xee\\xee\\xee\\xee\\xee\\xee\\xee\\xee\\xee\\xee\\xee\", # noqa: E501\n ),\n)\ndef test_function_wrong_args_for_tuple_collapses_args_in_message(\n address,\n tuple_contract,\n):\n with pytest.raises(Web3ValidationError) as e:\n tuple_contract.functions.method(\n (1, [2, 3], [(4, [True, [False]], [address])])\n ).call()\n\n # assert the user arguments are formatted as expected:\n # (int,(int,int),((int,(bool,(bool)),(address))))\n e.match(\n \"\\\\(int,\\\\(int,int\\\\),\\\\(\\\\(int,\\\\(bool,\\\\(bool\\\\)\\\\),\\\\(address\\\\)\\\\)\\\\)\\\\)\"\n )\n\n # assert the found method signature is formatted as expected:\n # ['method((uint256,uint256[],(int256,bool[2],address[])[]))']\n e.match(\n \"\\\\['method\\\\(\\\\(uint256,uint256\\\\[\\\\],\\\\(int256,bool\\\\[2\\\\],address\\\\[\\\\]\\\\)\\\\[\\\\]\\\\)\\\\)'\\\\]\" # noqa: E501\n )\n\n\n@pytest.mark.parametrize(\n \"address\",\n (\n \"0xEeeeeEeeeEeEeeEeEeEeeEEEeeeeEeeeeeeeEEeE\", # checksummed\n b\"\\xee\\xee\\xee\\xee\\xee\\xee\\xee\\xee\\xee\\xee\\xee\\xee\\xee\\xee\\xee\\xee\\xee\\xee\\xee\\xee\", # noqa: E501\n ),\n)\ndef test_function_wrong_args_for_tuple_collapses_kwargs_in_message(\n address, tuple_contract\n):\n with pytest.raises(Web3ValidationError) as e:\n tuple_contract.functions.method(\n a=(1, [2, 3], [(4, [True, [False]], [address])]) # noqa: E501\n ).call()\n\n # assert the user keyword arguments are formatted as expected:\n # {'a': '(int,(int,int),((int,(bool,(bool)),(address))))'}\n e.match(\n \"{'a': '\\\\(int,\\\\(int,int\\\\),\\\\(\\\\(int,\\\\(bool,\\\\(bool\\\\)\\\\),\\\\(address\\\\)\\\\)\\\\)\\\\)'}\" # noqa: E501\n )\n\n # assert the found method signature is formatted as expected:\n # ['method((uint256,uint256[],(int256,bool[2],address[])[]))']\n e.match(\n \"\\\\['method\\\\(\\\\(uint256,uint256\\\\[\\\\],\\\\(int256,bool\\\\[2\\\\],address\\\\[\\\\]\\\\)\\\\[\\\\]\\\\)\\\\)'\\\\]\" # noqa: E501\n )\n\n\ndef test_function_no_abi(w3):\n contract = w3.eth.contract()\n with pytest.raises(NoABIFound):\n contract.functions.thisFunctionDoesNotExist().call()\n\n\ndef test_call_abi_no_functions(w3):\n contract = w3.eth.contract(abi=[])\n with pytest.raises(NoABIFunctionsFound):\n contract.functions.thisFunctionDoesNotExist().call()\n\n\ndef test_call_not_sending_ether_to_nonpayable_function(payable_tester_contract, call):\n result = call(contract=payable_tester_contract, contract_function=\"doNoValueCall\")\n assert result == []\n\n\ndef test_call_sending_ether_to_nonpayable_function(payable_tester_contract, call):\n with pytest.raises(Web3ValidationError):\n call(\n contract=payable_tester_contract,\n contract_function=\"doNoValueCall\",\n tx_params={\"value\": 1},\n )\n\n\n@pytest.mark.parametrize(\n \"function, value\",\n (\n # minimum positive unambiguous value (larger than fixed8x1)\n (\"reflect\", Decimal(\"12.8\")),\n # maximum value (for ufixed256x1)\n (\"reflect\", Decimal(2**256 - 1) / 10),\n # maximum negative unambiguous value (less than 0 from ufixed*)\n (\"reflect\", Decimal(\"-0.1\")),\n # minimum value (for fixed8x1)\n (\"reflect\", Decimal(\"-12.8\")),\n # only ufixed256x80 type supports 2-80 decimals\n (\"reflect\", Decimal(2**256 - 1) / 10**80), # maximum allowed value\n (\"reflect\", Decimal(1) / 10**80), # smallest non-zero value\n # minimum value (for ufixed8x1)\n (\"reflect_short_u\", 0),\n # maximum value (for ufixed8x1)\n (\"reflect_short_u\", Decimal(\"25.5\")),\n ),\n)\ndef test_reflect_fixed_value(fixed_reflector_contract, function, value):\n contract_func = fixed_reflector_contract.functions[function]\n reflected = contract_func(value).call({\"gas\": 420000})\n assert reflected == value\n\n\nDEFAULT_DECIMALS = getcontext().prec\n\n\n@pytest.mark.parametrize(\n \"function, value, error\",\n (\n # out of range\n (\"reflect_short_u\", Decimal(\"25.6\"), \"no matching argument types\"),\n (\"reflect_short_u\", Decimal(\"-.1\"), \"no matching argument types\"),\n # too many digits for *x1, too large for 256x80\n (\"reflect\", Decimal(\"0.01\"), \"no matching argument types\"),\n # too many digits\n (\"reflect_short_u\", Decimal(\"0.01\"), \"no matching argument types\"),\n (\n \"reflect_short_u\",\n Decimal(f\"1e-{DEFAULT_DECIMALS + 1}\"),\n \"no matching argument types\",\n ),\n (\n \"reflect_short_u\",\n Decimal(\"25.4\" + \"9\" * DEFAULT_DECIMALS),\n \"no matching argument types\",\n ),\n (\"reflect\", Decimal(1) / 10**81, \"no matching argument types\"),\n # floats not accepted, for floating point error concerns\n (\"reflect_short_u\", 0.1, \"no matching argument types\"),\n # ambiguous\n (\"reflect\", Decimal(\"12.7\"), \"Ambiguous argument encoding\"),\n (\"reflect\", Decimal(0), \"Ambiguous argument encoding\"),\n (\"reflect\", 0, \"Ambiguous argument encoding\"),\n ),\n)\ndef test_invalid_fixed_value_reflections(\n fixed_reflector_contract, function, value, error\n):\n contract_func = fixed_reflector_contract.functions[function]\n with pytest.raises(Web3ValidationError, match=error):\n contract_func(value).call({\"gas\": 420000})\n\n\n@pytest.mark.parametrize(\n \"method_input, expected\",\n (\n (\n {\n \"a\": 123,\n \"b\": [1, 2],\n \"c\": [\n {\n \"x\": 234,\n \"y\": [True, False],\n \"z\": [\n \"0x4AD7E79d88650B01EEA2B1f069f01EE9db343d5c\",\n \"0xfdF1946A9b40245224488F1a36f4A9ed4844a523\",\n \"0xfdF1946A9b40245224488F1a36f4A9ed4844a523\",\n ],\n },\n {\n \"x\": 345,\n \"y\": [False, False],\n \"z\": [\n \"0xefd1FF70c185A1C0b125939815225199079096Ee\",\n \"0xf35C0784794F3Cd935F5754d3a0EbcE95bEf851e\",\n ],\n },\n ],\n },\n (\n 123,\n [1, 2],\n [\n (\n 234,\n [True, False],\n [\n \"0x4AD7E79d88650B01EEA2B1f069f01EE9db343d5c\",\n \"0xfdF1946A9b40245224488F1a36f4A9ed4844a523\",\n \"0xfdF1946A9b40245224488F1a36f4A9ed4844a523\",\n ],\n ),\n (\n 345,\n [False, False],\n [\n \"0xefd1FF70c185A1C0b125939815225199079096Ee\",\n \"0xf35C0784794F3Cd935F5754d3a0EbcE95bEf851e\",\n ],\n ),\n ],\n ),\n ),\n (\n (\n 123,\n [1, 2],\n [\n (\n 234,\n [True, False],\n [\n \"0x4AD7E79d88650B01EEA2B1f069f01EE9db343d5c\",\n \"0xfdF1946A9b40245224488F1a36f4A9ed4844a523\",\n \"0xfdF1946A9b40245224488F1a36f4A9ed4844a523\",\n ],\n ),\n (\n 345,\n [False, False],\n [\n \"0xefd1FF70c185A1C0b125939815225199079096Ee\",\n \"0xf35C0784794F3Cd935F5754d3a0EbcE95bEf851e\",\n ],\n ),\n ],\n ),\n (\n 123,\n [1, 2],\n [\n (\n 234,\n [True, False],\n [\n \"0x4AD7E79d88650B01EEA2B1f069f01EE9db343d5c\",\n \"0xfdF1946A9b40245224488F1a36f4A9ed4844a523\",\n \"0xfdF1946A9b40245224488F1a36f4A9ed4844a523\",\n ],\n ),\n (\n 345,\n [False, False],\n [\n \"0xefd1FF70c185A1C0b125939815225199079096Ee\",\n \"0xf35C0784794F3Cd935F5754d3a0EbcE95bEf851e\",\n ],\n ),\n ],\n ),\n ),\n ),\n)\ndef test_call_tuple_contract(tuple_contract, method_input, expected):\n result = tuple_contract.functions.method(method_input).call()\n assert result == expected\n\n\n@pytest.mark.parametrize(\n \"method_input, plain_tuple_output, type_str, namedtuple_repr\",\n (\n (\n {\n \"a\": 123,\n \"b\": [1, 2],\n \"c\": [\n {\n \"x\": 234,\n \"y\": [True, False],\n \"z\": [\n \"0x4AD7E79d88650B01EEA2B1f069f01EE9db343d5c\",\n \"0xfdF1946A9b40245224488F1a36f4A9ed4844a523\",\n \"0xfdF1946A9b40245224488F1a36f4A9ed4844a523\",\n ],\n },\n {\n \"x\": 345,\n \"y\": [False, False],\n \"z\": [\n \"0xefd1FF70c185A1C0b125939815225199079096Ee\",\n \"0xf35C0784794F3Cd935F5754d3a0EbcE95bEf851e\",\n ],\n },\n ],\n },\n (\n 123,\n [1, 2],\n [\n (\n 234,\n [True, False],\n [\n \"0x4AD7E79d88650B01EEA2B1f069f01EE9db343d5c\",\n \"0xfdF1946A9b40245224488F1a36f4A9ed4844a523\",\n \"0xfdF1946A9b40245224488F1a36f4A9ed4844a523\",\n ],\n ),\n (\n 345,\n [False, False],\n [\n \"0xefd1FF70c185A1C0b125939815225199079096Ee\",\n \"0xf35C0784794F3Cd935F5754d3a0EbcE95bEf851e\",\n ],\n ),\n ],\n ),\n \"<class 'web3._utils.abi.abi_decoded_namedtuple_factory.<locals>.ABIDecodedNamedTuple'>\", # noqa: E501\n \"ABIDecodedNamedTuple(a=123, b=[1, 2], c=[ABIDecodedNamedTuple(x=234, y=[True, False], z=['0x4AD7E79d88650B01EEA2B1f069f01EE9db343d5c', '0xfdF1946A9b40245224488F1a36f4A9ed4844a523', '0xfdF1946A9b40245224488F1a36f4A9ed4844a523']), ABIDecodedNamedTuple(x=345, y=[False, False], z=['0xefd1FF70c185A1C0b125939815225199079096Ee', '0xf35C0784794F3Cd935F5754d3a0EbcE95bEf851e'])])\", # noqa: E501\n ),\n ),\n)\ndef test_call_tuple_contract_with_decode_tuples_set(\n tuple_contract_with_decode_tuples,\n method_input,\n plain_tuple_output,\n type_str,\n namedtuple_repr,\n):\n result = tuple_contract_with_decode_tuples.functions.method(method_input).call()\n\n # check contract output matches dict_to_namedtuple output\n namedtuple_from_input = recursive_dict_to_namedtuple(method_input)\n assert result == namedtuple_from_input\n assert str(type(result)) == type_str\n assert result.__repr__() == namedtuple_repr\n\n # check that the namedtuple output is still a tuple\n assert result == plain_tuple_output\n\n # check that fields are correct\n assert result._fields == (\"a\", \"b\", \"c\")\n assert result.c[0]._fields == (\"x\", \"y\", \"z\")\n\n\n@pytest.mark.parametrize(\n \"method_input, expected\",\n (\n (\n {\n \"t\": [\n {\n \"u\": [\n {\"x\": 1, \"y\": 2},\n {\"x\": 3, \"y\": 4},\n {\"x\": 5, \"y\": 6},\n ]\n },\n {\n \"u\": [\n {\"x\": 7, \"y\": 8},\n {\"x\": 9, \"y\": 10},\n {\"x\": 11, \"y\": 12},\n ]\n },\n ]\n },\n (\n [\n (\n [\n (1, 2),\n (3, 4),\n (5, 6),\n ],\n ),\n (\n [\n (7, 8),\n (9, 10),\n (11, 12),\n ],\n ),\n ],\n ),\n ),\n (\n (\n [\n (\n [\n (1, 2),\n (3, 4),\n (5, 6),\n ],\n ),\n (\n [\n (7, 8),\n (9, 10),\n (11, 12),\n ],\n ),\n ],\n ),\n (\n [\n (\n [\n (1, 2),\n (3, 4),\n (5, 6),\n ],\n ),\n (\n [\n (7, 8),\n (9, 10),\n (11, 12),\n ],\n ),\n ],\n ),\n ),\n ),\n)\ndef test_call_nested_tuple_contract(nested_tuple_contract, method_input, expected):\n result = nested_tuple_contract.functions.method(method_input).call()\n assert result == expected\n\n\n@pytest.mark.parametrize(\n \"method_input, plain_tuple_output, type_str, namedtuple_repr\",\n (\n (\n {\n \"t\": [\n {\n \"u\": [\n {\"x\": 1, \"y\": 2},\n {\"x\": 3, \"y\": 4},\n {\"x\": 5, \"y\": 6},\n ]\n },\n {\n \"u\": [\n {\"x\": 7, \"y\": 8},\n {\"x\": 9, \"y\": 10},\n {\"x\": 11, \"y\": 12},\n ]\n },\n ]\n },\n (\n [\n (\n [\n (1, 2),\n (3, 4),\n (5, 6),\n ],\n ),\n (\n [\n (7, 8),\n (9, 10),\n (11, 12),\n ],\n ),\n ],\n ),\n \"<class 'web3._utils.abi.abi_decoded_namedtuple_factory.<locals>.ABIDecodedNamedTuple'>\", # noqa: E501\n \"ABIDecodedNamedTuple(t=[ABIDecodedNamedTuple(u=[ABIDecodedNamedTuple(x=1, y=2), ABIDecodedNamedTuple(x=3, y=4), ABIDecodedNamedTuple(x=5, y=6)]), ABIDecodedNamedTuple(u=[ABIDecodedNamedTuple(x=7, y=8), ABIDecodedNamedTuple(x=9, y=10), ABIDecodedNamedTuple(x=11, y=12)])])\", # noqa: E501\n ),\n ),\n)\ndef test_call_nested_tuple_contract_with_decode_tuples_set(\n nested_tuple_contract_with_decode_tuples,\n method_input,\n plain_tuple_output,\n type_str,\n namedtuple_repr,\n):\n result = nested_tuple_contract_with_decode_tuples.functions.method(\n method_input\n ).call()\n # check contract output matches dict_to_namedtuple output\n namedtuple_from_input = recursive_dict_to_namedtuple(method_input)\n assert result == namedtuple_from_input\n assert str(type(result)) == type_str\n assert result.__repr__() == namedtuple_repr\n\n # check that the namedtuple output is still a tuple\n assert result == plain_tuple_output\n\n # check that fields are correct\n assert result._fields == (\"t\",)\n assert result.t[0]._fields == (\"u\",)\n\n\ndef test_call_revert_contract(revert_contract):\n with pytest.raises(TransactionFailed, match=\"Function has been reverted.\"):\n # eth-tester will do a gas estimation if we don't submit a gas value,\n # which does not contain the revert reason. Avoid that by giving a gas\n # value.\n revert_contract.functions.revertWithMessage().call({\"gas\": 100000})\n\n\ndef test_changing_default_block_identifier(w3, math_contract):\n assert math_contract.caller.counter() == 0\n assert w3.eth.default_block == \"latest\"\n\n math_contract.functions.incrementCounter(7).transact()\n assert math_contract.caller.counter() == 7\n\n assert math_contract.functions.counter().call(block_identifier=1) == 0\n w3.eth.default_block = 1\n assert math_contract.functions.counter().call(block_identifier=None) == 0\n w3.eth.default_block = 0x2\n assert math_contract.functions.counter().call(block_identifier=None) == 7\n\n\n# -- async -- #\n\n\n@pytest.fixture\ndef async_bytes_contract_factory(async_w3):\n return async_w3.eth.contract(**BYTES_CONTRACT_DATA)\n\n\n@pytest_asyncio.fixture(params=[b\"\\x04\\x06\", \"0x0406\"])\nasync def async_bytes_contract(\n async_w3,\n request,\n async_bytes_contract_factory,\n address_conversion_func,\n):\n return await async_deploy(\n async_w3,\n async_bytes_contract_factory,\n address_conversion_func,\n args=[request.param],\n )\n\n\n@pytest_asyncio.fixture(\n params=[\n \"0x0406040604060406040604060406040604060406040604060406040604060406\",\n HexBytes(\"0406040604060406040604060406040604060406040604060406040604060406\"),\n ],\n)\nasync def async_bytes32_contract(async_w3, request, address_conversion_func):\n async_bytes32_contract_factory = async_w3.eth.contract(**BYTES32_CONTRACT_DATA)\n return await async_deploy(\n async_w3,\n async_bytes32_contract_factory,\n address_conversion_func,\n args=[request.param],\n )\n\n\n@pytest_asyncio.fixture\nasync def async_undeployed_math_contract(\n async_math_contract_factory, address_conversion_func\n):\n empty_address = address_conversion_func(\n \"0x000000000000000000000000000000000000dEaD\"\n )\n _undeployed_math_contract = async_math_contract_factory(address=empty_address)\n return _undeployed_math_contract\n\n\n@pytest_asyncio.fixture\nasync def async_mismatched_math_contract(\n async_w3,\n async_string_contract_factory,\n async_math_contract_factory,\n address_conversion_func,\n):\n deploy_txn = await async_string_contract_factory.constructor(\"Caqalai\").transact()\n deploy_receipt = await async_w3.eth.wait_for_transaction_receipt(deploy_txn)\n assert deploy_receipt is not None\n address = address_conversion_func(deploy_receipt[\"contractAddress\"])\n _mismatched_math_contract = async_math_contract_factory(address=address)\n return _mismatched_math_contract\n\n\n@pytest.fixture\n@pytest.mark.asyncio\nasync def test_async_deploy_raises_due_to_strict_byte_checking_by_default(\n async_w3, async_bytes_contract_factory, address_conversion_func\n):\n with pytest.raises(\n TypeError,\n match=\"One or more arguments could not be encoded to the necessary ABI type. \"\n \"Expected types are: bytes\",\n ):\n await async_deploy(\n async_w3,\n async_bytes_contract_factory,\n address_conversion_func,\n args=[\"0406\"],\n )\n\n\n@pytest.mark.asyncio\n@pytest.mark.parametrize(\"args\", (\"0x0406\", \"0406\", HexBytes(\"0406\"), b\"\\x04\\x06\"))\nasync def test_async_deploy_with_non_strict_abi_check(\n async_w3_non_strict_abi,\n address_conversion_func,\n args,\n):\n async_non_strict_bytes_contract_factory = async_w3_non_strict_abi.eth.contract(\n **BYTES_CONTRACT_DATA\n )\n deployed_contract = await async_deploy(\n async_w3_non_strict_abi,\n async_non_strict_bytes_contract_factory,\n address_conversion_func,\n args=[args],\n )\n assert deployed_contract is not None\n assert is_address(deployed_contract.address)\n\n\n@pytest.mark.asyncio\nasync def test_async_invalid_address_in_deploy_arg(\n async_constructor_with_address_arg_contract_factory,\n):\n with pytest.raises(InvalidAddress):\n await async_constructor_with_address_arg_contract_factory.constructor(\n \"0xd3cda913deb6f67967b99d67acdfa1712c293601\",\n ).transact()\n\n\n@pytest.mark.asyncio\n@pytest.mark.parametrize(\n \"call_args,call_kwargs\",\n (\n ((9, 7), {}),\n ((9,), {\"b\": 7}),\n (tuple(), {\"a\": 9, \"b\": 7}),\n ),\n)\nasync def test_async_call_with_multiple_arguments(\n async_math_contract, async_call, call_args, call_kwargs\n):\n result = await async_call(\n contract=async_math_contract,\n contract_function=\"add\",\n func_args=call_args,\n func_kwargs=call_kwargs,\n )\n assert result == 16\n\n\n@pytest.mark.asyncio\n@pytest.mark.parametrize(\n \"call_args,call_kwargs\",\n (\n ((9, 7), {}),\n ((9,), {\"b\": 7}),\n (tuple(), {\"a\": 9, \"b\": 7}),\n ),\n)\nasync def test_async_saved_method_call_with_multiple_arguments(\n async_math_contract, call_args, call_kwargs\n):\n math_contract_add = async_math_contract.functions.add(*call_args, **call_kwargs)\n result = await math_contract_add.call()\n assert result == 16\n\n\n@pytest.mark.asyncio\nasync def test_async_call_get_string_value(async_string_contract, async_call):\n result = await async_call(\n contract=async_string_contract, contract_function=\"getValue\"\n )\n # eth_abi.decode() does not assume implicit utf-8\n # encoding of string return values. Thus, we need to decode\n # ourselves for fair comparison.\n assert result == \"Caqalai\"\n\n\n@pytest.mark.asyncio\nasync def test_async_call_get_bytes32_array(async_arrays_contract, async_call):\n result = await async_call(\n contract=async_arrays_contract, contract_function=\"getBytes32Value\"\n )\n # expected_bytes32_array = [keccak('0'), keccak('1')]\n expected_bytes32_array = [\n b\"\\x04HR\\xb2\\xa6p\\xad\\xe5@~x\\xfb(c\\xc5\\x1d\\xe9\\xfc\\xb9eB\\xa0q\\x86\\xfe:\\xed\\xa6\\xbb\\x8a\\x11m\", # noqa: E501\n b\"\\xc8\\x9e\\xfd\\xaaT\\xc0\\xf2\\x0cz\\xdfa(\\x82\\xdf\\tP\\xf5\\xa9Qc~\\x03\\x07\\xcd\\xcbLg/)\\x8b\\x8b\\xc6\", # noqa: E501\n ]\n assert result == expected_bytes32_array\n\n\n@pytest.mark.asyncio\nasync def test_async_call_get_bytes32_const_array(async_arrays_contract, async_call):\n result = await async_call(\n contract=async_arrays_contract, contract_function=\"getBytes32ConstValue\"\n )\n # expected_bytes32_array = [keccak('A'), keccak('B')]\n expected_bytes32_array = [\n b\"\\x03x?\\xac.\\xfe\\xd8\\xfb\\xc9\\xadD>Y.\\xe3\\x0ea\\xd6_G\\x11@\\xc1\\x0c\\xa1U\\xe97\\xb45\\xb7`\", # noqa: E501\n b\"\\x1fg[\\xff\\x07Q_]\\xf9g7\\x19N\\xa9E\\xc3lA\\xe7\\xb4\\xfc\\xef0{|\\xd4\\xd0\\xe6\\x02\\xa6\\x91\\x11\", # noqa: E501\n ]\n assert result == expected_bytes32_array\n\n\n@pytest.mark.asyncio\nasync def test_async_call_get_byte_array(async_arrays_contract, async_call):\n result = await async_call(\n contract=async_arrays_contract, contract_function=\"getByteValue\"\n )\n expected_byte_arr = [b\"\\xff\", b\"\\xff\", b\"\\xff\", b\"\\xff\"]\n assert result == expected_byte_arr\n\n\n@pytest.mark.asyncio\nasync def test_async_call_get_byte_array_non_strict(\n async_non_strict_arrays_contract, async_call\n):\n result = await async_call(\n contract=async_non_strict_arrays_contract, contract_function=\"getByteValue\"\n )\n expected_non_strict_byte_arr = [b\"\\xff\", b\"\\xff\", b\"\\xff\", b\"\\xff\"]\n assert result == expected_non_strict_byte_arr\n\n\n@pytest.mark.asyncio\n@pytest.mark.parametrize(\"args,expected\", [([b\"\"], [b\"\\x00\"]), ([\"0x\"], [b\"\\x00\"])])\nasync def test_async_set_byte_array_non_strict(\n async_non_strict_arrays_contract, async_call, async_transact, args, expected\n):\n await async_transact(\n contract=async_non_strict_arrays_contract,\n contract_function=\"setByteValue\",\n func_args=[args],\n )\n result = await async_call(\n contract=async_non_strict_arrays_contract, contract_function=\"getByteValue\"\n )\n\n assert result == expected\n\n\n@pytest.mark.asyncio\n@pytest.mark.parametrize(\"args,expected\", [([b\"1\"], [b\"1\"]), ([\"0xDe\"], [b\"\\xDe\"])])\nasync def test_async_set_byte_array_strict_by_default(\n async_arrays_contract, async_call, async_transact, args, expected\n):\n await async_transact(\n contract=async_arrays_contract,\n contract_function=\"setByteValue\",\n func_args=[args],\n )\n result = await async_call(\n contract=async_arrays_contract, contract_function=\"getByteValue\"\n )\n\n assert result == expected\n\n\n@pytest.mark.asyncio\n@pytest.mark.parametrize(\"args\", ([\"\"], [\"s\"]))\nasync def test_async_set_strict_byte_array_with_invalid_args(\n async_arrays_contract, async_transact, args\n):\n with pytest.raises(Web3ValidationError):\n await async_transact(\n contract=async_arrays_contract,\n contract_function=\"setByteValue\",\n func_args=[args],\n )\n\n\n@pytest.mark.asyncio\nasync def test_async_call_get_byte_const_array_non_strict(\n async_non_strict_arrays_contract, async_call\n):\n result = await async_call(\n contract=async_non_strict_arrays_contract, contract_function=\"getByteConstValue\"\n )\n expected_byte_arr = [b\"\\x00\", b\"\\x01\"]\n assert result == expected_byte_arr\n\n\n@pytest.mark.asyncio\nasync def test_async_call_read_address_variable(\n async_constructor_with_address_argument_contract, async_call\n):\n result = await async_call(\n contract=async_constructor_with_address_argument_contract,\n contract_function=\"testAddr\",\n )\n assert result == \"0xd3CdA913deB6f67967B99D67aCDFa1712C293601\"\n\n\n@pytest.mark.xfail\n@pytest.mark.asyncio\nasync def test_async_init_with_ens_name_arg(\n async_w3, async_constructor_with_address_arg_contract_factory, async_call\n):\n with contract_ens_addresses(\n async_constructor_with_address_arg_contract_factory,\n [(\"arg-name.eth\", \"0xBB9bc244D798123fDe783fCc1C72d3Bb8C189413\")],\n ):\n address_contract = await async_deploy(\n async_w3,\n async_constructor_with_address_arg_contract_factory,\n args=[\n \"arg-name.eth\",\n ],\n )\n\n result = await async_call(contract=address_contract, contract_function=\"testAddr\")\n assert result == \"0xBB9bc244D798123fDe783fCc1C72d3Bb8C189413\"\n\n\n@pytest.mark.asyncio\nasync def test_async_call_read_bytes_variable(async_bytes_contract, async_call):\n result = await async_call(\n contract=async_bytes_contract, contract_function=\"constValue\"\n )\n assert result == b\"\\x01\\x23\"\n\n\n@pytest.mark.asyncio\nasync def test_async_call_get_bytes_value(async_bytes_contract, async_call):\n result = await async_call(\n contract=async_bytes_contract, contract_function=\"getValue\"\n )\n assert result == b\"\\x04\\x06\"\n\n\n@pytest.mark.asyncio\nasync def test_async_call_read_bytes32_variable(async_bytes32_contract, async_call):\n result = await async_call(\n contract=async_bytes32_contract, contract_function=\"constValue\"\n )\n assert (\n result\n == b\"\\x01\\x23\\x01\\x23\\x01\\x23\\x01\\x23\\x01\\x23\\x01\\x23\\x01\\x23\\x01\\x23\\x01\\x23\\x01\\x23\\x01\\x23\\x01\\x23\\x01\\x23\\x01\\x23\\x01\\x23\\x01\\x23\" # noqa: E501\n )\n\n\n@pytest.mark.asyncio\nasync def test_async_call_get_bytes32_value(async_bytes32_contract, async_call):\n result = await async_call(\n contract=async_bytes32_contract, contract_function=\"getValue\"\n )\n assert (\n result\n == b\"\\x04\\x06\\x04\\x06\\x04\\x06\\x04\\x06\\x04\\x06\\x04\\x06\\x04\\x06\\x04\\x06\\x04\\x06\\x04\\x06\\x04\\x06\\x04\\x06\\x04\\x06\\x04\\x06\\x04\\x06\\x04\\x06\" # noqa: E501\n )\n\n\n@pytest.mark.asyncio\n@pytest.mark.parametrize(\n \"value, expected\",\n [\n (\n \"0x\" + \"11\" * 20,\n \"0x\" + \"11\" * 20,\n ),\n (\n \"0xbb9bc244d798123fde783fcc1c72d3bb8c189413\",\n InvalidAddress,\n ),\n (\n \"0xBB9bc244D798123fDe783fCc1C72d3Bb8C189413\",\n \"0xBB9bc244D798123fDe783fCc1C72d3Bb8C189413\",\n ),\n ],\n)\nasync def test_async_call_address_reflector_with_address(\n async_address_reflector_contract, value, expected, async_call\n):\n if not isinstance(expected, str):\n with pytest.raises(expected):\n await async_call(\n contract=async_address_reflector_contract,\n contract_function=\"reflect\",\n func_args=[value],\n )\n else:\n assert (\n await async_call(\n contract=async_address_reflector_contract,\n contract_function=\"reflect\",\n func_args=[value],\n )\n == expected\n )\n\n\n@pytest.mark.asyncio\n@pytest.mark.parametrize(\n \"value, expected\",\n [\n (\n [\"0x\" + \"11\" * 20, \"0x\" + \"22\" * 20],\n [\"0x\" + \"11\" * 20, \"0x\" + \"22\" * 20],\n ),\n ([\"0x\" + \"11\" * 20, \"0x\" + \"aa\" * 20], InvalidAddress),\n (\n [\n \"0xFeC2079e80465cc8C687fFF9EE6386ca447aFec4\",\n \"0xBB9bc244D798123fDe783fCc1C72d3Bb8C189413\",\n ],\n [\n \"0xFeC2079e80465cc8C687fFF9EE6386ca447aFec4\",\n \"0xBB9bc244D798123fDe783fCc1C72d3Bb8C189413\",\n ],\n ),\n ],\n)\nasync def test_async_call_address_list_reflector_with_address(\n async_address_reflector_contract, value, expected, async_call\n):\n if not isinstance(expected, list):\n with pytest.raises(expected):\n await async_call(\n contract=async_address_reflector_contract,\n contract_function=\"reflect\",\n func_args=[value],\n )\n else:\n assert (\n await async_call(\n contract=async_address_reflector_contract,\n contract_function=\"reflect\",\n func_args=[value],\n )\n == expected\n )\n\n\n@pytest.mark.xfail\n@pytest.mark.asyncio\nasync def test_async_call_address_reflector_single_name(\n async_address_reflector_contract, async_call\n):\n with contract_ens_addresses(\n async_address_reflector_contract,\n [(\"dennisthepeasant.eth\", \"0xBB9bc244D798123fDe783fCc1C72d3Bb8C189413\")],\n ):\n result = await async_call(\n contract=async_address_reflector_contract,\n contract_function=\"reflect\",\n func_args=[\"dennisthepeasant.eth\"],\n )\n assert result == \"0xBB9bc244D798123fDe783fCc1C72d3Bb8C189413\"\n\n\n@pytest.mark.xfail\n@pytest.mark.asyncio\nasync def test_async_call_address_reflector_name_array(\n async_address_reflector_contract, async_call\n):\n names = [\n \"autonomouscollective.eth\",\n \"wedonthavealord.eth\",\n ]\n addresses = [\n \"0xBB9bc244D798123fDe783fCc1C72d3Bb8C189413\",\n \"0xFeC2079e80465cc8C687fFF9EE6386ca447aFec4\",\n ]\n\n with contract_ens_addresses(\n async_address_reflector_contract, zip(names, addresses)\n ):\n result = await async_call(\n contract=async_address_reflector_contract,\n contract_function=\"reflect\",\n func_args=[names],\n )\n\n assert addresses == result\n\n\n@pytest.mark.xfail\n@pytest.mark.asyncio\nasync def test_async_call_rejects_invalid_ens_name(\n async_address_reflector_contract, async_call\n):\n with contract_ens_addresses(async_address_reflector_contract, []):\n with pytest.raises(NameNotFound):\n await async_call(\n contract=async_address_reflector_contract,\n contract_function=\"reflect\",\n func_args=[\"type0.eth\"],\n )\n\n\n@pytest.mark.asyncio\nasync def test_async_call_missing_function(async_mismatched_math_contract, async_call):\n # note: contract being called needs to have a fallback function\n # (StringContract in this case)\n expected_missing_function_error_message = \"Could not decode contract function call\"\n with pytest.raises(BadFunctionCallOutput) as exception_info:\n await async_call(\n contract=async_mismatched_math_contract, contract_function=\"return13\"\n )\n assert expected_missing_function_error_message in str(exception_info.value)\n\n\n@pytest.mark.asyncio\nasync def test_async_call_undeployed_contract(\n async_undeployed_math_contract, async_call\n):\n expected_undeployed_call_error_message = (\n \"Could not transact with/call contract function\"\n )\n with pytest.raises(BadFunctionCallOutput) as exception_info:\n await async_call(\n contract=async_undeployed_math_contract, contract_function=\"return13\"\n )\n assert expected_undeployed_call_error_message in str(exception_info.value)\n\n\n@pytest.mark.asyncio\nasync def test_async_call_fallback_function(async_fallback_function_contract):\n result = await async_fallback_function_contract.fallback.call()\n assert result == []\n\n\n@pytest.mark.asyncio\n@pytest.mark.parametrize(\n \"tx_params,contract_name,expected\",\n (\n ({\"gas\": 210000}, \"no_receive\", \"fallback\"),\n ({\"gas\": 210000, \"value\": 2}, \"no_receive\", \"\"),\n ({\"value\": 2, \"gas\": 210000, \"data\": \"0x477a5c98\"}, \"no_receive\", \"\"),\n ({\"gas\": 210000, \"data\": \"0x477a5c98\"}, \"no_receive\", \"fallback\"),\n ({\"data\": \"0x477a5c98\"}, \"receive\", \"fallback\"),\n ({\"value\": 2}, \"receive\", \"receive\"),\n ),\n)\nasync def test_async_call_receive_fallback_function(\n async_w3,\n tx_params,\n expected,\n async_call,\n async_receive_function_contract,\n async_no_receive_function_contract,\n contract_name,\n):\n if contract_name == \"receive\":\n contract = async_receive_function_contract\n elif contract_name == \"no_receive\":\n contract = async_no_receive_function_contract\n else:\n raise AssertionError(\"contract must be either receive or no_receive\")\n\n initial_value = await async_call(contract=contract, contract_function=\"getText\")\n assert initial_value == \"\"\n to = {\"to\": contract.address}\n merged = {**to, **tx_params}\n await async_w3.eth.send_transaction(merged)\n final_value = await async_call(contract=contract, contract_function=\"getText\")\n assert final_value == expected\n\n\n@pytest.mark.asyncio\nasync def test_async_call_nonexistent_receive_function(\n async_fallback_function_contract,\n):\n with pytest.raises(FallbackNotFound, match=\"No receive function was found\"):\n await async_fallback_function_contract.receive.call()\n\n\n@pytest.mark.asyncio\nasync def test_async_throws_error_if_block_out_of_range(async_w3, async_math_contract):\n await async_w3.provider.make_request(method=\"evm_mine\", params=[20])\n with pytest.raises(BlockNumberOutofRange):\n await async_math_contract.functions.counter().call(block_identifier=-50)\n\n\n@pytest.mark.asyncio\nasync def test_async_accepts_latest_block(async_w3, async_math_contract):\n await async_w3.provider.make_request(method=\"evm_mine\", params=[5])\n await async_math_contract.functions.incrementCounter().transact()\n\n late = await async_math_contract.functions.counter().call(block_identifier=\"latest\")\n pend = await async_math_contract.functions.counter().call(\n block_identifier=\"pending\"\n )\n\n assert late == 1\n assert pend == 1\n\n\n@pytest.mark.asyncio\nasync def test_async_accepts_block_hash_as_identifier(async_w3, async_math_contract):\n blocks = await async_w3.provider.make_request(method=\"evm_mine\", params=[5])\n await async_math_contract.functions.incrementCounter().transact()\n more_blocks = await async_w3.provider.make_request(method=\"evm_mine\", params=[5])\n\n old = await async_math_contract.functions.counter().call(\n block_identifier=blocks[\"result\"][2]\n )\n new = await async_math_contract.functions.counter().call(\n block_identifier=more_blocks[\"result\"][2]\n )\n\n assert old == 0\n assert new == 1\n\n\n@pytest.mark.asyncio\nasync def test_async_neg_block_indexes_from_the_end(async_w3, async_math_contract):\n await async_w3.provider.make_request(method=\"evm_mine\", params=[5])\n await async_math_contract.functions.incrementCounter().transact()\n await async_math_contract.functions.incrementCounter().transact()\n await async_w3.provider.make_request(method=\"evm_mine\", params=[5])\n\n output1 = await async_math_contract.functions.counter().call(block_identifier=-7)\n output2 = await async_math_contract.functions.counter().call(block_identifier=-6)\n\n assert output1 == 1\n assert output2 == 2\n\n\n@pytest.mark.asyncio\nasync def test_async_no_functions_match_identifier(async_arrays_contract):\n with pytest.raises(MismatchedABI):\n await async_arrays_contract.functions.thisFunctionDoesNotExist().call()\n\n\n@pytest.mark.asyncio\nasync def test_async_function_1_match_identifier_wrong_number_of_args(\n async_arrays_contract,\n):\n regex = message_regex + diagnosis_arg_regex\n with pytest.raises(Web3ValidationError, match=regex):\n await async_arrays_contract.functions.setBytes32Value().call()\n\n\n@pytest.mark.asyncio\nasync def test_async_function_1_match_identifier_wrong_args_encoding(\n async_arrays_contract,\n):\n regex = message_regex + diagnosis_encoding_regex\n with pytest.raises(Web3ValidationError, match=regex):\n await async_arrays_contract.functions.setBytes32Value(\"dog\").call()\n\n\n@pytest.mark.asyncio\n@pytest.mark.parametrize(\n \"arg1,arg2,diagnosis\",\n (\n (100, \"dog\", diagnosis_arg_regex),\n (\"dog\", None, diagnosis_encoding_regex),\n (100, None, diagnosis_ambiguous_encoding),\n ),\n)\nasync def test_async_function_multiple_error_diagnoses(async_w3, arg1, arg2, diagnosis):\n Contract = async_w3.eth.contract(abi=MULTIPLE_FUNCTIONS)\n regex = message_regex + diagnosis\n with pytest.raises(Web3ValidationError, match=regex):\n if arg2:\n await Contract.functions.a(arg1, arg2).call()\n else:\n await Contract.functions.a(arg1).call()\n\n\n@pytest.mark.asyncio\nasync def test_async_function_no_abi(async_w3):\n contract = async_w3.eth.contract()\n with pytest.raises(NoABIFound):\n await contract.functions.thisFunctionDoesNotExist().call()\n\n\n@pytest.mark.asyncio\nasync def test_async_call_abi_no_functions(async_w3):\n contract = async_w3.eth.contract(abi=[])\n with pytest.raises(NoABIFunctionsFound):\n await contract.functions.thisFunctionDoesNotExist().call()\n\n\n@pytest.mark.asyncio\nasync def test_async_call_not_sending_ether_to_nonpayable_function(\n async_payable_tester_contract, async_call\n):\n result = await async_call(\n contract=async_payable_tester_contract, contract_function=\"doNoValueCall\"\n )\n assert result == []\n\n\n@pytest.mark.asyncio\nasync def test_async_call_sending_ether_to_nonpayable_function(\n async_payable_tester_contract, async_call\n):\n with pytest.raises(Web3ValidationError):\n await async_call(\n contract=async_payable_tester_contract,\n contract_function=\"doNoValueCall\",\n tx_params={\"value\": 1},\n )\n\n\n@pytest.mark.asyncio\n@pytest.mark.parametrize(\n \"function, value\",\n (\n # minimum positive unambiguous value (larger than fixed8x1)\n (\"reflect\", Decimal(\"12.8\")),\n # maximum value (for ufixed256x1)\n (\"reflect\", Decimal(2**256 - 1) / 10),\n # maximum negative unambiguous value (less than 0 from ufixed*)\n (\"reflect\", Decimal(\"-0.1\")),\n # minimum value (for fixed8x1)\n (\"reflect\", Decimal(\"-12.8\")),\n # only ufixed256x80 type supports 2-80 decimals\n (\"reflect\", Decimal(2**256 - 1) / 10**80), # maximum allowed value\n (\"reflect\", Decimal(1) / 10**80), # smallest non-zero value\n # minimum value (for ufixed8x1)\n (\"reflect_short_u\", 0),\n # maximum value (for ufixed8x1)\n (\"reflect_short_u\", Decimal(\"25.5\")),\n ),\n)\nasync def test_async_reflect_fixed_value(\n async_fixed_reflector_contract, function, value\n):\n contract_func = async_fixed_reflector_contract.functions[function]\n reflected = await contract_func(value).call({\"gas\": 420000})\n assert reflected == value\n\n\nDEFAULT_DECIMALS = getcontext().prec\n\n\n@pytest.mark.asyncio\n@pytest.mark.parametrize(\n \"function, value, error\",\n (\n # out of range\n (\"reflect_short_u\", Decimal(\"25.6\"), \"no matching argument types\"),\n (\"reflect_short_u\", Decimal(\"-.1\"), \"no matching argument types\"),\n # too many digits for *x1, too large for 256x80\n (\"reflect\", Decimal(\"0.01\"), \"no matching argument types\"),\n # too many digits\n (\"reflect_short_u\", Decimal(\"0.01\"), \"no matching argument types\"),\n (\n \"reflect_short_u\",\n Decimal(f\"1e-{DEFAULT_DECIMALS + 1}\"),\n \"no matching argument types\",\n ),\n (\n \"reflect_short_u\",\n Decimal(\"25.4\" + \"9\" * DEFAULT_DECIMALS),\n \"no matching argument types\",\n ),\n (\"reflect\", Decimal(1) / 10**81, \"no matching argument types\"),\n # floats not accepted, for floating point error concerns\n (\"reflect_short_u\", 0.1, \"no matching argument types\"),\n # ambiguous\n (\"reflect\", Decimal(\"12.7\"), \"Ambiguous argument encoding\"),\n (\"reflect\", Decimal(0), \"Ambiguous argument encoding\"),\n (\"reflect\", 0, \"Ambiguous argument encoding\"),\n ),\n)\nasync def test_async_invalid_fixed_value_reflections(\n async_fixed_reflector_contract, function, value, error\n):\n contract_func = async_fixed_reflector_contract.functions[function]\n with pytest.raises(Web3ValidationError, match=error):\n await contract_func(value).call({\"gas\": 420000})\n\n\n@pytest.mark.asyncio\n@pytest.mark.parametrize(\n \"method_input, expected\",\n (\n (\n {\n \"a\": 123,\n \"b\": [1, 2],\n \"c\": [\n {\n \"x\": 234,\n \"y\": [True, False],\n \"z\": [\n \"0x4AD7E79d88650B01EEA2B1f069f01EE9db343d5c\",\n \"0xfdF1946A9b40245224488F1a36f4A9ed4844a523\",\n \"0xfdF1946A9b40245224488F1a36f4A9ed4844a523\",\n ],\n },\n {\n \"x\": 345,\n \"y\": [False, False],\n \"z\": [\n \"0xefd1FF70c185A1C0b125939815225199079096Ee\",\n \"0xf35C0784794F3Cd935F5754d3a0EbcE95bEf851e\",\n ],\n },\n ],\n },\n (\n 123,\n [1, 2],\n [\n (\n 234,\n [True, False],\n [\n \"0x4AD7E79d88650B01EEA2B1f069f01EE9db343d5c\",\n \"0xfdF1946A9b40245224488F1a36f4A9ed4844a523\",\n \"0xfdF1946A9b40245224488F1a36f4A9ed4844a523\",\n ],\n ),\n (\n 345,\n [False, False],\n [\n \"0xefd1FF70c185A1C0b125939815225199079096Ee\",\n \"0xf35C0784794F3Cd935F5754d3a0EbcE95bEf851e\",\n ],\n ),\n ],\n ),\n ),\n (\n (\n 123,\n [1, 2],\n [\n (\n 234,\n [True, False],\n [\n \"0x4AD7E79d88650B01EEA2B1f069f01EE9db343d5c\",\n \"0xfdF1946A9b40245224488F1a36f4A9ed4844a523\",\n \"0xfdF1946A9b40245224488F1a36f4A9ed4844a523\",\n ],\n ),\n (\n 345,\n [False, False],\n [\n \"0xefd1FF70c185A1C0b125939815225199079096Ee\",\n \"0xf35C0784794F3Cd935F5754d3a0EbcE95bEf851e\",\n ],\n ),\n ],\n ),\n (\n 123,\n [1, 2],\n [\n (\n 234,\n [True, False],\n [\n \"0x4AD7E79d88650B01EEA2B1f069f01EE9db343d5c\",\n \"0xfdF1946A9b40245224488F1a36f4A9ed4844a523\",\n \"0xfdF1946A9b40245224488F1a36f4A9ed4844a523\",\n ],\n ),\n (\n 345,\n [False, False],\n [\n \"0xefd1FF70c185A1C0b125939815225199079096Ee\",\n \"0xf35C0784794F3Cd935F5754d3a0EbcE95bEf851e\",\n ],\n ),\n ],\n ),\n ),\n ),\n)\nasync def test_async_call_tuple_contract(async_tuple_contract, method_input, expected):\n result = await async_tuple_contract.functions.method(method_input).call()\n assert result == expected\n\n\n@pytest.mark.asyncio\n@pytest.mark.parametrize(\n \"method_input, plain_tuple_output, type_str, namedtuple_repr\",\n (\n (\n {\n \"a\": 123,\n \"b\": [1, 2],\n \"c\": [\n {\n \"x\": 234,\n \"y\": [True, False],\n \"z\": [\n \"0x4AD7E79d88650B01EEA2B1f069f01EE9db343d5c\",\n \"0xfdF1946A9b40245224488F1a36f4A9ed4844a523\",\n \"0xfdF1946A9b40245224488F1a36f4A9ed4844a523\",\n ],\n },\n {\n \"x\": 345,\n \"y\": [False, False],\n \"z\": [\n \"0xefd1FF70c185A1C0b125939815225199079096Ee\",\n \"0xf35C0784794F3Cd935F5754d3a0EbcE95bEf851e\",\n ],\n },\n ],\n },\n (\n 123,\n [1, 2],\n [\n (\n 234,\n [True, False],\n [\n \"0x4AD7E79d88650B01EEA2B1f069f01EE9db343d5c\",\n \"0xfdF1946A9b40245224488F1a36f4A9ed4844a523\",\n \"0xfdF1946A9b40245224488F1a36f4A9ed4844a523\",\n ],\n ),\n (\n 345,\n [False, False],\n [\n \"0xefd1FF70c185A1C0b125939815225199079096Ee\",\n \"0xf35C0784794F3Cd935F5754d3a0EbcE95bEf851e\",\n ],\n ),\n ],\n ),\n \"<class 'web3._utils.abi.abi_decoded_namedtuple_factory.<locals>.ABIDecodedNamedTuple'>\", # noqa: E501\n \"ABIDecodedNamedTuple(a=123, b=[1, 2], c=[ABIDecodedNamedTuple(x=234, y=[True, False], z=['0x4AD7E79d88650B01EEA2B1f069f01EE9db343d5c', '0xfdF1946A9b40245224488F1a36f4A9ed4844a523', '0xfdF1946A9b40245224488F1a36f4A9ed4844a523']), ABIDecodedNamedTuple(x=345, y=[False, False], z=['0xefd1FF70c185A1C0b125939815225199079096Ee', '0xf35C0784794F3Cd935F5754d3a0EbcE95bEf851e'])])\", # noqa: E501\n ),\n ),\n)\nasync def test_async_call_tuple_contract_with_decode_tuples_set(\n async_tuple_contract_with_decode_tuples,\n method_input,\n plain_tuple_output,\n type_str,\n namedtuple_repr,\n):\n result = await async_tuple_contract_with_decode_tuples.functions.method(\n method_input\n ).call()\n\n # check contract output matches dict_to_namedtuple output\n namedtuple_from_input = recursive_dict_to_namedtuple(method_input)\n assert result == namedtuple_from_input\n assert str(type(result)) == type_str\n assert result.__repr__() == namedtuple_repr\n\n # check that the namedtuple output is still a tuple\n assert result == plain_tuple_output\n\n # check that fields are correct\n assert result._fields == (\"a\", \"b\", \"c\")\n assert result.c[0]._fields == (\"x\", \"y\", \"z\")\n\n\n@pytest.mark.asyncio\n@pytest.mark.parametrize(\n \"method_input, expected\",\n (\n (\n {\n \"t\": [\n {\n \"u\": [\n {\"x\": 1, \"y\": 2},\n {\"x\": 3, \"y\": 4},\n {\"x\": 5, \"y\": 6},\n ]\n },\n {\n \"u\": [\n {\"x\": 7, \"y\": 8},\n {\"x\": 9, \"y\": 10},\n {\"x\": 11, \"y\": 12},\n ]\n },\n ]\n },\n (\n [\n (\n [\n (1, 2),\n (3, 4),\n (5, 6),\n ],\n ),\n (\n [\n (7, 8),\n (9, 10),\n (11, 12),\n ],\n ),\n ],\n ),\n ),\n (\n (\n [\n (\n [\n (1, 2),\n (3, 4),\n (5, 6),\n ],\n ),\n (\n [\n (7, 8),\n (9, 10),\n (11, 12),\n ],\n ),\n ],\n ),\n (\n [\n (\n [\n (1, 2),\n (3, 4),\n (5, 6),\n ],\n ),\n (\n [\n (7, 8),\n (9, 10),\n (11, 12),\n ],\n ),\n ],\n ),\n ),\n ),\n)\nasync def test_async_call_nested_tuple_contract(\n async_nested_tuple_contract, method_input, expected\n):\n result = await async_nested_tuple_contract.functions.method(method_input).call()\n assert result == expected\n\n\n@pytest.mark.asyncio\n@pytest.mark.parametrize(\n \"method_input, plain_tuple_output, type_str, namedtuple_repr\",\n (\n (\n {\n \"t\": [\n {\n \"u\": [\n {\"x\": 1, \"y\": 2},\n {\"x\": 3, \"y\": 4},\n {\"x\": 5, \"y\": 6},\n ]\n },\n {\n \"u\": [\n {\"x\": 7, \"y\": 8},\n {\"x\": 9, \"y\": 10},\n {\"x\": 11, \"y\": 12},\n ]\n },\n ]\n },\n (\n [\n (\n [\n (1, 2),\n (3, 4),\n (5, 6),\n ],\n ),\n (\n [\n (7, 8),\n (9, 10),\n (11, 12),\n ],\n ),\n ],\n ),\n \"<class 'web3._utils.abi.abi_decoded_namedtuple_factory.<locals>.ABIDecodedNamedTuple'>\", # noqa: E501\n \"ABIDecodedNamedTuple(t=[ABIDecodedNamedTuple(u=[ABIDecodedNamedTuple(x=1, y=2), ABIDecodedNamedTuple(x=3, y=4), ABIDecodedNamedTuple(x=5, y=6)]), ABIDecodedNamedTuple(u=[ABIDecodedNamedTuple(x=7, y=8), ABIDecodedNamedTuple(x=9, y=10), ABIDecodedNamedTuple(x=11, y=12)])])\", # noqa: E501\n ),\n ),\n)\nasync def test_async_call_nested_tuple_contract_with_decode_tuples_set(\n async_nested_tuple_contract_with_decode_tuples,\n method_input,\n plain_tuple_output,\n type_str,\n namedtuple_repr,\n):\n result = await async_nested_tuple_contract_with_decode_tuples.functions.method(\n method_input\n ).call()\n\n # check contract output matches dict_to_namedtuple output\n namedtuple_from_input = recursive_dict_to_namedtuple(method_input)\n assert result == namedtuple_from_input\n assert str(type(result)) == type_str\n assert result.__repr__() == namedtuple_repr\n\n # check that the namedtuple output is still a tuple\n assert result == plain_tuple_output\n\n # check that fields are correct\n assert result._fields == (\"t\",)\n assert result.t[0]._fields == (\"u\",)\n\n\n@pytest.mark.asyncio\nasync def test_async_call_revert_contract(async_revert_contract):\n with pytest.raises(TransactionFailed, match=\"Function has been reverted.\"):\n # eth-tester will do a gas estimation if we don't submit a gas value,\n # which does not contain the revert reason. Avoid that by giving a gas\n # value.\n await async_revert_contract.functions.revertWithMessage().call({\"gas\": 100000})\n\n\n@pytest.mark.asyncio\nasync def test_async_call_with_no_arguments(async_math_contract, call):\n result = await async_math_contract.functions.return13().call()\n assert result == 13\n\n\n@pytest.mark.asyncio\nasync def test_async_call_with_one_argument(async_math_contract, call):\n result = await async_math_contract.functions.multiply7(3).call()\n assert result == 21\n\n\n@pytest.mark.asyncio\nasync def test_async_returns_data_from_specified_block(async_w3, async_math_contract):\n start_num = await async_w3.eth.get_block(\"latest\")\n await async_w3.provider.make_request(method=\"evm_mine\", params=[5])\n await async_math_contract.functions.incrementCounter().transact()\n await async_math_contract.functions.incrementCounter().transact()\n\n output1 = await async_math_contract.functions.counter().call(\n block_identifier=start_num.number + 6\n )\n output2 = await async_math_contract.functions.counter().call(\n block_identifier=start_num.number + 7\n )\n\n assert output1 == 1\n assert output2 == 2\n\n\n@pytest.mark.asyncio\nasync def test_async_changing_default_block_identifier(async_w3, async_math_contract):\n assert await async_math_contract.caller.counter() == 0\n assert async_w3.eth.default_block == \"latest\"\n\n await async_math_contract.functions.incrementCounter(7).transact()\n assert await async_math_contract.caller.counter() == 7\n\n assert await async_math_contract.functions.counter().call(block_identifier=1) == 0\n async_w3.eth.default_block = 1\n assert (\n await async_math_contract.functions.counter().call(block_identifier=None) == 0\n )\n async_w3.eth.default_block = 0x2\n assert (\n await async_math_contract.functions.counter().call(block_identifier=None) == 7\n )\n",
"path": "tests/core/contracts/test_contract_call_interface.py"
},
{
"content": "import json\nimport pytest\n\nfrom eth_utils import (\n decode_hex,\n)\n\nfrom web3.contract import (\n Contract,\n)\nfrom web3.exceptions import (\n FallbackNotFound,\n)\n\n\ndef test_class_construction_sets_class_vars(\n w3, math_contract_abi, math_contract_bytecode, math_contract_runtime\n):\n math_contract_factory = w3.eth.contract(\n abi=math_contract_abi,\n bytecode=math_contract_bytecode,\n bytecode_runtime=math_contract_runtime,\n )\n\n assert math_contract_factory.w3 == w3\n assert math_contract_factory.bytecode == decode_hex(math_contract_bytecode)\n assert math_contract_factory.bytecode_runtime == decode_hex(math_contract_runtime)\n\n\ndef test_error_to_instantiate_base_class():\n with pytest.raises(AttributeError):\n Contract()\n\n\ndef test_abi_as_json_string(w3, math_contract_abi, some_address):\n abi_str = json.dumps(math_contract_abi)\n\n math_contract_factory = w3.eth.contract(abi=abi_str)\n assert math_contract_factory.abi == math_contract_abi\n\n math = math_contract_factory(some_address)\n assert math.abi == math_contract_abi\n\n\ndef test_error_to_call_non_existent_fallback(\n w3, math_contract_abi, math_contract_bytecode, math_contract_runtime\n):\n math_contract = w3.eth.contract(\n abi=math_contract_abi,\n bytecode=math_contract_bytecode,\n bytecode_runtime=math_contract_runtime,\n )\n with pytest.raises(FallbackNotFound):\n math_contract.fallback.estimate_gas()\n",
"path": "tests/core/contracts/test_contract_class_construction.py"
},
{
"content": "import copy\nfrom typing import (\n TYPE_CHECKING,\n Any,\n Awaitable,\n Callable,\n Dict,\n Iterable,\n List,\n Optional,\n Sequence,\n Type,\n cast,\n)\n\nfrom eth_typing import (\n ChecksumAddress,\n)\nfrom eth_utils import (\n combomethod,\n)\nfrom eth_utils.toolz import (\n partial,\n)\nfrom hexbytes import (\n HexBytes,\n)\n\nfrom web3._utils.abi import (\n fallback_func_abi_exists,\n filter_by_type,\n receive_func_abi_exists,\n)\nfrom web3._utils.async_transactions import (\n async_fill_transaction_defaults,\n)\nfrom web3._utils.compat import (\n Self,\n)\nfrom web3._utils.contracts import (\n async_parse_block_identifier,\n parse_block_identifier_no_extra_call,\n)\nfrom web3._utils.datatypes import (\n PropertyCheckingFactory,\n)\nfrom web3._utils.events import (\n AsyncEventFilterBuilder,\n get_event_data,\n)\nfrom web3._utils.filters import (\n AsyncLogFilter,\n)\nfrom web3._utils.function_identifiers import (\n FallbackFn,\n ReceiveFn,\n)\nfrom web3._utils.normalizers import (\n normalize_abi,\n normalize_address_no_ens,\n normalize_bytecode,\n)\nfrom web3.contract.base_contract import (\n BaseContract,\n BaseContractCaller,\n BaseContractConstructor,\n BaseContractEvent,\n BaseContractEvents,\n BaseContractFunction,\n BaseContractFunctions,\n NonExistentFallbackFunction,\n NonExistentReceiveFunction,\n)\nfrom web3.contract.utils import (\n async_build_transaction_for_function,\n async_call_contract_function,\n async_estimate_gas_for_function,\n async_transact_with_contract_function,\n find_functions_by_identifier,\n get_function_by_identifier,\n)\nfrom web3.exceptions import (\n ABIFunctionNotFound,\n NoABIFound,\n NoABIFunctionsFound,\n Web3ValidationError,\n)\nfrom web3.types import (\n ABI,\n BlockIdentifier,\n CallOverride,\n EventData,\n TxParams,\n)\nfrom web3.utils import (\n get_abi_input_names,\n)\n\nif TYPE_CHECKING:\n from ens import AsyncENS # noqa: F401\n from web3 import AsyncWeb3 # noqa: F401\n\n\nclass AsyncContractEvent(BaseContractEvent):\n # mypy types\n w3: \"AsyncWeb3\"\n\n @combomethod\n async def get_logs(\n self,\n argument_filters: Optional[Dict[str, Any]] = None,\n fromBlock: Optional[BlockIdentifier] = None,\n toBlock: Optional[BlockIdentifier] = None,\n block_hash: Optional[HexBytes] = None,\n ) -> Awaitable[Iterable[EventData]]:\n \"\"\"Get events for this contract instance using eth_getLogs API.\n\n This is a stateless method, as opposed to createFilter.\n It can be safely called against nodes which do not provide\n eth_newFilter API, like Infura nodes.\n\n If there are many events,\n like ``Transfer`` events for a popular token,\n the Ethereum node might be overloaded and timeout\n on the underlying JSON-RPC call.\n\n Example - how to get all ERC-20 token transactions\n for the latest 10 blocks:\n\n .. code-block:: python\n\n from = max(mycontract.web3.eth.block_number - 10, 1)\n to = mycontract.web3.eth.block_number\n\n events = mycontract.events.Transfer.getLogs(fromBlock=from, toBlock=to)\n\n for e in events:\n print(e[\"args\"][\"from\"],\n e[\"args\"][\"to\"],\n e[\"args\"][\"value\"])\n\n The returned processed log values will look like:\n\n .. code-block:: python\n\n (\n AttributeDict({\n 'args': AttributeDict({}),\n 'event': 'LogNoArguments',\n 'logIndex': 0,\n 'transactionIndex': 0,\n 'transactionHash': HexBytes('...'),\n 'address': '0xF2E246BB76DF876Cef8b38ae84130F4F55De395b',\n 'blockHash': HexBytes('...'),\n 'blockNumber': 3\n }),\n AttributeDict(...),\n ...\n )\n\n See also: :func:`web3.middleware.filter.local_filter_middleware`.\n\n :param argument_filters: Filter by argument values. Indexed arguments are\n filtered by the node while non-indexed arguments are filtered by the library.\n :param fromBlock: block number or \"latest\", defaults to \"latest\"\n :param toBlock: block number or \"latest\". Defaults to \"latest\"\n :param block_hash: block hash. Cannot be set at the\n same time as fromBlock or toBlock\n :yield: Tuple of :class:`AttributeDict` instances\n \"\"\"\n event_abi = self._get_event_abi()\n\n # validate ``argument_filters`` if present\n if argument_filters is not None:\n event_arg_names = get_abi_input_names(event_abi)\n if not all(arg in event_arg_names for arg in argument_filters.keys()):\n raise Web3ValidationError(\n \"When filtering by argument names, all argument names must be \"\n \"present in the contract's event ABI.\"\n )\n\n _filter_params = self._get_event_filter_params(\n event_abi, argument_filters, fromBlock, toBlock, block_hash\n )\n # call JSON-RPC API\n logs = await self.w3.eth.get_logs(_filter_params)\n\n # convert raw binary data to Python proxy objects as described by ABI:\n all_event_logs = tuple(\n get_event_data(self.w3.codec, event_abi, entry) for entry in logs\n )\n filtered_logs = self._process_get_logs_argument_filters(\n event_abi,\n all_event_logs,\n argument_filters,\n )\n return cast(Awaitable[Iterable[EventData]], filtered_logs)\n\n @combomethod\n async def create_filter(\n self,\n *, # PEP 3102\n argument_filters: Optional[Dict[str, Any]] = None,\n fromBlock: Optional[BlockIdentifier] = None,\n toBlock: BlockIdentifier = \"latest\",\n address: Optional[ChecksumAddress] = None,\n topics: Optional[Sequence[Any]] = None,\n ) -> AsyncLogFilter:\n \"\"\"\n Create filter object that tracks logs emitted by this contract event.\n \"\"\"\n filter_builder = AsyncEventFilterBuilder(self._get_event_abi(), self.w3.codec)\n self._set_up_filter_builder(\n argument_filters,\n fromBlock,\n toBlock,\n address,\n topics,\n filter_builder,\n )\n log_filter = await filter_builder.deploy(self.w3)\n log_filter.log_entry_formatter = get_event_data(\n self.w3.codec, self._get_event_abi()\n )\n log_filter.builder = filter_builder\n\n return log_filter\n\n @combomethod\n def build_filter(self) -> AsyncEventFilterBuilder:\n builder = AsyncEventFilterBuilder(\n self._get_event_abi(),\n self.w3.codec,\n formatter=get_event_data(self.w3.codec, self._get_event_abi()),\n )\n builder.address = self.address\n return builder\n\n\nclass AsyncContractEvents(BaseContractEvents):\n def __init__(\n self, abi: ABI, w3: \"AsyncWeb3\", address: Optional[ChecksumAddress] = None\n ) -> None:\n super().__init__(abi, w3, AsyncContractEvent, address)\n\n\nclass AsyncContractFunction(BaseContractFunction):\n # mypy types\n w3: \"AsyncWeb3\"\n\n def __call__(self, *args: Any, **kwargs: Any) -> \"AsyncContractFunction\":\n clone = copy.copy(self)\n if args is None:\n clone.args = tuple()\n else:\n clone.args = args\n\n if kwargs is None:\n clone.kwargs = {}\n else:\n clone.kwargs = kwargs\n clone._set_function_info()\n return clone\n\n @classmethod\n def factory(cls, class_name: str, **kwargs: Any) -> Self:\n return PropertyCheckingFactory(class_name, (cls,), kwargs)(kwargs.get(\"abi\"))\n\n async def call(\n self,\n transaction: Optional[TxParams] = None,\n block_identifier: BlockIdentifier = None,\n state_override: Optional[CallOverride] = None,\n ccip_read_enabled: Optional[bool] = None,\n ) -> Any:\n \"\"\"\n Execute a contract function call using the `eth_call` interface.\n\n This method prepares a ``Caller`` object that exposes the contract\n functions and public variables as callable Python functions.\n\n Reading a public ``owner`` address variable example:\n\n .. code-block:: python\n\n ContractFactory = w3.eth.contract(\n abi=wallet_contract_definition[\"abi\"]\n )\n\n # Not a real contract address\n contract = ContractFactory(\"0x2f70d3d26829e412A602E83FE8EeBF80255AEeA5\")\n\n # Read \"owner\" public variable\n addr = contract.functions.owner().call()\n\n :param transaction: Dictionary of transaction info for web3 interface\n :param block_identifier TODO\n :param state_override TODO\n :param ccip_read_enabled TODO\n :return: ``Caller`` object that has contract public functions\n and variables exposed as Python methods\n \"\"\"\n call_transaction = self._get_call_txparams(transaction)\n\n block_id = await async_parse_block_identifier(self.w3, block_identifier)\n\n return await async_call_contract_function(\n self.w3,\n self.address,\n self._return_data_normalizers,\n self.function_identifier,\n call_transaction,\n block_id,\n self.contract_abi,\n self.abi,\n state_override,\n ccip_read_enabled,\n self.decode_tuples,\n *self.args,\n **self.kwargs,\n )\n\n async def transact(self, transaction: Optional[TxParams] = None) -> HexBytes:\n setup_transaction = self._transact(transaction)\n return await async_transact_with_contract_function(\n self.address,\n self.w3,\n self.function_identifier,\n setup_transaction,\n self.contract_abi,\n self.abi,\n *self.args,\n **self.kwargs,\n )\n\n async def estimate_gas(\n self,\n transaction: Optional[TxParams] = None,\n block_identifier: Optional[BlockIdentifier] = None,\n ) -> int:\n setup_transaction = self._estimate_gas(transaction)\n return await async_estimate_gas_for_function(\n self.address,\n self.w3,\n self.function_identifier,\n setup_transaction,\n self.contract_abi,\n self.abi,\n block_identifier,\n *self.args,\n **self.kwargs,\n )\n\n async def build_transaction(\n self, transaction: Optional[TxParams] = None\n ) -> TxParams:\n built_transaction = self._build_transaction(transaction)\n return await async_build_transaction_for_function(\n self.address,\n self.w3,\n self.function_identifier,\n built_transaction,\n self.contract_abi,\n self.abi,\n *self.args,\n **self.kwargs,\n )\n\n @staticmethod\n def get_fallback_function(\n abi: ABI,\n async_w3: \"AsyncWeb3\",\n address: Optional[ChecksumAddress] = None,\n ) -> \"AsyncContractFunction\":\n if abi and fallback_func_abi_exists(abi):\n return AsyncContractFunction.factory(\n \"fallback\",\n w3=async_w3,\n contract_abi=abi,\n address=address,\n function_identifier=FallbackFn,\n )()\n return cast(AsyncContractFunction, NonExistentFallbackFunction())\n\n @staticmethod\n def get_receive_function(\n abi: ABI,\n async_w3: \"AsyncWeb3\",\n address: Optional[ChecksumAddress] = None,\n ) -> \"AsyncContractFunction\":\n if abi and receive_func_abi_exists(abi):\n return AsyncContractFunction.factory(\n \"receive\",\n w3=async_w3,\n contract_abi=abi,\n address=address,\n function_identifier=ReceiveFn,\n )()\n return cast(AsyncContractFunction, NonExistentReceiveFunction())\n\n\nclass AsyncContractFunctions(BaseContractFunctions):\n def __init__(\n self,\n abi: ABI,\n w3: \"AsyncWeb3\",\n address: Optional[ChecksumAddress] = None,\n decode_tuples: Optional[bool] = False,\n ) -> None:\n super().__init__(abi, w3, AsyncContractFunction, address, decode_tuples)\n\n def __getattr__(self, function_name: str) -> \"AsyncContractFunction\":\n if self.abi is None:\n raise NoABIFound(\n \"There is no ABI found for this contract.\",\n )\n if \"_functions\" not in self.__dict__:\n raise NoABIFunctionsFound(\n \"The abi for this contract contains no function definitions. \",\n \"Are you sure you provided the correct contract abi?\",\n )\n elif function_name not in self.__dict__[\"_functions\"]:\n raise ABIFunctionNotFound(\n f\"The function '{function_name}' was not found in this contract's abi.\",\n \" Are you sure you provided the correct contract abi?\",\n )\n else:\n return super().__getattribute__(function_name)\n\n\nclass AsyncContract(BaseContract):\n functions: AsyncContractFunctions = None\n caller: \"AsyncContractCaller\" = None\n\n # mypy types\n w3: \"AsyncWeb3\"\n\n #: Instance of :class:`ContractEvents` presenting available Event ABIs\n events: AsyncContractEvents = None\n\n def __init__(self, address: Optional[ChecksumAddress] = None) -> None:\n \"\"\"Create a new smart contract proxy object.\n\n :param address: Contract address as 0x hex string\"\"\"\n\n if self.w3 is None:\n raise AttributeError(\n \"The `Contract` class has not been initialized. Please use the \"\n \"`web3.contract` interface to create your contract class.\"\n )\n\n if address:\n self.address = normalize_address_no_ens(address)\n\n if not self.address:\n raise TypeError(\n \"The address argument is required to instantiate a contract.\"\n )\n self.functions = AsyncContractFunctions(\n self.abi, self.w3, self.address, decode_tuples=self.decode_tuples\n )\n self.caller = AsyncContractCaller(\n self.abi, self.w3, self.address, decode_tuples=self.decode_tuples\n )\n self.events = AsyncContractEvents(self.abi, self.w3, self.address)\n self.fallback = AsyncContract.get_fallback_function(\n self.abi, self.w3, AsyncContractFunction, self.address\n )\n self.receive = AsyncContract.get_receive_function(\n self.abi, self.w3, AsyncContractFunction, self.address\n )\n\n @classmethod\n def factory(\n cls, w3: \"AsyncWeb3\", class_name: Optional[str] = None, **kwargs: Any\n ) -> Type[Self]:\n kwargs[\"w3\"] = w3\n\n normalizers = {\n \"abi\": normalize_abi,\n \"address\": normalize_address_no_ens,\n \"bytecode\": normalize_bytecode,\n \"bytecode_runtime\": normalize_bytecode,\n }\n\n contract = cast(\n Type[Self],\n PropertyCheckingFactory(\n class_name or cls.__name__,\n (cls,),\n kwargs,\n normalizers=normalizers,\n ),\n )\n contract.functions = AsyncContractFunctions(\n contract.abi, contract.w3, decode_tuples=contract.decode_tuples\n )\n contract.caller = AsyncContractCaller(\n contract.abi,\n contract.w3,\n contract.address,\n decode_tuples=contract.decode_tuples,\n )\n contract.events = AsyncContractEvents(contract.abi, contract.w3)\n contract.fallback = AsyncContract.get_fallback_function(\n contract.abi,\n contract.w3,\n AsyncContractFunction,\n )\n contract.receive = AsyncContract.get_receive_function(\n contract.abi,\n contract.w3,\n AsyncContractFunction,\n )\n return contract\n\n @classmethod\n def constructor(cls, *args: Any, **kwargs: Any) -> Self:\n \"\"\"\n :param args: The contract constructor arguments as positional arguments\n :param kwargs: The contract constructor arguments as keyword arguments\n :return: a contract constructor object\n \"\"\"\n if cls.bytecode is None:\n raise ValueError(\n \"Cannot call constructor on a contract that does not have \"\n \"'bytecode' associated with it\"\n )\n\n return AsyncContractConstructor(cls.w3, cls.abi, cls.bytecode, *args, **kwargs)\n\n @combomethod\n def find_functions_by_identifier(\n cls,\n contract_abi: ABI,\n w3: \"AsyncWeb3\",\n address: ChecksumAddress,\n callable_check: Callable[..., Any],\n ) -> List[\"AsyncContractFunction\"]:\n return cast(\n List[AsyncContractFunction],\n find_functions_by_identifier(\n contract_abi, w3, address, callable_check, AsyncContractFunction\n ),\n )\n\n @combomethod\n def get_function_by_identifier(\n cls, fns: Sequence[\"AsyncContractFunction\"], identifier: str\n ) -> \"AsyncContractFunction\":\n return get_function_by_identifier(fns, identifier)\n\n\nclass AsyncContractCaller(BaseContractCaller):\n # mypy types\n w3: \"AsyncWeb3\"\n\n def __init__(\n self,\n abi: ABI,\n w3: \"AsyncWeb3\",\n address: ChecksumAddress,\n transaction: Optional[TxParams] = None,\n block_identifier: BlockIdentifier = None,\n ccip_read_enabled: Optional[bool] = None,\n decode_tuples: Optional[bool] = False,\n ) -> None:\n super().__init__(abi, w3, address, decode_tuples=decode_tuples)\n\n if self.abi:\n if transaction is None:\n transaction = {}\n\n self._functions = filter_by_type(\"function\", self.abi)\n for func in self._functions:\n fn = AsyncContractFunction.factory(\n func[\"name\"],\n w3=self.w3,\n contract_abi=self.abi,\n address=self.address,\n function_identifier=func[\"name\"],\n decode_tuples=decode_tuples,\n )\n\n # TODO: The no_extra_call method gets around the fact that we can't call\n # the full async method from within a class's __init__ method. We need\n # to see if there's a way to account for all desired elif cases.\n block_id = parse_block_identifier_no_extra_call(\n self.w3, block_identifier\n )\n caller_method = partial(\n self.call_function,\n fn,\n transaction=transaction,\n block_identifier=block_id,\n ccip_read_enabled=ccip_read_enabled,\n )\n\n setattr(self, func[\"name\"], caller_method)\n\n def __call__(\n self,\n transaction: Optional[TxParams] = None,\n block_identifier: BlockIdentifier = None,\n ccip_read_enabled: Optional[bool] = None,\n ) -> \"AsyncContractCaller\":\n if transaction is None:\n transaction = {}\n return type(self)(\n self.abi,\n self.w3,\n self.address,\n transaction=transaction,\n block_identifier=block_identifier,\n ccip_read_enabled=ccip_read_enabled,\n decode_tuples=self.decode_tuples,\n )\n\n\nclass AsyncContractConstructor(BaseContractConstructor):\n # mypy types\n w3: \"AsyncWeb3\"\n\n @combomethod\n async def transact(self, transaction: Optional[TxParams] = None) -> HexBytes:\n return await self.w3.eth.send_transaction(self._get_transaction(transaction))\n\n @combomethod\n async def build_transaction(\n self, transaction: Optional[TxParams] = None\n ) -> TxParams:\n \"\"\"\n Build the transaction dictionary without sending\n \"\"\"\n built_transaction = self._build_transaction(transaction)\n return await async_fill_transaction_defaults(self.w3, built_transaction)\n\n @combomethod\n async def estimate_gas(\n self,\n transaction: Optional[TxParams] = None,\n block_identifier: Optional[BlockIdentifier] = None,\n ) -> int:\n transaction = self._estimate_gas(transaction)\n\n return await self.w3.eth.estimate_gas(\n transaction, block_identifier=block_identifier\n )\n",
"path": "web3/contract/async_contract.py"
},
{
"content": "import copy\nfrom typing import (\n TYPE_CHECKING,\n Any,\n Callable,\n Dict,\n Iterable,\n List,\n Optional,\n Sequence,\n Type,\n cast,\n)\n\nfrom eth_typing import (\n ChecksumAddress,\n)\nfrom eth_utils import (\n combomethod,\n)\nfrom eth_utils.toolz import (\n partial,\n)\nfrom hexbytes import (\n HexBytes,\n)\n\nfrom web3._utils.abi import (\n fallback_func_abi_exists,\n filter_by_type,\n receive_func_abi_exists,\n)\nfrom web3._utils.compat import (\n Self,\n)\nfrom web3._utils.contracts import (\n parse_block_identifier,\n)\nfrom web3._utils.datatypes import (\n PropertyCheckingFactory,\n)\nfrom web3._utils.events import (\n EventFilterBuilder,\n get_event_data,\n)\nfrom web3._utils.filters import (\n LogFilter,\n)\nfrom web3._utils.function_identifiers import (\n FallbackFn,\n ReceiveFn,\n)\nfrom web3._utils.normalizers import (\n normalize_abi,\n normalize_address,\n normalize_bytecode,\n)\nfrom web3._utils.transactions import (\n fill_transaction_defaults,\n)\nfrom web3.contract.base_contract import (\n BaseContract,\n BaseContractCaller,\n BaseContractConstructor,\n BaseContractEvent,\n BaseContractEvents,\n BaseContractFunction,\n BaseContractFunctions,\n NonExistentFallbackFunction,\n NonExistentReceiveFunction,\n)\nfrom web3.contract.utils import (\n build_transaction_for_function,\n call_contract_function,\n estimate_gas_for_function,\n find_functions_by_identifier,\n get_function_by_identifier,\n transact_with_contract_function,\n)\nfrom web3.exceptions import (\n ABIFunctionNotFound,\n NoABIFound,\n NoABIFunctionsFound,\n Web3ValidationError,\n)\nfrom web3.types import (\n ABI,\n BlockIdentifier,\n CallOverride,\n EventData,\n TxParams,\n)\nfrom web3.utils import (\n get_abi_input_names,\n)\n\nif TYPE_CHECKING:\n from ens import ENS # noqa: F401\n from web3 import Web3 # noqa: F401\n\n\nclass ContractEvent(BaseContractEvent):\n # mypy types\n w3: \"Web3\"\n\n @combomethod\n def get_logs(\n self,\n argument_filters: Optional[Dict[str, Any]] = None,\n fromBlock: Optional[BlockIdentifier] = None,\n toBlock: Optional[BlockIdentifier] = None,\n block_hash: Optional[HexBytes] = None,\n ) -> Iterable[EventData]:\n \"\"\"Get events for this contract instance using eth_getLogs API.\n\n This is a stateless method, as opposed to create_filter.\n It can be safely called against nodes which do not provide\n eth_newFilter API, like Infura nodes.\n\n If there are many events,\n like ``Transfer`` events for a popular token,\n the Ethereum node might be overloaded and timeout\n on the underlying JSON-RPC call.\n\n Example - how to get all ERC-20 token transactions\n for the latest 10 blocks:\n\n .. code-block:: python\n\n from = max(mycontract.web3.eth.block_number - 10, 1)\n to = mycontract.web3.eth.block_number\n\n events = mycontract.events.Transfer.get_logs(fromBlock=from, toBlock=to)\n\n for e in events:\n print(e[\"args\"][\"from\"],\n e[\"args\"][\"to\"],\n e[\"args\"][\"value\"])\n\n The returned processed log values will look like:\n\n .. code-block:: python\n\n (\n AttributeDict({\n 'args': AttributeDict({}),\n 'event': 'LogNoArguments',\n 'logIndex': 0,\n 'transactionIndex': 0,\n 'transactionHash': HexBytes('...'),\n 'address': '0xF2E246BB76DF876Cef8b38ae84130F4F55De395b',\n 'blockHash': HexBytes('...'),\n 'blockNumber': 3\n }),\n AttributeDict(...),\n ...\n )\n\n See also: :func:`web3.middleware.filter.local_filter_middleware`.\n\n :param argument_filters: Filter by argument values. Indexed arguments are\n filtered by the node while non-indexed arguments are filtered by the library.\n :param fromBlock: block number or \"latest\", defaults to \"latest\"\n :param toBlock: block number or \"latest\". Defaults to \"latest\"\n :param block_hash: block hash. block_hash cannot be set at the\n same time as fromBlock or toBlock\n :yield: Tuple of :class:`AttributeDict` instances\n \"\"\"\n event_abi = self._get_event_abi()\n\n # validate ``argument_filters`` if present\n if argument_filters is not None:\n event_arg_names = get_abi_input_names(event_abi)\n if not all(arg in event_arg_names for arg in argument_filters.keys()):\n raise Web3ValidationError(\n \"When filtering by argument names, all argument names must be \"\n \"present in the contract's event ABI.\"\n )\n\n _filter_params = self._get_event_filter_params(\n event_abi, argument_filters, fromBlock, toBlock, block_hash\n )\n # call JSON-RPC API\n logs = self.w3.eth.get_logs(_filter_params)\n\n # convert raw binary data to Python proxy objects as described by ABI:\n all_event_logs = tuple(\n get_event_data(self.w3.codec, event_abi, entry) for entry in logs\n )\n filtered_logs = self._process_get_logs_argument_filters(\n event_abi,\n all_event_logs,\n argument_filters,\n )\n return filtered_logs\n\n @combomethod\n def create_filter(\n self,\n *, # PEP 3102\n argument_filters: Optional[Dict[str, Any]] = None,\n fromBlock: Optional[BlockIdentifier] = None,\n toBlock: BlockIdentifier = \"latest\",\n address: Optional[ChecksumAddress] = None,\n topics: Optional[Sequence[Any]] = None,\n ) -> LogFilter:\n \"\"\"\n Create filter object that tracks logs emitted by this contract event.\n \"\"\"\n filter_builder = EventFilterBuilder(self._get_event_abi(), self.w3.codec)\n self._set_up_filter_builder(\n argument_filters,\n fromBlock,\n toBlock,\n address,\n topics,\n filter_builder,\n )\n log_filter = filter_builder.deploy(self.w3)\n log_filter.log_entry_formatter = get_event_data(\n self.w3.codec, self._get_event_abi()\n )\n log_filter.builder = filter_builder\n\n return log_filter\n\n @combomethod\n def build_filter(self) -> EventFilterBuilder:\n builder = EventFilterBuilder(\n self._get_event_abi(),\n self.w3.codec,\n formatter=get_event_data(self.w3.codec, self._get_event_abi()),\n )\n builder.address = self.address\n return builder\n\n\nclass ContractEvents(BaseContractEvents):\n def __init__(\n self, abi: ABI, w3: \"Web3\", address: Optional[ChecksumAddress] = None\n ) -> None:\n super().__init__(abi, w3, ContractEvent, address)\n\n\nclass ContractFunction(BaseContractFunction):\n # mypy types\n w3: \"Web3\"\n\n def __call__(self, *args: Any, **kwargs: Any) -> \"ContractFunction\":\n clone = copy.copy(self)\n if args is None:\n clone.args = tuple()\n else:\n clone.args = args\n\n if kwargs is None:\n clone.kwargs = {}\n else:\n clone.kwargs = kwargs\n clone._set_function_info()\n return clone\n\n @classmethod\n def factory(cls, class_name: str, **kwargs: Any) -> Self:\n return PropertyCheckingFactory(class_name, (cls,), kwargs)(kwargs.get(\"abi\"))\n\n def call(\n self,\n transaction: Optional[TxParams] = None,\n block_identifier: BlockIdentifier = None,\n state_override: Optional[CallOverride] = None,\n ccip_read_enabled: Optional[bool] = None,\n ) -> Any:\n \"\"\"\n Execute a contract function call using the `eth_call` interface.\n\n This method prepares a ``Caller`` object that exposes the contract\n functions and public variables as callable Python functions.\n\n Reading a public ``owner`` address variable example:\n\n .. code-block:: python\n\n ContractFactory = w3.eth.contract(\n abi=wallet_contract_definition[\"abi\"]\n )\n\n # Not a real contract address\n contract = ContractFactory(\"0x2f70d3d26829e412A602E83FE8EeBF80255AEeA5\")\n\n # Read \"owner\" public variable\n addr = contract.functions.owner().call()\n\n :param transaction: Dictionary of transaction info for web3 interface\n :param block_identifier: TODO\n :param state_override TODO\n :param ccip_read_enabled TODO\n :return: ``Caller`` object that has contract public functions\n and variables exposed as Python methods\n \"\"\"\n call_transaction = self._get_call_txparams(transaction)\n\n block_id = parse_block_identifier(self.w3, block_identifier)\n\n return call_contract_function(\n self.w3,\n self.address,\n self._return_data_normalizers,\n self.function_identifier,\n call_transaction,\n block_id,\n self.contract_abi,\n self.abi,\n state_override,\n ccip_read_enabled,\n self.decode_tuples,\n *self.args,\n **self.kwargs,\n )\n\n def transact(self, transaction: Optional[TxParams] = None) -> HexBytes:\n setup_transaction = self._transact(transaction)\n return transact_with_contract_function(\n self.address,\n self.w3,\n self.function_identifier,\n setup_transaction,\n self.contract_abi,\n self.abi,\n *self.args,\n **self.kwargs,\n )\n\n def estimate_gas(\n self,\n transaction: Optional[TxParams] = None,\n block_identifier: Optional[BlockIdentifier] = None,\n ) -> int:\n setup_transaction = self._estimate_gas(transaction)\n return estimate_gas_for_function(\n self.address,\n self.w3,\n self.function_identifier,\n setup_transaction,\n self.contract_abi,\n self.abi,\n block_identifier,\n *self.args,\n **self.kwargs,\n )\n\n def build_transaction(self, transaction: Optional[TxParams] = None) -> TxParams:\n built_transaction = self._build_transaction(transaction)\n return build_transaction_for_function(\n self.address,\n self.w3,\n self.function_identifier,\n built_transaction,\n self.contract_abi,\n self.abi,\n *self.args,\n **self.kwargs,\n )\n\n @staticmethod\n def get_fallback_function(\n abi: ABI,\n w3: \"Web3\",\n address: Optional[ChecksumAddress] = None,\n ) -> \"ContractFunction\":\n if abi and fallback_func_abi_exists(abi):\n return ContractFunction.factory(\n \"fallback\",\n w3=w3,\n contract_abi=abi,\n address=address,\n function_identifier=FallbackFn,\n )()\n return cast(ContractFunction, NonExistentFallbackFunction())\n\n @staticmethod\n def get_receive_function(\n abi: ABI,\n w3: \"Web3\",\n address: Optional[ChecksumAddress] = None,\n ) -> \"ContractFunction\":\n if abi and receive_func_abi_exists(abi):\n return ContractFunction.factory(\n \"receive\",\n w3=w3,\n contract_abi=abi,\n address=address,\n function_identifier=ReceiveFn,\n )()\n return cast(ContractFunction, NonExistentReceiveFunction())\n\n\nclass ContractFunctions(BaseContractFunctions):\n def __init__(\n self,\n abi: ABI,\n w3: \"Web3\",\n address: Optional[ChecksumAddress] = None,\n decode_tuples: Optional[bool] = False,\n ) -> None:\n super().__init__(abi, w3, ContractFunction, address, decode_tuples)\n\n def __getattr__(self, function_name: str) -> \"ContractFunction\":\n if self.abi is None:\n raise NoABIFound(\n \"There is no ABI found for this contract.\",\n )\n if \"_functions\" not in self.__dict__:\n raise NoABIFunctionsFound(\n \"The abi for this contract contains no function definitions. \",\n \"Are you sure you provided the correct contract abi?\",\n )\n elif function_name not in self.__dict__[\"_functions\"]:\n raise ABIFunctionNotFound(\n f\"The function '{function_name}' was not found in this contract's abi.\",\n \" Are you sure you provided the correct contract abi?\",\n )\n else:\n return super().__getattribute__(function_name)\n\n\nclass Contract(BaseContract):\n # mypy types\n w3: \"Web3\"\n functions: ContractFunctions = None\n caller: \"ContractCaller\" = None\n\n # Instance of :class:`ContractEvents` presenting available Event ABIs\n events: ContractEvents = None\n\n def __init__(self, address: Optional[ChecksumAddress] = None) -> None:\n \"\"\"Create a new smart contract proxy object.\n :param address: Contract address as 0x hex string\"\"\"\n _w3 = self.w3\n if _w3 is None:\n raise AttributeError(\n \"The `Contract` class has not been initialized. Please use the \"\n \"`web3.contract` interface to create your contract class.\"\n )\n\n if address:\n self.address = normalize_address(cast(\"ENS\", _w3.ens), address)\n\n if not self.address:\n raise TypeError(\n \"The address argument is required to instantiate a contract.\"\n )\n\n self.functions = ContractFunctions(\n self.abi, _w3, self.address, decode_tuples=self.decode_tuples\n )\n self.caller = ContractCaller(\n self.abi, _w3, self.address, decode_tuples=self.decode_tuples\n )\n self.events = ContractEvents(self.abi, _w3, self.address)\n self.fallback = Contract.get_fallback_function(\n self.abi,\n _w3,\n ContractFunction,\n self.address,\n )\n self.receive = Contract.get_receive_function(\n self.abi,\n _w3,\n ContractFunction,\n self.address,\n )\n\n @classmethod\n def factory(\n cls, w3: \"Web3\", class_name: Optional[str] = None, **kwargs: Any\n ) -> Type[Self]:\n kwargs[\"w3\"] = w3\n\n normalizers = {\n \"abi\": normalize_abi,\n \"address\": partial(normalize_address, w3.ens),\n \"bytecode\": normalize_bytecode,\n \"bytecode_runtime\": normalize_bytecode,\n }\n\n contract = cast(\n Type[Self],\n PropertyCheckingFactory(\n class_name or cls.__name__,\n (cls,),\n kwargs,\n normalizers=normalizers,\n ),\n )\n contract.functions = ContractFunctions(\n contract.abi, contract.w3, decode_tuples=contract.decode_tuples\n )\n contract.caller = ContractCaller(\n contract.abi,\n contract.w3,\n contract.address,\n decode_tuples=contract.decode_tuples,\n )\n contract.events = ContractEvents(contract.abi, contract.w3)\n contract.fallback = Contract.get_fallback_function(\n contract.abi,\n contract.w3,\n ContractFunction,\n )\n contract.receive = Contract.get_receive_function(\n contract.abi,\n contract.w3,\n ContractFunction,\n )\n\n return contract\n\n @classmethod\n def constructor(cls, *args: Any, **kwargs: Any) -> \"ContractConstructor\":\n \"\"\"\n :param args: The contract constructor arguments as positional arguments\n :param kwargs: The contract constructor arguments as keyword arguments\n :return: a contract constructor object\n \"\"\"\n if cls.bytecode is None:\n raise ValueError(\n \"Cannot call constructor on a contract that does not have \"\n \"'bytecode' associated with it\"\n )\n\n return ContractConstructor(cls.w3, cls.abi, cls.bytecode, *args, **kwargs)\n\n @combomethod\n def find_functions_by_identifier(\n cls,\n contract_abi: ABI,\n w3: \"Web3\",\n address: ChecksumAddress,\n callable_check: Callable[..., Any],\n ) -> List[\"ContractFunction\"]:\n return cast(\n List[\"ContractFunction\"],\n find_functions_by_identifier(\n contract_abi, w3, address, callable_check, ContractFunction\n ),\n )\n\n @combomethod\n def get_function_by_identifier(\n cls, fns: Sequence[\"ContractFunction\"], identifier: str\n ) -> \"ContractFunction\":\n return get_function_by_identifier(fns, identifier)\n\n\nclass ContractCaller(BaseContractCaller):\n # mypy types\n w3: \"Web3\"\n\n def __init__(\n self,\n abi: ABI,\n w3: \"Web3\",\n address: ChecksumAddress,\n transaction: Optional[TxParams] = None,\n block_identifier: BlockIdentifier = None,\n ccip_read_enabled: Optional[bool] = None,\n decode_tuples: Optional[bool] = False,\n ) -> None:\n super().__init__(abi, w3, address, decode_tuples=decode_tuples)\n\n if self.abi:\n if transaction is None:\n transaction = {}\n\n self._functions = filter_by_type(\"function\", self.abi)\n for func in self._functions:\n fn = ContractFunction.factory(\n func[\"name\"],\n w3=self.w3,\n contract_abi=self.abi,\n address=self.address,\n function_identifier=func[\"name\"],\n decode_tuples=decode_tuples,\n )\n\n block_id = parse_block_identifier(self.w3, block_identifier)\n caller_method = partial(\n self.call_function,\n fn,\n transaction=transaction,\n block_identifier=block_id,\n ccip_read_enabled=ccip_read_enabled,\n )\n\n setattr(self, func[\"name\"], caller_method)\n\n def __call__(\n self,\n transaction: Optional[TxParams] = None,\n block_identifier: BlockIdentifier = None,\n ccip_read_enabled: Optional[bool] = None,\n ) -> \"ContractCaller\":\n if transaction is None:\n transaction = {}\n\n return type(self)(\n self.abi,\n self.w3,\n self.address,\n transaction=transaction,\n block_identifier=block_identifier,\n ccip_read_enabled=ccip_read_enabled,\n decode_tuples=self.decode_tuples,\n )\n\n\nclass ContractConstructor(BaseContractConstructor):\n # mypy types\n w3: \"Web3\"\n\n @combomethod\n def transact(self, transaction: Optional[TxParams] = None) -> HexBytes:\n return self.w3.eth.send_transaction(self._get_transaction(transaction))\n\n @combomethod\n def build_transaction(self, transaction: Optional[TxParams] = None) -> TxParams:\n \"\"\"\n Build the transaction dictionary without sending\n \"\"\"\n built_transaction = self._build_transaction(transaction)\n return fill_transaction_defaults(self.w3, built_transaction)\n\n @combomethod\n def estimate_gas(\n self,\n transaction: Optional[TxParams] = None,\n block_identifier: Optional[BlockIdentifier] = None,\n ) -> int:\n transaction = self._estimate_gas(transaction)\n\n return self.w3.eth.estimate_gas(transaction, block_identifier=block_identifier)\n",
"path": "web3/contract/contract.py"
}
] | 8_0 | python | import sys
import unittest
class TestContractFunctionNameCollision(unittest.TestCase):
def setUp(self):
from web3 import Web3, EthereumTesterProvider
# Set up a Web3 instance with EthereumTesterProvider
self.w3 = Web3(EthereumTesterProvider())
self.contract_abi = [
{
"inputs": [],
"name": "w3",
"outputs": [{"internalType": "bool", "name": "", "type": "bool"}],
"stateMutability": "nonpayable",
"type": "function",
},
{
"inputs": [],
"name": "z",
"outputs": [{"internalType": "bool", "name": "", "type": "bool"}],
"stateMutability": "nonpayable",
"type": "function",
},
]
self.contract_bytecode = "0x608060405234801561000f575f80fd5b5060d98061001c5f395ff3fe6080604052348015600e575f80fd5b50600436106030575f3560e01c8063a044c987146034578063c5d7802e14604e575b5f80fd5b603a6068565b60405160459190608c565b60405180910390f35b60546070565b604051605f9190608c565b60405180910390f35b5f6001905090565b5f90565b5f8115159050919050565b6086816074565b82525050565b5f602082019050609d5f830184607f565b9291505056fea264697066735822122056b76f22006829335981c36eca76f8aa0c6cf66d23990263a18b17fa27ab3db064736f6c63430008170033"
# Deploy the contract
ContractFactory = self.w3.eth.contract(abi=self.contract_abi, bytecode=self.contract_bytecode)
tx_hash = ContractFactory.constructor().transact()
tx_receipt = self.w3.eth.wait_for_transaction_receipt(tx_hash)
self.contract = self.w3.eth.contract(
address=tx_receipt.contractAddress,
abi=self.contract_abi
)
def test_w3_function_call(self):
# Call the 'w3' function of the contract
result = self.contract.functions.w3().call()
self.assertTrue(result)
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestContractFunctionNameCollision))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == "__main__":
main()
|
https://github.com/teamqurrent/web3.py | Update the `datastructures.py` file by implementing the `tupleize_lists_nested` function that is designed to convert lists to tuples in any mapping inputs, thereby making these objects hashable. In addition, modify the `__hash__` method in the `AttributeDict` class to apply this `tupleize_lists_nested` function before hashing to support nested data structures. | 0c0e0de | -e . [tester]
idna
pytest
pytest_asyncio
eth-tester[py-evm]==v0.9.1-b.1
py-geth>=3.11.0 | python3.9 | 1259fcfa | diff --git a/newsfragments/2908.bugfix.rst b/newsfragments/2908.bugfix.rst
new file mode 100644
--- /dev/null
+++ b/newsfragments/2908.bugfix.rst
@@ -0,0 +1 @@
+fix AttributeDicts unhashable if they contain lists recursively tupleizing them
diff --git a/tests/core/datastructures/test_tuplelize_nested_lists.py b/tests/core/datastructures/test_tuplelize_nested_lists.py
new file mode 100644
--- /dev/null
+++ b/tests/core/datastructures/test_tuplelize_nested_lists.py
@@ -0,0 +1,122 @@
+import pytest
+import re
+
+from web3.datastructures import (
+ AttributeDict,
+ tupleize_lists_nested,
+)
+
+
+@pytest.mark.parametrize(
+ "input,expected",
+ (
+ (
+ {
+ "mylst": [1, 2, 3, [4, 5, [6, 7], 8], 9, 10],
+ "nested": {"mylst": [1, 2, 3, [1], [2, 3]]},
+ },
+ AttributeDict(
+ {
+ "mylst": (1, 2, 3, (4, 5, (6, 7), 8), 9, 10),
+ "nested": AttributeDict({"mylst": (1, 2, 3, (1,), (2, 3))}),
+ }
+ ),
+ ),
+ (
+ {
+ "mylst": [1, 2, 3, [5, 4, [6, 7], 8], 9, 10],
+ "nested": {"mylst": [1, 2, 3, [1], [2, 3]]},
+ },
+ AttributeDict(
+ {
+ "nested": AttributeDict({"mylst": (1, 2, 3, (1,), (2, 3))}),
+ "mylst": (1, 2, 3, (5, 4, (6, 7), 8), 9, 10),
+ }
+ ),
+ ),
+ (
+ AttributeDict(
+ {
+ "mylst": [1, 2, 3, [4, 5, [6, 7], 8], 9, 10],
+ "nested": AttributeDict({"mylst": [1, 2, 3, [1], [2, 3]]}),
+ }
+ ),
+ AttributeDict(
+ {
+ "mylst": (1, 2, 3, (4, 5, (6, 7), 8), 9, 10),
+ "nested": AttributeDict({"mylst": (1, 2, 3, (1,), (2, 3))}),
+ }
+ ),
+ ),
+ ),
+)
+def test_tupleization_and_hashing(input, expected):
+ assert tupleize_lists_nested(input) == expected
+ assert hash(AttributeDict(input)) == hash(expected)
+
+
+@pytest.mark.parametrize(
+ "input, error",
+ (
+ (
+ AttributeDict(
+ {
+ "myset": set({1, 2, 3}),
+ "nested": AttributeDict({"mylst": (1, 2, 3, (1,), (2, 3))}),
+ }
+ ),
+ {
+ "expected_exception": TypeError,
+ "match": "Found unhashable type 'set': {(1, 2, 3)}",
+ },
+ ),
+ (
+ AttributeDict(
+ {
+ "mybytearray": bytearray((1, 2, 3)),
+ "nested": AttributeDict({"mylst": [1, 2, 3, [1], [2, 3]]}),
+ }
+ ),
+ {
+ "expected_exception": TypeError,
+ "match": re.escape(
+ "Found unhashable type 'bytearray': bytearray(b'\\x01\\x02\\x03')"
+ ),
+ },
+ ),
+ ),
+)
+def test_tupleization_and_hashing_error(input, error):
+ with pytest.raises(**error):
+ assert hash(input)
+
+
+@pytest.mark.parametrize(
+ "input, error",
+ (
+ (
+ AttributeDict(
+ {
+ "mylst": (1, 2, 3, (4, 5, (6, 7), 8), 9, 10),
+ "nested": AttributeDict({"mylst": (1, 2, 3, (1,), (2, 3))}),
+ }
+ ),
+ None,
+ ),
+ (
+ AttributeDict(
+ {
+ "mylst": [1, 2, 3, [4, 5, [6, 7], 8], 9, 10],
+ "nested": AttributeDict({"mylst": [1, 2, 3, [1], [2, 3]]}),
+ }
+ ),
+ {"expected_exception": TypeError, "match": "unhashable type: 'list'"},
+ ),
+ ),
+)
+def test_AttributeDict_hashing_backwards_compatibility(input, error):
+ if error:
+ with pytest.raises(**error):
+ assert hash(tuple(sorted(input.items()))) == hash(input)
+ else:
+ assert hash(tuple(sorted(input.items()))) == hash(input)
diff --git a/web3/datastructures.py b/web3/datastructures.py
--- a/web3/datastructures.py
+++ b/web3/datastructures.py
@@ -114,7 +114,7 @@ class AttributeDict(ReadableAttributeDict[TKey, TValue], Hashable):
raise TypeError("This data is immutable -- create a copy instead of modifying")
def __hash__(self) -> int:
- return hash(tuple(sorted(self.items())))
+ return hash(tuple(sorted(tupleize_lists_nested(self).items())))
def __eq__(self, other: Any) -> bool:
if isinstance(other, Mapping):
@@ -123,6 +123,29 @@ class AttributeDict(ReadableAttributeDict[TKey, TValue], Hashable):
return False
+def tupleize_lists_nested(d: Mapping[TKey, TValue]) -> AttributeDict[TKey, TValue]:
+ """
+ Unhashable types inside dicts will throw an error if attempted to be hashed.
+ This method converts lists to tuples, rendering them hashable.
+ Other unhashable types found will raise a TypeError
+ """
+
+ def _to_tuple(lst: List[Any]) -> Any:
+ return tuple(_to_tuple(i) if isinstance(i, list) else i for i in lst)
+
+ ret = dict()
+ for k, v in d.items():
+ if isinstance(v, List):
+ ret[k] = _to_tuple(v)
+ elif isinstance(v, Mapping):
+ ret[k] = tupleize_lists_nested(v)
+ elif not isinstance(v, Hashable):
+ raise TypeError(f"Found unhashable type '{type(v).__name__}': {v}")
+ else:
+ ret[k] = v
+ return AttributeDict(ret)
+
+
class NamedElementOnion(Mapping[TKey, TValue]):
"""
Add layers to an onion-shaped structure. Optionally, inject to a specific layer.
| [
{
"content": "from collections import (\n OrderedDict,\n)\nfrom collections.abc import (\n Hashable,\n)\nfrom typing import (\n Any,\n Callable,\n Dict,\n Iterator,\n List,\n Mapping,\n MutableMapping,\n Optional,\n Sequence,\n Type,\n TypeVar,\n Union,\n cast,\n)\n\nfrom eth_utils import (\n is_integer,\n)\n\nfrom web3._utils.formatters import (\n recursive_map,\n)\n\n# Hashable must be immutable:\n# \"the implementation of hashable collections requires that a\n# key's hash value is immutable\"\n# https://docs.python.org/3/reference/datamodel.html#object.__hash__\n\nT = TypeVar(\"T\")\nTKey = TypeVar(\"TKey\", bound=Hashable)\nTValue = TypeVar(\"TValue\")\n\n\nclass ReadableAttributeDict(Mapping[TKey, TValue]):\n \"\"\"\n The read attributes for the AttributeDict types\n \"\"\"\n\n def __init__(\n self, dictionary: Dict[TKey, TValue], *args: Any, **kwargs: Any\n ) -> None:\n # type ignored on 46/50 b/c dict() expects str index type not TKey\n self.__dict__ = dict(dictionary) # type: ignore\n self.__dict__.update(dict(*args, **kwargs))\n\n def __getitem__(self, key: TKey) -> TValue:\n return self.__dict__[key] # type: ignore\n\n def __iter__(self) -> Iterator[Any]:\n return iter(self.__dict__)\n\n def __len__(self) -> int:\n return len(self.__dict__)\n\n def __repr__(self) -> str:\n return self.__class__.__name__ + f\"({self.__dict__!r})\"\n\n def _repr_pretty_(self, builder: Any, cycle: bool) -> None:\n \"\"\"\n Custom pretty output for the IPython console\n https://ipython.readthedocs.io/en/stable/api/generated/IPython.lib.pretty.html#extending # noqa: E501\n \"\"\"\n builder.text(self.__class__.__name__ + \"(\")\n if cycle:\n builder.text(\"<cycle>\")\n else:\n builder.pretty(self.__dict__)\n builder.text(\")\")\n\n @classmethod\n def _apply_if_mapping(cls: Type[T], value: TValue) -> Union[T, TValue]:\n if isinstance(value, Mapping):\n # error: Too many arguments for \"object\"\n return cls(value) # type: ignore\n else:\n return value\n\n @classmethod\n def recursive(cls, value: TValue) -> \"ReadableAttributeDict[TKey, TValue]\":\n return recursive_map(cls._apply_if_mapping, value)\n\n\nclass MutableAttributeDict(\n MutableMapping[TKey, TValue], ReadableAttributeDict[TKey, TValue]\n):\n def __setitem__(self, key: Any, val: Any) -> None:\n self.__dict__[key] = val\n\n def __delitem__(self, key: Any) -> None:\n del self.__dict__[key]\n\n\nclass AttributeDict(ReadableAttributeDict[TKey, TValue], Hashable):\n \"\"\"\n This provides superficial immutability, someone could hack around it\n \"\"\"\n\n def __setattr__(self, attr: str, val: TValue) -> None:\n if attr == \"__dict__\":\n super().__setattr__(attr, val)\n else:\n raise TypeError(\n \"This data is immutable -- create a copy instead of modifying\"\n )\n\n def __delattr__(self, key: str) -> None:\n raise TypeError(\"This data is immutable -- create a copy instead of modifying\")\n\n def __hash__(self) -> int:\n return hash(tuple(sorted(self.items())))\n\n def __eq__(self, other: Any) -> bool:\n if isinstance(other, Mapping):\n return self.__dict__ == dict(other)\n else:\n return False\n\n\nclass NamedElementOnion(Mapping[TKey, TValue]):\n \"\"\"\n Add layers to an onion-shaped structure. Optionally, inject to a specific layer.\n This structure is iterable, where the outermost layer is first, and innermost\n is last.\n \"\"\"\n\n def __init__(\n self,\n init_elements: Sequence[Any],\n valid_element: Callable[..., bool] = callable,\n ) -> None:\n self._queue: \"OrderedDict[Any, Any]\" = OrderedDict()\n for element in reversed(init_elements):\n if valid_element(element):\n self.add(element)\n else:\n self.add(*element)\n\n def add(self, element: TValue, name: Optional[TKey] = None) -> None:\n if name is None:\n name = cast(TKey, element)\n\n if name in self._queue:\n if name is element:\n raise ValueError(\"You can't add the same un-named instance twice\")\n else:\n raise ValueError(\n \"You can't add the same name again, use replace instead\"\n )\n\n self._queue[name] = element\n\n def inject(\n self, element: TValue, name: Optional[TKey] = None, layer: Optional[int] = None\n ) -> None:\n \"\"\"\n Inject a named element to an arbitrary layer in the onion.\n\n The current implementation only supports insertion at the innermost layer,\n or at the outermost layer. Note that inserting to the outermost is equivalent\n to calling :meth:`add` .\n \"\"\"\n if not is_integer(layer):\n raise TypeError(\"The layer for insertion must be an int.\")\n elif layer != 0 and layer != len(self._queue):\n raise NotImplementedError(\n f\"You can only insert to the beginning or end of a {type(self)}, \"\n f\"currently. You tried to insert to {layer}, but only 0 and \"\n f\"{len(self._queue)} are permitted. \"\n )\n\n self.add(element, name=name)\n\n if layer == 0:\n if name is None:\n name = cast(TKey, element)\n self._queue.move_to_end(name, last=False)\n elif layer == len(self._queue):\n return\n else:\n raise AssertionError(\n \"Impossible to reach: earlier validation raises an error\"\n )\n\n def clear(self) -> None:\n self._queue.clear()\n\n def replace(self, old: TKey, new: TKey) -> TValue:\n if old not in self._queue:\n raise ValueError(\n \"You can't replace unless one already exists, use add instead\"\n )\n to_be_replaced = self._queue[old]\n if to_be_replaced is old:\n # re-insert with new name in old slot\n self._replace_with_new_name(old, new)\n else:\n self._queue[old] = new\n return to_be_replaced\n\n def remove(self, old: TKey) -> None:\n if old not in self._queue:\n raise ValueError(\"You can only remove something that has been added\")\n del self._queue[old]\n\n @property\n def middlewares(self) -> Sequence[Any]:\n \"\"\"\n Returns middlewares in the appropriate order to be imported into a new Web3\n instance (reversed _queue order) as a list of (middleware, name) tuples.\n \"\"\"\n return [(val, key) for key, val in reversed(self._queue.items())]\n\n def _replace_with_new_name(self, old: TKey, new: TKey) -> None:\n self._queue[new] = new\n found_old = False\n for key in list(self._queue.keys()):\n if not found_old:\n if key == old:\n found_old = True\n continue\n elif key != new:\n self._queue.move_to_end(key)\n del self._queue[old]\n\n def __iter__(self) -> Iterator[TKey]:\n elements = self._queue.values()\n if not isinstance(elements, Sequence):\n # type ignored b/c elements is set as _OrderedDictValuesView[Any] on 210\n elements = list(elements) # type: ignore\n return iter(reversed(elements))\n\n def __add__(self, other: Any) -> \"NamedElementOnion[TKey, TValue]\":\n if not isinstance(other, NamedElementOnion):\n raise NotImplementedError(\n \"You can only combine with another NamedElementOnion\"\n )\n combined = self._queue.copy()\n combined.update(other._queue)\n return NamedElementOnion(cast(List[Any], combined.items()))\n\n def __contains__(self, element: Any) -> bool:\n return element in self._queue\n\n def __getitem__(self, element: TKey) -> TValue:\n return self._queue[element]\n\n def __len__(self) -> int:\n return len(self._queue)\n\n def __reversed__(self) -> Iterator[TValue]:\n elements = cast(List[Any], self._queue.values())\n if not isinstance(elements, Sequence):\n elements = list(elements)\n return iter(elements)\n",
"path": "web3/datastructures.py"
}
] | 8_1 | python | import sys
import unittest
import re
class TestTupleizationAndHashing(unittest.TestCase):
def setUp(self):
from web3.datastructures import (
AttributeDict
)
self.data = [
(
{
"mylst": [1, 2, 3, [4, 5, [6, 7], 8], 9, 10],
"nested": {"mylst": [1, 2, 3, [1], [2, 3]]},
},
AttributeDict(
{
"mylst": (1, 2, 3, (4, 5, (6, 7), 8), 9, 10),
"nested": AttributeDict({"mylst": (1, 2, 3, (1,), (2, 3))}),
}
),
),
(
{
"mylst": [1, 2, 3, [5, 4, [6, 7], 8], 9, 10],
"nested": {"mylst": [1, 2, 3, [1], [2, 3]]},
},
AttributeDict(
{
"nested": AttributeDict({"mylst": (1, 2, 3, (1,), (2, 3))}),
"mylst": (1, 2, 3, (5, 4, (6, 7), 8), 9, 10),
}
),
),
(
AttributeDict(
{
"mylst": [1, 2, 3, [4, 5, [6, 7], 8], 9, 10],
"nested": AttributeDict({"mylst": [1, 2, 3, [1], [2, 3]]}),
}
),
AttributeDict(
{
"mylst": (1, 2, 3, (4, 5, (6, 7), 8), 9, 10),
"nested": AttributeDict({"mylst": (1, 2, 3, (1,), (2, 3))}),
}
),
),
]
self.error_data = [
(
AttributeDict(
{
"myset": set({1, 2, 3}),
"nested": AttributeDict({"mylst": (1, 2, 3, (1,), (2, 3))}),
}
),
TypeError,
"unhashable type: 'set'",
),
(
AttributeDict(
{
"mybytearray": bytearray((1, 2, 3)),
"nested": AttributeDict({"mylst": [1, 2, 3, [1], [2, 3]]}),
}
),
TypeError,
re.escape("unhashable type: 'bytearray'"),
),
]
def test_tupleization_and_hashing(self):
from web3.datastructures import (
AttributeDict,
tupleize_lists_nested,
)
for input, expected in self.data:
self.assertEqual(tupleize_lists_nested(input), expected)
self.assertEqual(hash(AttributeDict(input)), hash(expected))
def test_errors(self):
for input, exc, msg in self.error_data:
with self.assertRaises(exc, msg=msg):
hash(input)
def test_AttributeDict_hashing_backwards_compatibility(self):
from web3.datastructures import (
AttributeDict
)
input = AttributeDict(
{
"mylst": (1, 2, 3, (4, 5, (6, 7), 8), 9, 10),
"nested": AttributeDict({"mylst": (1, 2, 3, (1,), (2, 3))}),
}
)
self.assertEqual(hash(tuple(sorted(input.items()))), hash(input))
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestTupleizationAndHashing))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == "__main__":
main() |
https://github.com/teamqurrent/web3.py | Remove an unnecessary constant, `WHOLE_CONFUSABLES`, from the file `ens/_normalization.py`. This constant was not being used in the codebase, and its removal is part of code cleanup and optimization. | 32aca5a | -e . [tester]
idna
pytest
pytest_asyncio
eth-tester[py-evm]==v0.9.1-b.1
py-geth>=3.11.0 | python3.9 | 1ff10a30 | diff --git a/ens/_normalization.py b/ens/_normalization.py
--- a/ens/_normalization.py
+++ b/ens/_normalization.py
@@ -215,7 +215,6 @@ def _construct_whole_confusable_map() -> Dict[int, Set[str]]:
WHOLE_CONFUSABLE_MAP = _construct_whole_confusable_map()
VALID_CODEPOINTS = _extract_valid_codepoints()
MAX_LEN_EMOJI_PATTERN = max(len(e) for e in NORMALIZATION_SPEC["emoji"])
-WHOLE_CONFUSABLES = NORMALIZATION_SPEC["wholes"]
NSM_MAX = NORMALIZATION_SPEC["nsm_max"]
| [
{
"content": "from enum import (\n Enum,\n)\nimport json\nimport os\nfrom sys import (\n version_info,\n)\nfrom typing import (\n Any,\n Dict,\n List,\n Optional,\n Set,\n Tuple,\n Union,\n)\n\nfrom pyunormalize import (\n NFC,\n NFD,\n)\n\nfrom .exceptions import (\n InvalidName,\n)\n\n# TODO: remove once web3 supports python>=3.8\nif version_info >= (3, 8):\n from typing import (\n Literal,\n )\nelse:\n from typing_extensions import ( # type: ignore\n Literal,\n )\n\n\n# -- setup -- #\n\n\ndef _json_list_mapping_to_dict(\n f: Dict[str, Any],\n list_mapped_key: str,\n) -> Dict[str, Any]:\n \"\"\"\n Takes a `[key, [value]]` mapping from the original ENS spec json files and turns it\n into a `{key: value}` mapping.\n \"\"\"\n f[list_mapped_key] = {k: v for k, v in f[list_mapped_key]}\n return f\n\n\n# get the normalization spec json files downloaded from links in ENSIP-15\n# https://docs.ens.domains/ens-improvement-proposals/ensip-15-normalization-standard\nspecs_dir_path = os.path.abspath(os.path.join(os.path.dirname(__file__), \"specs\"))\nwith open(os.path.join(specs_dir_path, \"normalization_spec.json\")) as spec:\n f = json.load(spec)\n\n NORMALIZATION_SPEC = _json_list_mapping_to_dict(f, \"mapped\")\n # clean `FE0F` (65039) from entries since it's optional\n for e in NORMALIZATION_SPEC[\"emoji\"]:\n if 65039 in e:\n for i in range(e.count(65039)):\n e.remove(65039)\n\nwith open(os.path.join(specs_dir_path, \"nf.json\")) as nf:\n f = json.load(nf)\n NF = _json_list_mapping_to_dict(f, \"decomp\")\n\n\n# --- Classes -- #\n\n\nclass TokenType(Enum):\n EMOJI = \"emoji\"\n TEXT = \"text\"\n\n\nclass Token:\n type: Literal[TokenType.TEXT, TokenType.EMOJI]\n _original_text: str\n _original_codepoints: List[int]\n _normalized_codepoints: Optional[List[int]] = None\n\n restricted: bool = False\n\n def __init__(self, codepoints: List[int]) -> None:\n self._original_codepoints = codepoints\n self._original_text = \"\".join(chr(cp) for cp in codepoints)\n\n @property\n def codepoints(self) -> List[int]:\n return (\n self._normalized_codepoints\n if self._normalized_codepoints\n else self._original_codepoints\n )\n\n @property\n def text(self) -> str:\n return _codepoints_to_text(self.codepoints)\n\n\nclass EmojiToken(Token):\n type: Literal[TokenType.EMOJI] = TokenType.EMOJI\n\n\nclass TextToken(Token):\n type: Literal[TokenType.TEXT] = TokenType.TEXT\n\n\nclass Label:\n type: str\n tokens: List[Token]\n\n def __init__(\n self,\n type: str = None,\n tokens: List[Token] = None,\n ) -> None:\n self.type = type\n self.tokens = tokens\n\n @property\n def text(self) -> str:\n if not self.tokens:\n return \"\"\n\n return \"\".join(token.text for token in self.tokens)\n\n\nclass ENSNormalizedName:\n labels: List[Label]\n\n def __init__(self, normalized_labels: List[Label]) -> None:\n self.labels = normalized_labels\n\n @property\n def as_text(self) -> str:\n return \".\".join(label.text for label in self.labels)\n\n\n# -----\n\nGROUP_COMBINED_VALID_CPS = []\nfor d in NORMALIZATION_SPEC[\"groups\"]:\n GROUP_COMBINED_VALID_CPS.extend(d[\"primary\"])\n GROUP_COMBINED_VALID_CPS.extend(d[\"secondary\"])\n\nVALID_BY_GROUPS = {\n d[\"name\"]: set(d[\"primary\"] + d[\"secondary\"]) for d in NORMALIZATION_SPEC[\"groups\"]\n}\n\n\ndef _extract_valid_codepoints() -> Set[int]:\n all_valid = set()\n for _name, valid_cps in VALID_BY_GROUPS.items():\n all_valid.update(valid_cps)\n all_valid.update(map(ord, NFD(\"\".join(map(chr, all_valid)))))\n return all_valid\n\n\ndef _construct_whole_confusable_map() -> Dict[int, Set[str]]:\n \"\"\"\n Create a mapping, per confusable, that contains all the groups in the cp's whole\n confusable excluding the confusable extent of the cp itself - as per the spec at\n https://docs.ens.domains/ens-improvement-proposals/ensip-15-normalization-standard\n \"\"\"\n whole_map: Dict[int, Set[str]] = {}\n for whole in NORMALIZATION_SPEC[\"wholes\"]:\n whole_confusables: Set[int] = set(whole[\"valid\"] + whole[\"confused\"])\n confusable_extents: List[Tuple[Set[int], Set[str]]] = []\n\n for confusable_cp in whole_confusables:\n # create confusable extents for all whole confusables\n groups: Set[str] = set()\n for gn, gv in VALID_BY_GROUPS.items():\n if confusable_cp in gv:\n groups.add(gn)\n\n if len(confusable_extents) == 0:\n confusable_extents.append(({confusable_cp}, groups))\n else:\n extent_exists = False\n for entry in confusable_extents:\n if any(g in entry[1] for g in groups):\n extent_exists = True\n entry[0].update({confusable_cp})\n entry[1].update(groups)\n break\n\n if not extent_exists:\n confusable_extents.append(({confusable_cp}, groups))\n\n for confusable_cp in whole_confusables:\n confusable_cp_extent_groups: Set[str] = set()\n\n if confusable_cp in whole[\"confused\"]:\n whole_map[confusable_cp] = set()\n for ce in confusable_extents:\n if confusable_cp in ce[0]:\n confusable_cp_extent_groups.update(ce[1])\n else:\n whole_map[confusable_cp].update(ce[1])\n\n # remove the groups from confusable_cp's confusable extent\n whole_map[confusable_cp] = whole_map[confusable_cp].difference(\n confusable_cp_extent_groups\n )\n\n return whole_map\n\n\nWHOLE_CONFUSABLE_MAP = _construct_whole_confusable_map()\nVALID_CODEPOINTS = _extract_valid_codepoints()\nMAX_LEN_EMOJI_PATTERN = max(len(e) for e in NORMALIZATION_SPEC[\"emoji\"])\nWHOLE_CONFUSABLES = NORMALIZATION_SPEC[\"wholes\"]\nNSM_MAX = NORMALIZATION_SPEC[\"nsm_max\"]\n\n\ndef _is_fenced(cp: int) -> bool:\n return cp in [fenced[0] for fenced in NORMALIZATION_SPEC[\"fenced\"]]\n\n\ndef _codepoints_to_text(cps: Union[List[List[int]], List[int]]) -> str:\n return \"\".join(\n chr(cp) if isinstance(cp, int) else _codepoints_to_text(cp) for cp in cps\n )\n\n\ndef _validate_tokens_and_get_label_type(tokens: List[Token]) -> str:\n \"\"\"\n Validate tokens and return the label type.\n\n :param List[Token] tokens: the tokens to validate\n :raises InvalidName: if any of the tokens are invalid\n \"\"\"\n\n if all(token.type == TokenType.EMOJI for token in tokens):\n return \"emoji\"\n\n label_text = \"\".join(token.text for token in tokens)\n concat_text_tokens_as_str = \"\".join(\n t.text for t in tokens if t.type == TokenType.TEXT\n )\n all_token_cps = [cp for t in tokens for cp in t.codepoints]\n\n if len(tokens) == 1 and tokens[0].type == TokenType.TEXT:\n # if single text token\n encoded = concat_text_tokens_as_str.encode()\n try:\n encoded.decode(\"ascii\") # if label is ascii\n\n if \"_\" in concat_text_tokens_as_str[concat_text_tokens_as_str.count(\"_\") :]:\n raise InvalidName(\n \"Underscores '_' may only occur at the start of a label: \"\n f\"'{label_text}'\"\n )\n elif concat_text_tokens_as_str[2:4] == \"--\":\n raise InvalidName(\n \"A label's third and fourth characters cannot be hyphens '-': \"\n f\"'{label_text}'\"\n )\n return \"ascii\"\n except UnicodeDecodeError:\n pass\n\n if 95 in all_token_cps[all_token_cps.count(95) :]:\n raise InvalidName(\n f\"Underscores '_' may only occur at the start of a label: '{label_text}'\"\n )\n\n if _is_fenced(all_token_cps[0]) or _is_fenced(all_token_cps[-1]):\n raise InvalidName(\n f\"Label cannot start or end with a fenced codepoint: '{label_text}'\"\n )\n\n for cp_index, cp in enumerate(all_token_cps):\n if cp_index == len(all_token_cps) - 1:\n break\n next_cp = all_token_cps[cp_index + 1]\n if _is_fenced(cp) and _is_fenced(next_cp):\n raise InvalidName(\n f\"Label cannot contain two fenced codepoints in a row: '{label_text}'\"\n )\n\n if any(\n t.codepoints[0] in NORMALIZATION_SPEC[\"cm\"]\n for t in tokens\n if t.type == TokenType.TEXT\n ):\n raise InvalidName(\n \"At least one text token in label starts with a \"\n f\"combining mark: '{label_text}'\"\n )\n\n # find first group that contains all chars in label\n text_token_cps_set = {\n cp\n for token in tokens\n if token.type == TokenType.TEXT\n for cp in token.codepoints\n }\n\n chars_group_name = None\n for group_name, group_cps in VALID_BY_GROUPS.items():\n if text_token_cps_set.issubset(group_cps):\n chars_group_name = group_name\n break\n\n if not chars_group_name:\n raise InvalidName(\n f\"Label contains codepoints from multiple groups: '{label_text}'\"\n )\n\n # apply NFD and check contiguous NSM sequences\n for group in NORMALIZATION_SPEC[\"groups\"]:\n if group[\"name\"] == chars_group_name:\n if \"cm\" not in group:\n nfd_cps = [\n ord(nfd_c) for c in concat_text_tokens_as_str for nfd_c in NFD(c)\n ]\n\n next_index = -1\n for cp_i, cp in enumerate(nfd_cps):\n if cp_i <= next_index:\n continue\n\n if cp in NORMALIZATION_SPEC[\"nsm\"]:\n if cp_i == len(nfd_cps) - 1:\n break\n\n contiguous_nsm_cps = [cp]\n next_index = cp_i + 1\n next_cp = nfd_cps[next_index]\n while next_cp in NORMALIZATION_SPEC[\"nsm\"]:\n contiguous_nsm_cps.append(next_cp)\n if len(contiguous_nsm_cps) > NSM_MAX:\n raise InvalidName(\n \"Contiguous NSM sequence for label greater than NSM\"\n f\" max of {NSM_MAX}: '{label_text}'\"\n )\n next_index += 1\n if next_index == len(nfd_cps):\n break\n next_cp = nfd_cps[next_index]\n\n if not len(contiguous_nsm_cps) == len(set(contiguous_nsm_cps)):\n raise InvalidName(\n \"Contiguous NSM sequence for label contains duplicate \"\n f\"codepoints: '{label_text}'\"\n )\n break\n\n # check wholes\n # start with set of all groups with confusables\n retained_groups = set(VALID_BY_GROUPS.keys())\n confused_chars = set()\n buffer = set()\n\n for char_cp in text_token_cps_set:\n groups_excluding_ce = WHOLE_CONFUSABLE_MAP.get(char_cp)\n\n if groups_excluding_ce and len(groups_excluding_ce) > 0:\n if len(retained_groups) == 0:\n break\n else:\n retained_groups = retained_groups.intersection(groups_excluding_ce)\n confused_chars.add(char_cp)\n\n elif GROUP_COMBINED_VALID_CPS.count(char_cp) == 1:\n return chars_group_name\n\n else:\n buffer.add(char_cp)\n\n if len(confused_chars) > 0:\n for retained_group_name in retained_groups:\n if all(cp in VALID_BY_GROUPS[retained_group_name] for cp in buffer):\n # Though the spec doesn't mention this explicitly, if the buffer is\n # empty, the label is confusable. This allows for using ``all()`` here\n # since that yields ``True`` on empty sets.\n # e.g. ``all(cp in group_cps for cp in set())`` is ``True``\n # for any ``group_cps``.\n if len(buffer) == 0:\n msg = (\n f\"All characters in label are confusable: \"\n f\"'{label_text}' ({chars_group_name} / \"\n )\n msg += (\n f\"{[rgn for rgn in retained_groups]})\"\n if len(retained_groups) > 1\n else f\"{retained_group_name})\"\n )\n else:\n msg = (\n f\"Label is confusable: '{label_text}' \"\n f\"({chars_group_name} / {retained_group_name})\"\n )\n raise InvalidName(msg)\n\n return chars_group_name\n\n\ndef _build_and_validate_label_from_tokens(tokens: List[Token]) -> Label:\n for token in tokens:\n if token.type == TokenType.TEXT:\n # apply NFC normalization to text tokens\n chars = [chr(cp) for cp in token._original_codepoints]\n nfc = NFC(chars)\n token._normalized_codepoints = [ord(c) for c in nfc]\n\n label_type = _validate_tokens_and_get_label_type(tokens)\n\n label = Label()\n label.type = label_type\n label.tokens = tokens\n return label\n\n\ndef _buffer_codepoints_to_chars(buffer: Union[List[int], List[List[int]]]) -> str:\n return \"\".join(\n \"\".join(chr(c) for c in char) if isinstance(char, list) else chr(char)\n for char in buffer\n )\n\n\n# -----\n\n\ndef normalize_name_ensip15(name: str) -> ENSNormalizedName:\n \"\"\"\n Normalize an ENS name according to ENSIP-15\n https://docs.ens.domains/ens-improvement-proposals/ensip-15-normalization-standard\n\n :param str name: the dot-separated ENS name\n :raises InvalidName: if ``name`` has invalid syntax\n \"\"\"\n\n if not name:\n raise InvalidName(\"Name cannot be empty\")\n elif isinstance(name, (bytes, bytearray)):\n name = name.decode(\"utf-8\")\n\n raw_labels = name.split(\".\")\n\n if any(len(label) == 0 for label in raw_labels):\n raise InvalidName(\"Labels cannot be empty\")\n\n normalized_labels = []\n\n for label_str in raw_labels:\n # _input takes the label and breaks it into a list of unicode code points\n # e.g. \"xyz👨🏻\" -> [120, 121, 122, 128104, 127995]\n _input = [ord(c) for c in label_str]\n buffer: List[int] = []\n tokens: List[Token] = []\n\n while len(_input) > 0:\n emoji_codepoint = None\n end_index = 1\n while end_index <= len(_input):\n current_emoji_sequence = _input[:end_index]\n\n if len(current_emoji_sequence) > MAX_LEN_EMOJI_PATTERN:\n # if we've reached the max length of all known emoji patterns\n break\n\n # remove 0xFE0F (65039)\n elif 65039 in current_emoji_sequence:\n current_emoji_sequence.remove(65039)\n _input.remove(65039)\n end_index -= 1 # reset end_index after removing 0xFE0F\n\n if current_emoji_sequence in NORMALIZATION_SPEC[\"emoji\"]:\n emoji_codepoint = current_emoji_sequence\n end_index += 1\n\n if emoji_codepoint:\n if len(buffer) > 0:\n # emit `Text` token with values in buffer\n tokens.append(TextToken(buffer))\n buffer = [] # clear the buffer\n\n # emit `Emoji` token with values in emoji_codepoint\n tokens.append(EmojiToken(emoji_codepoint))\n _input = _input[len(emoji_codepoint) :]\n\n else:\n leading_codepoint = _input.pop(0)\n\n if leading_codepoint in NORMALIZATION_SPEC[\"ignored\"]:\n pass\n\n elif leading_codepoint in NORMALIZATION_SPEC[\"mapped\"]:\n mapped = NORMALIZATION_SPEC[\"mapped\"][leading_codepoint]\n for cp in mapped:\n buffer.append(cp)\n\n else:\n if leading_codepoint in VALID_CODEPOINTS:\n buffer.append(leading_codepoint)\n else:\n raise InvalidName(\n f\"Invalid character: '{chr(leading_codepoint)}' | \"\n f\"codepoint {leading_codepoint} ({hex(leading_codepoint)})\"\n )\n\n if len(buffer) > 0 and len(_input) == 0:\n tokens.append(TextToken(buffer))\n\n # create a `Label` instance from tokens\n # - Apply NFC to each `Text` token\n # - Run tokens through \"Validation\" section of ENSIP-15\n normalized_label = _build_and_validate_label_from_tokens(tokens)\n normalized_labels.append(normalized_label)\n\n # - join labels back together after normalization\n return ENSNormalizedName(normalized_labels)\n",
"path": "ens/_normalization.py"
}
] | 8_10 | python | import unittest
import sys
class TestNormalizationConstantRemoval(unittest.TestCase):
def test_whole_confusables_removal(self):
from ens import _normalization
# test if the constant has been removed
self.assertFalse(hasattr(_normalization, 'WHOLE_CONFUSABLES'), "WHOLE_CONFUSABLES should not exist in _normalization module")
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestNormalizationConstantRemoval))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
main()
|
https://github.com/teamqurrent/web3.py | Prevent circular import by moving around the import order in `web3/providers/__init__.py`. You should move the .persistent import to be below the websocket import | 33d186b | -e . [tester]
idna
pytest
pytest_asyncio
eth-tester[py-evm]==v0.9.1-b.1
py-geth>=3.11.0 | python3.9 | a17ff88f | diff --git a/web3/providers/__init__.py b/web3/providers/__init__.py
--- a/web3/providers/__init__.py
+++ b/web3/providers/__init__.py
@@ -11,9 +11,6 @@ from .base import (
from .ipc import (
IPCProvider,
)
-from .persistent import (
- PersistentConnectionProvider,
-)
from .rpc import (
HTTPProvider,
)
@@ -21,6 +18,9 @@ from .websocket import (
WebsocketProvider,
WebsocketProviderV2,
)
+from .persistent import (
+ PersistentConnectionProvider,
+)
from .auto import (
AutoProvider,
)
| [
{
"content": "from .async_base import (\n AsyncBaseProvider,\n)\nfrom .async_rpc import (\n AsyncHTTPProvider,\n)\nfrom .base import (\n BaseProvider,\n JSONBaseProvider,\n)\nfrom .ipc import (\n IPCProvider,\n)\nfrom .persistent import (\n PersistentConnectionProvider,\n)\nfrom .rpc import (\n HTTPProvider,\n)\nfrom .websocket import (\n WebsocketProvider,\n WebsocketProviderV2,\n)\nfrom .auto import (\n AutoProvider,\n)\n",
"path": "web3/providers/__init__.py"
}
] | 8_11 | python | import unittest
class TestUtilityMethods(unittest.TestCase):
def test_either_set_is_a_subset_with_percentage(self):
import web3._utils.utility_methods as utility_methods
set_a = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10}
set_b = {1, 2, 3, 4, 5, 6, 7, 8, 9}
# 90% of set_a is in set_b
self.assertTrue(utility_methods.either_set_is_a_subset(set_a, set_b, percentage=90))
def test_transaction_result_formatters_includes_yParity(self):
from web3._utils.method_formatters import TRANSACTION_RESULT_FORMATTERS
self.assertIn('yParity', TRANSACTION_RESULT_FORMATTERS)
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestUtilityMethods))
runner = unittest.TextTestRunner()
import sys
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
main() |
https://github.com/teamqurrent/web3.py | Improve the caching mechanism for responses in the `WebsocketProviderV2` class. This includes ensuring that all undesired responses are cached correctly and that a cached response is returned immediately without needing to call recv() again. Implement a timeout mechanism for the make_request method in the `WebsocketProviderV2` class. This is to ensure that if a response is not received within a specified time frame, an exception is raised, indicating a timeout. | ef022ef | -e . [tester]
idna
pytest
pytest_asyncio
eth-tester[py-evm]==v0.9.1-b.1
py-geth>=3.11.0 | python3.9 | f55177b3 | diff --git a/tests/core/providers/test_wsv2_provider.py b/tests/core/providers/test_wsv2_provider.py
--- a/tests/core/providers/test_wsv2_provider.py
+++ b/tests/core/providers/test_wsv2_provider.py
@@ -6,6 +6,9 @@ from eth_utils import (
to_bytes,
)
+from web3.exceptions import (
+ TimeExhausted,
+)
from web3.providers.websocket import (
WebsocketProviderV2,
)
@@ -14,24 +17,28 @@ from web3.types import (
)
+def _mock_ws(provider):
+ # move to top of file when python 3.7 is no longer supported in web3.py
+ from unittest.mock import (
+ AsyncMock,
+ )
+
+ provider._ws = AsyncMock()
+
+
@pytest.mark.asyncio
@pytest.mark.skipif(
# TODO: remove when python 3.7 is no longer supported in web3.py
# python 3.7 is already sunset so this feels like a reasonable tradeoff
sys.version_info < (3, 8),
- reason="Mock args behave differently in python 3.7 but test should still pass.",
+ reason="Uses AsyncMock, not supported by python 3.7",
)
async def test_async_make_request_caches_all_undesired_responses_and_returns_desired():
- # move to top of file when python 3.7 is no longer supported in web3.py
- from unittest.mock import (
- AsyncMock,
- )
-
provider = WebsocketProviderV2("ws://mocked")
method_under_test = provider.make_request
- provider._ws = AsyncMock()
+ _mock_ws(provider)
undesired_responses_count = 10
ws_recv_responses = [
to_bytes(
@@ -90,3 +97,25 @@ async def test_async_make_request_returns_cached_response_with_no_recv_if_cached
assert len(provider._request_processor._raw_response_cache) == 0
assert not provider._ws.recv.called # type: ignore
+
+
+@pytest.mark.asyncio
+@pytest.mark.skipif(
+ # TODO: remove when python 3.7 is no longer supported in web3.py
+ # python 3.7 is already sunset so this feels like a reasonable tradeoff
+ sys.version_info < (3, 8),
+ reason="Uses AsyncMock, not supported by python 3.7",
+)
+async def test_async_make_request_times_out_of_while_loop_looking_for_response():
+ provider = WebsocketProviderV2("ws://mocked", call_timeout=0.1)
+
+ method_under_test = provider.make_request
+
+ _mock_ws(provider)
+ provider._ws.recv.side_effect = lambda *args, **kwargs: b'{"jsonrpc": "2.0"}'
+
+ with pytest.raises(
+ TimeExhausted,
+ match="Timed out waiting for response with request id `0` after 0.1 seconds.",
+ ):
+ await method_under_test(RPCEndpoint("some_method"), ["desired_params"])
diff --git a/web3/providers/persistent.py b/web3/providers/persistent.py
--- a/web3/providers/persistent.py
+++ b/web3/providers/persistent.py
@@ -34,7 +34,7 @@ class PersistentConnectionProvider(AsyncJSONBaseProvider, ABC):
self,
endpoint_uri: str,
request_cache_size: int = 100,
- call_timeout: int = DEFAULT_PERSISTENT_CONNECTION_TIMEOUT,
+ call_timeout: float = DEFAULT_PERSISTENT_CONNECTION_TIMEOUT,
) -> None:
super().__init__()
self.endpoint_uri = endpoint_uri
diff --git a/web3/providers/websocket/websocket_v2.py b/web3/providers/websocket/websocket_v2.py
--- a/web3/providers/websocket/websocket_v2.py
+++ b/web3/providers/websocket/websocket_v2.py
@@ -27,9 +27,11 @@ from web3._utils.caching import (
)
from web3.exceptions import (
ProviderConnectionError,
+ TimeExhausted,
Web3ValidationError,
)
from web3.providers.persistent import (
+ DEFAULT_PERSISTENT_CONNECTION_TIMEOUT,
PersistentConnectionProvider,
)
from web3.types import (
@@ -64,7 +66,7 @@ class WebsocketProviderV2(PersistentConnectionProvider):
self,
endpoint_uri: Optional[Union[URI, str]] = None,
websocket_kwargs: Optional[Dict[str, Any]] = None,
- call_timeout: Optional[int] = None,
+ call_timeout: Optional[float] = DEFAULT_PERSISTENT_CONNECTION_TIMEOUT,
) -> None:
self.endpoint_uri = URI(endpoint_uri)
if self.endpoint_uri is None:
@@ -167,21 +169,43 @@ class WebsocketProviderV2(PersistentConnectionProvider):
return response
async def _get_response_for_request_id(self, request_id: RPCId) -> RPCResponse:
- response_id = None
- response = None
- while response_id != request_id:
- response = await self._ws_recv()
- response_id = response.get("id")
+ async def _match_response_id_to_request_id() -> RPCResponse:
+ response_id = None
+ response = None
+ while response_id != request_id:
+ response = await self._ws_recv()
+ response_id = response.get("id")
+
+ if response_id == request_id:
+ break
+ else:
+ # cache all responses that are not the desired response
+ await self._request_processor.cache_raw_response(
+ response,
+ )
+ await asyncio.sleep(0.1)
+
+ return response
- if response_id == request_id:
- break
- else:
- # cache all responses that are not the desired response
- await self._request_processor.cache_raw_response(
- response,
- )
-
- return response
+ try:
+ # Enters a while loop, looking for a response id match to the request id.
+ # If the provider does not give responses with matching ids, this will
+ # hang forever. The JSON-RPC spec requires that providers respond with
+ # the same id that was sent in the request, but we need to handle these
+ # "bad" cases somewhat gracefully.
+ timeout = (
+ self.call_timeout
+ if self.call_timeout and self.call_timeout <= 20
+ else 20
+ )
+ return await asyncio.wait_for(_match_response_id_to_request_id(), timeout)
+ except asyncio.TimeoutError:
+ raise TimeExhausted(
+ f"Timed out waiting for response with request id `{request_id}` after "
+ f"{self.call_timeout} seconds. This is likely due to the provider not "
+ "returning a response with the same id that was sent in the request, "
+ "which is required by the JSON-RPC spec."
+ )
async def _ws_recv(self) -> RPCResponse:
return json.loads(
| [
{
"content": "import json\nimport pytest\nimport sys\n\nfrom eth_utils import (\n to_bytes,\n)\n\nfrom web3.providers.websocket import (\n WebsocketProviderV2,\n)\nfrom web3.types import (\n RPCEndpoint,\n)\n\n\n@pytest.mark.asyncio\n@pytest.mark.skipif(\n # TODO: remove when python 3.7 is no longer supported in web3.py\n # python 3.7 is already sunset so this feels like a reasonable tradeoff\n sys.version_info < (3, 8),\n reason=\"Mock args behave differently in python 3.7 but test should still pass.\",\n)\nasync def test_async_make_request_caches_all_undesired_responses_and_returns_desired():\n # move to top of file when python 3.7 is no longer supported in web3.py\n from unittest.mock import (\n AsyncMock,\n )\n\n provider = WebsocketProviderV2(\"ws://mocked\")\n\n method_under_test = provider.make_request\n\n provider._ws = AsyncMock()\n undesired_responses_count = 10\n ws_recv_responses = [\n to_bytes(\n text=json.dumps(\n {\n \"jsonrpc\": \"2.0\",\n \"method\": \"eth_subscription\",\n \"params\": {\"subscription\": \"0x1\", \"result\": f\"0x{i}\"},\n }\n )\n )\n for i in range(0, undesired_responses_count)\n ]\n # The first request we make should have an id of `0`, expect the response to match\n # that id. Append it as the last response in the list.\n ws_recv_responses.append(b'{\"jsonrpc\": \"2.0\", \"id\":0, \"result\": \"0x1337\"}')\n provider._ws.recv.side_effect = ws_recv_responses\n\n response = await method_under_test(RPCEndpoint(\"some_method\"), [\"desired_params\"])\n assert response == json.loads(ws_recv_responses.pop()) # pop the expected response\n\n assert (\n len(provider._request_processor._raw_response_cache)\n == len(ws_recv_responses)\n == undesired_responses_count\n )\n\n for (\n _cache_key,\n cached_response,\n ) in provider._request_processor._raw_response_cache.items():\n # assert all cached responses are in the list of responses we received\n assert to_bytes(text=json.dumps(cached_response)) in ws_recv_responses\n\n\n@pytest.mark.asyncio\n@pytest.mark.skipif(\n # TODO: remove when python 3.7 is no longer supported in web3.py\n # python 3.7 is already sunset so this feels like a reasonable tradeoff\n sys.version_info < (3, 8),\n reason=\"Uses AsyncMock, not supported by python 3.7\",\n)\nasync def test_async_make_request_returns_cached_response_with_no_recv_if_cached():\n provider = WebsocketProviderV2(\"ws://mocked\")\n\n method_under_test = provider.make_request\n\n _mock_ws(provider)\n\n # cache the response, so we should get it immediately & should never call `recv()`\n desired_response = {\"jsonrpc\": \"2.0\", \"id\": 0, \"result\": \"0x1337\"}\n await provider._request_processor.cache_raw_response(desired_response)\n\n response = await method_under_test(RPCEndpoint(\"some_method\"), [\"desired_params\"])\n assert response == desired_response\n\n assert len(provider._request_processor._raw_response_cache) == 0\n assert not provider._ws.recv.called # type: ignore\n",
"path": "tests/core/providers/test_wsv2_provider.py"
},
{
"content": "from abc import (\n ABC,\n)\nimport logging\nfrom typing import (\n Optional,\n)\n\nfrom websockets.legacy.client import (\n WebSocketClientProtocol,\n)\n\nfrom web3.providers.async_base import (\n AsyncJSONBaseProvider,\n)\nfrom web3.providers.websocket.request_processor import (\n RequestProcessor,\n)\nfrom web3.types import (\n RPCResponse,\n)\n\nDEFAULT_PERSISTENT_CONNECTION_TIMEOUT = 20\n\n\nclass PersistentConnectionProvider(AsyncJSONBaseProvider, ABC):\n logger = logging.getLogger(\"web3.providers.PersistentConnectionProvider\")\n has_persistent_connection = True\n\n _ws: Optional[WebSocketClientProtocol] = None\n _request_processor: RequestProcessor\n\n def __init__(\n self,\n endpoint_uri: str,\n request_cache_size: int = 100,\n call_timeout: int = DEFAULT_PERSISTENT_CONNECTION_TIMEOUT,\n ) -> None:\n super().__init__()\n self.endpoint_uri = endpoint_uri\n self._request_processor = RequestProcessor(\n self,\n request_info_cache_size=request_cache_size,\n )\n self.call_timeout = call_timeout\n\n async def connect(self) -> None:\n raise NotImplementedError(\"Must be implemented by subclasses\")\n\n async def disconnect(self) -> None:\n raise NotImplementedError(\"Must be implemented by subclasses\")\n\n async def _ws_recv(self) -> RPCResponse:\n raise NotImplementedError(\"Must be implemented by subclasses\")\n",
"path": "web3/providers/persistent.py"
},
{
"content": "import asyncio\nimport json\nimport logging\nimport os\nfrom typing import (\n Any,\n Dict,\n Optional,\n Union,\n)\n\nfrom eth_typing import (\n URI,\n)\nfrom toolz import (\n merge,\n)\nfrom websockets.client import (\n connect,\n)\nfrom websockets.exceptions import (\n WebSocketException,\n)\n\nfrom web3._utils.caching import (\n generate_cache_key,\n)\nfrom web3.exceptions import (\n ProviderConnectionError,\n Web3ValidationError,\n)\nfrom web3.providers.persistent import (\n PersistentConnectionProvider,\n)\nfrom web3.types import (\n RPCEndpoint,\n RPCId,\n RPCResponse,\n)\n\nDEFAULT_PING_INTERVAL = 30 # 30 seconds\nDEFAULT_PING_TIMEOUT = 300 # 5 minutes\n\nVALID_WEBSOCKET_URI_PREFIXES = {\"ws://\", \"wss://\"}\nRESTRICTED_WEBSOCKET_KWARGS = {\"uri\", \"loop\"}\nDEFAULT_WEBSOCKET_KWARGS = {\n # set how long to wait between pings from the server\n \"ping_interval\": DEFAULT_PING_INTERVAL,\n # set how long to wait without a pong response before closing the connection\n \"ping_timeout\": DEFAULT_PING_TIMEOUT,\n}\n\n\ndef get_default_endpoint() -> URI:\n return URI(os.environ.get(\"WEB3_WS_PROVIDER_URI\", \"ws://127.0.0.1:8546\"))\n\n\nclass WebsocketProviderV2(PersistentConnectionProvider):\n logger = logging.getLogger(\"web3.providers.WebsocketProviderV2\")\n is_async: bool = True\n _max_connection_retries: int = 5\n\n def __init__(\n self,\n endpoint_uri: Optional[Union[URI, str]] = None,\n websocket_kwargs: Optional[Dict[str, Any]] = None,\n call_timeout: Optional[int] = None,\n ) -> None:\n self.endpoint_uri = URI(endpoint_uri)\n if self.endpoint_uri is None:\n self.endpoint_uri = get_default_endpoint()\n\n if not any(\n self.endpoint_uri.startswith(prefix)\n for prefix in VALID_WEBSOCKET_URI_PREFIXES\n ):\n raise Web3ValidationError(\n f\"Websocket endpoint uri must begin with 'ws://' or 'wss://': \"\n f\"{self.endpoint_uri}\"\n )\n\n if websocket_kwargs is not None:\n found_restricted_keys = set(websocket_kwargs).intersection(\n RESTRICTED_WEBSOCKET_KWARGS\n )\n if found_restricted_keys:\n raise Web3ValidationError(\n f\"Found restricted keys for websocket_kwargs: \"\n f\"{found_restricted_keys}.\"\n )\n\n self.websocket_kwargs = merge(DEFAULT_WEBSOCKET_KWARGS, websocket_kwargs or {})\n\n super().__init__(endpoint_uri, call_timeout=call_timeout)\n\n def __str__(self) -> str:\n return f\"Websocket connection: {self.endpoint_uri}\"\n\n async def is_connected(self, show_traceback: bool = False) -> bool:\n if not self._ws:\n return False\n\n try:\n await self._ws.pong()\n return True\n\n except WebSocketException as e:\n if show_traceback:\n raise ProviderConnectionError(\n f\"Error connecting to endpoint: '{self.endpoint_uri}'\"\n ) from e\n return False\n\n async def connect(self) -> None:\n _connection_attempts = 0\n _backoff_rate_change = 1.75\n _backoff_time = 1.75\n\n while _connection_attempts != self._max_connection_retries:\n try:\n _connection_attempts += 1\n self._ws = await connect(self.endpoint_uri, **self.websocket_kwargs)\n break\n except WebSocketException as e:\n if _connection_attempts == self._max_connection_retries:\n raise ProviderConnectionError(\n f\"Could not connect to endpoint: {self.endpoint_uri}. \"\n f\"Retries exceeded max of {self._max_connection_retries}.\"\n ) from e\n self.logger.info(\n f\"Could not connect to endpoint: {self.endpoint_uri}. Retrying in \"\n f\"{round(_backoff_time, 1)} seconds.\",\n exc_info=True,\n )\n await asyncio.sleep(_backoff_time)\n _backoff_time *= _backoff_rate_change\n\n async def disconnect(self) -> None:\n await self._ws.close()\n self._ws = None\n\n # clear the request information cache after disconnecting\n self._request_processor.clear_caches()\n self.logger.debug(\n f'Successfully disconnected from endpoint: \"{self.endpoint_uri}\" '\n \"and the request processor transient caches were cleared.\"\n )\n\n async def make_request(self, method: RPCEndpoint, params: Any) -> RPCResponse:\n request_data = self.encode_rpc_request(method, params)\n\n if self._ws is None:\n await self.connect()\n\n await asyncio.wait_for(self._ws.send(request_data), timeout=self.call_timeout)\n\n current_request_id = json.loads(request_data)[\"id\"]\n request_cache_key = generate_cache_key(current_request_id)\n\n if request_cache_key in self._request_processor._raw_response_cache:\n # if response is already cached, pop it from cache\n response = await self._request_processor.pop_raw_response(request_cache_key)\n else:\n # else, wait for the desired response, caching all others along the way\n response = await self._get_response_for_request_id(current_request_id)\n\n return response\n\n async def _get_response_for_request_id(self, request_id: RPCId) -> RPCResponse:\n response_id = None\n response = None\n while response_id != request_id:\n response = await self._ws_recv()\n response_id = response.get(\"id\")\n\n if response_id == request_id:\n break\n else:\n # cache all responses that are not the desired response\n await self._request_processor.cache_raw_response(\n response,\n )\n\n return response\n\n async def _ws_recv(self) -> RPCResponse:\n return json.loads(\n await asyncio.wait_for(self._ws.recv(), timeout=self.call_timeout)\n )\n",
"path": "web3/providers/websocket/websocket_v2.py"
}
] | 8_12 | python | import asyncio
import sys
import pytest
def _mock_ws(provider):
from unittest.mock import AsyncMock
provider._ws = AsyncMock()
@pytest.mark.asyncio
@pytest.mark.skipif(
sys.version_info < (3, 8),
reason="Uses AsyncMock, not supported by python 3.7",
)
async def test_async_make_request_times_out_of_while_loop_looking_for_response():
from web3.exceptions import (
TimeExhausted,
)
from web3.providers.websocket import (
WebsocketProviderV2,
)
from web3.types import (
RPCEndpoint,
)
timeout = 0.001
provider = WebsocketProviderV2("ws://mocked", call_timeout=timeout)
method_under_test = provider.make_request
_mock_ws(provider)
provider._ws.recv = lambda *args, **kwargs: asyncio.sleep(1)
with pytest.raises(
TimeExhausted,
match=r"Timed out waiting for response with request id `0` after "
rf"{timeout} seconds",
):
await method_under_test(RPCEndpoint("some_method"), ["desired_params"])
def main():
import pytest
# Run the pytest tests programmatically
exit_code = pytest.main(["-v", __file__])
# Exit with the appropriate status code
sys.exit(exit_code)
if __name__ == '__main__':
main()
|
https://github.com/teamqurrent/web3.py | Modify the `disconnect` method of `WebSocketProviderV2` so that it no longer writes to the log when catching the exception. | fe96e1e | -e . [tester]
idna
pytest
pytest_asyncio
eth-tester[py-evm]==v0.9.1-b.1
py-geth>=3.11.0 | python3.9 | 9ae0e13 | diff --git a/web3/providers/websocket/websocket_v2.py b/web3/providers/websocket/websocket_v2.py
--- a/web3/providers/websocket/websocket_v2.py
+++ b/web3/providers/websocket/websocket_v2.py
@@ -154,12 +154,8 @@ class WebsocketProviderV2(PersistentConnectionProvider):
try:
self._message_listener_task.cancel()
await self._message_listener_task
- except (asyncio.CancelledError, StopAsyncIteration) as e:
- self.logger.info(
- "Websocket message listener background task caught and ignored an "
- f"exception during cancellation: {e}"
- )
-
+ except (asyncio.CancelledError, StopAsyncIteration):
+ pass
self._request_processor.clear_caches()
@async_handle_request_caching
| [
{
"content": "import asyncio\nimport json\nimport logging\nimport os\nfrom typing import (\n Any,\n Dict,\n Optional,\n Union,\n)\n\nfrom eth_typing import (\n URI,\n)\nfrom toolz import (\n merge,\n)\nfrom websockets.client import (\n connect,\n)\nfrom websockets.exceptions import (\n WebSocketException,\n)\n\nfrom web3._utils.caching import (\n async_handle_request_caching,\n generate_cache_key,\n)\nfrom web3.exceptions import (\n ProviderConnectionError,\n TimeExhausted,\n Web3ValidationError,\n)\nfrom web3.providers.persistent import (\n DEFAULT_PERSISTENT_CONNECTION_TIMEOUT,\n PersistentConnectionProvider,\n)\nfrom web3.types import (\n RPCEndpoint,\n RPCId,\n RPCResponse,\n)\n\nDEFAULT_PING_INTERVAL = 30 # 30 seconds\nDEFAULT_PING_TIMEOUT = 300 # 5 minutes\n\nVALID_WEBSOCKET_URI_PREFIXES = {\"ws://\", \"wss://\"}\nRESTRICTED_WEBSOCKET_KWARGS = {\"uri\", \"loop\"}\nDEFAULT_WEBSOCKET_KWARGS = {\n # set how long to wait between pings from the server\n \"ping_interval\": DEFAULT_PING_INTERVAL,\n # set how long to wait without a pong response before closing the connection\n \"ping_timeout\": DEFAULT_PING_TIMEOUT,\n}\n\n\ndef get_default_endpoint() -> URI:\n return URI(os.environ.get(\"WEB3_WS_PROVIDER_URI\", \"ws://127.0.0.1:8546\"))\n\n\nclass WebsocketProviderV2(PersistentConnectionProvider):\n logger = logging.getLogger(\"web3.providers.WebsocketProviderV2\")\n is_async: bool = True\n _max_connection_retries: int = 5\n\n def __init__(\n self,\n endpoint_uri: Optional[Union[URI, str]] = None,\n websocket_kwargs: Optional[Dict[str, Any]] = None,\n request_timeout: Optional[float] = DEFAULT_PERSISTENT_CONNECTION_TIMEOUT,\n raise_listener_task_exceptions: bool = False,\n ) -> None:\n self.endpoint_uri = URI(endpoint_uri)\n if self.endpoint_uri is None:\n self.endpoint_uri = get_default_endpoint()\n\n if not any(\n self.endpoint_uri.startswith(prefix)\n for prefix in VALID_WEBSOCKET_URI_PREFIXES\n ):\n raise Web3ValidationError(\n \"Websocket endpoint uri must begin with 'ws://' or 'wss://': \"\n f\"{self.endpoint_uri}\"\n )\n\n if websocket_kwargs is not None:\n found_restricted_keys = set(websocket_kwargs).intersection(\n RESTRICTED_WEBSOCKET_KWARGS\n )\n if found_restricted_keys:\n raise Web3ValidationError(\n \"Found restricted keys for websocket_kwargs: \"\n f\"{found_restricted_keys}.\"\n )\n\n self.websocket_kwargs = merge(DEFAULT_WEBSOCKET_KWARGS, websocket_kwargs or {})\n self.raise_listener_task_exceptions = raise_listener_task_exceptions\n\n super().__init__(endpoint_uri, request_timeout=request_timeout)\n\n def __str__(self) -> str:\n return f\"Websocket connection: {self.endpoint_uri}\"\n\n async def is_connected(self, show_traceback: bool = False) -> bool:\n if not self._ws:\n return False\n\n try:\n await self._ws.pong()\n return True\n\n except WebSocketException as e:\n if show_traceback:\n raise ProviderConnectionError(\n f\"Error connecting to endpoint: '{self.endpoint_uri}'\"\n ) from e\n return False\n\n async def connect(self) -> None:\n _connection_attempts = 0\n _backoff_rate_change = 1.75\n _backoff_time = 1.75\n\n while _connection_attempts != self._max_connection_retries:\n try:\n _connection_attempts += 1\n self._ws = await connect(self.endpoint_uri, **self.websocket_kwargs)\n self._message_listener_task = asyncio.create_task(\n self._ws_message_listener()\n )\n break\n except WebSocketException as e:\n if _connection_attempts == self._max_connection_retries:\n raise ProviderConnectionError(\n f\"Could not connect to endpoint: {self.endpoint_uri}. \"\n f\"Retries exceeded max of {self._max_connection_retries}.\"\n ) from e\n self.logger.info(\n f\"Could not connect to endpoint: {self.endpoint_uri}. Retrying in \"\n f\"{round(_backoff_time, 1)} seconds.\",\n exc_info=True,\n )\n await asyncio.sleep(_backoff_time)\n _backoff_time *= _backoff_rate_change\n\n async def disconnect(self) -> None:\n if self._ws is not None and not self._ws.closed:\n await self._ws.close()\n self._ws = None\n self.logger.debug(\n f'Successfully disconnected from endpoint: \"{self.endpoint_uri}'\n )\n\n try:\n self._message_listener_task.cancel()\n await self._message_listener_task\n except (asyncio.CancelledError, StopAsyncIteration) as e:\n self.logger.info(\n \"Websocket message listener background task caught and ignored an \"\n f\"exception during cancellation: {e}\"\n )\n\n self._request_processor.clear_caches()\n\n @async_handle_request_caching\n async def make_request(self, method: RPCEndpoint, params: Any) -> RPCResponse:\n request_data = self.encode_rpc_request(method, params)\n\n if self._ws is None:\n raise ProviderConnectionError(\n \"Connection to websocket has not been initiated for the provider.\"\n )\n\n await asyncio.wait_for(\n self._ws.send(request_data), timeout=self.request_timeout\n )\n\n current_request_id = json.loads(request_data)[\"id\"]\n response = await self._get_response_for_request_id(current_request_id)\n\n return response\n\n async def _get_response_for_request_id(self, request_id: RPCId) -> RPCResponse:\n async def _match_response_id_to_request_id() -> RPCResponse:\n request_cache_key = generate_cache_key(request_id)\n\n while True:\n # sleep(0) here seems to be the most efficient way to yield control\n # back to the event loop while waiting for the response to be in the\n # queue.\n await asyncio.sleep(0)\n\n if request_cache_key in self._request_processor._request_response_cache:\n self.logger.debug(\n f\"Popping response for id {request_id} from cache.\"\n )\n popped_response = self._request_processor.pop_raw_response(\n cache_key=request_cache_key,\n )\n return popped_response\n\n try:\n # Add the request timeout around the while loop that checks the request\n # cache and tried to recv(). If the request is neither in the cache, nor\n # received within the request_timeout, raise ``TimeExhausted``.\n return await asyncio.wait_for(\n _match_response_id_to_request_id(), self.request_timeout\n )\n except asyncio.TimeoutError:\n raise TimeExhausted(\n f\"Timed out waiting for response with request id `{request_id}` after \"\n f\"{self.request_timeout} second(s). This may be due to the provider \"\n \"not returning a response with the same id that was sent in the \"\n \"request or an exception raised during the request was caught and \"\n \"allowed to continue.\"\n )\n\n async def _ws_message_listener(self) -> None:\n self.logger.info(\n \"Websocket listener background task started. Storing all messages in \"\n \"appropriate request processor queues / caches to be processed.\"\n )\n try:\n async for raw_message in self._ws:\n # sleep(0) here seems to be the most efficient way to yield control\n # back to the event loop to share the loop with other tasks.\n await asyncio.sleep(0)\n\n response = json.loads(raw_message)\n subscription = response.get(\"method\") == \"eth_subscription\"\n await self._request_processor.cache_raw_response(\n response, subscription=subscription\n )\n except Exception as e:\n if self.raise_listener_task_exceptions:\n # If ``True``, raise; else, error log & keep task alive\n raise e\n\n self.logger.error(\n \"Exception caught in listener, error logging and keeping listener \"\n f\"background task alive.\\n error={e}\"\n )\n",
"path": "web3/providers/websocket/websocket_v2.py"
}
] | 8_13 | python | import sys
from _ast import AsyncFunctionDef
import inspect
import ast
from typing import Any
class DisconnectAnalyzer(ast.NodeVisitor):
def __init__(self):
self.has_pass = False
self.has_logger_info = False
def visit_Pass(self, node):
self.has_pass = True
def visit_Attribute(self, node):
if isinstance(node.value, ast.Attribute):
if (node.value.attr == 'logger' and node.attr == 'info'):
self.has_logger_info = True
self.generic_visit(node)
def analyze(self, node):
self.visit(node)
return self.has_pass and not self.has_logger_info
class WebSocketProviderV2Analyzer(ast.NodeVisitor):
def __init__(self):
self.verified = False
def visit_AsyncFunctionDef(self, node: AsyncFunctionDef) -> Any:
if node.name == "disconnect":
self.verified = DisconnectAnalyzer().analyze(node)
def analyze(self, node) -> bool:
self.visit(node)
return self.verified
def main():
from web3.providers.websocket.websocket_v2 import WebsocketProviderV2
class_source = inspect.getsource(WebsocketProviderV2)
tree = ast.parse(class_source)
visitor = WebSocketProviderV2Analyzer()
visitor.visit(tree)
if visitor.analyze(tree):
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
main()
|
https://github.com/teamqurrent/web3.py | The pluggy dependency is no longer needed. Remove pluggy from `setup.py` and verify it is no longer there. | be76120 | -e . [tester]
idna
pytest
pytest_asyncio
eth-tester[py-evm]==v0.9.1-b.1
py-geth>=3.11.0 | python3.9 | 4ee4c865 | diff --git a/newsfragments/2992.internal.rst b/newsfragments/2992.internal.rst
new file mode 100644
--- /dev/null
+++ b/newsfragments/2992.internal.rst
@@ -0,0 +1 @@
+Removed `pluggy` from dev requirements
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -37,7 +37,6 @@ extras_require = {
"tox>=3.18.0",
"tqdm>4.32",
"twine>=1.13",
- "pluggy==0.13.1",
"when-changed>=0.3.0",
"build>=0.9.0",
],
| [
{
"content": "#!/usr/bin/env python\nfrom setuptools import (\n find_packages,\n setup,\n)\n\nextras_require = {\n \"tester\": [\n \"eth-tester[py-evm]==v0.9.0-b.1\",\n \"py-geth>=3.11.0\",\n ],\n \"linter\": [\n \"black>=22.1.0\",\n \"flake8==3.8.3\",\n \"isort>=5.11.0\",\n \"mypy==0.910\",\n \"types-setuptools>=57.4.4\",\n \"types-requests>=2.26.1\",\n \"types-protobuf==3.19.13\",\n ],\n \"docs\": [\n \"sphinx>=5.3.0\",\n \"sphinx_rtd_theme>=1.0.0\",\n \"towncrier>=21,<22\",\n ],\n \"dev\": [\n \"bumpversion\",\n \"flaky>=3.7.0\",\n \"hypothesis>=3.31.2\",\n \"importlib-metadata<5.0;python_version<'3.8'\",\n \"pytest>=7.0.0\",\n \"pytest-asyncio>=0.18.1\",\n \"pytest-mock>=1.10\",\n \"pytest-watch>=4.2\",\n \"pytest-xdist>=1.29\",\n \"setuptools>=38.6.0\",\n \"tox>=3.18.0\",\n \"tqdm>4.32\",\n \"twine>=1.13\",\n \"pluggy==0.13.1\",\n \"when-changed>=0.3.0\",\n \"build>=0.9.0\",\n ],\n \"ipfs\": [\n \"ipfshttpclient==0.8.0a2\",\n ],\n}\n\nextras_require[\"dev\"] = (\n extras_require[\"tester\"]\n + extras_require[\"linter\"]\n + extras_require[\"docs\"]\n + extras_require[\"ipfs\"]\n + extras_require[\"dev\"]\n)\n\nwith open(\"./README.md\") as readme:\n long_description = readme.read()\n\nsetup(\n name=\"web3\",\n # *IMPORTANT*: Don't manually change the version here. Use the 'bumpversion' utility.\n version=\"6.4.0\",\n description=\"\"\"web3.py\"\"\",\n long_description_content_type=\"text/markdown\",\n long_description=long_description,\n author=\"Piper Merriam\",\n author_email=\"pipermerriam@gmail.com\",\n url=\"https://github.com/ethereum/web3.py\",\n include_package_data=True,\n install_requires=[\n \"aiohttp>=3.7.4.post0\",\n \"eth-abi>=4.0.0\",\n \"eth-account>=0.8.0\",\n \"eth-hash[pycryptodome]>=0.5.1\",\n \"eth-typing>=3.0.0\",\n \"eth-utils>=2.1.0\",\n \"hexbytes>=0.1.0\",\n \"jsonschema>=4.0.0\",\n \"lru-dict>=1.1.6\",\n \"protobuf>=4.21.6\",\n \"pywin32>=223;platform_system=='Windows'\",\n \"requests>=2.16.0\",\n # remove typing_extensions after python_requires>=3.8, see web3._utils.compat\n \"typing-extensions>=3.7.4.1,<5;python_version<'3.8'\",\n \"websockets>=10.0.0\",\n ],\n python_requires=\">=3.7.2\",\n extras_require=extras_require,\n py_modules=[\"web3\", \"ens\", \"ethpm\"],\n entry_points={\"pytest11\": [\"pytest_ethereum = web3.tools.pytest_ethereum.plugins\"]},\n license=\"MIT\",\n zip_safe=False,\n keywords=\"ethereum\",\n packages=find_packages(exclude=[\"tests\", \"tests.*\"]),\n package_data={\"web3\": [\"py.typed\"]},\n classifiers=[\n \"Development Status :: 5 - Production/Stable\",\n \"Intended Audience :: Developers\",\n \"License :: OSI Approved :: MIT License\",\n \"Natural Language :: English\",\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n \"Programming Language :: Python :: 3.10\",\n \"Programming Language :: Python :: 3.11\",\n ],\n)\n",
"path": "setup.py"
}
] | 8_2 | python | import unittest
import sys
class TestSetupPy(unittest.TestCase):
def test_pluggy_not_in_setup_py(self):
with open('setup.py', 'r') as file:
setup_py_contents = file.read()
self.assertNotIn('pluggy', setup_py_contents)
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestSetupPy))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
main()
|
https://github.com/teamqurrent/web3.py | The middlewares `request_parameter_normalizer` and `pythonic` should no longer be included in the default stack. Remove them from being returned in `manager.py` | 674e342 | -e . [tester]
idna
pytest
pytest_asyncio
eth-tester[py-evm]==v0.9.1-b.1
py-geth>=3.11.0 | python3.9 | 070e2288 | diff --git a/tests/core/manager/test_default_middlewares.py b/tests/core/manager/test_default_middlewares.py
--- a/tests/core/manager/test_default_middlewares.py
+++ b/tests/core/manager/test_default_middlewares.py
@@ -11,19 +11,15 @@ from web3.middleware import (
buffered_gas_estimate_middleware,
gas_price_strategy_middleware,
name_to_address_middleware,
- pythonic_middleware,
- request_parameter_normalizer,
validation_middleware,
)
def test_default_sync_middlwares(w3):
expected_middlewares = [
- (request_parameter_normalizer, "request_param_normalizer"),
(gas_price_strategy_middleware, "gas_price_strategy"),
(name_to_address_middleware(w3), "name_to_address"),
(attrdict_middleware, "attrdict"),
- (pythonic_middleware, "pythonic"),
(validation_middleware, "validation"),
(abi_middleware, "abi"),
(buffered_gas_estimate_middleware, "gas_estimate"),
diff --git a/tests/core/providers/test_http_provider.py b/tests/core/providers/test_http_provider.py
--- a/tests/core/providers/test_http_provider.py
+++ b/tests/core/providers/test_http_provider.py
@@ -13,9 +13,29 @@ from web3 import (
from web3._utils import (
request,
)
+from web3.eth import (
+ Eth,
+)
from web3.exceptions import (
ProviderConnectionError,
)
+from web3.geth import (
+ Geth,
+ GethAdmin,
+ GethPersonal,
+ GethTxPool,
+)
+from web3.middleware import (
+ abi_middleware,
+ attrdict_middleware,
+ buffered_gas_estimate_middleware,
+ gas_price_strategy_middleware,
+ name_to_address_middleware,
+ validation_middleware,
+)
+from web3.net import (
+ Net,
+)
from web3.providers import (
HTTPProvider,
)
@@ -39,6 +59,42 @@ def test_init_kwargs():
assert w3.manager.provider == provider
+def test_web3_with_http_provider_has_default_middlewares_and_modules() -> None:
+ adapter = HTTPAdapter(pool_connections=20, pool_maxsize=20)
+ session = Session()
+ session.mount("http://", adapter)
+ session.mount("https://", adapter)
+
+ provider = HTTPProvider(endpoint_uri=URI, session=session)
+ w3 = Web3(provider)
+
+ # assert default modules
+ assert isinstance(w3.eth, Eth)
+ assert isinstance(w3.net, Net)
+ assert isinstance(w3.geth, Geth)
+ assert isinstance(w3.geth.admin, GethAdmin)
+ assert isinstance(w3.geth.personal, GethPersonal)
+ assert isinstance(w3.geth.txpool, GethTxPool)
+
+ # assert default middleware
+
+ # the following length check should fail and will need to be added to once more
+ # async middlewares are added to the defaults
+ assert len(w3.middleware_onion.middlewares) == 6
+
+ assert (
+ w3.middleware_onion.get("gas_price_strategy") == gas_price_strategy_middleware
+ )
+ assert (
+ w3.middleware_onion.get("name_to_address").__name__
+ == name_to_address_middleware(w3).__name__
+ )
+ assert w3.middleware_onion.get("attrdict") == attrdict_middleware
+ assert w3.middleware_onion.get("validation") == validation_middleware
+ assert w3.middleware_onion.get("gas_estimate") == buffered_gas_estimate_middleware
+ assert w3.middleware_onion.get("abi") == abi_middleware
+
+
def test_user_provided_session():
adapter = HTTPAdapter(pool_connections=20, pool_maxsize=20)
session = Session()
diff --git a/web3/manager.py b/web3/manager.py
--- a/web3/manager.py
+++ b/web3/manager.py
@@ -35,8 +35,6 @@ from web3.middleware import (
buffered_gas_estimate_middleware,
gas_price_strategy_middleware,
name_to_address_middleware,
- pythonic_middleware,
- request_parameter_normalizer,
validation_middleware,
)
from web3.providers import (
@@ -139,13 +137,11 @@ class RequestManager:
Leaving ens unspecified will prevent the middleware from resolving names.
"""
return [
- (request_parameter_normalizer, "request_param_normalizer"), # Delete
(gas_price_strategy_middleware, "gas_price_strategy"),
(name_to_address_middleware(w3), "name_to_address"), # Add Async
(attrdict_middleware, "attrdict"),
- (pythonic_middleware, "pythonic"), # Delete
(validation_middleware, "validation"),
- (abi_middleware, "abi"), # Delete
+ (abi_middleware, "abi"),
(buffered_gas_estimate_middleware, "gas_estimate"),
]
| [
{
"content": "from web3.manager import (\n RequestManager,\n)\nfrom web3.middleware import (\n abi_middleware,\n async_attrdict_middleware,\n async_buffered_gas_estimate_middleware,\n async_gas_price_strategy_middleware,\n async_validation_middleware,\n attrdict_middleware,\n buffered_gas_estimate_middleware,\n gas_price_strategy_middleware,\n name_to_address_middleware,\n pythonic_middleware,\n request_parameter_normalizer,\n validation_middleware,\n)\n\n\ndef test_default_sync_middlwares(w3):\n expected_middlewares = [\n (request_parameter_normalizer, \"request_param_normalizer\"),\n (gas_price_strategy_middleware, \"gas_price_strategy\"),\n (name_to_address_middleware(w3), \"name_to_address\"),\n (attrdict_middleware, \"attrdict\"),\n (pythonic_middleware, \"pythonic\"),\n (validation_middleware, \"validation\"),\n (abi_middleware, \"abi\"),\n (buffered_gas_estimate_middleware, \"gas_estimate\"),\n ]\n\n default_middlewares = RequestManager.default_middlewares(w3)\n\n for x in range(len(default_middlewares)):\n assert default_middlewares[x][0].__name__ == expected_middlewares[x][0].__name__\n assert default_middlewares[x][1] == expected_middlewares[x][1]\n\n\ndef test_default_async_middlwares():\n expected_middlewares = [\n (async_gas_price_strategy_middleware, \"gas_price_strategy\"),\n (async_attrdict_middleware, \"attrdict\"),\n (async_validation_middleware, \"validation\"),\n (async_buffered_gas_estimate_middleware, \"gas_estimate\"),\n ]\n\n default_middlewares = RequestManager.async_default_middlewares()\n\n for x in range(len(default_middlewares)):\n assert default_middlewares[x][0].__name__ == expected_middlewares[x][0].__name__\n assert default_middlewares[x][1] == expected_middlewares[x][1]\n",
"path": "tests/core/manager/test_default_middlewares.py"
},
{
"content": "import pytest\n\nfrom requests import (\n Session,\n)\nfrom requests.adapters import (\n HTTPAdapter,\n)\n\nfrom web3 import (\n Web3,\n)\nfrom web3._utils import (\n request,\n)\nfrom web3.exceptions import (\n ProviderConnectionError,\n)\nfrom web3.providers import (\n HTTPProvider,\n)\n\nURI = \"http://mynode.local:8545\"\n\n\ndef test_no_args():\n provider = HTTPProvider()\n w3 = Web3(provider)\n assert w3.manager.provider == provider\n assert not w3.manager.provider.is_async\n assert not w3.is_connected()\n with pytest.raises(ProviderConnectionError):\n w3.is_connected(show_traceback=True)\n\n\ndef test_init_kwargs():\n provider = HTTPProvider(endpoint_uri=URI, request_kwargs={\"timeout\": 60})\n w3 = Web3(provider)\n assert w3.manager.provider == provider\n\n\ndef test_user_provided_session():\n adapter = HTTPAdapter(pool_connections=20, pool_maxsize=20)\n session = Session()\n session.mount(\"http://\", adapter)\n session.mount(\"https://\", adapter)\n\n provider = HTTPProvider(endpoint_uri=URI, session=session)\n w3 = Web3(provider)\n assert w3.manager.provider == provider\n\n session = request.cache_and_return_session(URI)\n adapter = session.get_adapter(URI)\n assert isinstance(adapter, HTTPAdapter)\n assert adapter._pool_connections == 20\n assert adapter._pool_maxsize == 20\n",
"path": "tests/core/providers/test_http_provider.py"
},
{
"content": "import logging\nfrom typing import (\n TYPE_CHECKING,\n Any,\n Callable,\n List,\n Optional,\n Sequence,\n Tuple,\n Union,\n cast,\n)\n\nfrom eth_utils.toolz import (\n pipe,\n)\nfrom hexbytes import (\n HexBytes,\n)\n\nfrom web3.datastructures import (\n NamedElementOnion,\n)\nfrom web3.exceptions import (\n BadResponseFormat,\n MethodUnavailable,\n)\nfrom web3.middleware import (\n abi_middleware,\n async_attrdict_middleware,\n async_buffered_gas_estimate_middleware,\n async_gas_price_strategy_middleware,\n async_validation_middleware,\n attrdict_middleware,\n buffered_gas_estimate_middleware,\n gas_price_strategy_middleware,\n name_to_address_middleware,\n pythonic_middleware,\n request_parameter_normalizer,\n validation_middleware,\n)\nfrom web3.providers import (\n AutoProvider,\n)\nfrom web3.types import (\n AsyncMiddleware,\n AsyncMiddlewareOnion,\n Middleware,\n MiddlewareOnion,\n RPCEndpoint,\n RPCResponse,\n)\n\nif TYPE_CHECKING:\n from web3 import ( # noqa: F401\n AsyncWeb3,\n Web3,\n )\n from web3.providers import ( # noqa: F401\n AsyncBaseProvider,\n BaseProvider,\n )\n\n\nNULL_RESPONSES = [None, HexBytes(\"0x\"), \"0x\"]\n\n\ndef apply_error_formatters(\n error_formatters: Callable[..., Any],\n response: RPCResponse,\n) -> RPCResponse:\n if error_formatters:\n formatted_resp = pipe(response, error_formatters)\n return formatted_resp\n else:\n return response\n\n\ndef apply_null_result_formatters(\n null_result_formatters: Callable[..., Any],\n response: RPCResponse,\n params: Optional[Any] = None,\n) -> RPCResponse:\n if null_result_formatters:\n formatted_resp = pipe(params, null_result_formatters)\n return formatted_resp\n else:\n return response\n\n\nclass RequestManager:\n logger = logging.getLogger(\"web3.RequestManager\")\n\n middleware_onion: Union[\n MiddlewareOnion, AsyncMiddlewareOnion, NamedElementOnion[None, None]\n ]\n\n def __init__(\n self,\n w3: Union[\"AsyncWeb3\", \"Web3\"],\n provider: Optional[Union[\"BaseProvider\", \"AsyncBaseProvider\"]] = None,\n middlewares: Optional[\n Union[\n Sequence[Tuple[Middleware, str]], Sequence[Tuple[AsyncMiddleware, str]]\n ]\n ] = None,\n ) -> None:\n self.w3 = w3\n\n if provider is None:\n self.provider = AutoProvider()\n else:\n self.provider = provider\n\n if middlewares is None:\n middlewares = (\n self.async_default_middlewares()\n if self.provider.is_async\n else self.default_middlewares(cast(\"Web3\", w3))\n )\n\n self.middleware_onion = NamedElementOnion(middlewares)\n\n w3: Union[\"AsyncWeb3\", \"Web3\"] = None\n _provider = None\n\n @property\n def provider(self) -> Union[\"BaseProvider\", \"AsyncBaseProvider\"]:\n return self._provider\n\n @provider.setter\n def provider(self, provider: Union[\"BaseProvider\", \"AsyncBaseProvider\"]) -> None:\n self._provider = provider\n\n @staticmethod\n def default_middlewares(w3: \"Web3\") -> List[Tuple[Middleware, str]]:\n \"\"\"\n List the default middlewares for the request manager.\n Leaving ens unspecified will prevent the middleware from resolving names.\n \"\"\"\n return [\n (request_parameter_normalizer, \"request_param_normalizer\"), # Delete\n (gas_price_strategy_middleware, \"gas_price_strategy\"),\n (name_to_address_middleware(w3), \"name_to_address\"), # Add Async\n (attrdict_middleware, \"attrdict\"),\n (pythonic_middleware, \"pythonic\"), # Delete\n (validation_middleware, \"validation\"),\n (abi_middleware, \"abi\"), # Delete\n (buffered_gas_estimate_middleware, \"gas_estimate\"),\n ]\n\n @staticmethod\n def async_default_middlewares() -> List[Tuple[AsyncMiddleware, str]]:\n \"\"\"\n List the default async middlewares for the request manager.\n \"\"\"\n return [\n (async_gas_price_strategy_middleware, \"gas_price_strategy\"),\n (async_attrdict_middleware, \"attrdict\"),\n (async_validation_middleware, \"validation\"),\n (async_buffered_gas_estimate_middleware, \"gas_estimate\"),\n ]\n\n #\n # Provider requests and response\n #\n def _make_request(\n self, method: Union[RPCEndpoint, Callable[..., RPCEndpoint]], params: Any\n ) -> RPCResponse:\n provider = cast(\"BaseProvider\", self.provider)\n request_func = provider.request_func(\n cast(\"Web3\", self.w3), cast(MiddlewareOnion, self.middleware_onion)\n )\n self.logger.debug(f\"Making request. Method: {method}\")\n return request_func(method, params)\n\n async def _coro_make_request(\n self, method: Union[RPCEndpoint, Callable[..., RPCEndpoint]], params: Any\n ) -> RPCResponse:\n provider = cast(\"AsyncBaseProvider\", self.provider)\n request_func = await provider.request_func(\n cast(\"AsyncWeb3\", self.w3),\n cast(AsyncMiddlewareOnion, self.middleware_onion),\n )\n self.logger.debug(f\"Making request. Method: {method}\")\n\n return await request_func(method, params)\n\n @staticmethod\n def formatted_response(\n response: RPCResponse,\n params: Any,\n error_formatters: Optional[Callable[..., Any]] = None,\n null_result_formatters: Optional[Callable[..., Any]] = None,\n ) -> Any:\n if \"error\" in response:\n apply_error_formatters(error_formatters, response)\n\n # guard against eth-tester case - eth-tester returns a string\n # with no code, so can't parse what the error is.\n if isinstance(response[\"error\"], dict):\n resp_code = response[\"error\"].get(\"code\")\n if resp_code == -32601:\n raise MethodUnavailable(response[\"error\"])\n raise ValueError(response[\"error\"])\n # NULL_RESPONSES includes None, so return False here as the default\n # so we don't apply the null_result_formatters if there is no 'result' key\n elif response.get(\"result\", False) in NULL_RESPONSES:\n # null_result_formatters raise either a BlockNotFound\n # or a TransactionNotFound error, depending on the method called\n apply_null_result_formatters(null_result_formatters, response, params)\n return response[\"result\"]\n elif response.get(\"result\") is not None:\n return response[\"result\"]\n else:\n raise BadResponseFormat(\n \"The response was in an unexpected format and unable to be parsed. \"\n f\"The raw response is: {response}\"\n )\n\n def request_blocking(\n self,\n method: Union[RPCEndpoint, Callable[..., RPCEndpoint]],\n params: Any,\n error_formatters: Optional[Callable[..., Any]] = None,\n null_result_formatters: Optional[Callable[..., Any]] = None,\n ) -> Any:\n \"\"\"\n Make a synchronous request using the provider\n \"\"\"\n response = self._make_request(method, params)\n return self.formatted_response(\n response, params, error_formatters, null_result_formatters\n )\n\n async def coro_request(\n self,\n method: Union[RPCEndpoint, Callable[..., RPCEndpoint]],\n params: Any,\n error_formatters: Optional[Callable[..., Any]] = None,\n null_result_formatters: Optional[Callable[..., Any]] = None,\n ) -> Any:\n \"\"\"\n Couroutine for making a request using the provider\n \"\"\"\n response = await self._coro_make_request(method, params)\n return self.formatted_response(\n response, params, error_formatters, null_result_formatters\n )\n",
"path": "web3/manager.py"
}
] | 8_3 | python | import unittest
import sys
class TestMiddlewareRemoval(unittest.TestCase):
def test_removed_middlewares_not_in_default_stack(self):
from web3 import Web3
from web3.middleware import (
pythonic_middleware,
request_parameter_normalizer,
)
w3 = Web3()
middleware_stack = w3.middleware_onion.middlewares
# Check that removed middlewares are not in the default middleware stack
# by checking the actual middleware function references
middleware_functions = [middleware[0] for middleware in middleware_stack]
self.assertNotIn(pythonic_middleware, middleware_functions)
self.assertNotIn(request_parameter_normalizer, middleware_functions)
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestMiddlewareRemoval))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
main()
|
https://github.com/teamqurrent/web3.py | Enhance the robustness of the formatting middleware in the web3.py library by ensuring it correctly handles cases where the response result is None in `formatting.py`. This change prevents potential errors or unexpected behaviors when the middleware encounters None values in the response. | 0a1da2d | -e . [tester]
idna
pytest
pytest_asyncio
eth-tester[py-evm]==v0.9.1-b.1
py-geth>=3.11.0 | python3.9 | 12f3702 | diff --git a/newsfragments/2546.bugfix.rst b/newsfragments/2546.bugfix.rst
new file mode 100644
--- /dev/null
+++ b/newsfragments/2546.bugfix.rst
@@ -0,0 +1 @@
+Handle `None` in the formatting middleware
\ No newline at end of file
diff --git a/tests/core/middleware/test_formatting_middleware.py b/tests/core/middleware/test_formatting_middleware.py
new file mode 100644
--- /dev/null
+++ b/tests/core/middleware/test_formatting_middleware.py
@@ -0,0 +1,126 @@
+import pytest
+from unittest.mock import (
+ Mock,
+)
+
+from web3 import (
+ Web3,
+)
+from web3.middleware import (
+ construct_error_generator_middleware,
+ construct_formatting_middleware,
+ construct_result_generator_middleware,
+)
+from web3.providers.base import (
+ BaseProvider,
+)
+from web3.types import (
+ RPCEndpoint,
+)
+
+
+class DummyProvider(BaseProvider):
+ def make_request(self, method, params):
+ raise NotImplementedError(f"Cannot make request for {method}:{params}")
+
+
+@pytest.fixture
+def w3():
+ return Web3(provider=DummyProvider(), middlewares=[])
+
+
+def test_formatting_middleware(w3):
+ # No formatters by default
+ w3.middleware_onion.add(construct_formatting_middleware())
+ w3.middleware_onion.add(
+ construct_result_generator_middleware(
+ {
+ "test_endpoint": lambda method, params: "done",
+ }
+ )
+ )
+
+ expected = "done"
+ actual = w3.manager.request_blocking("test_endpoint", [])
+ assert actual == expected
+
+
+def test_formatting_middleware_no_method(w3):
+ w3.middleware_onion.add(construct_formatting_middleware())
+
+ # Formatting middleware requires an endpoint
+ with pytest.raises(NotImplementedError):
+ w3.manager.request_blocking("test_endpoint", [])
+
+
+def test_formatting_middleware_request_formatters(w3):
+ callable_mock = Mock()
+ w3.middleware_onion.add(
+ construct_result_generator_middleware(
+ {RPCEndpoint("test_endpoint"): lambda method, params: "done"}
+ )
+ )
+
+ w3.middleware_onion.add(
+ construct_formatting_middleware(
+ request_formatters={RPCEndpoint("test_endpoint"): callable_mock}
+ )
+ )
+
+ expected = "done"
+ actual = w3.manager.request_blocking("test_endpoint", ["param1"])
+
+ callable_mock.assert_called_once_with(["param1"])
+ assert actual == expected
+
+
+def test_formatting_middleware_result_formatters(w3):
+ w3.middleware_onion.add(
+ construct_result_generator_middleware(
+ {RPCEndpoint("test_endpoint"): lambda method, params: "done"}
+ )
+ )
+ w3.middleware_onion.add(
+ construct_formatting_middleware(
+ result_formatters={RPCEndpoint("test_endpoint"): lambda x: f"STATUS:{x}"}
+ )
+ )
+
+ expected = "STATUS:done"
+ actual = w3.manager.request_blocking("test_endpoint", [])
+ assert actual == expected
+
+
+def test_formatting_middleware_result_formatters_for_none(w3):
+ w3.middleware_onion.add(
+ construct_result_generator_middleware(
+ {RPCEndpoint("test_endpoint"): lambda method, params: None}
+ )
+ )
+ w3.middleware_onion.add(
+ construct_formatting_middleware(
+ result_formatters={RPCEndpoint("test_endpoint"): lambda x: hex(x)}
+ )
+ )
+
+ expected = None
+ actual = w3.manager.request_blocking("test_endpoint", [])
+ assert actual == expected
+
+
+def test_formatting_middleware_error_formatters(w3):
+ w3.middleware_onion.add(
+ construct_error_generator_middleware(
+ {RPCEndpoint("test_endpoint"): lambda method, params: "error"}
+ )
+ )
+ w3.middleware_onion.add(
+ construct_formatting_middleware(
+ result_formatters={RPCEndpoint("test_endpoint"): lambda x: f"STATUS:{x}"}
+ )
+ )
+
+ expected = "error"
+ with pytest.raises(ValueError) as err:
+ w3.manager.request_blocking("test_endpoint", [])
+ assert str(err.value) == expected
diff --git a/web3/middleware/formatting.py b/web3/middleware/formatting.py
--- a/web3/middleware/formatting.py
+++ b/web3/middleware/formatting.py
@@ -50,7 +50,11 @@ def _apply_response_formatters(
response, response_type, method_response_formatter(appropriate_response)
)
- if "result" in response and method in result_formatters:
+ if (
+ "result" in response
+ and response["result"] is not None
+ and method in result_formatters
+ ):
return _format_response("result", result_formatters[method])
elif "error" in response and method in error_formatters:
return _format_response("error", error_formatters[method])
| [
{
"content": "from typing import (\n TYPE_CHECKING,\n Any,\n Callable,\n Coroutine,\n Optional,\n)\n\nfrom eth_utils.toolz import (\n assoc,\n merge,\n)\n\nfrom web3.types import (\n AsyncMiddleware,\n AsyncMiddlewareCoroutine,\n Formatters,\n FormattersDict,\n Literal,\n Middleware,\n RPCEndpoint,\n RPCResponse,\n)\n\nif TYPE_CHECKING:\n from web3 import ( # noqa: F401\n AsyncWeb3,\n Web3,\n )\n\nFORMATTER_DEFAULTS: FormattersDict = {\n \"request_formatters\": {},\n \"result_formatters\": {},\n \"error_formatters\": {},\n}\n\n\ndef _apply_response_formatters(\n method: RPCEndpoint,\n response: RPCResponse,\n result_formatters: Formatters,\n error_formatters: Formatters,\n) -> RPCResponse:\n def _format_response(\n response_type: Literal[\"result\", \"error\"],\n method_response_formatter: Callable[..., Any],\n ) -> RPCResponse:\n appropriate_response = response[response_type]\n return assoc(\n response, response_type, method_response_formatter(appropriate_response)\n )\n\n if \"result\" in response and method in result_formatters:\n return _format_response(\"result\", result_formatters[method])\n elif \"error\" in response and method in error_formatters:\n return _format_response(\"error\", error_formatters[method])\n else:\n return response\n\n\n# --- sync -- #\n\n\ndef construct_formatting_middleware(\n request_formatters: Optional[Formatters] = None,\n result_formatters: Optional[Formatters] = None,\n error_formatters: Optional[Formatters] = None,\n) -> Middleware:\n def ignore_web3_in_standard_formatters(\n _w3: \"Web3\",\n _method: RPCEndpoint,\n ) -> FormattersDict:\n return dict(\n request_formatters=request_formatters or {},\n result_formatters=result_formatters or {},\n error_formatters=error_formatters or {},\n )\n\n return construct_web3_formatting_middleware(ignore_web3_in_standard_formatters)\n\n\ndef construct_web3_formatting_middleware(\n web3_formatters_builder: Callable[[\"Web3\", RPCEndpoint], FormattersDict],\n) -> Middleware:\n def formatter_middleware(\n make_request: Callable[[RPCEndpoint, Any], Any],\n w3: \"Web3\",\n ) -> Callable[[RPCEndpoint, Any], RPCResponse]:\n def middleware(method: RPCEndpoint, params: Any) -> RPCResponse:\n formatters = merge(\n FORMATTER_DEFAULTS,\n web3_formatters_builder(w3, method),\n )\n request_formatters = formatters.pop(\"request_formatters\")\n\n if method in request_formatters:\n formatter = request_formatters[method]\n params = formatter(params)\n response = make_request(method, params)\n\n return _apply_response_formatters(\n method=method, response=response, **formatters\n )\n\n return middleware\n\n return formatter_middleware\n\n\n# --- async --- #\n\n\nasync def async_construct_formatting_middleware(\n request_formatters: Optional[Formatters] = None,\n result_formatters: Optional[Formatters] = None,\n error_formatters: Optional[Formatters] = None,\n) -> AsyncMiddleware:\n async def ignore_web3_in_standard_formatters(\n _async_w3: \"AsyncWeb3\",\n _method: RPCEndpoint,\n ) -> FormattersDict:\n return dict(\n request_formatters=request_formatters or {},\n result_formatters=result_formatters or {},\n error_formatters=error_formatters or {},\n )\n\n return await async_construct_web3_formatting_middleware(\n ignore_web3_in_standard_formatters\n )\n\n\nasync def async_construct_web3_formatting_middleware(\n async_web3_formatters_builder: Callable[\n [\"AsyncWeb3\", RPCEndpoint], Coroutine[Any, Any, FormattersDict]\n ]\n) -> Callable[\n [Callable[[RPCEndpoint, Any], Any], \"AsyncWeb3\"],\n Coroutine[Any, Any, AsyncMiddlewareCoroutine],\n]:\n async def formatter_middleware(\n make_request: Callable[[RPCEndpoint, Any], Any],\n w3: \"AsyncWeb3\",\n ) -> AsyncMiddlewareCoroutine:\n async def middleware(method: RPCEndpoint, params: Any) -> RPCResponse:\n formatters = merge(\n FORMATTER_DEFAULTS,\n await async_web3_formatters_builder(w3, method),\n )\n request_formatters = formatters.pop(\"request_formatters\")\n\n if method in request_formatters:\n formatter = request_formatters[method]\n params = formatter(params)\n response = await make_request(method, params)\n\n return _apply_response_formatters(\n method=method, response=response, **formatters\n )\n\n return middleware\n\n return formatter_middleware\n",
"path": "web3/middleware/formatting.py"
}
] | 8_4 | python | import unittest
import sys
class TestFormattingMiddleware(unittest.TestCase):
def test_formatting_middleware_handles_none(self):
from web3 import Web3
from web3.middleware import construct_formatting_middleware, construct_result_generator_middleware
from web3.types import RPCEndpoint
w3 = Web3()
w3.middleware_onion.add(
construct_result_generator_middleware(
{RPCEndpoint("test_endpoint"): lambda method, params: None}
)
)
w3.middleware_onion.add(
construct_formatting_middleware(
result_formatters={RPCEndpoint("test_endpoint"): lambda x: hex(x)}
)
)
expected = None
actual = w3.manager.request_blocking("test_endpoint", [])
self.assertEqual(actual, expected)
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestFormattingMiddleware))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
main()
|
https://github.com/teamqurrent/web3.py | Ensure that the time-based gas price strategy correctly returns the default gas price in scenarios where the blockchain has no transactions to sample from by modifying the function `time_based_gas_price_strategy` in `time_based.py` and adding a condition to return w3.eth.gas_price when the latest block number is 0, indicating an empty transaction list. | 66f2391 | -e . [tester]
idna
pytest
pytest_asyncio
eth-tester[py-evm]==v0.9.1-b.1
py-geth>=3.11.0 | python3.9 | 5dca5243 | diff --git a/newsfragments/1149.bugfix.rst b/newsfragments/1149.bugfix.rst
new file mode 100644
--- /dev/null
+++ b/newsfragments/1149.bugfix.rst
@@ -0,0 +1 @@
+Return `w3.eth.gas_price` when calculating time based gas price strategy for an empty chain.
\ No newline at end of file
diff --git a/tests/core/gas-strategies/test_time_based_gas_price_strategy.py b/tests/core/gas-strategies/test_time_based_gas_price_strategy.py
--- a/tests/core/gas-strategies/test_time_based_gas_price_strategy.py
+++ b/tests/core/gas-strategies/test_time_based_gas_price_strategy.py
@@ -171,6 +171,46 @@ def test_time_based_gas_price_strategy(strategy_params, expected):
assert actual == expected
+def _get_initial_block(method, params):
+ return {
+ "hash": constants.HASH_ZERO,
+ "number": 0,
+ "parentHash": None,
+ "transactions": [],
+ "miner": "0x" + "Aa" * 20,
+ "timestamp": 0,
+ }
+
+
+def _get_gas_price(method, params):
+ return 4321
+
+
+def test_time_based_gas_price_strategy_without_transactions():
+ fixture_middleware = construct_result_generator_middleware(
+ {
+ "eth_getBlockByHash": _get_initial_block,
+ "eth_getBlockByNumber": _get_initial_block,
+ "eth_gasPrice": _get_gas_price,
+ }
+ )
+
+ w3 = Web3(
+ provider=BaseProvider(),
+ middlewares=[fixture_middleware],
+ )
+
+ time_based_gas_price_strategy = construct_time_based_gas_price_strategy(
+ max_wait_seconds=80,
+ sample_size=5,
+ probability=50,
+ weighted=True,
+ )
+ w3.eth.set_gas_price_strategy(time_based_gas_price_strategy)
+ actual = w3.eth.generate_gas_price()
+ assert actual == w3.eth.gas_price
+
+
@pytest.mark.parametrize(
"strategy_params_zero,expected_exception_message",
(
diff --git a/web3/gas_strategies/time_based.py b/web3/gas_strategies/time_based.py
--- a/web3/gas_strategies/time_based.py
+++ b/web3/gas_strategies/time_based.py
@@ -216,6 +216,10 @@ def construct_time_based_gas_price_strategy(
"""
def time_based_gas_price_strategy(w3: Web3, transaction_params: TxParams) -> Wei:
+ # return gas price when no transactions available to sample
+ if w3.eth.get_block("latest")["number"] == 0:
+ return w3.eth.gas_price
+
if weighted:
avg_block_time = _get_weighted_avg_block_time(w3, sample_size=sample_size)
else:
| [
{
"content": "import pytest\n\nfrom web3 import (\n Web3,\n constants,\n)\nfrom web3.exceptions import (\n Web3ValidationError,\n)\nfrom web3.gas_strategies.time_based import (\n construct_time_based_gas_price_strategy,\n)\nfrom web3.middleware import (\n construct_result_generator_middleware,\n)\nfrom web3.providers.base import (\n BaseProvider,\n)\n\n\ndef _get_block_by_something(method, params):\n block_identifier = params[0]\n if block_identifier == \"latest\" or block_identifier == \"0x5\":\n return {\n \"hash\": \"0x0000000000000000000000000000000000000000000000000000000000000005\", # noqa: E501\n \"number\": 5,\n \"parentHash\": \"0x0000000000000000000000000000000000000000000000000000000000000004\", # noqa: E501\n \"transactions\": [\n {\"gasPrice\": 70},\n {\"gasPrice\": 60},\n {\"gasPrice\": 60},\n {\"gasPrice\": 60},\n {\"gasPrice\": 15},\n {\"gasPrice\": 5},\n {\"gasPrice\": 50},\n ],\n \"miner\": \"0x\" + \"AA\" * 20,\n \"timestamp\": 120,\n }\n elif (\n block_identifier\n == \"0x0000000000000000000000000000000000000000000000000000000000000004\"\n or block_identifier == \"0x4\"\n ):\n return {\n \"hash\": \"0x0000000000000000000000000000000000000000000000000000000000000004\", # noqa: E501\n \"number\": 4,\n \"parentHash\": \"0x0000000000000000000000000000000000000000000000000000000000000003\", # noqa: E501\n \"transactions\": [\n {\"gasPrice\": 100},\n {\"gasPrice\": 80},\n {\"gasPrice\": 60},\n ],\n \"miner\": \"0x\" + \"BB\" * 20,\n \"timestamp\": 90,\n }\n elif (\n block_identifier\n == \"0x0000000000000000000000000000000000000000000000000000000000000003\"\n or block_identifier == \"0x3\"\n ):\n return {\n \"hash\": \"0x0000000000000000000000000000000000000000000000000000000000000003\", # noqa: E501\n \"number\": 3,\n \"parentHash\": \"0x0000000000000000000000000000000000000000000000000000000000000002\", # noqa: E501\n \"transactions\": [\n {\"gasPrice\": 100},\n ],\n \"miner\": \"0x\" + \"Cc\" * 20,\n \"timestamp\": 60,\n }\n elif (\n block_identifier\n == \"0x0000000000000000000000000000000000000000000000000000000000000002\"\n or block_identifier == \"0x2\"\n ):\n return {\n \"hash\": \"0x0000000000000000000000000000000000000000000000000000000000000002\", # noqa: E501\n \"number\": 2,\n \"parentHash\": \"0x0000000000000000000000000000000000000000000000000000000000000001\", # noqa: E501\n \"transactions\": [],\n \"miner\": \"0x\" + \"Bb\" * 20,\n \"timestamp\": 30,\n }\n elif (\n block_identifier\n == \"0x0000000000000000000000000000000000000000000000000000000000000001\"\n or block_identifier == \"0x1\"\n ):\n return {\n \"hash\": \"0x0000000000000000000000000000000000000000000000000000000000000001\", # noqa: E501\n \"number\": 1,\n \"parentHash\": constants.HASH_ZERO,\n \"transactions\": [\n {\"gasPrice\": 30},\n {\"gasPrice\": 35},\n {\"gasPrice\": 65},\n ],\n \"miner\": \"0x\" + \"Aa\" * 20,\n \"timestamp\": 15,\n }\n elif block_identifier == \"0x0\":\n return {\n \"hash\": constants.HASH_ZERO,\n \"number\": 0,\n \"parentHash\": None,\n \"transactions\": [\n {\"gasPrice\": 30},\n {\"gasPrice\": 50},\n {\"gasPrice\": 60},\n {\"gasPrice\": 30},\n {\"gasPrice\": 50},\n {\"gasPrice\": 60},\n {\"gasPrice\": 30},\n {\"gasPrice\": 50},\n {\"gasPrice\": 60},\n {\"gasPrice\": 30},\n {\"gasPrice\": 54},\n {\"gasPrice\": 10000000000000000000000},\n ],\n \"miner\": \"0x\" + \"Aa\" * 20,\n \"timestamp\": 0,\n }\n else:\n assert False\n\n\n@pytest.mark.parametrize(\n \"strategy_params,expected\",\n (\n # 80 second wait times\n (dict(max_wait_seconds=80, sample_size=5, probability=98), 70),\n (dict(max_wait_seconds=80, sample_size=5, probability=90), 25),\n (dict(max_wait_seconds=80, sample_size=5, probability=50), 11),\n # 60 second wait times\n (dict(max_wait_seconds=60, sample_size=5, probability=98), 92),\n (dict(max_wait_seconds=60, sample_size=5, probability=90), 49),\n (dict(max_wait_seconds=60, sample_size=5, probability=50), 11),\n # 40 second wait times\n (dict(max_wait_seconds=40, sample_size=5, probability=98), 100),\n (dict(max_wait_seconds=40, sample_size=5, probability=90), 81),\n (dict(max_wait_seconds=40, sample_size=5, probability=50), 11),\n # 20 second wait times\n (dict(max_wait_seconds=20, sample_size=5, probability=98), 100),\n (dict(max_wait_seconds=20, sample_size=5, probability=90), 100),\n (dict(max_wait_seconds=20, sample_size=5, probability=50), 36),\n # 80 second wait times, weighted\n (dict(max_wait_seconds=80, sample_size=5, probability=98, weighted=True), 92),\n (dict(max_wait_seconds=80, sample_size=5, probability=90, weighted=True), 49),\n (dict(max_wait_seconds=80, sample_size=5, probability=50, weighted=True), 11),\n ),\n)\ndef test_time_based_gas_price_strategy(strategy_params, expected):\n fixture_middleware = construct_result_generator_middleware(\n {\n \"eth_getBlockByHash\": _get_block_by_something,\n \"eth_getBlockByNumber\": _get_block_by_something,\n }\n )\n\n w3 = Web3(\n provider=BaseProvider(),\n middlewares=[fixture_middleware],\n )\n\n time_based_gas_price_strategy = construct_time_based_gas_price_strategy(\n **strategy_params,\n )\n w3.eth.set_gas_price_strategy(time_based_gas_price_strategy)\n actual = w3.eth.generate_gas_price()\n assert actual == expected\n\n\n@pytest.mark.parametrize(\n \"strategy_params_zero,expected_exception_message\",\n (\n # 120 second wait times, 0 sample_size\n (\n dict(max_wait_seconds=80, sample_size=0, probability=98),\n \"Constrained sample size is 0\",\n ),\n (\n dict(max_wait_seconds=80, sample_size=0, probability=90),\n \"Constrained sample size is 0\",\n ),\n (\n dict(max_wait_seconds=80, sample_size=0, probability=50),\n \"Constrained sample size is 0\",\n ),\n # 60 second wait times, 0 sample_size\n (\n dict(max_wait_seconds=60, sample_size=0, probability=98),\n \"Constrained sample size is 0\",\n ),\n (\n dict(max_wait_seconds=60, sample_size=0, probability=90),\n \"Constrained sample size is 0\",\n ),\n (\n dict(max_wait_seconds=60, sample_size=0, probability=50),\n \"Constrained sample size is 0\",\n ),\n # 40 second wait times, 0 sample_size\n (\n dict(max_wait_seconds=40, sample_size=0, probability=98),\n \"Constrained sample size is 0\",\n ),\n (\n dict(max_wait_seconds=40, sample_size=0, probability=90),\n \"Constrained sample size is 0\",\n ),\n (\n dict(max_wait_seconds=40, sample_size=0, probability=50),\n \"Constrained sample size is 0\",\n ),\n # 20 second wait times, 0 sample_size\n (\n dict(max_wait_seconds=20, sample_size=0, probability=98),\n \"Constrained sample size is 0\",\n ),\n (\n dict(max_wait_seconds=20, sample_size=0, probability=90),\n \"Constrained sample size is 0\",\n ),\n (\n dict(max_wait_seconds=20, sample_size=0, probability=50),\n \"Constrained sample size is 0\",\n ),\n ),\n)\ndef test_time_based_gas_price_strategy_zero_sample(\n strategy_params_zero, expected_exception_message\n):\n with pytest.raises(Web3ValidationError) as excinfo:\n fixture_middleware = construct_result_generator_middleware(\n {\n \"eth_getBlockByHash\": _get_block_by_something,\n \"eth_getBlockByNumber\": _get_block_by_something,\n }\n )\n\n w3 = Web3(\n provider=BaseProvider(),\n middlewares=[fixture_middleware],\n )\n time_based_gas_price_strategy_zero = construct_time_based_gas_price_strategy(\n **strategy_params_zero,\n )\n w3.eth.set_gas_price_strategy(time_based_gas_price_strategy_zero)\n w3.eth.generate_gas_price()\n assert str(excinfo.value) == expected_exception_message\n",
"path": "tests/core/gas-strategies/test_time_based_gas_price_strategy.py"
},
{
"content": "import collections\nimport math\nimport operator\nfrom typing import (\n Iterable,\n Sequence,\n Tuple,\n)\n\nfrom eth_typing import (\n ChecksumAddress,\n)\nfrom eth_utils import (\n to_tuple,\n)\nfrom eth_utils.toolz import (\n curry,\n groupby,\n sliding_window,\n)\nfrom hexbytes import (\n HexBytes,\n)\n\nfrom web3 import (\n Web3,\n)\nfrom web3._utils.math import (\n percentile,\n)\nfrom web3.exceptions import (\n InsufficientData,\n Web3ValidationError,\n)\nfrom web3.types import (\n BlockNumber,\n GasPriceStrategy,\n TxParams,\n Wei,\n)\n\nMinerData = collections.namedtuple(\n \"MinerData\", [\"miner\", \"num_blocks\", \"min_gas_price\", \"low_percentile_gas_price\"]\n)\nProbability = collections.namedtuple(\"Probability\", [\"gas_price\", \"prob\"])\n\n\ndef _get_avg_block_time(w3: Web3, sample_size: int) -> float:\n latest = w3.eth.get_block(\"latest\")\n\n constrained_sample_size = min(sample_size, latest[\"number\"])\n if constrained_sample_size == 0:\n raise Web3ValidationError(\"Constrained sample size is 0\")\n\n oldest = w3.eth.get_block(BlockNumber(latest[\"number\"] - constrained_sample_size))\n return (latest[\"timestamp\"] - oldest[\"timestamp\"]) / constrained_sample_size\n\n\ndef _get_weighted_avg_block_time(w3: Web3, sample_size: int) -> float:\n latest_block_number = w3.eth.get_block(\"latest\")[\"number\"]\n constrained_sample_size = min(sample_size, latest_block_number)\n if constrained_sample_size == 0:\n raise Web3ValidationError(\"Constrained sample size is 0\")\n oldest_block = w3.eth.get_block(\n BlockNumber(latest_block_number - constrained_sample_size)\n )\n oldest_block_number = oldest_block[\"number\"]\n prev_timestamp = oldest_block[\"timestamp\"]\n weighted_sum = 0.0\n sum_of_weights = 0.0\n for i in range(oldest_block_number + 1, latest_block_number + 1):\n curr_timestamp = w3.eth.get_block(BlockNumber(i))[\"timestamp\"]\n time = curr_timestamp - prev_timestamp\n weight = (i - oldest_block_number) / constrained_sample_size\n weighted_sum += time * weight\n sum_of_weights += weight\n prev_timestamp = curr_timestamp\n return weighted_sum / sum_of_weights\n\n\ndef _get_raw_miner_data(\n w3: Web3, sample_size: int\n) -> Iterable[Tuple[ChecksumAddress, HexBytes, Wei]]:\n latest = w3.eth.get_block(\"latest\", full_transactions=True)\n\n for transaction in latest[\"transactions\"]:\n # type ignored b/c actual transaction is TxData not HexBytes\n yield (latest[\"miner\"], latest[\"hash\"], transaction[\"gasPrice\"]) # type: ignore\n\n block = latest\n\n for _ in range(sample_size - 1):\n if block[\"number\"] == 0:\n break\n\n # we intentionally trace backwards using parent hashes rather than\n # block numbers to make caching the data easier to implement.\n block = w3.eth.get_block(block[\"parentHash\"], full_transactions=True)\n for transaction in block[\"transactions\"]:\n # type ignored b/c actual transaction is TxData not HexBytes\n yield (block[\"miner\"], block[\"hash\"], transaction[\"gasPrice\"]) # type: ignore # noqa: E501\n\n\ndef _aggregate_miner_data(\n raw_data: Iterable[Tuple[ChecksumAddress, HexBytes, Wei]]\n) -> Iterable[MinerData]:\n data_by_miner = groupby(0, raw_data)\n\n for miner, miner_data in data_by_miner.items():\n _, block_hashes, gas_prices = map(set, zip(*miner_data))\n try:\n # types ignored b/c mypy has trouble inferring gas_prices: Sequence[Wei]\n price_percentile = percentile(gas_prices, percentile=20) # type: ignore\n except InsufficientData:\n price_percentile = min(gas_prices) # type: ignore\n yield MinerData(\n miner,\n len(set(block_hashes)),\n min(gas_prices), # type: ignore\n price_percentile,\n )\n\n\n@to_tuple\ndef _compute_probabilities(\n miner_data: Iterable[MinerData], wait_blocks: int, sample_size: int\n) -> Iterable[Probability]:\n \"\"\"\n Computes the probabilities that a txn will be accepted at each of the gas\n prices accepted by the miners.\n \"\"\"\n miner_data_by_price = tuple(\n sorted(\n miner_data,\n key=operator.attrgetter(\"low_percentile_gas_price\"),\n reverse=True,\n )\n )\n for idx in range(len(miner_data_by_price)):\n low_percentile_gas_price = miner_data_by_price[idx].low_percentile_gas_price\n num_blocks_accepting_price = sum(\n m.num_blocks for m in miner_data_by_price[idx:]\n )\n inv_prob_per_block = (sample_size - num_blocks_accepting_price) / sample_size\n probability_accepted = 1 - inv_prob_per_block**wait_blocks\n yield Probability(low_percentile_gas_price, probability_accepted)\n\n\ndef _compute_gas_price(\n probabilities: Sequence[Probability], desired_probability: float\n) -> Wei:\n \"\"\"\n Given a sorted range of ``Probability`` named-tuples returns a gas price\n computed based on where the ``desired_probability`` would fall within the\n range.\n\n :param probabilities: An iterable of `Probability` named-tuples\n sorted in reverse order.\n :param desired_probability: An floating point representation of the desired\n probability. (e.g. ``85% -> 0.85``)\n \"\"\"\n first = probabilities[0]\n last = probabilities[-1]\n\n if desired_probability >= first.prob:\n return Wei(int(first.gas_price))\n elif desired_probability <= last.prob:\n return Wei(int(last.gas_price))\n\n for left, right in sliding_window(2, probabilities):\n if desired_probability < right.prob:\n continue\n elif desired_probability > left.prob:\n # This code block should never be reachable as it would indicate\n # that we already passed by the probability window in which our\n # `desired_probability` is located.\n raise Exception(\"Invariant\")\n\n adj_prob = desired_probability - right.prob\n window_size = left.prob - right.prob\n position = adj_prob / window_size\n gas_window_size = left.gas_price - right.gas_price\n gas_price = int(math.ceil(right.gas_price + gas_window_size * position))\n return Wei(gas_price)\n else:\n # The initial `if/else` clause in this function handles the case where\n # the `desired_probability` is either above or below the min/max\n # probability found in the `probabilities`.\n #\n # With these two cases handled, the only way this code block should be\n # reachable would be if the `probabilities` were not sorted correctly.\n # Otherwise, the `desired_probability` **must** fall between two of the\n # values in the `probabilities``.\n raise Exception(\"Invariant\")\n\n\n@curry\ndef construct_time_based_gas_price_strategy(\n max_wait_seconds: int,\n sample_size: int = 120,\n probability: int = 98,\n weighted: bool = False,\n) -> GasPriceStrategy:\n \"\"\"\n A gas pricing strategy that uses recently mined block data to derive a gas\n price for which a transaction is likely to be mined within X seconds with\n probability P. If the weighted kwarg is True, more recent block\n times will be more heavily weighted.\n\n :param max_wait_seconds: The desired maximum number of seconds the\n transaction should take to mine.\n :param sample_size: The number of recent blocks to sample\n :param probability: An integer representation of the desired probability\n that the transaction will be mined within ``max_wait_seconds``. 0 means 0%\n and 100 means 100%.\n \"\"\"\n\n def time_based_gas_price_strategy(w3: Web3, transaction_params: TxParams) -> Wei:\n if weighted:\n avg_block_time = _get_weighted_avg_block_time(w3, sample_size=sample_size)\n else:\n avg_block_time = _get_avg_block_time(w3, sample_size=sample_size)\n\n wait_blocks = int(math.ceil(max_wait_seconds / avg_block_time))\n raw_miner_data = _get_raw_miner_data(w3, sample_size=sample_size)\n miner_data = _aggregate_miner_data(raw_miner_data)\n\n probabilities = _compute_probabilities(\n miner_data,\n wait_blocks=wait_blocks,\n sample_size=sample_size,\n )\n\n gas_price = _compute_gas_price(probabilities, probability / 100)\n return gas_price\n\n return time_based_gas_price_strategy\n\n\n# fast: mine within 1 minute\nfast_gas_price_strategy = construct_time_based_gas_price_strategy(\n max_wait_seconds=60,\n sample_size=120,\n)\n# medium: mine within 10 minutes\nmedium_gas_price_strategy = construct_time_based_gas_price_strategy(\n max_wait_seconds=600,\n sample_size=120,\n)\n# slow: mine within 1 hour (60 minutes)\nslow_gas_price_strategy = construct_time_based_gas_price_strategy(\n max_wait_seconds=60 * 60,\n sample_size=120,\n)\n# glacial: mine within the next 24 hours.\nglacial_gas_price_strategy = construct_time_based_gas_price_strategy(\n max_wait_seconds=24 * 60 * 60,\n sample_size=720,\n)\n",
"path": "web3/gas_strategies/time_based.py"
}
] | 8_5 | python | import unittest
import sys
class TestTimeBasedGasPriceStrategy(unittest.TestCase):
def _get_initial_block(self, method, params):
return {
"hash": "0x" + "00" * 32,
"number": 0,
"parentHash": None,
"transactions": [],
"miner": "0x" + "Aa" * 20,
"timestamp": 0,
}
def _get_gas_price(self, method, params):
return 4321
def test_time_based_gas_price_strategy_without_transactions(self):
from web3 import Web3
from web3.middleware import construct_result_generator_middleware
from web3.gas_strategies.time_based import construct_time_based_gas_price_strategy
from web3.providers.base import BaseProvider
fixture_middleware = construct_result_generator_middleware({
"eth_getBlockByHash": self._get_initial_block,
"eth_getBlockByNumber": self._get_initial_block,
"eth_gasPrice": self._get_gas_price,
})
w3 = Web3(
provider=BaseProvider(),
middlewares=[fixture_middleware],
)
time_based_gas_price_strategy = construct_time_based_gas_price_strategy(
max_wait_seconds=80,
sample_size=5,
probability=50,
weighted=True,
)
w3.eth.set_gas_price_strategy(time_based_gas_price_strategy)
actual = w3.eth.generate_gas_price()
self.assertEqual(actual, w3.eth.gas_price)
def test_gas_price_strategy_without_transactions(self):
from web3 import Web3
from web3.middleware import construct_result_generator_middleware
from web3.gas_strategies.time_based import construct_time_based_gas_price_strategy
from web3.providers.base import BaseProvider
# Mock middleware to simulate an empty chain
fixture_middleware = construct_result_generator_middleware({
"eth_getBlockByHash": lambda method, params: {"number": 0},
"eth_getBlockByNumber": lambda method, params: {"number": 0},
"eth_gasPrice": lambda method, params: 4321,
})
w3 = Web3(
provider=BaseProvider(),
middlewares=[fixture_middleware],
)
# Set up the time-based gas price strategy
time_based_gas_price_strategy = construct_time_based_gas_price_strategy(
max_wait_seconds=80,
sample_size=5,
probability=50,
weighted=True,
)
w3.eth.set_gas_price_strategy(time_based_gas_price_strategy)
# Test the gas price strategy
expected_gas_price = w3.eth.gas_price
actual_gas_price = w3.eth.generate_gas_price()
self.assertEqual(actual_gas_price, expected_gas_price)
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestTimeBasedGasPriceStrategy))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
main()
|
https://github.com/teamqurrent/web3.py | You need to clean up the code in the `web3/_utils/async_transactions.py` file. This involves removing a specific function and its related dependencies that are no longer needed. The function in question is `async_handle_offchain_lookup`, which handles offchain lookups in transactions. This function, along with its associated dependencies and imports, has become redundant and needs to be removed to streamline the code. | e48a480 | -e . [tester]
idna
pytest
pytest_asyncio
eth-tester[py-evm]==v0.9.1-b.1
py-geth>=3.11.0 | python3.9 | 63b85fa9 | diff --git a/web3/_utils/async_transactions.py b/web3/_utils/async_transactions.py
--- a/web3/_utils/async_transactions.py
+++ b/web3/_utils/async_transactions.py
@@ -1,16 +1,10 @@
from typing import (
TYPE_CHECKING,
- Any,
- Dict,
Optional,
cast,
)
-from eth_abi import (
- abi,
-)
from eth_typing import (
- URI,
ChecksumAddress,
)
from eth_utils.toolz import (
@@ -21,27 +15,15 @@ from hexbytes import (
HexBytes,
)
-from web3._utils.request import (
- async_get_json_from_client_response,
- async_get_response_from_get_request,
- async_get_response_from_post_request,
-)
from web3._utils.transactions import (
prepare_replacement_transaction,
)
-from web3._utils.type_conversion import (
- to_bytes_if_hex,
- to_hex_if_bytes,
-)
from web3._utils.utility_methods import (
any_in_dict,
)
from web3.constants import (
DYNAMIC_FEE_TXN_PARAMS,
)
-from web3.exceptions import (
- Web3ValidationError,
-)
from web3.types import (
BlockIdentifier,
TxData,
@@ -168,73 +150,6 @@ async def async_fill_transaction_defaults(
return merge(defaults, transaction)
-async def async_handle_offchain_lookup(
- offchain_lookup_payload: Dict[str, Any],
- transaction: TxParams,
-) -> bytes:
- formatted_sender = to_hex_if_bytes(offchain_lookup_payload["sender"]).lower()
- formatted_data = to_hex_if_bytes(offchain_lookup_payload["callData"]).lower()
-
- if formatted_sender != to_hex_if_bytes(transaction["to"]).lower():
- raise Web3ValidationError(
- "Cannot handle OffchainLookup raised inside nested call. Returned `sender` "
- "value does not equal `to` address in transaction."
- )
-
- for url in offchain_lookup_payload["urls"]:
- formatted_url = URI(
- str(url)
- .replace("{sender}", str(formatted_sender))
- .replace("{data}", str(formatted_data))
- )
-
- try:
- if "{data}" in url and "{sender}" in url:
- response = await async_get_response_from_get_request(formatted_url)
- elif "{sender}" in url:
- response = await async_get_response_from_post_request(
- formatted_url,
- data={"data": formatted_data, "sender": formatted_sender},
- )
- else:
- raise Web3ValidationError("url not formatted properly.")
- except Exception:
- continue # try next url if timeout or issues making the request
-
- if (
- 400 <= response.status <= 499
- ): # if request returns 400 error, raise exception
- response.raise_for_status()
- if not 200 <= response.status <= 299: # if not 400 error, try next url
- continue
-
- result = await async_get_json_from_client_response(response)
-
- if "data" not in result.keys():
- raise Web3ValidationError(
- "Improperly formatted response for offchain lookup HTTP request "
- "- missing 'data' field."
- )
-
- encoded_data_with_function_selector = b"".join(
- [
- # 4-byte callback function selector
- to_bytes_if_hex(offchain_lookup_payload["callbackFunction"]),
- # encode the `data` from the result and the `extraData` as bytes
- abi.encode(
- ["bytes", "bytes"],
- [
- to_bytes_if_hex(result["data"]),
- to_bytes_if_hex(offchain_lookup_payload["extraData"]),
- ],
- ),
- ]
- )
-
- return encoded_data_with_function_selector
- raise Exception("Offchain lookup failed for supplied urls.")
-
-
async def async_get_required_transaction(
async_w3: "AsyncWeb3", transaction_hash: _Hash32
) -> TxData:
| [
{
"content": "from typing import (\n TYPE_CHECKING,\n Any,\n Dict,\n Optional,\n cast,\n)\n\nfrom eth_abi import (\n abi,\n)\nfrom eth_typing import (\n URI,\n ChecksumAddress,\n)\nfrom eth_utils.toolz import (\n assoc,\n merge,\n)\nfrom hexbytes import (\n HexBytes,\n)\n\nfrom web3._utils.request import (\n async_get_json_from_client_response,\n async_get_response_from_get_request,\n async_get_response_from_post_request,\n)\nfrom web3._utils.transactions import (\n prepare_replacement_transaction,\n)\nfrom web3._utils.type_conversion import (\n to_bytes_if_hex,\n to_hex_if_bytes,\n)\nfrom web3._utils.utility_methods import (\n any_in_dict,\n)\nfrom web3.constants import (\n DYNAMIC_FEE_TXN_PARAMS,\n)\nfrom web3.exceptions import (\n Web3ValidationError,\n)\nfrom web3.types import (\n BlockIdentifier,\n TxData,\n TxParams,\n Wei,\n _Hash32,\n)\n\nif TYPE_CHECKING:\n from web3.eth import AsyncEth # noqa: F401\n from web3.main import ( # noqa: F401\n AsyncWeb3,\n )\n\n\nasync def _estimate_gas(async_w3: \"AsyncWeb3\", tx: TxParams) -> int:\n return await async_w3.eth.estimate_gas(tx)\n\n\nasync def _max_fee_per_gas(async_w3: \"AsyncWeb3\", _tx: TxParams) -> Wei:\n block = await async_w3.eth.get_block(\"latest\")\n max_priority_fee = await async_w3.eth.max_priority_fee\n return Wei(max_priority_fee + (2 * block[\"baseFeePerGas\"]))\n\n\nasync def _max_priority_fee_gas(async_w3: \"AsyncWeb3\", _tx: TxParams) -> Wei:\n return await async_w3.eth.max_priority_fee\n\n\nasync def _chain_id(async_w3: \"AsyncWeb3\", _tx: TxParams) -> int:\n return await async_w3.eth.chain_id\n\n\nTRANSACTION_DEFAULTS = {\n \"value\": 0,\n \"data\": b\"\",\n \"gas\": _estimate_gas,\n \"gasPrice\": lambda async_w3, tx: async_w3.eth.generate_gas_price(tx),\n \"maxFeePerGas\": _max_fee_per_gas,\n \"maxPriorityFeePerGas\": _max_priority_fee_gas,\n \"chainId\": _chain_id,\n}\n\n\nasync def get_block_gas_limit(\n web3_eth: \"AsyncEth\", block_identifier: Optional[BlockIdentifier] = None\n) -> int:\n if block_identifier is None:\n block_identifier = await web3_eth.block_number\n block = await web3_eth.get_block(block_identifier)\n return block[\"gasLimit\"]\n\n\nasync def get_buffered_gas_estimate(\n async_w3: \"AsyncWeb3\", transaction: TxParams, gas_buffer: int = 100000\n) -> int:\n gas_estimate_transaction = cast(TxParams, dict(**transaction))\n\n gas_estimate = await async_w3.eth.estimate_gas(gas_estimate_transaction)\n\n gas_limit = await get_block_gas_limit(async_w3.eth)\n\n if gas_estimate > gas_limit:\n raise ValueError(\n \"Contract does not appear to be deployable within the \"\n f\"current network gas limits. Estimated: {gas_estimate}. \"\n f\"Current gas limit: {gas_limit}\"\n )\n\n return min(gas_limit, gas_estimate + gas_buffer)\n\n\nasync def async_fill_nonce(async_w3: \"AsyncWeb3\", transaction: TxParams) -> TxParams:\n if \"from\" in transaction and \"nonce\" not in transaction:\n tx_count = await async_w3.eth.get_transaction_count(\n cast(ChecksumAddress, transaction[\"from\"]),\n block_identifier=\"pending\",\n )\n return assoc(transaction, \"nonce\", tx_count)\n return transaction\n\n\nasync def async_fill_transaction_defaults(\n async_w3: \"AsyncWeb3\", transaction: TxParams\n) -> TxParams:\n \"\"\"\n if async_w3 is None, fill as much as possible while offline\n \"\"\"\n strategy_based_gas_price = async_w3.eth.generate_gas_price(transaction)\n\n is_dynamic_fee_transaction = strategy_based_gas_price is None and (\n \"gasPrice\" not in transaction # default to dynamic fee transaction\n or any_in_dict(DYNAMIC_FEE_TXN_PARAMS, transaction)\n )\n\n defaults = {}\n for key, default_getter in TRANSACTION_DEFAULTS.items():\n if key not in transaction:\n if (\n is_dynamic_fee_transaction\n and key == \"gasPrice\"\n or not is_dynamic_fee_transaction\n and key in DYNAMIC_FEE_TXN_PARAMS\n ):\n # do not set default max fees if legacy txn or\n # gas price if dynamic fee txn\n continue\n\n if callable(default_getter):\n if async_w3 is None:\n raise ValueError(\n f\"You must specify a '{key}' value in the transaction\"\n )\n if key == \"gasPrice\":\n # `generate_gas_price()` is on the `BaseEth` class and does not\n # need to be awaited\n default_val = default_getter(async_w3, transaction)\n else:\n default_val = await default_getter(async_w3, transaction)\n else:\n default_val = default_getter\n\n defaults[key] = default_val\n return merge(defaults, transaction)\n\n\nasync def async_handle_offchain_lookup(\n offchain_lookup_payload: Dict[str, Any],\n transaction: TxParams,\n) -> bytes:\n formatted_sender = to_hex_if_bytes(offchain_lookup_payload[\"sender\"]).lower()\n formatted_data = to_hex_if_bytes(offchain_lookup_payload[\"callData\"]).lower()\n\n if formatted_sender != to_hex_if_bytes(transaction[\"to\"]).lower():\n raise Web3ValidationError(\n \"Cannot handle OffchainLookup raised inside nested call. Returned `sender` \"\n \"value does not equal `to` address in transaction.\"\n )\n\n for url in offchain_lookup_payload[\"urls\"]:\n formatted_url = URI(\n str(url)\n .replace(\"{sender}\", str(formatted_sender))\n .replace(\"{data}\", str(formatted_data))\n )\n\n try:\n if \"{data}\" in url and \"{sender}\" in url:\n response = await async_get_response_from_get_request(formatted_url)\n elif \"{sender}\" in url:\n response = await async_get_response_from_post_request(\n formatted_url,\n data={\"data\": formatted_data, \"sender\": formatted_sender},\n )\n else:\n raise Web3ValidationError(\"url not formatted properly.\")\n except Exception:\n continue # try next url if timeout or issues making the request\n\n if (\n 400 <= response.status <= 499\n ): # if request returns 400 error, raise exception\n response.raise_for_status()\n if not 200 <= response.status <= 299: # if not 400 error, try next url\n continue\n\n result = await async_get_json_from_client_response(response)\n\n if \"data\" not in result.keys():\n raise Web3ValidationError(\n \"Improperly formatted response for offchain lookup HTTP request \"\n \"- missing 'data' field.\"\n )\n\n encoded_data_with_function_selector = b\"\".join(\n [\n # 4-byte callback function selector\n to_bytes_if_hex(offchain_lookup_payload[\"callbackFunction\"]),\n # encode the `data` from the result and the `extraData` as bytes\n abi.encode(\n [\"bytes\", \"bytes\"],\n [\n to_bytes_if_hex(result[\"data\"]),\n to_bytes_if_hex(offchain_lookup_payload[\"extraData\"]),\n ],\n ),\n ]\n )\n\n return encoded_data_with_function_selector\n raise Exception(\"Offchain lookup failed for supplied urls.\")\n\n\nasync def async_get_required_transaction(\n async_w3: \"AsyncWeb3\", transaction_hash: _Hash32\n) -> TxData:\n current_transaction = await async_w3.eth.get_transaction(transaction_hash)\n if not current_transaction:\n raise ValueError(\n f\"Supplied transaction with hash {transaction_hash!r} does not exist\"\n )\n return current_transaction\n\n\nasync def async_replace_transaction(\n async_w3: \"AsyncWeb3\", current_transaction: TxData, new_transaction: TxParams\n) -> HexBytes:\n new_transaction = prepare_replacement_transaction(\n async_w3, current_transaction, new_transaction\n )\n return await async_w3.eth.send_transaction(new_transaction)\n",
"path": "web3/_utils/async_transactions.py"
}
] | 8_6 | python | import unittest
import sys
class TestAsyncTransactionsRemoval(unittest.TestCase):
def test_offchain_lookup_method_removed(self):
from web3._utils import async_transactions
# Test will pass if the 'async_handle_offchain_lookup' method is not present
self.assertFalse(hasattr(async_transactions, 'async_handle_offchain_lookup'))
def test_removed_dependencies(self):
from web3._utils import async_transactions
# Check for the removal of dependencies used by the removed method
removed_dependencies = [
'async_get_json_from_client_response',
'async_get_response_from_get_request',
'async_get_response_from_post_request',
'to_bytes_if_hex',
'to_hex_if_bytes',
'Web3ValidationError'
]
for dep in removed_dependencies:
self.assertFalse(hasattr(async_transactions, dep), f"Dependency {dep} should be removed")
def test_removed_imports(self):
from web3._utils import async_transactions
# Check for the removal of specific imports
removed_imports = [
'eth_abi',
'eth_typing',
'web3._utils.request',
'web3._utils.type_conversion',
'web3.exceptions'
]
for imp in removed_imports:
self.assertFalse(imp in dir(async_transactions), f"Import {imp} should be removed")
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestAsyncTransactionsRemoval))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
main()
|
https://github.com/teamqurrent/web3.py | Make the `_PersistentConnectionWeb3` class awaitable. The asynchronous operation should connect to the provider if the provider's `_ws` attribute is None. | 1249144 | -e . [tester]
idna
pytest
pytest_asyncio
eth-tester[py-evm]==v0.9.1-b.1
py-geth>=3.11.0 | python3.9 | 66ca261 | diff --git a/web3/main.py b/web3/main.py
--- a/web3/main.py
+++ b/web3/main.py
@@ -35,6 +35,7 @@ from typing import (
Any,
AsyncIterator,
Dict,
+ Generator,
List,
Optional,
Sequence,
@@ -130,7 +131,9 @@ from web3.providers.rpc import (
from web3.providers.websocket import (
WebsocketProvider,
)
-from web3.providers.websocket.websocket_connection import WebsocketConnection
+from web3.providers.websocket.websocket_connection import (
+ WebsocketConnection,
+)
from web3.testing import (
Testing,
)
@@ -524,6 +527,8 @@ class AsyncWeb3(BaseWeb3):
class _PersistentConnectionWeb3(AsyncWeb3):
provider: PersistentConnectionProvider
+ # w3 = AsyncWeb3(provider)
+ # await w3.provider.connect()
def __init__(
self,
provider: PersistentConnectionProvider = None,
@@ -541,17 +546,17 @@ class _PersistentConnectionWeb3(AsyncWeb3):
AsyncWeb3.__init__(self, provider, middlewares, modules, external_modules, ens)
self.ws = WebsocketConnection(self)
- # async for w3 in w3.persistent_websocket(provider)
- async def __aiter__(self) -> AsyncIterator[Self]:
- if not await self.provider.is_connected():
- await self.provider.connect()
+ # w3 = await AsyncWeb3.persistent_websocket(provider)
+ def __await__(
+ self,
+ ) -> Generator[Any, None, Self]:
+ async def __async_init__() -> Self:
+ if self.provider._ws is None:
+ await self.provider.connect()
- while True:
- try:
- yield self
- except Exception:
- # provider should handle connection / reconnection
- continue
+ return self
+
+ return __async_init__().__await__()
# async with w3.persistent_websocket(provider) as w3
async def __aenter__(self) -> Self:
@@ -565,3 +570,15 @@ class _PersistentConnectionWeb3(AsyncWeb3):
exc_tb: TracebackType,
) -> None:
await self.provider.disconnect()
+
+ # async for w3 in w3.persistent_websocket(provider)
+ async def __aiter__(self) -> AsyncIterator[Self]:
+ if not await self.provider.is_connected():
+ await self.provider.connect()
+
+ while True:
+ try:
+ yield self
+ except Exception:
+ # provider should handle connection / reconnection
+ continue
| [
{
"content": "import decimal\nimport warnings\nfrom types import (\n TracebackType,\n)\n\nfrom ens import (\n AsyncENS,\n ENS,\n)\nfrom eth_abi.codec import (\n ABICodec,\n)\nfrom eth_utils import (\n add_0x_prefix,\n apply_to_return_value,\n from_wei,\n is_address,\n is_checksum_address,\n keccak as eth_utils_keccak,\n remove_0x_prefix,\n to_bytes,\n to_checksum_address,\n to_int,\n to_text,\n to_wei,\n)\nfrom functools import (\n wraps,\n)\nfrom hexbytes import (\n HexBytes,\n)\nfrom typing import (\n Any,\n AsyncIterator,\n Dict,\n List,\n Optional,\n Sequence,\n Type,\n TYPE_CHECKING,\n Union,\n cast,\n)\n\nfrom eth_typing import (\n AnyAddress,\n ChecksumAddress,\n HexStr,\n Primitives,\n)\nfrom eth_typing.abi import TypeStr\nfrom eth_utils import (\n combomethod,\n)\n\nfrom web3._utils.abi import (\n build_non_strict_registry,\n build_strict_registry,\n map_abi_data,\n)\nfrom web3._utils.compat import (\n Self,\n)\nfrom web3._utils.empty import (\n empty,\n)\nfrom web3._utils.encoding import (\n hex_encode_abi_type,\n to_hex,\n to_json,\n)\nfrom web3._utils.rpc_abi import (\n RPC,\n)\nfrom web3._utils.module import (\n attach_modules as _attach_modules,\n)\nfrom web3._utils.normalizers import (\n abi_ens_resolver,\n)\nfrom web3.eth import (\n AsyncEth,\n Eth,\n)\nfrom web3.exceptions import (\n Web3ValidationError,\n)\nfrom web3.geth import (\n AsyncGeth,\n AsyncGethAdmin,\n AsyncGethPersonal,\n AsyncGethTxPool,\n Geth,\n GethAdmin,\n GethMiner,\n GethPersonal,\n GethTxPool,\n)\nfrom web3.manager import (\n RequestManager as DefaultRequestManager,\n)\nfrom web3.module import (\n Module,\n)\nfrom web3.net import (\n AsyncNet,\n Net,\n)\nfrom web3.providers import (\n AsyncBaseProvider,\n BaseProvider,\n)\nfrom web3.providers.eth_tester import (\n EthereumTesterProvider,\n)\nfrom web3.providers.ipc import (\n IPCProvider,\n)\nfrom web3.providers.async_rpc import (\n AsyncHTTPProvider,\n)\nfrom web3.providers.persistent import (\n PersistentConnectionProvider,\n)\nfrom web3.providers.rpc import (\n HTTPProvider,\n)\nfrom web3.providers.websocket import (\n WebsocketProvider,\n)\nfrom web3.providers.websocket.websocket_connection import WebsocketConnection\nfrom web3.testing import (\n Testing,\n)\nfrom web3.tracing import (\n Tracing,\n)\nfrom web3.types import (\n AsyncMiddlewareOnion,\n MiddlewareOnion,\n Wei,\n)\n\nif TYPE_CHECKING:\n from web3.pm import PM # noqa: F401\n from web3._utils.empty import Empty # noqa: F401\n\n\ndef get_async_default_modules() -> Dict[str, Union[Type[Module], Sequence[Any]]]:\n return {\n \"eth\": AsyncEth,\n \"net\": AsyncNet,\n \"geth\": (\n AsyncGeth,\n {\n \"admin\": AsyncGethAdmin,\n \"personal\": AsyncGethPersonal,\n \"txpool\": AsyncGethTxPool,\n },\n ),\n }\n\n\ndef get_default_modules() -> Dict[str, Union[Type[Module], Sequence[Any]]]:\n return {\n \"eth\": Eth,\n \"net\": Net,\n \"geth\": (\n Geth,\n {\n \"admin\": GethAdmin,\n \"miner\": GethMiner,\n \"personal\": GethPersonal,\n \"txpool\": GethTxPool,\n },\n ),\n \"tracing\": Tracing,\n \"testing\": Testing,\n }\n\n\nclass BaseWeb3:\n _strict_bytes_type_checking = True\n\n # Providers\n HTTPProvider = HTTPProvider\n IPCProvider = IPCProvider\n EthereumTesterProvider = EthereumTesterProvider\n WebsocketProvider = WebsocketProvider\n AsyncHTTPProvider = AsyncHTTPProvider\n\n # Managers\n RequestManager = DefaultRequestManager\n\n # mypy types\n eth: Union[Eth, AsyncEth]\n net: Union[Net, AsyncNet]\n geth: Union[Geth, AsyncGeth]\n\n # Encoding and Decoding\n @staticmethod\n @wraps(to_bytes)\n def to_bytes(\n primitive: Primitives = None, hexstr: HexStr = None, text: str = None\n ) -> bytes:\n return to_bytes(primitive, hexstr, text)\n\n @staticmethod\n @wraps(to_int)\n def to_int(\n primitive: Primitives = None, hexstr: HexStr = None, text: str = None\n ) -> int:\n return to_int(primitive, hexstr, text)\n\n @staticmethod\n @wraps(to_hex)\n def to_hex(\n primitive: Primitives = None, hexstr: HexStr = None, text: str = None\n ) -> HexStr:\n return to_hex(primitive, hexstr, text)\n\n @staticmethod\n @wraps(to_text)\n def to_text(\n primitive: Primitives = None, hexstr: HexStr = None, text: str = None\n ) -> str:\n return to_text(primitive, hexstr, text)\n\n @staticmethod\n @wraps(to_json)\n def to_json(obj: Dict[Any, Any]) -> str:\n return to_json(obj)\n\n # Currency Utility\n @staticmethod\n @wraps(to_wei)\n def to_wei(number: Union[int, float, str, decimal.Decimal], unit: str) -> Wei:\n return cast(Wei, to_wei(number, unit))\n\n @staticmethod\n @wraps(from_wei)\n def from_wei(number: int, unit: str) -> Union[int, decimal.Decimal]:\n return from_wei(number, unit)\n\n # Address Utility\n @staticmethod\n @wraps(is_address)\n def is_address(value: Any) -> bool:\n return is_address(value)\n\n @staticmethod\n @wraps(is_checksum_address)\n def is_checksum_address(value: Any) -> bool:\n return is_checksum_address(value)\n\n @staticmethod\n @wraps(to_checksum_address)\n def to_checksum_address(value: Union[AnyAddress, str, bytes]) -> ChecksumAddress:\n return to_checksum_address(value)\n\n @property\n def api(self) -> str:\n from web3 import __version__\n\n return __version__\n\n @property\n def strict_bytes_type_checking(self) -> bool:\n return self._strict_bytes_type_checking\n\n @strict_bytes_type_checking.setter\n def strict_bytes_type_checking(self, strict_bytes_type_check: bool) -> None:\n self.codec = (\n ABICodec(build_strict_registry())\n if strict_bytes_type_check\n else ABICodec(build_non_strict_registry())\n )\n self._strict_bytes_type_checking = strict_bytes_type_check\n\n @staticmethod\n @apply_to_return_value(HexBytes)\n def keccak(\n primitive: Optional[Primitives] = None,\n text: Optional[str] = None,\n hexstr: Optional[HexStr] = None,\n ) -> bytes:\n if isinstance(primitive, (bytes, int, type(None))):\n input_bytes = to_bytes(primitive, hexstr=hexstr, text=text)\n return eth_utils_keccak(input_bytes)\n\n raise TypeError(\n f\"You called keccak with first arg {primitive!r} and keywords \"\n f\"{{'text': {text!r}, 'hexstr': {hexstr!r}}}. You must call it with \"\n \"one of these approaches: keccak(text='txt'), keccak(hexstr='0x747874'), \"\n \"keccak(b'\\\\x74\\\\x78\\\\x74'), or keccak(0x747874).\"\n )\n\n @classmethod\n def normalize_values(\n cls, w3: \"BaseWeb3\", abi_types: List[TypeStr], values: List[Any]\n ) -> List[Any]:\n return map_abi_data([abi_ens_resolver(w3)], abi_types, values)\n\n @combomethod\n def solidity_keccak(cls, abi_types: List[TypeStr], values: List[Any]) -> bytes:\n \"\"\"\n Executes keccak256 exactly as Solidity does.\n Takes list of abi_types as inputs -- `[uint24, int8[], bool]`\n and list of corresponding values -- `[20, [-1, 5, 0], True]`\n \"\"\"\n if len(abi_types) != len(values):\n raise ValueError(\n \"Length mismatch between provided abi types and values. Got \"\n f\"{len(abi_types)} types and {len(values)} values.\"\n )\n\n if isinstance(cls, type):\n w3 = None\n else:\n w3 = cls\n normalized_values = cls.normalize_values(w3, abi_types, values)\n\n hex_string = add_0x_prefix(\n HexStr(\n \"\".join(\n remove_0x_prefix(hex_encode_abi_type(abi_type, value))\n for abi_type, value in zip(abi_types, normalized_values)\n )\n )\n )\n return cls.keccak(hexstr=hex_string)\n\n def attach_modules(\n self, modules: Optional[Dict[str, Union[Type[Module], Sequence[Any]]]]\n ) -> None:\n \"\"\"\n Attach modules to the `Web3` instance.\n \"\"\"\n _attach_modules(self, modules)\n\n def is_encodable(self, _type: TypeStr, value: Any) -> bool:\n return self.codec.is_encodable(_type, value)\n\n @property\n def pm(self) -> \"PM\":\n if hasattr(self, \"_pm\"):\n # ignored b/c property is dynamically set\n # via enable_unstable_package_management_api\n return self._pm\n else:\n raise AttributeError(\n \"The Package Management feature is disabled by default until \"\n \"its API stabilizes. To use these features, please enable them by \"\n \"running `w3.enable_unstable_package_management_api()` and try again.\"\n )\n\n def enable_unstable_package_management_api(self) -> None:\n if not hasattr(self, \"_pm\"):\n warnings.warn(\n \"The ``ethPM`` module is no longer being maintained and will be \"\n \"deprecated with ``web3.py`` version 7\",\n UserWarning,\n )\n from web3.pm import PM # noqa: F811\n\n self.attach_modules({\"_pm\": PM})\n\n\nclass Web3(BaseWeb3):\n # mypy types\n eth: Eth\n net: Net\n geth: Geth\n\n def __init__(\n self,\n provider: Optional[BaseProvider] = None,\n middlewares: Optional[Sequence[Any]] = None,\n modules: Optional[Dict[str, Union[Type[Module], Sequence[Any]]]] = None,\n external_modules: Optional[\n Dict[str, Union[Type[Module], Sequence[Any]]]\n ] = None,\n ens: Union[ENS, \"Empty\"] = empty,\n ) -> None:\n self.manager = self.RequestManager(self, provider, middlewares)\n self.codec = ABICodec(build_strict_registry())\n\n if modules is None:\n modules = get_default_modules()\n\n self.attach_modules(modules)\n\n if external_modules is not None:\n self.attach_modules(external_modules)\n\n self.ens = ens\n\n def is_connected(self, show_traceback: bool = False) -> bool:\n return self.provider.is_connected(show_traceback)\n\n @property\n def middleware_onion(self) -> MiddlewareOnion:\n return cast(MiddlewareOnion, self.manager.middleware_onion)\n\n @property\n def provider(self) -> BaseProvider:\n return cast(BaseProvider, self.manager.provider)\n\n @provider.setter\n def provider(self, provider: BaseProvider) -> None:\n self.manager.provider = provider\n\n @property\n def client_version(self) -> str:\n return self.manager.request_blocking(RPC.web3_clientVersion, [])\n\n @property\n def ens(self) -> Union[ENS, \"Empty\"]:\n if self._ens is empty:\n ns = ENS.from_web3(self)\n ns.w3 = self\n return ns\n\n return self._ens\n\n @ens.setter\n def ens(self, new_ens: Union[ENS, \"Empty\"]) -> None:\n if new_ens:\n new_ens.w3 = self # set self object reference for ``ENS.w3``\n self._ens = new_ens\n\n\n# -- async -- #\n\n\nclass AsyncWeb3(BaseWeb3):\n # mypy Types\n eth: AsyncEth\n net: AsyncNet\n geth: AsyncGeth\n\n def __init__(\n self,\n provider: Optional[AsyncBaseProvider] = None,\n middlewares: Optional[Sequence[Any]] = None,\n modules: Optional[Dict[str, Union[Type[Module], Sequence[Any]]]] = None,\n external_modules: Optional[\n Dict[str, Union[Type[Module], Sequence[Any]]]\n ] = None,\n ens: Union[AsyncENS, \"Empty\"] = empty,\n **kwargs: Any,\n ) -> None:\n self.manager = self.RequestManager(self, provider, middlewares)\n self.codec = ABICodec(build_strict_registry())\n\n if modules is None:\n modules = get_async_default_modules()\n\n self.attach_modules(modules)\n\n if external_modules is not None:\n self.attach_modules(external_modules)\n\n self.ens = ens\n\n async def is_connected(self, show_traceback: bool = False) -> bool:\n return await self.provider.is_connected(show_traceback)\n\n @property\n def middleware_onion(self) -> AsyncMiddlewareOnion:\n return cast(AsyncMiddlewareOnion, self.manager.middleware_onion)\n\n @property\n def provider(self) -> AsyncBaseProvider:\n return cast(AsyncBaseProvider, self.manager.provider)\n\n @provider.setter\n def provider(self, provider: AsyncBaseProvider) -> None:\n self.manager.provider = provider\n\n @property\n async def client_version(self) -> str:\n return await self.manager.coro_request(RPC.web3_clientVersion, [])\n\n @property\n def ens(self) -> Union[AsyncENS, \"Empty\"]:\n if self._ens is empty:\n ns = AsyncENS.from_web3(self)\n ns.w3 = self\n return ns\n return self._ens\n\n @ens.setter\n def ens(self, new_ens: Union[AsyncENS, \"Empty\"]) -> None:\n if new_ens:\n new_ens.w3 = self # set self object reference for ``AsyncENS.w3``\n self._ens = new_ens\n\n @staticmethod\n def persistent_websocket(\n provider: PersistentConnectionProvider,\n middlewares: Optional[Sequence[Any]] = None,\n modules: Optional[Dict[str, Union[Type[Module], Sequence[Any]]]] = None,\n external_modules: Optional[\n Dict[str, Union[Type[Module], Sequence[Any]]]\n ] = None,\n ens: Union[AsyncENS, \"Empty\"] = empty,\n ) -> \"_PersistentConnectionWeb3\":\n \"\"\"\n Establish a persistent connection via websockets to a websocket provider using\n a ``PersistentConnectionProvider`` instance.\n \"\"\"\n return _PersistentConnectionWeb3(\n provider,\n middlewares,\n modules,\n external_modules,\n ens,\n )\n\n\nclass _PersistentConnectionWeb3(AsyncWeb3):\n provider: PersistentConnectionProvider\n\n def __init__(\n self,\n provider: PersistentConnectionProvider = None,\n middlewares: Optional[Sequence[Any]] = None,\n modules: Optional[Dict[str, Union[Type[Module], Sequence[Any]]]] = None,\n external_modules: Optional[\n Dict[str, Union[Type[Module], Sequence[Any]]]\n ] = None,\n ens: Union[AsyncENS, \"Empty\"] = empty,\n ) -> None:\n if not isinstance(provider, PersistentConnectionProvider):\n raise Web3ValidationError(\n \"Provider must inherit from PersistentConnectionProvider class.\"\n )\n AsyncWeb3.__init__(self, provider, middlewares, modules, external_modules, ens)\n self.ws = WebsocketConnection(self)\n\n # async for w3 in w3.persistent_websocket(provider)\n async def __aiter__(self) -> AsyncIterator[Self]:\n if not await self.provider.is_connected():\n await self.provider.connect()\n\n while True:\n try:\n yield self\n except Exception:\n # provider should handle connection / reconnection\n continue\n\n # async with w3.persistent_websocket(provider) as w3\n async def __aenter__(self) -> Self:\n await self.provider.connect()\n return self\n\n async def __aexit__(\n self,\n exc_type: Type[BaseException],\n exc_val: BaseException,\n exc_tb: TracebackType,\n ) -> None:\n await self.provider.disconnect()\n",
"path": "web3/main.py"
}
] | 8_7 | python | import pytest
from unittest.mock import (
AsyncMock,
Mock,
patch,
)
from web3 import (
AsyncWeb3,
)
from web3.providers.websocket import (
WebsocketProviderV2,
)
from web3.types import (
RPCEndpoint,
)
import sys
def _mock_ws(provider):
provider._ws = AsyncMock()
async def _coro():
return None
store = [False]
async def connect_patch(*_1, **_2):
store[0] = True
@pytest.mark.asyncio
async def test_awaitable(caplog):
provider = WebsocketProviderV2("ws://mocked")
# Check that provider connects when websocket is none
with patch(
"web3.providers.websocket.websocket_v2.connect", new=connect_patch
):
w3 = await AsyncWeb3.persistent_websocket(provider)
assert store[0] == True
store[0] = False
_mock_ws(provider)
# Check that provider does not connect when websocket is not none
with patch(
"web3.providers.websocket.websocket_v2.connect", new=connect_patch
):
await AsyncWeb3.persistent_websocket(provider)
assert store[0] == False
def main():
import pytest
# Run the pytest tests programmatically
exit_code = pytest.main(["-v", __file__])
sys.exit(exit_code)
if __name__ == '__main__':
main()
|
https://github.com/teamqurrent/web3.py | The objective of the commit is to enhance the efficiency and responsiveness of the event loop in the web3.py project by implementing several key changes across different files. Firstly, use asyncio.sleep(0) is as an efficient way to yield control back to the event loop, allowing it to manage multiple tasks concurrently without quick timeouts. This change is primarily focused on files where asynchronous operations are handled, such as `web3/manager.py` and relevant WebSocket provider files like `web3/providers/websocket/websocket_v2.py`. Secondly, increase the default timeout for awaiting responses to requests from 20 seconds to 50 seconds, which involves modifications in the WebSocket provider (`web3/providers/websocket/websocket_v2.py`). This adjustment ensures that the system is more tolerant of slower responses. Lastly, make the caching methods within the request processor (`web3/providers/websocket/request_processor.py`) synchronous, as their asynchronous nature is not necessary. This simplification aims to streamline the processing and improve overall efficiency. | 2db5fee | -e . [tester]
idna
pytest
pytest_asyncio
eth-tester[py-evm]==v0.9.1-b.1
py-geth>=3.11.0 | python3.9 | 2c78c125 | diff --git a/tests/core/providers/test_wsv2_provider.py b/tests/core/providers/test_wsv2_provider.py
--- a/tests/core/providers/test_wsv2_provider.py
+++ b/tests/core/providers/test_wsv2_provider.py
@@ -1,3 +1,4 @@
+import asyncio
import json
import pytest
import sys
@@ -87,7 +88,7 @@ async def test_async_make_request_returns_cached_response_with_no_recv_if_cached
# cache the response, so we should get it immediately & should never call `recv()`
desired_response = {"jsonrpc": "2.0", "id": 0, "result": "0x1337"}
- await provider._request_processor.cache_raw_response(desired_response)
+ provider._request_processor.cache_raw_response(desired_response)
response = await method_under_test(RPCEndpoint("some_method"), ["desired_params"])
assert response == desired_response
@@ -104,15 +105,18 @@ async def test_async_make_request_returns_cached_response_with_no_recv_if_cached
reason="Uses AsyncMock, not supported by python 3.7",
)
async def test_async_make_request_times_out_of_while_loop_looking_for_response():
- provider = WebsocketProviderV2("ws://mocked", request_timeout=0.1)
+ timeout = 0.001
+ provider = WebsocketProviderV2("ws://mocked", request_timeout=timeout)
method_under_test = provider.make_request
_mock_ws(provider)
- provider._ws.recv.side_effect = lambda *args, **kwargs: b'{"jsonrpc": "2.0"}'
+ # mock the websocket to never receive a response & sleep longer than the timeout
+ provider._ws.recv = lambda *args, **kwargs: asyncio.sleep(1)
with pytest.raises(
TimeExhausted,
- match=r"Timed out waiting for response with request id `0` after 0.1 second",
+ match=r"Timed out waiting for response with request id `0` after "
+ rf"{timeout} second\(s\)",
):
await method_under_test(RPCEndpoint("some_method"), ["desired_params"])
diff --git a/web3/manager.py b/web3/manager.py
--- a/web3/manager.py
+++ b/web3/manager.py
@@ -369,8 +369,13 @@ class RequestManager:
)
while True:
+ # sleep(0) here seems to be the most efficient way to yield control back to
+ # the event loop while waiting for the response to be cached or received on
+ # the websocket.
+ await asyncio.sleep(0)
+
# look in the cache for a response
- response = await self._request_processor.pop_raw_response(subscription=True)
+ response = self._request_processor.pop_raw_response(subscription=True)
if response is not None:
break
else:
@@ -380,7 +385,7 @@ class RequestManager:
try:
# keep timeout low but reasonable to check both the cache
# and the websocket connection for new responses
- response = await self._provider._ws_recv(timeout=2)
+ response = await self._provider._ws_recv(timeout=0.5)
except asyncio.TimeoutError:
# if no response received, continue to next iteration
continue
@@ -388,12 +393,7 @@ class RequestManager:
if response.get("method") == "eth_subscription":
break
else:
- await self._provider._request_processor.cache_raw_response(
- response
- )
-
- # this is important to let asyncio run other tasks
- await asyncio.sleep(0.05)
+ self._provider._request_processor.cache_raw_response(response)
yield await self._process_ws_response(response)
diff --git a/web3/providers/persistent.py b/web3/providers/persistent.py
--- a/web3/providers/persistent.py
+++ b/web3/providers/persistent.py
@@ -21,7 +21,7 @@ from web3.types import (
RPCResponse,
)
-DEFAULT_PERSISTENT_CONNECTION_TIMEOUT = 20
+DEFAULT_PERSISTENT_CONNECTION_TIMEOUT = 50
class PersistentConnectionProvider(AsyncJSONBaseProvider, ABC):
diff --git a/web3/providers/websocket/request_processor.py b/web3/providers/websocket/request_processor.py
--- a/web3/providers/websocket/request_processor.py
+++ b/web3/providers/websocket/request_processor.py
@@ -191,9 +191,7 @@ class RequestProcessor:
# raw response cache
- async def cache_raw_response(
- self, raw_response: Any, subscription: bool = False
- ) -> None:
+ def cache_raw_response(self, raw_response: Any, subscription: bool = False) -> None:
if subscription:
self._provider.logger.debug(
f"Caching subscription response:\n response={raw_response}"
@@ -208,7 +206,7 @@ class RequestProcessor:
)
self._request_response_cache.cache(cache_key, raw_response)
- async def pop_raw_response(
+ def pop_raw_response(
self, cache_key: str = None, subscription: bool = False
) -> Any:
if subscription:
diff --git a/web3/providers/websocket/websocket_v2.py b/web3/providers/websocket/websocket_v2.py
--- a/web3/providers/websocket/websocket_v2.py
+++ b/web3/providers/websocket/websocket_v2.py
@@ -169,13 +169,18 @@ class WebsocketProviderV2(PersistentConnectionProvider):
request_cache_key = generate_cache_key(request_id)
while True:
+ # sleep(0) here seems to be the most efficient way to yield control
+ # back to the event loop while waiting for the response to be cached
+ # or received on the websocket.
+ await asyncio.sleep(0)
+
if request_cache_key in self._request_processor._request_response_cache:
# if response is already cached, pop it from cache
self.logger.debug(
f"Response for id {request_id} is already cached, pop it "
"from the cache."
)
- return await self._request_processor.pop_raw_response(
+ return self._request_processor.pop_raw_response(
cache_key=request_cache_key,
)
@@ -189,7 +194,7 @@ class WebsocketProviderV2(PersistentConnectionProvider):
try:
# keep timeout low but reasonable to check both the
# cache and the websocket connection for new responses
- response = await self._ws_recv(timeout=2)
+ response = await self._ws_recv(timeout=0.5)
except asyncio.TimeoutError:
# keep the request timeout around the whole of this
# while loop in case the response sneaks into the cache
@@ -209,13 +214,10 @@ class WebsocketProviderV2(PersistentConnectionProvider):
is_subscription = (
response.get("method") == "eth_subscription"
)
- await self._request_processor.cache_raw_response(
+ self._request_processor.cache_raw_response(
response, subscription=is_subscription
)
- # this is important to let asyncio run other tasks
- await asyncio.sleep(0.05)
-
try:
# Add the request timeout around the while loop that checks the request
# cache and tried to recv(). If the request is neither in the cache, nor
| [
{
"content": "import json\nimport pytest\nimport sys\n\nfrom eth_utils import (\n to_bytes,\n)\n\nfrom web3.exceptions import (\n TimeExhausted,\n)\nfrom web3.providers.websocket import (\n WebsocketProviderV2,\n)\nfrom web3.types import (\n RPCEndpoint,\n)\n\n\ndef _mock_ws(provider):\n # move to top of file when python 3.7 is no longer supported in web3.py\n from unittest.mock import (\n AsyncMock,\n )\n\n provider._ws = AsyncMock()\n\n\n@pytest.mark.asyncio\n@pytest.mark.skipif(\n # TODO: remove when python 3.7 is no longer supported in web3.py\n # python 3.7 is already sunset so this feels like a reasonable tradeoff\n sys.version_info < (3, 8),\n reason=\"Uses AsyncMock, not supported by python 3.7\",\n)\nasync def test_async_make_request_caches_all_undesired_responses_and_returns_desired():\n provider = WebsocketProviderV2(\"ws://mocked\")\n\n method_under_test = provider.make_request\n\n _mock_ws(provider)\n undesired_responses_count = 10\n ws_recv_responses = [\n to_bytes(\n text=json.dumps(\n {\n \"jsonrpc\": \"2.0\",\n \"method\": \"eth_subscription\",\n \"params\": {\"subscription\": \"0x1\", \"result\": f\"0x{i}\"},\n }\n )\n )\n for i in range(0, undesired_responses_count)\n ]\n # The first request we make should have an id of `0`, expect the response to match\n # that id. Append it as the last response in the list.\n ws_recv_responses.append(b'{\"jsonrpc\": \"2.0\", \"id\":0, \"result\": \"0x1337\"}')\n provider._ws.recv.side_effect = ws_recv_responses\n\n response = await method_under_test(RPCEndpoint(\"some_method\"), [\"desired_params\"])\n assert response == json.loads(ws_recv_responses.pop()) # pop the expected response\n\n assert (\n len(provider._request_processor._subscription_response_deque)\n == len(ws_recv_responses)\n == undesired_responses_count\n )\n\n for cached_response in provider._request_processor._subscription_response_deque:\n # assert all cached responses are in the list of responses we received\n assert to_bytes(text=json.dumps(cached_response)) in ws_recv_responses\n\n\n@pytest.mark.asyncio\n@pytest.mark.skipif(\n # TODO: remove when python 3.7 is no longer supported in web3.py\n # python 3.7 is already sunset so this feels like a reasonable tradeoff\n sys.version_info < (3, 8),\n reason=\"Uses AsyncMock, not supported by python 3.7\",\n)\nasync def test_async_make_request_returns_cached_response_with_no_recv_if_cached():\n provider = WebsocketProviderV2(\"ws://mocked\")\n\n method_under_test = provider.make_request\n\n _mock_ws(provider)\n\n # cache the response, so we should get it immediately & should never call `recv()`\n desired_response = {\"jsonrpc\": \"2.0\", \"id\": 0, \"result\": \"0x1337\"}\n await provider._request_processor.cache_raw_response(desired_response)\n\n response = await method_under_test(RPCEndpoint(\"some_method\"), [\"desired_params\"])\n assert response == desired_response\n\n assert len(provider._request_processor._request_response_cache) == 0\n assert not provider._ws.recv.called # type: ignore\n\n\n@pytest.mark.asyncio\n@pytest.mark.skipif(\n # TODO: remove when python 3.7 is no longer supported in web3.py\n # python 3.7 is already sunset so this feels like a reasonable tradeoff\n sys.version_info < (3, 8),\n reason=\"Uses AsyncMock, not supported by python 3.7\",\n)\nasync def test_async_make_request_times_out_of_while_loop_looking_for_response():\n provider = WebsocketProviderV2(\"ws://mocked\", request_timeout=0.1)\n\n method_under_test = provider.make_request\n\n _mock_ws(provider)\n provider._ws.recv.side_effect = lambda *args, **kwargs: b'{\"jsonrpc\": \"2.0\"}'\n\n with pytest.raises(\n TimeExhausted,\n match=r\"Timed out waiting for response with request id `0` after 0.1 second\",\n ):\n await method_under_test(RPCEndpoint(\"some_method\"), [\"desired_params\"])\n",
"path": "tests/core/providers/test_wsv2_provider.py"
},
{
"content": "import asyncio\nimport logging\nfrom typing import (\n TYPE_CHECKING,\n Any,\n AsyncGenerator,\n Callable,\n List,\n Optional,\n Sequence,\n Tuple,\n Union,\n cast,\n)\n\nfrom eth_utils.toolz import (\n pipe,\n)\nfrom hexbytes import (\n HexBytes,\n)\nfrom websockets.exceptions import (\n ConnectionClosedOK,\n)\n\nfrom web3._utils.caching import (\n generate_cache_key,\n)\nfrom web3._utils.compat import (\n Self,\n)\nfrom web3.datastructures import (\n NamedElementOnion,\n)\nfrom web3.exceptions import (\n BadResponseFormat,\n MethodUnavailable,\n)\nfrom web3.middleware import (\n abi_middleware,\n async_attrdict_middleware,\n async_buffered_gas_estimate_middleware,\n async_gas_price_strategy_middleware,\n async_name_to_address_middleware,\n async_validation_middleware,\n attrdict_middleware,\n buffered_gas_estimate_middleware,\n gas_price_strategy_middleware,\n name_to_address_middleware,\n validation_middleware,\n)\nfrom web3.module import (\n apply_result_formatters,\n)\nfrom web3.providers import (\n AutoProvider,\n PersistentConnectionProvider,\n)\nfrom web3.types import (\n AsyncMiddleware,\n AsyncMiddlewareOnion,\n Middleware,\n MiddlewareOnion,\n RPCEndpoint,\n RPCResponse,\n)\n\nif TYPE_CHECKING:\n from web3.main import ( # noqa: F401\n AsyncWeb3,\n Web3,\n )\n from web3.providers import ( # noqa: F401\n AsyncBaseProvider,\n BaseProvider,\n )\n from web3.providers.websocket.request_processor import ( # noqa: F401\n RequestProcessor,\n )\n\n\nNULL_RESPONSES = [None, HexBytes(\"0x\"), \"0x\"]\nMETHOD_NOT_FOUND = -32601\n\n\ndef _raise_bad_response_format(response: RPCResponse, error: str = \"\") -> None:\n message = \"The response was in an unexpected format and unable to be parsed.\"\n raw_response = f\"The raw response is: {response}\"\n\n if error is not None and error != \"\":\n message = f\"{message} {error}. {raw_response}\"\n else:\n message = f\"{message} {raw_response}\"\n\n raise BadResponseFormat(message)\n\n\ndef apply_error_formatters(\n error_formatters: Callable[..., Any],\n response: RPCResponse,\n) -> RPCResponse:\n if error_formatters:\n formatted_resp = pipe(response, error_formatters)\n return formatted_resp\n else:\n return response\n\n\ndef apply_null_result_formatters(\n null_result_formatters: Callable[..., Any],\n response: RPCResponse,\n params: Optional[Any] = None,\n) -> RPCResponse:\n if null_result_formatters:\n formatted_resp = pipe(params, null_result_formatters)\n return formatted_resp\n else:\n return response\n\n\nclass RequestManager:\n logger = logging.getLogger(\"web3.RequestManager\")\n\n middleware_onion: Union[\n MiddlewareOnion, AsyncMiddlewareOnion, NamedElementOnion[None, None]\n ]\n\n def __init__(\n self,\n w3: Union[\"AsyncWeb3\", \"Web3\"],\n provider: Optional[Union[\"BaseProvider\", \"AsyncBaseProvider\"]] = None,\n middlewares: Optional[\n Union[\n Sequence[Tuple[Middleware, str]], Sequence[Tuple[AsyncMiddleware, str]]\n ]\n ] = None,\n ) -> None:\n self.w3 = w3\n\n if provider is None:\n self.provider = AutoProvider()\n else:\n self.provider = provider\n\n if middlewares is None:\n middlewares = (\n self.async_default_middlewares()\n if self.provider.is_async\n else self.default_middlewares(cast(\"Web3\", w3))\n )\n\n self.middleware_onion = NamedElementOnion(middlewares)\n\n if isinstance(provider, PersistentConnectionProvider):\n # set up the request processor to be able to properly process ordered\n # responses from the persistent connection as FIFO\n provider = cast(PersistentConnectionProvider, self.provider)\n self._request_processor: RequestProcessor = provider._request_processor\n\n w3: Union[\"AsyncWeb3\", \"Web3\"] = None\n _provider = None\n\n @property\n def provider(self) -> Union[\"BaseProvider\", \"AsyncBaseProvider\"]:\n return self._provider\n\n @provider.setter\n def provider(self, provider: Union[\"BaseProvider\", \"AsyncBaseProvider\"]) -> None:\n self._provider = provider\n\n @staticmethod\n def default_middlewares(w3: \"Web3\") -> List[Tuple[Middleware, str]]:\n \"\"\"\n List the default middlewares for the request manager.\n Leaving w3 unspecified will prevent the middleware from resolving names.\n Documentation should remain in sync with these defaults.\n \"\"\"\n return [\n (gas_price_strategy_middleware, \"gas_price_strategy\"),\n (name_to_address_middleware(w3), \"name_to_address\"),\n (attrdict_middleware, \"attrdict\"),\n (validation_middleware, \"validation\"),\n (abi_middleware, \"abi\"),\n (buffered_gas_estimate_middleware, \"gas_estimate\"),\n ]\n\n @staticmethod\n def async_default_middlewares() -> List[Tuple[AsyncMiddleware, str]]:\n \"\"\"\n List the default async middlewares for the request manager.\n Documentation should remain in sync with these defaults.\n \"\"\"\n return [\n (async_gas_price_strategy_middleware, \"gas_price_strategy\"),\n (async_name_to_address_middleware, \"name_to_address\"),\n (async_attrdict_middleware, \"attrdict\"),\n (async_validation_middleware, \"validation\"),\n (async_buffered_gas_estimate_middleware, \"gas_estimate\"),\n ]\n\n #\n # Provider requests and response\n #\n def _make_request(\n self, method: Union[RPCEndpoint, Callable[..., RPCEndpoint]], params: Any\n ) -> RPCResponse:\n provider = cast(\"BaseProvider\", self.provider)\n request_func = provider.request_func(\n cast(\"Web3\", self.w3), cast(MiddlewareOnion, self.middleware_onion)\n )\n self.logger.debug(f\"Making request. Method: {method}\")\n return request_func(method, params)\n\n async def _coro_make_request(\n self, method: Union[RPCEndpoint, Callable[..., RPCEndpoint]], params: Any\n ) -> RPCResponse:\n provider = cast(\"AsyncBaseProvider\", self.provider)\n request_func = await provider.request_func(\n cast(\"AsyncWeb3\", self.w3),\n cast(AsyncMiddlewareOnion, self.middleware_onion),\n )\n self.logger.debug(f\"Making request. Method: {method}\")\n return await request_func(method, params)\n\n #\n # formatted_response parses and validates JSON-RPC responses for expected\n # properties (result or an error) with the expected types.\n #\n # Required properties are not strictly enforced to further determine which\n # exception to raise for specific cases.\n #\n # See also: https://www.jsonrpc.org/specification\n #\n @staticmethod\n def formatted_response(\n response: RPCResponse,\n params: Any,\n error_formatters: Optional[Callable[..., Any]] = None,\n null_result_formatters: Optional[Callable[..., Any]] = None,\n ) -> Any:\n # jsonrpc is not enforced (as per the spec) but if present, it must be 2.0\n if \"jsonrpc\" in response and response[\"jsonrpc\"] != \"2.0\":\n _raise_bad_response_format(\n response, 'The \"jsonrpc\" field must be present with a value of \"2.0\"'\n )\n\n # id is not enforced (as per the spec) but if present, it must be a\n # string or integer\n # TODO: v7 - enforce id per the spec\n if \"id\" in response:\n response_id = response[\"id\"]\n # id is always None for errors\n if response_id is None and \"error\" not in response:\n _raise_bad_response_format(\n response, '\"id\" must be None when an error is present'\n )\n elif not isinstance(response_id, (str, int, type(None))):\n _raise_bad_response_format(response, '\"id\" must be a string or integer')\n\n # Response may not include both \"error\" and \"result\"\n if \"error\" in response and \"result\" in response:\n _raise_bad_response_format(\n response, 'Response cannot include both \"error\" and \"result\"'\n )\n\n # Format and validate errors\n elif \"error\" in response:\n error = response.get(\"error\")\n # Raise the error when the value is a string\n if error is None or isinstance(error, str):\n raise ValueError(error)\n\n # Errors must include an integer code\n code = error.get(\"code\")\n if not isinstance(code, int):\n _raise_bad_response_format(response, \"error['code'] must be an integer\")\n elif code == METHOD_NOT_FOUND:\n raise MethodUnavailable(error)\n\n # Errors must include a message\n if not isinstance(error.get(\"message\"), str):\n _raise_bad_response_format(\n response, \"error['message'] must be a string\"\n )\n\n apply_error_formatters(error_formatters, response)\n\n raise ValueError(error)\n\n # Format and validate results\n elif \"result\" in response:\n # Null values for result should apply null_result_formatters\n # Skip when result not present in the response (fallback to False)\n if response.get(\"result\", False) in NULL_RESPONSES:\n apply_null_result_formatters(null_result_formatters, response, params)\n return response.get(\"result\")\n\n # Response from eth_subscription includes response[\"params\"][\"result\"]\n elif (\n response.get(\"method\") == \"eth_subscription\"\n and response.get(\"params\") is not None\n and response[\"params\"].get(\"subscription\") is not None\n and response[\"params\"].get(\"result\") is not None\n ):\n return {\n \"subscription\": response[\"params\"][\"subscription\"],\n \"result\": response[\"params\"][\"result\"],\n }\n\n # Any other response type raises BadResponseFormat\n else:\n _raise_bad_response_format(response)\n\n def request_blocking(\n self,\n method: Union[RPCEndpoint, Callable[..., RPCEndpoint]],\n params: Any,\n error_formatters: Optional[Callable[..., Any]] = None,\n null_result_formatters: Optional[Callable[..., Any]] = None,\n ) -> Any:\n \"\"\"\n Make a synchronous request using the provider\n \"\"\"\n response = self._make_request(method, params)\n return self.formatted_response(\n response, params, error_formatters, null_result_formatters\n )\n\n async def coro_request(\n self,\n method: Union[RPCEndpoint, Callable[..., RPCEndpoint]],\n params: Any,\n error_formatters: Optional[Callable[..., Any]] = None,\n null_result_formatters: Optional[Callable[..., Any]] = None,\n ) -> Any:\n \"\"\"\n Coroutine for making a request using the provider\n \"\"\"\n response = await self._coro_make_request(method, params)\n return self.formatted_response(\n response, params, error_formatters, null_result_formatters\n )\n\n # persistent connection\n async def ws_send(self, method: RPCEndpoint, params: Any) -> RPCResponse:\n provider = cast(PersistentConnectionProvider, self._provider)\n request_func = await provider.request_func(\n cast(\"AsyncWeb3\", self.w3),\n cast(AsyncMiddlewareOnion, self.middleware_onion),\n )\n self.logger.debug(\n \"Making request to open websocket connection - \"\n f\"uri: {provider.endpoint_uri}, method: {method}\"\n )\n response = await request_func(method, params)\n return await self._process_ws_response(response)\n\n async def ws_recv(self) -> Any:\n return await self._ws_recv_stream().__anext__()\n\n def _persistent_recv_stream(self) -> \"_AsyncPersistentRecvStream\":\n return _AsyncPersistentRecvStream(self)\n\n async def _ws_recv_stream(self) -> AsyncGenerator[RPCResponse, None]:\n if not isinstance(self._provider, PersistentConnectionProvider):\n raise TypeError(\n \"Only websocket providers that maintain an open, persistent connection \"\n \"can listen to websocket recv streams.\"\n )\n\n while True:\n # look in the cache for a response\n response = await self._request_processor.pop_raw_response(subscription=True)\n if response is not None:\n break\n else:\n # if no response in the cache, check the websocket connection\n if not self._provider._ws_lock.locked():\n async with self._provider._ws_lock:\n try:\n # keep timeout low but reasonable to check both the cache\n # and the websocket connection for new responses\n response = await self._provider._ws_recv(timeout=2)\n except asyncio.TimeoutError:\n # if no response received, continue to next iteration\n continue\n\n if response.get(\"method\") == \"eth_subscription\":\n break\n else:\n await self._provider._request_processor.cache_raw_response(\n response\n )\n\n # this is important to let asyncio run other tasks\n await asyncio.sleep(0.05)\n\n yield await self._process_ws_response(response)\n\n async def _process_ws_response(self, response: RPCResponse) -> RPCResponse:\n provider = cast(PersistentConnectionProvider, self._provider)\n request_info = self._request_processor.get_request_information_for_response(\n response\n )\n\n if request_info is None:\n self.logger.debug(\"No cache key found for response, returning raw response\")\n return response\n else:\n if request_info.method == \"eth_subscribe\" and \"result\" in response.keys():\n # if response for the initial eth_subscribe request, which returns the\n # subscription id\n subscription_id = response[\"result\"]\n cache_key = generate_cache_key(subscription_id)\n if cache_key not in self._request_processor._request_information_cache:\n # cache by subscription id in order to process each response for the\n # subscription as it comes in\n request_info.subscription_id = subscription_id\n provider.logger.debug(\n \"Caching eth_subscription info:\\n \"\n f\"cache_key={cache_key},\\n \"\n f\"request_info={request_info.__dict__}\"\n )\n self._request_processor._request_information_cache.cache(\n cache_key, request_info\n )\n\n # pipe response back through middleware response processors\n if len(request_info.middleware_response_processors) > 0:\n response = pipe(response, *request_info.middleware_response_processors)\n\n (\n result_formatters,\n error_formatters,\n null_formatters,\n ) = request_info.response_formatters\n partly_formatted_response = self.formatted_response(\n response,\n request_info.params,\n error_formatters,\n null_formatters,\n )\n return apply_result_formatters(result_formatters, partly_formatted_response)\n\n\nclass _AsyncPersistentRecvStream:\n \"\"\"\n Async generator for receiving responses from a persistent connection. This\n abstraction is necessary to define the `__aiter__()` method required for\n use with \"async for\" loops.\n \"\"\"\n\n def __init__(self, manager: RequestManager, *args: Any, **kwargs: Any) -> None:\n self.manager = manager\n super().__init__(*args, **kwargs)\n\n def __aiter__(self) -> Self:\n return self\n\n async def __anext__(self) -> RPCResponse:\n try:\n return await self.manager.ws_recv()\n except ConnectionClosedOK:\n raise StopAsyncIteration\n",
"path": "web3/manager.py"
},
{
"content": "from abc import (\n ABC,\n)\nimport asyncio\nimport logging\nfrom typing import (\n Optional,\n)\n\nfrom websockets.legacy.client import (\n WebSocketClientProtocol,\n)\n\nfrom web3.providers.async_base import (\n AsyncJSONBaseProvider,\n)\nfrom web3.providers.websocket.request_processor import (\n RequestProcessor,\n)\nfrom web3.types import (\n RPCResponse,\n)\n\nDEFAULT_PERSISTENT_CONNECTION_TIMEOUT = 20\n\n\nclass PersistentConnectionProvider(AsyncJSONBaseProvider, ABC):\n logger = logging.getLogger(\"web3.providers.PersistentConnectionProvider\")\n has_persistent_connection = True\n\n _ws: Optional[WebSocketClientProtocol] = None\n _ws_lock: asyncio.Lock = asyncio.Lock()\n _request_processor: RequestProcessor\n\n def __init__(\n self,\n endpoint_uri: str,\n request_timeout: float = DEFAULT_PERSISTENT_CONNECTION_TIMEOUT,\n subscription_response_deque_size: int = 500,\n ) -> None:\n super().__init__()\n self.endpoint_uri = endpoint_uri\n self._request_processor = RequestProcessor(\n self,\n subscription_response_deque_size=subscription_response_deque_size,\n )\n self.request_timeout = request_timeout\n\n async def connect(self) -> None:\n raise NotImplementedError(\"Must be implemented by subclasses\")\n\n async def disconnect(self) -> None:\n raise NotImplementedError(\"Must be implemented by subclasses\")\n\n async def _ws_recv(self, timeout: float = None) -> RPCResponse:\n raise NotImplementedError(\"Must be implemented by subclasses\")\n",
"path": "web3/providers/persistent.py"
},
{
"content": "from collections import (\n deque,\n)\nfrom copy import (\n copy,\n)\nfrom typing import (\n TYPE_CHECKING,\n Any,\n Callable,\n Deque,\n Dict,\n Optional,\n Tuple,\n)\n\nfrom web3._utils.caching import (\n RequestInformation,\n generate_cache_key,\n)\nfrom web3.types import (\n RPCEndpoint,\n RPCResponse,\n)\nfrom web3.utils import (\n SimpleCache,\n)\n\nif TYPE_CHECKING:\n from web3.providers.persistent import (\n PersistentConnectionProvider,\n )\n\n\nclass RequestProcessor:\n _request_information_cache: SimpleCache\n _request_response_cache: SimpleCache\n _subscription_response_deque: Deque[RPCResponse]\n\n def __init__(\n self,\n provider: \"PersistentConnectionProvider\",\n subscription_response_deque_size: int = 500,\n ) -> None:\n self._provider = provider\n\n self._request_information_cache = SimpleCache(500)\n self._request_response_cache = SimpleCache(500)\n self._subscription_response_deque = deque(\n maxlen=subscription_response_deque_size\n )\n\n @property\n def active_subscriptions(self) -> Dict[str, Any]:\n return {\n value.subscription_id: {\"params\": value.params}\n for key, value in self._request_information_cache.items()\n if value.method == \"eth_subscribe\"\n }\n\n # request information cache\n\n def cache_request_information(\n self,\n method: RPCEndpoint,\n params: Any,\n response_formatters: Tuple[Callable[..., Any], ...],\n ) -> str:\n # copy the request counter and find the next request id without incrementing\n # since this is done when / if the request is successfully sent\n request_id = next(copy(self._provider.request_counter))\n cache_key = generate_cache_key(request_id)\n\n self._bump_cache_if_key_present(cache_key, request_id)\n\n request_info = RequestInformation(\n method,\n params,\n response_formatters,\n )\n self._provider.logger.debug(\n f\"Caching request info:\\n request_id={request_id},\\n\"\n f\" cache_key={cache_key},\\n request_info={request_info.__dict__}\"\n )\n self._request_information_cache.cache(\n cache_key,\n request_info,\n )\n return cache_key\n\n def _bump_cache_if_key_present(self, cache_key: str, request_id: int) -> None:\n \"\"\"\n If the cache key is present in the cache, bump the cache key and request id\n by one to make room for the new request. This behavior is necessary when a\n request is made but inner requests, say to `eth_estimateGas` if the `gas` is\n missing, are made before the original request is sent.\n \"\"\"\n if cache_key in self._request_information_cache:\n original_request_info = self._request_information_cache.get_cache_entry(\n cache_key\n )\n bump = generate_cache_key(request_id + 1)\n\n # recursively bump the cache if the new key is also present\n self._bump_cache_if_key_present(bump, request_id + 1)\n\n self._provider.logger.debug(\n \"Caching internal request. Bumping original request in cache:\\n\"\n f\" request_id=[{request_id}] -> [{request_id + 1}],\\n\"\n f\" cache_key=[{cache_key}] -> [{bump}],\\n\"\n f\" request_info={original_request_info.__dict__}\"\n )\n self._request_information_cache.cache(bump, original_request_info)\n\n def pop_cached_request_information(\n self, cache_key: str\n ) -> Optional[RequestInformation]:\n request_info = self._request_information_cache.pop(cache_key)\n if request_info is not None:\n self._provider.logger.debug(\n \"Request info popped from cache:\\n\"\n f\" cache_key={cache_key},\\n request_info={request_info.__dict__}\"\n )\n return request_info\n\n def get_request_information_for_response(\n self,\n response: RPCResponse,\n ) -> RequestInformation:\n if \"method\" in response and response[\"method\"] == \"eth_subscription\":\n if \"params\" not in response:\n raise ValueError(\"Subscription response must have params field\")\n if \"subscription\" not in response[\"params\"]:\n raise ValueError(\n \"Subscription response params must have subscription field\"\n )\n\n # retrieve the request info from the cache using the subscription id\n cache_key = generate_cache_key(response[\"params\"][\"subscription\"])\n request_info = (\n # don't pop the request info from the cache, since we need to keep it\n # to process future subscription responses\n # i.e. subscription request information remains in the cache\n self._request_information_cache.get_cache_entry(cache_key)\n )\n\n else:\n # retrieve the request info from the cache using the request id\n cache_key = generate_cache_key(response[\"id\"])\n request_info = (\n # pop the request info from the cache since we don't need to keep it\n # this keeps the cache size bounded\n self.pop_cached_request_information(cache_key)\n )\n if (\n request_info is not None\n and request_info.method == \"eth_unsubscribe\"\n and response.get(\"result\") is True\n ):\n # if successful unsubscribe request, remove the subscription request\n # information from the cache since it is no longer needed\n subscription_id = request_info.params[0]\n subscribe_cache_key = generate_cache_key(subscription_id)\n self.pop_cached_request_information(subscribe_cache_key)\n\n # rebuild the deque without the unsubscribed subscription responses\n self._subscription_response_deque = deque(\n filter(\n lambda sub_response: sub_response[\"params\"][\"subscription\"]\n != subscription_id,\n self._subscription_response_deque,\n ),\n maxlen=self._subscription_response_deque.maxlen,\n )\n\n return request_info\n\n def append_middleware_response_processor(\n self,\n middleware_response_processor: Callable[..., Any],\n ) -> None:\n request_id = next(copy(self._provider.request_counter)) - 1\n cache_key = generate_cache_key(request_id)\n current_request_cached_info: RequestInformation = (\n self._request_information_cache.get_cache_entry(cache_key)\n )\n if current_request_cached_info:\n current_request_cached_info.middleware_response_processors.append(\n middleware_response_processor\n )\n\n # raw response cache\n\n async def cache_raw_response(\n self, raw_response: Any, subscription: bool = False\n ) -> None:\n if subscription:\n self._provider.logger.debug(\n f\"Caching subscription response:\\n response={raw_response}\"\n )\n self._subscription_response_deque.append(raw_response)\n else:\n response_id = raw_response.get(\"id\")\n cache_key = generate_cache_key(response_id)\n self._provider.logger.debug(\n f\"Caching response:\\n response_id={response_id},\\n\"\n f\" cache_key={cache_key},\\n response={raw_response}\"\n )\n self._request_response_cache.cache(cache_key, raw_response)\n\n async def pop_raw_response(\n self, cache_key: str = None, subscription: bool = False\n ) -> Any:\n if subscription:\n deque_length = len(self._subscription_response_deque)\n if deque_length == 0:\n return None\n\n raw_response = self._subscription_response_deque.popleft()\n self._provider.logger.debug(\n f\"Subscription response deque is not empty. Processing {deque_length} \"\n \"subscription(s) as FIFO before receiving new response.\"\n )\n self._provider.logger.debug(\n \"Cached subscription response popped from deque to be processed:\\n\"\n f\" raw_response={raw_response}\"\n )\n else:\n if not cache_key:\n raise ValueError(\n \"Must provide cache key when popping a non-subscription response.\"\n )\n\n raw_response = self._request_response_cache.pop(cache_key)\n if raw_response is not None:\n self._provider.logger.debug(\n \"Cached response popped from cache to be processed:\\n\"\n f\" cache_key={cache_key},\\n\"\n f\" raw_response={raw_response}\"\n )\n\n return raw_response\n\n # request processor class methods\n\n def clear_caches(self) -> None:\n \"\"\"\n Clear the request processor caches.\n \"\"\"\n\n self._request_information_cache.clear()\n self._request_response_cache.clear()\n self._subscription_response_deque.clear()\n",
"path": "web3/providers/websocket/request_processor.py"
},
{
"content": "import asyncio\nimport json\nimport logging\nimport os\nfrom typing import (\n Any,\n Dict,\n Optional,\n Union,\n)\n\nfrom eth_typing import (\n URI,\n)\nfrom toolz import (\n merge,\n)\nfrom websockets.client import (\n connect,\n)\nfrom websockets.exceptions import (\n WebSocketException,\n)\n\nfrom web3._utils.caching import (\n generate_cache_key,\n)\nfrom web3.exceptions import (\n ProviderConnectionError,\n TimeExhausted,\n Web3ValidationError,\n)\nfrom web3.providers.persistent import (\n DEFAULT_PERSISTENT_CONNECTION_TIMEOUT,\n PersistentConnectionProvider,\n)\nfrom web3.types import (\n RPCEndpoint,\n RPCId,\n RPCResponse,\n)\n\nDEFAULT_PING_INTERVAL = 30 # 30 seconds\nDEFAULT_PING_TIMEOUT = 300 # 5 minutes\n\nVALID_WEBSOCKET_URI_PREFIXES = {\"ws://\", \"wss://\"}\nRESTRICTED_WEBSOCKET_KWARGS = {\"uri\", \"loop\"}\nDEFAULT_WEBSOCKET_KWARGS = {\n # set how long to wait between pings from the server\n \"ping_interval\": DEFAULT_PING_INTERVAL,\n # set how long to wait without a pong response before closing the connection\n \"ping_timeout\": DEFAULT_PING_TIMEOUT,\n}\n\n\ndef get_default_endpoint() -> URI:\n return URI(os.environ.get(\"WEB3_WS_PROVIDER_URI\", \"ws://127.0.0.1:8546\"))\n\n\nclass WebsocketProviderV2(PersistentConnectionProvider):\n logger = logging.getLogger(\"web3.providers.WebsocketProviderV2\")\n is_async: bool = True\n _max_connection_retries: int = 5\n\n def __init__(\n self,\n endpoint_uri: Optional[Union[URI, str]] = None,\n websocket_kwargs: Optional[Dict[str, Any]] = None,\n request_timeout: Optional[float] = DEFAULT_PERSISTENT_CONNECTION_TIMEOUT,\n ) -> None:\n self.endpoint_uri = URI(endpoint_uri)\n if self.endpoint_uri is None:\n self.endpoint_uri = get_default_endpoint()\n\n if not any(\n self.endpoint_uri.startswith(prefix)\n for prefix in VALID_WEBSOCKET_URI_PREFIXES\n ):\n raise Web3ValidationError(\n f\"Websocket endpoint uri must begin with 'ws://' or 'wss://': \"\n f\"{self.endpoint_uri}\"\n )\n\n if websocket_kwargs is not None:\n found_restricted_keys = set(websocket_kwargs).intersection(\n RESTRICTED_WEBSOCKET_KWARGS\n )\n if found_restricted_keys:\n raise Web3ValidationError(\n \"Found restricted keys for websocket_kwargs: \"\n f\"{found_restricted_keys}.\"\n )\n\n self.websocket_kwargs = merge(DEFAULT_WEBSOCKET_KWARGS, websocket_kwargs or {})\n\n super().__init__(endpoint_uri, request_timeout=request_timeout)\n\n def __str__(self) -> str:\n return f\"Websocket connection: {self.endpoint_uri}\"\n\n async def is_connected(self, show_traceback: bool = False) -> bool:\n if not self._ws:\n return False\n\n try:\n await self._ws.pong()\n return True\n\n except WebSocketException as e:\n if show_traceback:\n raise ProviderConnectionError(\n f\"Error connecting to endpoint: '{self.endpoint_uri}'\"\n ) from e\n return False\n\n async def connect(self) -> None:\n _connection_attempts = 0\n _backoff_rate_change = 1.75\n _backoff_time = 1.75\n\n while _connection_attempts != self._max_connection_retries:\n try:\n _connection_attempts += 1\n self._ws = await connect(self.endpoint_uri, **self.websocket_kwargs)\n break\n except WebSocketException as e:\n if _connection_attempts == self._max_connection_retries:\n raise ProviderConnectionError(\n f\"Could not connect to endpoint: {self.endpoint_uri}. \"\n f\"Retries exceeded max of {self._max_connection_retries}.\"\n ) from e\n self.logger.info(\n f\"Could not connect to endpoint: {self.endpoint_uri}. Retrying in \"\n f\"{round(_backoff_time, 1)} seconds.\",\n exc_info=True,\n )\n await asyncio.sleep(_backoff_time)\n _backoff_time *= _backoff_rate_change\n\n async def disconnect(self) -> None:\n if self._ws is not None and not self._ws.closed:\n await self._ws.close()\n self._ws = None\n self.logger.debug(\n f'Successfully disconnected from endpoint: \"{self.endpoint_uri}'\n )\n\n self._request_processor.clear_caches()\n\n async def make_request(self, method: RPCEndpoint, params: Any) -> RPCResponse:\n request_data = self.encode_rpc_request(method, params)\n\n if self._ws is None:\n raise ProviderConnectionError(\n \"Connection to websocket has not been initiated for the provider.\"\n )\n\n await asyncio.wait_for(\n self._ws.send(request_data), timeout=self.request_timeout\n )\n\n current_request_id = json.loads(request_data)[\"id\"]\n response = await self._get_response_for_request_id(current_request_id)\n\n return response\n\n async def _get_response_for_request_id(self, request_id: RPCId) -> RPCResponse:\n async def _match_response_id_to_request_id() -> RPCResponse:\n request_cache_key = generate_cache_key(request_id)\n\n while True:\n if request_cache_key in self._request_processor._request_response_cache:\n # if response is already cached, pop it from cache\n self.logger.debug(\n f\"Response for id {request_id} is already cached, pop it \"\n \"from the cache.\"\n )\n return await self._request_processor.pop_raw_response(\n cache_key=request_cache_key,\n )\n\n else:\n if not self._ws_lock.locked():\n async with self._ws_lock:\n self.logger.debug(\n f\"Response for id {request_id} is not cached, calling \"\n \"`recv()` on websocket.\"\n )\n try:\n # keep timeout low but reasonable to check both the\n # cache and the websocket connection for new responses\n response = await self._ws_recv(timeout=2)\n except asyncio.TimeoutError:\n # keep the request timeout around the whole of this\n # while loop in case the response sneaks into the cache\n # from another call.\n continue\n\n response_id = response.get(\"id\")\n\n if response_id == request_id:\n self.logger.debug(\n f\"Received and returning response for id {request_id}.\"\n )\n return response\n else:\n # cache all responses that are not the desired response\n self.logger.debug(\"Undesired response received, caching.\")\n is_subscription = (\n response.get(\"method\") == \"eth_subscription\"\n )\n await self._request_processor.cache_raw_response(\n response, subscription=is_subscription\n )\n\n # this is important to let asyncio run other tasks\n await asyncio.sleep(0.05)\n\n try:\n # Add the request timeout around the while loop that checks the request\n # cache and tried to recv(). If the request is neither in the cache, nor\n # received within the request_timeout, raise ``TimeExhausted``.\n return await asyncio.wait_for(\n _match_response_id_to_request_id(), self.request_timeout\n )\n except asyncio.TimeoutError:\n raise TimeExhausted(\n f\"Timed out waiting for response with request id `{request_id}` after \"\n f\"{self.request_timeout} second(s). This may be due to the provider \"\n \"not returning a response with the same id that was sent in the \"\n \"request or an exception raised during the request was caught and \"\n \"allowed to continue.\"\n )\n\n async def _ws_recv(self, timeout: float = None) -> RPCResponse:\n return json.loads(await asyncio.wait_for(self._ws.recv(), timeout=timeout))\n",
"path": "web3/providers/websocket/websocket_v2.py"
}
] | 8_8 | python | import unittest
import asyncio
import sys
class TestWebsocketProviderV2(unittest.TestCase):
def test_default_timeout_increased(self):
from web3.providers.websocket import WebsocketProviderV2
# Test that the default timeout for awaiting responses is now 50 seconds.
provider = WebsocketProviderV2("ws://mocked")
self.assertEqual(provider.request_timeout, 50, "Default timeout should be 50 seconds")
def test_caching_methods_synchronous(self):
from web3.providers.websocket import WebsocketProviderV2
# Test that caching methods in the request processor are synchronous.
provider = WebsocketProviderV2("ws://mocked")
self.assertFalse(asyncio.iscoroutinefunction(provider._request_processor.cache_raw_response), "cache_raw_response should be synchronous")
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestWebsocketProviderV2))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
main()
|
https://github.com/teamqurrent/web3.py | Enhance the accuracy of empty string checks in the `ens/utils.py` file. Specifically strip the input string before checking if it's empty to accurately catch cases where the input might contain only blank spaces, which should still be considered as an empty name. | 12f3702 | -e . [tester]
idna
pytest
pytest_asyncio
eth-tester[py-evm]==v0.9.1-b.1
py-geth>=3.11.0 | python3.9 | b5e302a7 | diff --git a/ens/utils.py b/ens/utils.py
--- a/ens/utils.py
+++ b/ens/utils.py
@@ -127,10 +127,12 @@ def normalize_name(name: str) -> str:
elif isinstance(name, (bytes, bytearray)):
name = name.decode("utf-8")
+ clean_name = name.strip()
+
try:
- return idna.uts46_remap(name, std3_rules=True, transitional=False)
+ return idna.uts46_remap(clean_name, std3_rules=True, transitional=False)
except idna.IDNAError as exc:
- raise InvalidName(f"{name} is an invalid name, because {exc}") from exc
+ raise InvalidName(f"{clean_name} is an invalid name, because {exc}") from exc
def ens_encode_name(name: str) -> bytes:
@@ -263,7 +265,7 @@ def is_none_or_zero_address(addr: Union[Address, ChecksumAddress, HexAddress]) -
def is_empty_name(name: str) -> bool:
- return name in {None, ".", ""}
+ return name is None or name.strip() in ("", ".")
def is_valid_ens_name(ens_name: str) -> bool:
diff --git a/tests/ens/test_utils.py b/tests/ens/test_utils.py
--- a/tests/ens/test_utils.py
+++ b/tests/ens/test_utils.py
@@ -95,7 +95,7 @@ def test_ens_encode_name_validating_total_encoded_name_size(name, expected):
assert ens_encoded == expected
-@pytest.mark.parametrize("empty_name", ("", "."))
+@pytest.mark.parametrize("empty_name", ("", ".", None, " ", " "))
def test_ens_encode_name_returns_single_zero_byte_for_empty_name(empty_name):
assert ens_encode_name(empty_name) == b"\00"
| [
{
"content": "from datetime import (\n datetime,\n timezone,\n)\nfrom typing import (\n TYPE_CHECKING,\n Any,\n Callable,\n Collection,\n Dict,\n List,\n Optional,\n Sequence,\n Tuple,\n Type,\n Union,\n cast,\n)\n\nfrom eth_typing import (\n Address,\n ChecksumAddress,\n HexAddress,\n HexStr,\n)\nfrom eth_utils import (\n is_same_address,\n remove_0x_prefix,\n to_bytes,\n to_normalized_address,\n)\nfrom eth_utils.abi import (\n collapse_if_tuple,\n)\nfrom hexbytes import (\n HexBytes,\n)\nimport idna\n\nfrom ens.constants import (\n ACCEPTABLE_STALE_HOURS,\n AUCTION_START_GAS_CONSTANT,\n AUCTION_START_GAS_MARGINAL,\n EMPTY_ADDR_HEX,\n EMPTY_SHA3_BYTES,\n REVERSE_REGISTRAR_DOMAIN,\n)\nfrom ens.exceptions import (\n ENSValidationError,\n InvalidName,\n)\n\ndefault = object()\n\n\nif TYPE_CHECKING:\n from web3 import ( # noqa: F401\n AsyncWeb3,\n Web3 as _Web3,\n )\n from web3.providers import ( # noqa: F401\n AsyncBaseProvider,\n BaseProvider,\n )\n from web3.types import ( # noqa: F401\n ABIFunction,\n AsyncMiddleware,\n Middleware,\n RPCEndpoint,\n )\n\n\ndef Web3() -> Type[\"_Web3\"]:\n from web3 import (\n Web3 as Web3Main,\n )\n\n return Web3Main\n\n\ndef init_web3(\n provider: \"BaseProvider\" = cast(\"BaseProvider\", default),\n middlewares: Optional[Sequence[Tuple[\"Middleware\", str]]] = None,\n) -> \"_Web3\":\n from web3 import (\n Web3 as Web3Main,\n )\n from web3.eth import (\n Eth as EthMain,\n )\n\n if provider is default:\n w3 = Web3Main(ens=None, modules={\"eth\": (EthMain)})\n else:\n w3 = Web3Main(provider, middlewares, ens=None, modules={\"eth\": (EthMain)})\n\n return customize_web3(w3)\n\n\ndef customize_web3(w3: \"_Web3\") -> \"_Web3\":\n from web3.middleware import (\n make_stalecheck_middleware,\n )\n\n if w3.middleware_onion.get(\"name_to_address\"):\n w3.middleware_onion.remove(\"name_to_address\")\n\n if not w3.middleware_onion.get(\"stalecheck\"):\n w3.middleware_onion.add(\n make_stalecheck_middleware(ACCEPTABLE_STALE_HOURS * 3600), name=\"stalecheck\"\n )\n return w3\n\n\ndef normalize_name(name: str) -> str:\n \"\"\"\n Clean the fully qualified name, as defined in ENS `EIP-137\n <https://github.com/ethereum/EIPs/blob/master/EIPS/eip-137.md#name-syntax>`_\n\n This does *not* enforce whether ``name`` is a label or fully qualified domain.\n\n :param str name: the dot-separated ENS name\n :raises InvalidName: if ``name`` has invalid syntax\n \"\"\"\n if not name:\n return name\n elif isinstance(name, (bytes, bytearray)):\n name = name.decode(\"utf-8\")\n\n try:\n return idna.uts46_remap(name, std3_rules=True, transitional=False)\n except idna.IDNAError as exc:\n raise InvalidName(f\"{name} is an invalid name, because {exc}\") from exc\n\n\ndef ens_encode_name(name: str) -> bytes:\n \"\"\"\n Encode a name according to DNS standards specified in section 3.1\n of RFC1035 with the following validations:\n\n - There is no limit on the total length of the encoded name\n and the limit on labels is the ENS standard of 255.\n\n - Return a single 0-octet, b'\\x00', if empty name.\n \"\"\"\n if is_empty_name(name):\n return b\"\\x00\"\n\n normalized_name = normalize_name(name)\n\n labels = normalized_name.split(\".\")\n labels_as_bytes = [to_bytes(text=label) for label in labels]\n\n # raises if len(label) > 255:\n for index, label in enumerate(labels):\n if len(label) > 255:\n raise ENSValidationError(\n f\"Label at position {index} too long after encoding.\"\n )\n\n # concat label size in bytes to each label:\n dns_prepped_labels = [to_bytes(len(label)) + label for label in labels_as_bytes]\n\n # return the joined prepped labels in order and append the zero byte at the end:\n return b\"\".join(dns_prepped_labels) + b\"\\x00\"\n\n\ndef is_valid_name(name: str) -> bool:\n \"\"\"\n Validate whether the fully qualified name is valid, as defined in ENS `EIP-137\n <https://github.com/ethereum/EIPs/blob/master/EIPS/eip-137.md#name-syntax>`_\n\n :param str name: the dot-separated ENS name\n :returns: True if ``name`` is set, and :meth:`~ens.ENS.nameprep` will not\n raise InvalidName\n \"\"\"\n if not name:\n return False\n try:\n normalize_name(name)\n return True\n except InvalidName:\n return False\n\n\ndef to_utc_datetime(timestamp: float) -> Optional[datetime]:\n return datetime.fromtimestamp(timestamp, timezone.utc) if timestamp else None\n\n\ndef sha3_text(val: Union[str, bytes]) -> HexBytes:\n if isinstance(val, str):\n val = val.encode(\"utf-8\")\n return Web3().keccak(val)\n\n\ndef label_to_hash(label: str) -> HexBytes:\n label = normalize_name(label)\n if \".\" in label:\n raise ValueError(f\"Cannot generate hash for label {label!r} with a '.'\")\n return Web3().keccak(text=label)\n\n\ndef normal_name_to_hash(name: str) -> HexBytes:\n node = EMPTY_SHA3_BYTES\n if name:\n labels = name.split(\".\")\n for label in reversed(labels):\n labelhash = label_to_hash(label)\n assert isinstance(labelhash, bytes)\n assert isinstance(node, bytes)\n node = Web3().keccak(node + labelhash)\n return node\n\n\ndef raw_name_to_hash(name: str) -> HexBytes:\n \"\"\"\n Generate the namehash. This is also known as the ``node`` in ENS contracts.\n\n In normal operation, generating the namehash is handled\n behind the scenes. For advanced usage, it is a helpful utility.\n\n This normalizes the name with `nameprep\n <https://github.com/ethereum/EIPs/blob/master/EIPS/eip-137.md#name-syntax>`_\n before hashing.\n\n :param str name: ENS name to hash\n :return: the namehash\n :rtype: bytes\n :raises InvalidName: if ``name`` has invalid syntax\n \"\"\"\n normalized_name = normalize_name(name)\n return normal_name_to_hash(normalized_name)\n\n\ndef address_in(\n address: ChecksumAddress, addresses: Collection[ChecksumAddress]\n) -> bool:\n return any(is_same_address(address, item) for item in addresses)\n\n\ndef address_to_reverse_domain(address: ChecksumAddress) -> str:\n lower_unprefixed_address = remove_0x_prefix(HexStr(to_normalized_address(address)))\n return lower_unprefixed_address + \".\" + REVERSE_REGISTRAR_DOMAIN\n\n\ndef estimate_auction_start_gas(labels: Collection[str]) -> int:\n return AUCTION_START_GAS_CONSTANT + AUCTION_START_GAS_MARGINAL * len(labels)\n\n\ndef assert_signer_in_modifier_kwargs(modifier_kwargs: Any) -> ChecksumAddress:\n ERR_MSG = \"You must specify the sending account\"\n assert len(modifier_kwargs) == 1, ERR_MSG\n\n _modifier_type, modifier_dict = dict(modifier_kwargs).popitem()\n if \"from\" not in modifier_dict:\n raise TypeError(ERR_MSG)\n\n return modifier_dict[\"from\"]\n\n\ndef is_none_or_zero_address(addr: Union[Address, ChecksumAddress, HexAddress]) -> bool:\n return not addr or addr == EMPTY_ADDR_HEX\n\n\ndef is_empty_name(name: str) -> bool:\n return name in {None, \".\", \"\"}\n\n\ndef is_valid_ens_name(ens_name: str) -> bool:\n split_domain = ens_name.split(\".\")\n if len(split_domain) == 1:\n return False\n for name in split_domain:\n if not is_valid_name(name):\n return False\n return True\n\n\n# borrowed from similar method at `web3._utils.abi` due to circular dependency\ndef get_abi_output_types(abi: \"ABIFunction\") -> List[str]:\n return (\n []\n if abi[\"type\"] == \"fallback\"\n else [collapse_if_tuple(cast(Dict[str, Any], arg)) for arg in abi[\"outputs\"]]\n )\n\n\n# -- async -- #\n\n\ndef init_async_web3(\n provider: \"AsyncBaseProvider\" = cast(\"AsyncBaseProvider\", default),\n middlewares: Optional[Sequence[Tuple[\"AsyncMiddleware\", str]]] = (),\n) -> \"AsyncWeb3\":\n from web3 import (\n AsyncWeb3 as AsyncWeb3Main,\n )\n from web3.eth import (\n AsyncEth as AsyncEthMain,\n )\n\n middlewares = list(middlewares)\n for i, (middleware, name) in enumerate(middlewares):\n if name == \"name_to_address\":\n middlewares.pop(i)\n\n if \"stalecheck\" not in (name for mw, name in middlewares):\n middlewares.append((_async_ens_stalecheck_middleware, \"stalecheck\"))\n\n if provider is default:\n async_w3 = AsyncWeb3Main(\n middlewares=middlewares, ens=None, modules={\"eth\": (AsyncEthMain)}\n )\n else:\n async_w3 = AsyncWeb3Main(\n provider,\n middlewares=middlewares,\n ens=None,\n modules={\"eth\": (AsyncEthMain)},\n )\n\n return async_w3\n\n\nasync def _async_ens_stalecheck_middleware(\n make_request: Callable[[\"RPCEndpoint\", Any], Any], w3: \"AsyncWeb3\"\n) -> \"Middleware\":\n from web3.middleware import (\n async_make_stalecheck_middleware,\n )\n\n middleware = await async_make_stalecheck_middleware(ACCEPTABLE_STALE_HOURS * 3600)\n return await middleware(make_request, w3)\n",
"path": "ens/utils.py"
},
{
"content": "import pytest\n\nfrom eth_utils import (\n is_integer,\n to_bytes,\n)\n\nfrom ens.exceptions import (\n ENSValidationError,\n)\nfrom ens.utils import (\n ens_encode_name,\n init_async_web3,\n init_web3,\n)\nfrom web3.eth import (\n AsyncEth,\n)\nfrom web3.providers.eth_tester import (\n AsyncEthereumTesterProvider,\n)\n\n\ndef test_init_web3_adds_expected_middlewares():\n w3 = init_web3()\n middlewares = map(str, w3.manager.middleware_onion)\n assert \"stalecheck_middleware\" in next(middlewares)\n\n\n@pytest.mark.parametrize(\n \"name,expected\",\n (\n # test some allowed cases\n (\"tester.eth\", b\"\\x06tester\\x03eth\\x00\"),\n (\n \"a.b.c.d.e.f.g.h.i.j.k.l.m.n.o.p\",\n b\"\\x01a\\x01b\\x01c\\x01d\\x01e\\x01f\\x01g\\x01h\\x01i\\x01j\\x01k\\x01l\\x01m\\x01n\\x01o\\x01p\\x00\", # noqa: E501\n ),\n (\n \"1.2.3.4.5.6.7.8.9.10\",\n b\"\\x011\\x012\\x013\\x014\\x015\\x016\\x017\\x018\\x019\\x0210\\x00\",\n ),\n (\"abc.123.def-456.eth\", b\"\\x03abc\\x03123\\x07def-456\\x03eth\\x00\"),\n (\"abc.123.def-456.eth\", b\"\\x03abc\\x03123\\x07def-456\\x03eth\\x00\"),\n (\n \"nhéééééé.eth\",\n b\"\\x0enh\\xc3\\xa9\\xc3\\xa9\\xc3\\xa9\\xc3\\xa9\\xc3\\xa9\\xc3\\xa9\\x03eth\\x00\",\n ),\n (\"🌈rainbow.eth\", b\"\\x0b\\xf0\\x9f\\x8c\\x88rainbow\\x03eth\\x00\"),\n (\"🐔🐔.tk\", b\"\\x08\\xf0\\x9f\\x90\\x94\\xf0\\x9f\\x90\\x94\\x02tk\\x00\"),\n # test that label length may be less than 255\n (f\"{'a' * 255}.b\", b\"\\xff\" + (b\"a\" * 255) + b\"\\x01b\\x00\"),\n (f\"a.{'b' * 255}\", b\"\\x01a\" + b\"\\xff\" + (b\"b\" * 255) + b\"\\x00\"),\n (f\"abc-123.{'b' * 255}\", b\"\\x07abc-123\" + b\"\\xff\" + b\"b\" * 255 + b\"\\x00\"),\n ),\n)\ndef test_ens_encode_name(name, expected):\n assert ens_encode_name(name) == expected\n\n\n@pytest.mark.parametrize(\n \"name,expected\",\n (\n (\n f\"{'a' * 63}.{'b' * 63}.{'c' * 63}.{'d' * 63}.{'e' * 63}.{'f' * 63}.{'g' * 63}\", # noqa: E501\n b\"\".join([b\"?\" + to_bytes(text=label) * 63 for label in \"abcdefg\"])\n + b\"\\x00\",\n ),\n (\n f\"{'a-1' * 21}.{'b-2' * 21}.{'c-3' * 21}.{'d-4' * 21}.{'e-5' * 21}.{'f-6' * 21}\", # noqa: E501\n b\"\".join(\n [\n b\"?\" + to_bytes(text=label) * 21\n for label in [\n \"a-1\",\n \"b-2\",\n \"c-3\",\n \"d-4\",\n \"e-5\",\n \"f-6\",\n ]\n ]\n )\n + b\"\\x00\",\n ),\n ),\n)\ndef test_ens_encode_name_validating_total_encoded_name_size(name, expected):\n # This test is important because dns validation technically limits the\n # total encoded domain name size to 255. ENSIP-10 expects the name to be\n # DNS encoded with one of the validation exceptions being that the\n # total encoded size can be any length.\n ens_encoded = ens_encode_name(name)\n assert len(ens_encoded) > 255\n assert ens_encoded == expected\n\n\n@pytest.mark.parametrize(\"empty_name\", (\"\", \".\"))\ndef test_ens_encode_name_returns_single_zero_byte_for_empty_name(empty_name):\n assert ens_encode_name(empty_name) == b\"\\00\"\n\n\n@pytest.mark.parametrize(\n \"name,invalid_label_index\",\n (\n (\"a\" * 256, 0),\n (f\"{'a' * 256}.b\", 0),\n (f\"a.{'a-b1' * 64}x\", 1),\n (f\"{'a' * 256}.{'1' * 255}.{'b' * 255}\", 0),\n (f\"{'a' * 255}.{'1' * 256}.{'b' * 255}\", 1),\n (f\"{'a' * 255}.{'1' * 255}.{'b' * 256}\", 2),\n ),\n)\ndef test_ens_encode_name_raises_ValidationError_on_label_lengths_over_63(\n name, invalid_label_index\n):\n with pytest.raises(\n ENSValidationError, match=f\"Label at position {invalid_label_index} too long\"\n ):\n ens_encode_name(name)\n\n\ndef test_ens_encode_name_normalizes_name_before_encoding():\n assert ens_encode_name(\"Öbb.at\") == ens_encode_name(\"öbb.at\")\n assert ens_encode_name(\"nhÉéÉéÉé.eth\") == ens_encode_name(\"nhéééééé.eth\")\n assert ens_encode_name(\"TESTER.eth\") == ens_encode_name(\"tester.eth\")\n assert ens_encode_name(\"test\\u200btest.com\") == ens_encode_name(\"testtest.com\")\n assert ens_encode_name(\"O\\u0308bb.at\") == ens_encode_name(\"öbb.at\")\n\n\n# -- async -- #\n\n\n@pytest.mark.asyncio\nasync def test_init_async_web3_adds_expected_async_middlewares():\n async_w3 = init_async_web3()\n middlewares = map(str, async_w3.manager.middleware_onion)\n assert \"stalecheck_middleware\" in next(middlewares)\n\n\n@pytest.mark.asyncio\nasync def test_init_async_web3_adds_async_eth():\n async_w3 = init_async_web3()\n assert isinstance(async_w3.eth, AsyncEth)\n\n\n@pytest.mark.asyncio\nasync def test_init_async_web3_with_provider_argument_adds_async_eth():\n async_w3 = init_async_web3(AsyncEthereumTesterProvider())\n\n assert isinstance(async_w3.provider, AsyncEthereumTesterProvider)\n assert isinstance(async_w3.eth, AsyncEth)\n\n latest_block = await async_w3.eth.get_block(\"latest\")\n assert latest_block\n assert is_integer(latest_block[\"number\"])\n",
"path": "tests/ens/test_utils.py"
}
] | 8_9 | python | import unittest
import sys
class TestEnsEncodeName(unittest.TestCase):
def test_ens_encode_name_empty(self):
from ens.utils import ens_encode_name
# Test for the none and empt name types
for empty_name in ("", ".", None, " ", " "):
with self.subTest(empty_name=empty_name):
self.assertEqual(ens_encode_name(empty_name), b"\00", f"Failed for empty name: {empty_name}")
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestEnsEncodeName))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
main()
|
https://github.com/teamqurrent/pypush | Enhance the handling of incoming messages in the pypush repository by introducing a thread-safe queue mechanism. To do this, modify the `apns.py` file to replace the existing list-based message storage with a new class called IncomingQueue that ensures thread safety. Focus on implementing synchronized methods for adding, removing, and finding messages in the queue. Call the method for adding a message `append` and ensure the length of the queue can be calculated with len(queue). | 27528bf | requests
cryptography
wheel
tlslite-ng==0.8.0a43
srp
pbkdf2 | python3.9 | b3ead0c | diff --git a/apns.py b/apns.py
--- a/apns.py
+++ b/apns.py
@@ -29,13 +29,53 @@ def _connect(private_key: str, cert: str) -> tlslite.TLSConnection:
return sock
+class IncomingQueue:
+ def __init__(self):
+ self.queue = []
+ self.lock = threading.Lock()
+
+ def append(self, item):
+ with self.lock:
+ self.queue.append(item)
+
+ def pop(self, index):
+ with self.lock:
+ return self.queue.pop(index)
+
+ def __getitem__(self, index):
+ with self.lock:
+ return self.queue[index]
+
+ def __len__(self):
+ with self.lock:
+ return len(self.queue)
+
+ def find(self, finder):
+ with self.lock:
+ return next((i for i in self.queue if finder(i)), None)
+
+ def pop_find(self, finder):
+ with self.lock:
+ found = next((i for i in self.queue if finder(i)), None)
+ if found is not None:
+ # We have the lock, so we can safely remove it
+ self.queue.remove(found)
+ return found
+
+ def wait_pop_find(self, finder, delay=0.1):
+ found = None
+ while found is None:
+ found = self.pop_find(finder)
+ if found is None:
+ time.sleep(delay)
+ return found
class APNSConnection:
- incoming_queue = []
+ incoming_queue = IncomingQueue()
# Sink everything in the queue
def sink(self):
- self.incoming_queue = []
+ self.incoming_queue = IncomingQueue()
def _queue_filler(self):
while True and not self.sock.closed:
@@ -47,23 +87,33 @@ class APNSConnection:
# print("QUEUE: Got payload?")
if payload is not None:
- # print("QUEUE: Received payload: " + str(payload))
+ #print("QUEUE: Received payload: " + str(payload))
+ print("QUEUE: Received payload type: " + hex(payload[0]))
self.incoming_queue.append(payload)
# print("QUEUE: Thread ended")
- def _pop_by_id(self, id: int) -> tuple[int, list[tuple[int, bytes]]] | None:
- # print("QUEUE: Looking for id " + str(id) + " in " + str(self.incoming_queue))
- for i in range(len(self.incoming_queue)):
- if self.incoming_queue[i][0] == id:
- return self.incoming_queue.pop(i)
- return None
-
- def wait_for_packet(self, id: int) -> tuple[int, list[tuple[int, bytes]]]:
- payload = self._pop_by_id(id)
- while payload is None:
- payload = self._pop_by_id(id)
- time.sleep(0.1)
- return payload
+ # def _pop_by_id(self, id: int) -> tuple[int, list[tuple[int, bytes]]] | None:
+ # def finder(item):
+ # return item[0] == id
+ # return self.incoming_queue.find(finder)
+ # # print("QUEUE: Looking for id " + str(id) + " in " + str(self.incoming_queue))
+ # #for i in range(len(self.incoming_queue)):
+ # # if self.incoming_queue[i][0] == id:
+ # # return self.incoming_queue.pop(i)
+ # #return None
+
+ # def wait_for_packet(self, id: int) -> tuple[int, list[tuple[int, bytes]]]:
+ # found = None
+ # while found is None:
+ # found = self._pop_by_id(id)
+ # if found is None:
+ # time.sleep(0.1)
+ # return found
+
+ # def find_packet(self, finder) ->
+
+ #def replace_packet(self, payload: tuple[int, list[tuple[int, bytes]]]):
+ # self.incoming_queue.append(payload)
def __init__(self, private_key=None, cert=None):
# Generate the private key and certificate if they're not provided
@@ -96,7 +146,7 @@ class APNSConnection:
self.sock.write(payload)
- payload = self.wait_for_packet(8)
+ payload = self.incoming_queue.wait_pop_find(lambda i: i[0] == 8)
if (
payload == None
@@ -141,7 +191,8 @@ class APNSConnection:
self.sock.write(payload)
- payload = self.wait_for_packet(0x0B)
+ # Wait for ACK
+ payload = self.incoming_queue.wait_pop_find(lambda i: i[0] == 0x0B)
if payload[1][0][1] != 0x00.to_bytes(1, 'big'):
raise Exception("Failed to send message")
@@ -156,6 +207,19 @@ class APNSConnection:
def keep_alive(self):
self.sock.write(_serialize_payload(0x0C, []))
+ # def _send_ack(self, id: bytes):
+ # print(f"Sending ACK for message {id}")
+ # payload = _serialize_payload(0x0B, [(1, self.token), (4, id), (8, b"\x00")])
+ # self.sock.write(payload)
+ # #self.sock.write(_serialize_payload(0x0B, [(4, id)])
+ # #pass
+
+ # def recieve_message(self):
+ # payload = self.incoming_queue.wait_pop_find(lambda i: i[0] == 0x0A)
+ # # Send ACK
+ # self._send_ack(_get_field(payload[1], 4))
+ # return _get_field(payload[1], 3)
+
# TODO: Find a way to make this non-blocking
# def expect_message(self) -> tuple[int, list[tuple[int, bytes]]] | None:
# return _deserialize_payload(self.sock)
diff --git a/demo.py b/demo.py
--- a/demo.py
+++ b/demo.py
@@ -140,6 +140,7 @@ def lookup(topic:str, users: list[str]):
print(f"Looking up users {users} for topic {topic}...")
resp = ids.lookup(conn, CONFIG['username'], ids_keypair, topic, users)
+ #print(resp)
#r = list(resp['results'].values())[0]
for k, v in resp['results'].items():
print(f"Result for user {k} topic {topic}:")
@@ -156,20 +157,23 @@ def lookup(topic:str, users: list[str]):
# Hack to make sure that the requests and responses match up
# This filter MUST contain all the topics you are looking up
-conn.filter(['com.apple.madrid', 'com.apple.private.alloy.facetime.multi', 'com.apple.private.alloy.multiplex1'])
-import time
-print("...waiting for queued messages... (this is a hack)")
-time.sleep(5) # Let the server send us any messages it was holding
-conn.sink() # Dump the messages
+#conn.filter(['com.apple.madrid', 'com.apple.private.alloy.facetime.multi', 'com.apple.private.alloy.multiplex1', 'com.apple.private.alloy.screensharing'])
+#import time
+#print("...waiting for queued messages... (this is a hack)")
+#time.sleep(5) # Let the server send us any messages it was holding
+#conn.sink() # Dump the messages
-lookup("com.apple.madrid", ["mailto:jjtech@jjtech.dev"])
-lookup("com.apple.private.alloy.facetime.multi", ["mailto:jjtech@jjtech.dev"])
+#lookup("com.apple.madrid", ["mailto:jjtech@jjtech.dev"])
+#lookup("com.apple.private.alloy.facetime.multi", ["mailto:jjtech@jjtech.dev"])
-lookup("com.apple.private.alloy.facetime.multi", ["mailto:user_test2@icloud.com"])
-lookup("com.apple.madrid", ["mailto:user_test2@icloud.com"])
+# lookup("com.apple.private.alloy.facetime.multi", ["mailto:user_test2@icloud.com"])
+# lookup("com.apple.madrid", ["mailto:user_test2@icloud.com"])
-lookup("com.apple.private.alloy.multiplex1", ["mailto:user_test2@icloud.com"])
+# lookup("com.apple.private.alloy.multiplex1", ["mailto:user_test2@icloud.com"])
+lookup("com.apple.private.alloy.screensharing", ["mailto:user_test2@icloud.com"])
+
+#time.sleep(4)
# Save config
with open("config.json", "w") as f:
json.dump(CONFIG, f, indent=4)
\ No newline at end of file
diff --git a/ids.py b/ids.py
--- a/ids.py
+++ b/ids.py
@@ -100,9 +100,11 @@ def _send_request(conn: apns.APNSConnection, bag_key: str, topic: str, body: byt
#print(headers)
+ msg_id = random.randbytes(16)
+
req = {
"cT": "application/x-apple-plist",
- "U": b"\x16%C\xd5\xcd:D1\xa1\xa7z6\xa9\xe2\xbc\x8f", # Just random bytes?
+ "U": msg_id,
"c": 96,
"ua": USER_AGENT,
"u": bags.ids_bag()[bag_key],
@@ -112,14 +114,23 @@ def _send_request(conn: apns.APNSConnection, bag_key: str, topic: str, body: byt
}
conn.send_message(topic, plistlib.dumps(req, fmt=plistlib.FMT_BINARY))
- resp = conn.wait_for_packet(0x0A)
-
- resp_body = apns._get_field(resp[1], 3)
-
- if resp_body is None:
- raise (Exception(f"Got invalid response: {resp}"))
-
- return resp_body
+ #resp = conn.wait_for_packet(0x0A)
+
+ def check_response(x):
+ if x[0] != 0x0A:
+ return False
+ resp_body = apns._get_field(x[1], 3)
+ if resp_body is None:
+ return False
+ resp_body = plistlib.loads(resp_body)
+ return resp_body['U'] == msg_id
+
+ # Lambda to check if the response is the one we want
+ #conn.incoming_queue.find(check_response)
+ payload = conn.incoming_queue.wait_pop_find(check_response)
+ #conn._send_ack(apns._get_field(payload[1], 4))
+ resp = apns._get_field(payload[1], 3)
+ return plistlib.loads(resp)
# Performs an IDS lookup
@@ -132,7 +143,8 @@ def lookup(conn: apns.APNSConnection, self: str, keypair: KeyPair, topic: str, q
conn.filter([topic])
query = {"uris": query}
resp = _send_request(conn, "id-query", topic, plistlib.dumps(query), keypair, self)
- resp = plistlib.loads(resp)
+ #resp = plistlib.loads(resp)
+ #print(resp)
resp = gzip.decompress(resp["b"])
resp = plistlib.loads(resp)
return resp
| [
{
"content": "from __future__ import annotations\n\nimport random\nimport socket\nimport threading\nimport time\nfrom hashlib import sha1\n\nimport tlslite\n\nimport albert\n\nCOURIER_HOST = \"windows.courier.push.apple.com\" # TODO: Get this from config\nCOURIER_PORT = 5223\nALPN = [b\"apns-security-v2\"]\n\n\n# Connect to the courier server\ndef _connect(private_key: str, cert: str) -> tlslite.TLSConnection:\n # Connect to the courier server\n sock = socket.create_connection((COURIER_HOST, COURIER_PORT))\n # Wrap the socket in TLS\n sock = tlslite.TLSConnection(sock)\n # Parse the certificate and private key\n cert = tlslite.X509CertChain([tlslite.X509().parse(cert)])\n private_key = tlslite.parsePEMKey(private_key, private=True)\n # Handshake with the server\n sock.handshakeClientCert(cert, private_key, alpn=ALPN)\n\n return sock\n\n\nclass APNSConnection:\n incoming_queue = []\n\n # Sink everything in the queue\n def sink(self):\n self.incoming_queue = []\n\n def _queue_filler(self):\n while True and not self.sock.closed:\n # print(self.sock.closed)\n # print(\"QUEUE: Waiting for payload...\")\n # self.sock.read(1)\n # print(\"QUEUE: Got payload?\")\n payload = _deserialize_payload(self.sock)\n # print(\"QUEUE: Got payload?\")\n\n if payload is not None:\n # print(\"QUEUE: Received payload: \" + str(payload))\n self.incoming_queue.append(payload)\n # print(\"QUEUE: Thread ended\")\n\n def _pop_by_id(self, id: int) -> tuple[int, list[tuple[int, bytes]]] | None:\n # print(\"QUEUE: Looking for id \" + str(id) + \" in \" + str(self.incoming_queue))\n for i in range(len(self.incoming_queue)):\n if self.incoming_queue[i][0] == id:\n return self.incoming_queue.pop(i)\n return None\n\n def wait_for_packet(self, id: int) -> tuple[int, list[tuple[int, bytes]]]:\n payload = self._pop_by_id(id)\n while payload is None:\n payload = self._pop_by_id(id)\n time.sleep(0.1)\n return payload\n\n def __init__(self, private_key=None, cert=None):\n # Generate the private key and certificate if they're not provided\n if private_key is None or cert is None:\n self.private_key, self.cert = albert.generate_push_cert()\n else:\n self.private_key, self.cert = private_key, cert\n\n self.sock = _connect(self.private_key, self.cert)\n\n # Start the queue filler thread\n self.queue_filler_thread = threading.Thread(\n target=self._queue_filler, daemon=True\n )\n self.queue_filler_thread.start()\n\n def connect(self, root: bool = True, token: bytes = None):\n flags = 0b01000001\n if root:\n flags |= 0b0100\n\n if token is None:\n payload = _serialize_payload(\n 7, [(2, 0x01.to_bytes(1, 'big')), (5, flags.to_bytes(4, 'big'))]\n )\n else:\n payload = _serialize_payload(\n 7, [(1, token), (2, 0x01.to_bytes(1, 'big')), (5, flags.to_bytes(4, 'big'))]\n )\n\n self.sock.write(payload)\n\n payload = self.wait_for_packet(8)\n\n if (\n payload == None\n or payload[0] != 8\n or _get_field(payload[1], 1) != 0x00.to_bytes(1, 'big')\n ):\n raise Exception(\"Failed to connect\")\n\n new_token = _get_field(payload[1], 3)\n if new_token is not None:\n self.token = new_token\n elif token is not None:\n self.token = token\n else:\n raise Exception(\"No token\")\n\n return self.token\n\n def filter(self, topics: list[str]):\n fields = [(1, self.token)]\n\n for topic in topics:\n fields.append((2, sha1(topic.encode()).digest()))\n\n payload = _serialize_payload(9, fields)\n\n self.sock.write(payload)\n\n def send_message(self, topic: str, payload: str, id=None):\n if id is None:\n id = random.randbytes(4)\n\n payload = _serialize_payload(\n 0x0A,\n [\n (4, id),\n (1, sha1(topic.encode()).digest()),\n (2, self.token),\n (3, payload),\n ],\n )\n\n self.sock.write(payload)\n\n payload = self.wait_for_packet(0x0B)\n\n if payload[1][0][1] != 0x00.to_bytes(1, 'big'):\n raise Exception(\"Failed to send message\")\n\n def set_state(self, state: int):\n self.sock.write(\n _serialize_payload(\n 0x14, [(1, state.to_bytes(1, 'big')), (2, 0x7FFFFFFF.to_bytes(4, 'big'))]\n )\n )\n\n def keep_alive(self):\n self.sock.write(_serialize_payload(0x0C, []))\n\n # TODO: Find a way to make this non-blocking\n # def expect_message(self) -> tuple[int, list[tuple[int, bytes]]] | None:\n # return _deserialize_payload(self.sock)\n\n\ndef _serialize_field(id: int, value: bytes) -> bytes:\n return id.to_bytes(1, 'big') + len(value).to_bytes(2, \"big\") + value\n\n\ndef _serialize_payload(id: int, fields: list[(int, bytes)]) -> bytes:\n payload = b\"\"\n\n for fid, value in fields:\n if fid is not None:\n payload += _serialize_field(fid, value)\n\n return id.to_bytes(1, 'big') + len(payload).to_bytes(4, \"big\") + payload\n\n\ndef _deserialize_field(stream: bytes) -> tuple[int, bytes]:\n id = int.from_bytes(stream[:1], \"big\")\n length = int.from_bytes(stream[1:3], \"big\")\n value = stream[3 : 3 + length]\n return id, value\n\n\n# Note: Takes a stream, not a buffer, as we do not know the length of the payload\n# WILL BLOCK IF THE STREAM IS EMPTY\ndef _deserialize_payload(stream) -> tuple[int, list[tuple[int, bytes]]] | None:\n id = int.from_bytes(stream.read(1), \"big\")\n\n if id == 0x0:\n return None\n\n length = int.from_bytes(stream.read(4), \"big\")\n\n buffer = stream.read(length)\n\n fields = []\n\n while len(buffer) > 0:\n fid, value = _deserialize_field(buffer)\n fields.append((fid, value))\n buffer = buffer[3 + len(value) :]\n\n return id, fields\n\n\ndef _deserialize_payload_from_buffer(\n buffer: bytes,\n) -> tuple[int, list[tuple[int, bytes]]] | None:\n id = int.from_bytes(buffer[:1], \"big\")\n\n if id == 0x0:\n return None\n\n length = int.from_bytes(buffer[1:5], \"big\")\n\n buffer = buffer[5:]\n\n if len(buffer) < length:\n raise Exception(\"Buffer is too short\")\n\n fields = []\n\n while len(buffer) > 0:\n fid, value = _deserialize_field(buffer)\n fields.append((fid, value))\n buffer = buffer[3 + len(value) :]\n\n return id, fields\n\n\n# Returns the value of the first field with the given id\ndef _get_field(fields: list[tuple[int, bytes]], id: int) -> bytes:\n for field_id, value in fields:\n if field_id == id:\n return value\n return None\n",
"path": "apns.py"
},
{
"content": "from ids import *\nimport ids\nimport getpass\nimport json\n\n# Open config\ntry:\n with open(\"config.json\", \"r\") as f:\n CONFIG = json.load(f)\nexcept FileNotFoundError:\n CONFIG = {}\n\ndef input_multiline(prompt):\n print(prompt)\n lines = []\n while True:\n line = input()\n if line == \"\":\n break\n lines.append(line)\n return \"\\n\".join(lines)\n\ndef refresh_token():\n # If no username is set, prompt for it\n if \"username\" not in CONFIG:\n CONFIG[\"username\"] = input(\"Enter iCloud username: \")\n # If no password is set, prompt for it\n if \"password\" not in CONFIG:\n CONFIG[\"password\"] = getpass.getpass(\"Enter iCloud password: \")\n # If grandslam authentication is not set, prompt for it\n if \"use_gsa\" not in CONFIG:\n CONFIG[\"use_gsa\"] = input(\"Use grandslam authentication? [y/N] \").lower() == \"y\"\n\n def factor_gen():\n return input(\"Enter iCloud 2FA code: \")\n\n CONFIG[\"user_id\"], CONFIG[\"token\"] = ids._get_auth_token(\n CONFIG[\"username\"], CONFIG[\"password\"], CONFIG[\"use_gsa\"], factor_gen=factor_gen\n )\n\ndef refresh_cert():\n CONFIG[\"key\"], CONFIG[\"auth_cert\"] = ids._get_auth_cert(\n CONFIG[\"user_id\"], CONFIG[\"token\"]\n )\n\ndef create_connection(): \n conn = apns.APNSConnection()\n token = conn.connect()\n #conn.filter(['com.apple.madrid'])\n CONFIG['push'] = {\n 'token': b64encode(token).decode(),\n 'cert': conn.cert,\n 'key': conn.private_key\n }\n return conn\n\ndef restore_connection():\n conn = apns.APNSConnection(CONFIG['push']['key'], CONFIG['push']['cert'])\n conn.connect(True, b64decode(CONFIG['push']['token']))\n #conn.filter(['com.apple.madrid', 'com.apple.private.alloy.facetime.multi'])\n return conn\n\ndef refresh_ids_cert():\n info = {\n \"uri\": \"mailto:\" + CONFIG[\"username\"],\n \"user_id\": CONFIG['user_id'],\n }\n\n resp = None\n try:\n if \"validation_data\" in CONFIG:\n resp = ids._register_request(\n CONFIG['push']['token'],\n info,\n CONFIG['auth_cert'],\n CONFIG['key'],\n CONFIG['push']['cert'],\n CONFIG['push']['key'],\n CONFIG[\"validation_data\"],\n )\n except Exception as e:\n print(e)\n resp = None\n\n if resp is None:\n print(\n \"Note: Validation data can be obtained from @JJTech, or intercepted using a HTTP proxy.\"\n )\n validation_data = (\n input_multiline(\"Enter validation data: \")\n .replace(\"\\n\", \"\")\n .replace(\" \", \"\")\n )\n resp = ids._register_request(\n CONFIG['push']['token'],\n info,\n CONFIG['auth_cert'],\n CONFIG['key'],\n CONFIG['push']['cert'],\n CONFIG['push']['key'],\n validation_data,\n )\n CONFIG[\"validation_data\"] = validation_data\n\n ids_cert = x509.load_der_x509_certificate(\n resp[\"services\"][0][\"users\"][0][\"cert\"]\n )\n ids_cert = (\n ids_cert.public_bytes(serialization.Encoding.PEM).decode(\"utf-8\").strip()\n )\n\n CONFIG[\"ids_cert\"] = ids_cert\n\n\nif not 'push' in CONFIG:\n print(\"No existing APNs credentials, creating new ones...\")\n #print(\"No push conn\")\n conn = create_connection()\nelse:\n print(\"Restoring APNs credentials...\")\n conn = restore_connection()\nprint(\"Connected to APNs!\")\n\nif not 'ids_cert' in CONFIG:\n print(\"No existing IDS certificate, creating new one...\")\n if not 'key' in CONFIG:\n print(\"No existing authentication certificate, creating new one...\")\n if not 'token' in CONFIG:\n print(\"No existing authentication token, creating new one...\")\n refresh_token()\n print(\"Got authentication token!\")\n refresh_cert()\n print(\"Got authentication certificate!\")\n refresh_ids_cert()\nprint(\"Got IDS certificate!\")\n\nids_keypair = ids.KeyPair(CONFIG['key'], CONFIG['ids_cert'])\n\ndef lookup(topic:str, users: list[str]):\n print(f\"Looking up users {users} for topic {topic}...\")\n resp = ids.lookup(conn, CONFIG['username'], ids_keypair, topic, users)\n\n #r = list(resp['results'].values())[0]\n for k, v in resp['results'].items():\n print(f\"Result for user {k} topic {topic}:\")\n i = v['identities']\n print(f\"IDENTITIES: {len(i)}\")\n for iden in i:\n print(\"IDENTITY\", end=\" \")\n print(f\"Push Token: {b64encode(iden['push-token']).decode()}\", end=\" \")\n if 'client-data' in iden:\n print(f\"Client Data: {len(iden['client-data'])}\")\n \n else:\n print(\"No client data\")\n\n# Hack to make sure that the requests and responses match up\n# This filter MUST contain all the topics you are looking up\nconn.filter(['com.apple.madrid', 'com.apple.private.alloy.facetime.multi', 'com.apple.private.alloy.multiplex1'])\nimport time\nprint(\"...waiting for queued messages... (this is a hack)\")\ntime.sleep(5) # Let the server send us any messages it was holding\nconn.sink() # Dump the messages\n\nlookup(\"com.apple.madrid\", [\"mailto:jjtech@jjtech.dev\"])\nlookup(\"com.apple.private.alloy.facetime.multi\", [\"mailto:jjtech@jjtech.dev\"])\n\nlookup(\"com.apple.private.alloy.facetime.multi\", [\"mailto:user_test2@icloud.com\"])\nlookup(\"com.apple.madrid\", [\"mailto:user_test2@icloud.com\"])\n\nlookup(\"com.apple.private.alloy.multiplex1\", [\"mailto:user_test2@icloud.com\"])\n\n# Save config\nwith open(\"config.json\", \"w\") as f:\n json.dump(CONFIG, f, indent=4)",
"path": "demo.py"
},
{
"content": "import plistlib\nimport random\nimport uuid\nimport gzip\nfrom base64 import b64decode, b64encode\nfrom datetime import datetime\n\nimport requests\nfrom cryptography import x509\nfrom cryptography.hazmat.backends import default_backend\nfrom cryptography.hazmat.primitives import hashes, serialization\nfrom cryptography.hazmat.primitives.asymmetric import padding, rsa\nfrom cryptography.x509.oid import NameOID\nfrom collections import namedtuple\n\nimport apns\nimport bags\nimport gsa\n\nUSER_AGENT = \"com.apple.madrid-lookup [macOS,13.2.1,22D68,MacBookPro18,3]\"\n\nKeyPair = namedtuple(\"KeyPair\", [\"key\", \"cert\"])\n\n# Nonce Format:\n# 01000001876bd0a2c0e571093967fce3d7\n# 01 # version\n# 000001876d008cc5 # unix time\n# r1r2r3r4r5r6r7r8 # random bytes\ndef generate_nonce() -> bytes:\n return (\n b\"\\x01\"\n + int(datetime.now().timestamp() * 1000).to_bytes(8, \"big\")\n + random.randbytes(8)\n )\n\ndef _create_payload(\n bag_key: str,\n query_string: str,\n push_token: str,\n payload: bytes,\n nonce: bytes = None,\n) -> tuple[str, bytes]:\n # Generate the nonce\n if nonce is None:\n nonce = generate_nonce()\n push_token = b64decode(push_token)\n\n return (\n nonce\n + len(bag_key).to_bytes(4, 'big')\n + bag_key.encode()\n + len(query_string).to_bytes(4, 'big')\n + query_string.encode()\n + len(payload).to_bytes(4, 'big')\n + payload\n + len(push_token).to_bytes(4, 'big')\n + push_token,\n nonce,\n )\n\n\ndef sign_payload(\n private_key: str, bag_key: str, query_string: str, push_token: str, payload: bytes\n) -> tuple[str, bytes]:\n # Load the private key\n key = serialization.load_pem_private_key(\n private_key.encode(), password=None, backend=default_backend()\n )\n\n payload, nonce = _create_payload(bag_key, query_string, push_token, payload)\n sig = key.sign(payload, padding.PKCS1v15(), hashes.SHA1())\n\n sig = b\"\\x01\\x01\" + sig\n sig = b64encode(sig).decode()\n\n return sig, nonce\n\n\n# global_key, global_cert = load_keys()\n\ndef _send_request(conn: apns.APNSConnection, bag_key: str, topic: str, body: bytes, keypair: KeyPair, username: str) -> bytes:\n body = gzip.compress(body, mtime=0)\n\n push_token = b64encode(conn.token).decode()\n\n # Sign the request\n signature, nonce = sign_payload(keypair.key, bag_key, \"\", push_token, body)\n\n headers = {\n \"x-id-cert\": keypair.cert.replace(\"-----BEGIN CERTIFICATE-----\", \"\")\n .replace(\"-----END CERTIFICATE-----\", \"\")\n .replace(\"\\n\", \"\"),\n \"x-id-nonce\": b64encode(nonce).decode(),\n \"x-id-sig\": signature,\n \"x-push-token\": push_token,\n \"x-id-self-uri\": 'mailto:' + username,\n \"User-Agent\": USER_AGENT,\n \"x-protocol-version\": \"1630\",\n }\n\n #print(headers)\n\n req = {\n \"cT\": \"application/x-apple-plist\",\n \"U\": b\"\\x16%C\\xd5\\xcd:D1\\xa1\\xa7z6\\xa9\\xe2\\xbc\\x8f\", # Just random bytes?\n \"c\": 96,\n \"ua\": USER_AGENT,\n \"u\": bags.ids_bag()[bag_key],\n \"h\": headers,\n \"v\": 2,\n \"b\": body,\n }\n\n conn.send_message(topic, plistlib.dumps(req, fmt=plistlib.FMT_BINARY))\n resp = conn.wait_for_packet(0x0A)\n\n resp_body = apns._get_field(resp[1], 3)\n\n if resp_body is None:\n raise (Exception(f\"Got invalid response: {resp}\"))\n\n return resp_body\n\n\n# Performs an IDS lookup\n# conn: an active APNs connection. must be connected and have a push token. will be filtered to the IDS topic\n# self: the user's email address\n# keypair: a KeyPair object containing the user's private key and certificate\n# topic: the IDS topic to query\n# query: a list of URIs to query\ndef lookup(conn: apns.APNSConnection, self: str, keypair: KeyPair, topic: str, query: list[str]) -> any:\n conn.filter([topic])\n query = {\"uris\": query}\n resp = _send_request(conn, \"id-query\", topic, plistlib.dumps(query), keypair, self)\n resp = plistlib.loads(resp)\n resp = gzip.decompress(resp[\"b\"])\n resp = plistlib.loads(resp)\n return resp\n\n\ndef _auth_token_request(username: str, password: str) -> any:\n # Turn the PET into an auth token\n data = {\n \"apple-id\": username,\n \"client-id\": str(uuid.uuid4()),\n \"delegates\": {\"com.apple.private.ids\": {\"protocol-version\": \"4\"}},\n \"password\": password,\n }\n data = plistlib.dumps(data)\n\n r = requests.post(\n \"https://setup.icloud.com/setup/prefpane/loginDelegates\",\n auth=(username, password),\n data=data,\n verify=False,\n )\n r = plistlib.loads(r.content)\n return r\n\n\n# Gets an IDS auth token for the given username and password\n# If use_gsa is True, GSA authentication will be used, which requires anisette\n# If use_gsa is False, it will use a old style 2FA code\n# If factor_gen is not None, it will be called to get the 2FA code, otherwise it will be prompted\n# Returns (realm user id, auth token)\ndef _get_auth_token(\n username: str, password: str, use_gsa: bool = False, factor_gen: callable = None\n) -> tuple[str, str]:\n if use_gsa:\n g = gsa.authenticate(username, password, gsa.Anisette())\n pet = g[\"t\"][\"com.apple.gs.idms.pet\"][\"token\"]\n else:\n # Make the request without the 2FA code to make the prompt appear\n _auth_token_request(username, password)\n # Now make the request with the 2FA code\n if factor_gen is None:\n pet = password + input(\"Enter 2FA code: \")\n else:\n pet = password + factor_gen()\n r = _auth_token_request(username, pet)\n # print(r)\n if \"description\" in r:\n raise Exception(f\"Error: {r['description']}\")\n service_data = r[\"delegates\"][\"com.apple.private.ids\"][\"service-data\"]\n realm_user_id = service_data[\"realm-user-id\"]\n auth_token = service_data[\"auth-token\"]\n # print(f\"Auth token for {realm_user_id}: {auth_token}\")\n return realm_user_id, auth_token\n\n\ndef _generate_csr(private_key: rsa.RSAPrivateKey) -> str:\n csr = (\n x509.CertificateSigningRequestBuilder()\n .subject_name(\n x509.Name(\n [\n x509.NameAttribute(NameOID.COMMON_NAME, random.randbytes(20).hex()),\n ]\n )\n )\n .sign(private_key, hashes.SHA256())\n )\n\n csr = csr.public_bytes(serialization.Encoding.PEM).decode(\"utf-8\")\n return (\n csr.replace(\"-----BEGIN CERTIFICATE REQUEST-----\", \"\")\n .replace(\"-----END CERTIFICATE REQUEST-----\", \"\")\n .replace(\"\\n\", \"\")\n )\n\n\n# Gets an IDS auth cert for the given user id and auth token\n# Returns [private key PEM, certificate PEM]\ndef _get_auth_cert(user_id, token) -> tuple[str, str]:\n private_key = rsa.generate_private_key(\n public_exponent=65537, key_size=2048, backend=default_backend()\n )\n body = {\n \"authentication-data\": {\"auth-token\": token},\n \"csr\": b64decode(_generate_csr(private_key)),\n \"realm-user-id\": user_id,\n }\n\n body = plistlib.dumps(body)\n\n r = requests.post(\n \"https://profile.ess.apple.com/WebObjects/VCProfileService.woa/wa/authenticateDS\",\n data=body,\n headers={\"x-protocol-version\": \"1630\"},\n verify=False,\n )\n r = plistlib.loads(r.content)\n if r[\"status\"] != 0:\n raise (Exception(f\"Failed to get auth cert: {r}\"))\n cert = x509.load_der_x509_certificate(r[\"cert\"])\n return (\n private_key.private_bytes(\n encoding=serialization.Encoding.PEM,\n format=serialization.PrivateFormat.TraditionalOpenSSL,\n encryption_algorithm=serialization.NoEncryption(),\n )\n .decode(\"utf-8\")\n .strip(),\n cert.public_bytes(serialization.Encoding.PEM).decode(\"utf-8\").strip(),\n )\n\n\ndef _register_request(\n push_token, info, auth_cert, auth_key, push_cert, push_key, validation_data\n):\n body = {\n \"hardware-version\": \"MacBookPro18,3\",\n \"language\": \"en-US\",\n \"os-version\": \"macOS,13.2.1,22D68\",\n \"software-version\": \"22D68\",\n \"services\": [\n {\n \"capabilities\": [{\"flags\": 1, \"name\": \"Messenger\", \"version\": 1}],\n \"service\": \"com.apple.madrid\",\n \"users\": [\n {\n \"uris\": [{\"uri\": info[\"uri\"]}],\n \"user-id\": info[\"user_id\"],\n }\n ],\n }\n ],\n \"validation-data\": b64decode(validation_data),\n }\n\n body = plistlib.dumps(body)\n body = gzip.compress(body, mtime=0)\n\n push_sig, push_nonce = sign_payload(push_key, \"id-register\", \"\", push_token, body)\n auth_sig, auth_nonce = sign_payload(auth_key, \"id-register\", \"\", push_token, body)\n\n headers = {\n \"x-protocol-version\": \"1640\",\n \"content-type\": \"application/x-apple-plist\",\n \"content-encoding\": \"gzip\",\n \"x-auth-sig-0\": auth_sig,\n \"x-auth-cert-0\": auth_cert.replace(\"\\n\", \"\")\n .replace(\"-----BEGIN CERTIFICATE-----\", \"\")\n .replace(\"-----END CERTIFICATE-----\", \"\"),\n \"x-auth-user-id-0\": info[\"user_id\"],\n \"x-auth-nonce-0\": b64encode(auth_nonce),\n \"x-pr-nonce\": b64encode(auth_nonce),\n \"x-push-token\": push_token,\n \"x-push-sig\": push_sig,\n \"x-push-cert\": push_cert.replace(\"\\n\", \"\")\n .replace(\"-----BEGIN CERTIFICATE-----\", \"\")\n .replace(\"-----END CERTIFICATE-----\", \"\"),\n \"x-push-nonce\": b64encode(push_nonce),\n }\n\n r = requests.post(\n \"https://identity.ess.apple.com/WebObjects/TDIdentityService.woa/wa/register\",\n headers=headers,\n data=body,\n verify=False,\n )\n r = plistlib.loads(r.content)\n print(f'Response code: {r[\"status\"]}')\n if \"status\" in r and r[\"status\"] == 6004:\n raise Exception(\"Validation data expired!\")\n return r\n",
"path": "ids.py"
}
] | 9_0 | python | import unittest
import threading
import sys
class TestIncomingQueue(unittest.TestCase):
def test_thread_safety(self):
from apns import IncomingQueue
queue = IncomingQueue()
items_to_add = 100
def add_items():
for _ in range(items_to_add):
queue.append(1)
threads = [threading.Thread(target=add_items) for _ in range(2)]
for thread in threads:
thread.start()
for thread in threads:
thread.join()
self.assertEqual(len(queue), items_to_add * 2)
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestIncomingQueue))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
main()
|
https://github.com/teamqurrent/pypush | To enhance the project's command-line interface capabilities, you need to add the prompt_toolkit library to the project's dependencies. This involves modifying the `requirements.txt` file to include this new dependency. | 3ef1b6e | requests
cryptography
wheel
tlslite-ng==0.8.0a43
srp
pbkdf2 | python3.9 | db90bf5 | diff --git a/requirements.txt b/requirements.txt
--- a/requirements.txt
+++ b/requirements.txt
@@ -5,4 +5,5 @@ tlslite-ng==0.8.0a43
srp
pbkdf2
unicorn
-rich
\ No newline at end of file
+rich
+prompt_toolkit
\ No newline at end of file
| [
{
"content": "requests\ncryptography\nwheel\ntlslite-ng==0.8.0a43\nsrp\npbkdf2\nunicorn\nrich",
"path": "requirements.txt"
}
] | 9_1 | python | import unittest
import os
import sys
class TestRequirementsFile(unittest.TestCase):
def test_prompt_toolkit_in_requirements(self):
# Path to the requirements.txt file
requirements_path = os.path.join(os.path.dirname(__file__), 'requirements.txt')
with open(requirements_path, 'r') as file:
requirements = file.read()
# Check if 'prompt_toolkit' is in the requirements
self.assertIn('prompt_toolkit', requirements, "prompt_toolkit not found in requirements.txt")
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestRequirementsFile))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
main()
|
https://github.com/teamqurrent/pypush | Enhance the flexibility in specifying the sender of an iMessage. Modify the `from_raw` method in the `imessage.py` file to accept an additional optional parameter for the sender. Adjust the logic within this method to prioritize this new parameter over the last participant in the message for determining the sender. | be9a278 | requests
cryptography
wheel
tlslite-ng==0.8.0a43
srp
pbkdf2 | python3.10 | 74fff8b | diff --git a/imessage.py b/imessage.py
--- a/imessage.py
+++ b/imessage.py
@@ -85,7 +85,7 @@ class iMessage:
return True
- def from_raw(message: bytes) -> "iMessage":
+ def from_raw(message: bytes, sender: str | None = None) -> "iMessage":
"""Create an `iMessage` from raw message bytes"""
compressed = False
try:
@@ -100,7 +100,7 @@ class iMessage:
text=message.get("t", ""),
xml=message.get("x"),
participants=message.get("p", []),
- sender=message.get("p", [])[-1] if message.get("p", []) != [] else None,
+ sender=sender if sender is not None else message.get("p", [])[-1] if "p" in message else None,
_id=uuid.UUID(message.get("r")) if "r" in message else None,
group_id=uuid.UUID(message.get("gid")) if "gid" in message else None,
body=BalloonBody(message["bid"], message["b"])
@@ -333,7 +333,7 @@ class iMessageUser:
decrypted = self._decrypt_payload(payload)
- return iMessage.from_raw(decrypted)
+ return iMessage.from_raw(decrypted, body['sP'])
KEY_CACHE: dict[bytes, tuple[bytes, bytes]] = {}
"""Mapping of push token : (public key, session token)"""
| [
{
"content": "# LOW LEVEL imessage function, decryption etc\n# Don't handle APNS etc, accept it already setup\n\n## HAVE ANOTHER FILE TO SETUP EVERYTHING AUTOMATICALLY, etc\n# JSON parsing of keys, don't pass around strs??\n\nimport gzip\nimport logging\nimport plistlib\nimport random\nimport uuid\nfrom dataclasses import dataclass, field\nfrom hashlib import sha1, sha256\nfrom io import BytesIO\n\nfrom cryptography.hazmat.primitives import hashes\nfrom cryptography.hazmat.primitives.asymmetric import ec, padding\nfrom cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes\n\nimport apns\nimport ids\n\nlogger = logging.getLogger(\"imessage\")\n\nNORMAL_NONCE = b\"\\x00\" * 15 + b\"\\x01\" # This is always used as the AES nonce\n\n\nclass BalloonBody:\n \"\"\"Represents the special parts of message extensions etc.\"\"\"\n\n def __init__(self, type: str, data: bytes):\n self.type = type\n self.data = data\n\n # TODO : Register handlers based on type id\n\n\n@dataclass\nclass iMessage:\n \"\"\"Represents an iMessage\"\"\"\n\n text: str = \"\"\n \"\"\"Plain text of message, always required, may be an empty string\"\"\"\n xml: str | None = None\n \"\"\"XML portion of message, may be None\"\"\"\n participants: list[str] = field(default_factory=list)\n \"\"\"List of participants in the message, including the sender\"\"\"\n sender: str | None = None\n \"\"\"Sender of the message\"\"\"\n _id: uuid.UUID | None = None\n \"\"\"ID of the message, will be randomly generated if not provided\"\"\"\n group_id: uuid.UUID | None = None\n \"\"\"Group ID of the message, will be randomly generated if not provided\"\"\"\n body: BalloonBody | None = None\n \"\"\"BalloonBody, may be None\"\"\"\n\n _compressed: bool = True\n \"\"\"Internal property representing whether the message should be compressed\"\"\"\n\n _raw: dict | None = None\n \"\"\"Internal property representing the original raw message, may be None\"\"\"\n\n def sanity_check(self):\n \"\"\"Corrects any missing fields\"\"\"\n if self._id is None:\n self._id = uuid.uuid4()\n\n if self.group_id is None:\n self.group_id = uuid.uuid4()\n\n if self.sender is None:\n if len(self.participants) > 1:\n self.sender = self.participants[-1]\n else:\n logger.warning(\n \"Message has no sender, and only one participant, sanity check failed\"\n )\n return False\n\n if self.sender not in self.participants:\n self.participants.append(self.sender)\n\n if self.xml != None:\n self._compressed = False # XML is never compressed for some reason\n\n return True\n\n def from_raw(message: bytes) -> \"iMessage\":\n \"\"\"Create an `iMessage` from raw message bytes\"\"\"\n compressed = False\n try:\n message = gzip.decompress(message)\n compressed = True\n except:\n pass\n\n message = plistlib.loads(message)\n\n return iMessage(\n text=message.get(\"t\", \"\"),\n xml=message.get(\"x\"),\n participants=message.get(\"p\", []),\n sender=message.get(\"p\", [])[-1] if message.get(\"p\", []) != [] else None,\n _id=uuid.UUID(message.get(\"r\")) if \"r\" in message else None,\n group_id=uuid.UUID(message.get(\"gid\")) if \"gid\" in message else None,\n body=BalloonBody(message[\"bid\"], message[\"b\"])\n if \"bid\" in message\n else None,\n _compressed=compressed,\n _raw=message,\n )\n\n def to_raw(self) -> bytes:\n \"\"\"Convert an `iMessage` to raw message bytes\"\"\"\n if not self.sanity_check():\n raise ValueError(\"Message failed sanity check\")\n\n d = {\n \"t\": self.text,\n \"x\": self.xml,\n \"p\": self.participants,\n \"r\": str(self._id).upper(),\n \"gid\": str(self.group_id).upper(),\n \"pv\": 0,\n \"gv\": \"8\",\n \"v\": \"1\",\n }\n\n # Remove keys that are None\n d = {k: v for k, v in d.items() if v is not None}\n\n # Serialize as a plist\n d = plistlib.dumps(d, fmt=plistlib.FMT_BINARY)\n\n # Compression\n if self._compressed:\n d = gzip.compress(d, mtime=0)\n\n return d\n\n\nclass iMessageUser:\n \"\"\"Represents a logged in and connected iMessage user.\n This abstraction should probably be reworked into IDS some time...\"\"\"\n\n def __init__(self, connection: apns.APNSConnection, user: ids.IDSUser):\n self.connection = connection\n self.user = user\n\n def _get_raw_message(self):\n \"\"\"\n Returns a raw APNs message corresponding to the next conforming notification in the queue\n Returns None if no conforming notification is found\n \"\"\"\n\n def check_response(x):\n if x[0] != 0x0A:\n return False\n if apns._get_field(x[1], 2) != sha1(\"com.apple.madrid\".encode()).digest():\n return False\n resp_body = apns._get_field(x[1], 3)\n if resp_body is None:\n # logger.debug(\"Rejecting madrid message with no body\")\n return False\n resp_body = plistlib.loads(resp_body)\n if \"P\" not in resp_body:\n # logger.debug(f\"Rejecting madrid message with no payload : {resp_body}\")\n return False\n return True\n\n payload = self.connection.incoming_queue.pop_find(check_response)\n if payload is None:\n return None\n id = apns._get_field(payload[1], 4)\n\n return payload\n\n def _parse_payload(payload: bytes) -> tuple[bytes, bytes]:\n payload = BytesIO(payload)\n\n tag = payload.read(1)\n #print(\"TAG\", tag)\n body_length = int.from_bytes(payload.read(2), \"big\")\n body = payload.read(body_length)\n\n signature_len = payload.read(1)[0]\n signature = payload.read(signature_len)\n\n return (body, signature)\n\n def _construct_payload(body: bytes, signature: bytes) -> bytes:\n payload = (\n b\"\\x02\"\n + len(body).to_bytes(2, \"big\")\n + body\n + len(signature).to_bytes(1, \"big\")\n + signature\n )\n return payload\n\n def _hash_identity(id: bytes) -> bytes:\n iden = ids.identity.IDSIdentity.decode(id)\n\n # TODO: Combine this with serialization code in ids.identity\n output = BytesIO()\n output.write(b\"\\x00\\x41\\x04\")\n output.write(\n ids._helpers.parse_key(iden.signing_public_key)\n .public_numbers()\n .x.to_bytes(32, \"big\")\n )\n output.write(\n ids._helpers.parse_key(iden.signing_public_key)\n .public_numbers()\n .y.to_bytes(32, \"big\")\n )\n\n output.write(b\"\\x00\\xAC\")\n output.write(b\"\\x30\\x81\\xA9\")\n output.write(b\"\\x02\\x81\\xA1\")\n output.write(\n ids._helpers.parse_key(iden.encryption_public_key)\n .public_numbers()\n .n.to_bytes(161, \"big\")\n )\n output.write(b\"\\x02\\x03\\x01\\x00\\x01\")\n\n return sha256(output.getvalue()).digest()\n\n def _encrypt_sign_payload(\n self, key: ids.identity.IDSIdentity, message: bytes\n ) -> bytes:\n # Generate a random AES key\n random_seed = random.randbytes(11)\n # Create the HMAC\n import hmac\n\n hm = hmac.new(\n random_seed,\n message\n + b\"\\x02\"\n + iMessageUser._hash_identity(self.user.encryption_identity.encode())\n + iMessageUser._hash_identity(key.encode()),\n sha256,\n ).digest()\n\n aes_key = random_seed + hm[:5]\n\n # print(len(aes_key))\n\n # Encrypt the message with the AES key\n cipher = Cipher(algorithms.AES(aes_key), modes.CTR(NORMAL_NONCE))\n encrypted = cipher.encryptor().update(message)\n\n # Encrypt the AES key with the public key of the recipient\n recipient_key = ids._helpers.parse_key(key.encryption_public_key)\n rsa_body = recipient_key.encrypt(\n aes_key + encrypted[:100],\n padding.OAEP(\n mgf=padding.MGF1(algorithm=hashes.SHA1()),\n algorithm=hashes.SHA1(),\n label=None,\n ),\n )\n\n # Construct the payload\n body = rsa_body + encrypted[100:]\n sig = ids._helpers.parse_key(self.user.encryption_identity.signing_key).sign(\n body, ec.ECDSA(hashes.SHA1())\n )\n payload = iMessageUser._construct_payload(body, sig)\n\n return payload\n\n def _decrypt_payload(self, payload: bytes) -> dict:\n payload = iMessageUser._parse_payload(payload)\n\n body = BytesIO(payload[0])\n rsa_body = ids._helpers.parse_key(\n self.user.encryption_identity.encryption_key\n ).decrypt(\n body.read(160),\n padding.OAEP(\n mgf=padding.MGF1(algorithm=hashes.SHA1()),\n algorithm=hashes.SHA1(),\n label=None,\n ),\n )\n\n cipher = Cipher(algorithms.AES(rsa_body[:16]), modes.CTR(NORMAL_NONCE))\n decrypted = cipher.decryptor().update(rsa_body[16:] + body.read())\n\n return decrypted\n\n def _verify_payload(self, payload: bytes, sender: str, sender_token: str) -> bool:\n # Get the public key for the sender\n self._cache_keys([sender])\n\n if not sender_token in self.KEY_CACHE:\n logger.warning(\"Unable to find the public key of the sender, cannot verify\")\n return False\n\n identity_keys = ids.identity.IDSIdentity.decode(self.KEY_CACHE[sender_token][0])\n sender_ec_key = ids._helpers.parse_key(identity_keys.signing_public_key)\n\n payload = iMessageUser._parse_payload(payload)\n\n try:\n # Verify the signature (will throw an exception if it fails)\n sender_ec_key.verify(\n payload[1],\n payload[0],\n ec.ECDSA(hashes.SHA1()),\n )\n return True\n except:\n return False\n\n def receive(self) -> iMessage | None:\n \"\"\"\n Will return the next iMessage in the queue, or None if there are no messages\n \"\"\"\n raw = self._get_raw_message()\n if raw is None:\n return None\n body = apns._get_field(raw[1], 3)\n body = plistlib.loads(body)\n #print(f\"Got body message {body}\")\n payload = body[\"P\"]\n\n if not self._verify_payload(payload, body['sP'], body[\"t\"]):\n raise Exception(\"Failed to verify payload\")\n \n decrypted = self._decrypt_payload(payload)\n \n return iMessage.from_raw(decrypted)\n\n KEY_CACHE: dict[bytes, tuple[bytes, bytes]] = {}\n \"\"\"Mapping of push token : (public key, session token)\"\"\"\n USER_CACHE: dict[str, list[bytes]] = {}\n \"\"\"Mapping of handle : [push tokens]\"\"\"\n\n def _cache_keys(self, participants: list[str]):\n # Check to see if we have cached the keys for all of the participants\n if all([p in self.USER_CACHE for p in participants]):\n return\n\n # Look up the public keys for the participants, and cache a token : public key mapping\n lookup = self.user.lookup(participants)\n\n for key, participant in lookup.items():\n if not key in self.USER_CACHE:\n self.USER_CACHE[key] = []\n\n for identity in participant[\"identities\"]:\n if not \"client-data\" in identity:\n continue\n if not \"public-message-identity-key\" in identity[\"client-data\"]:\n continue\n if not \"push-token\" in identity:\n continue\n if not \"session-token\" in identity:\n continue\n\n self.USER_CACHE[key].append(identity[\"push-token\"])\n\n # print(identity)\n\n self.KEY_CACHE[identity[\"push-token\"]] = (\n identity[\"client-data\"][\"public-message-identity-key\"],\n identity[\"session-token\"],\n )\n\n def send(self, message: iMessage):\n # Set the sender, if it isn't already\n if message.sender is None:\n message.sender = self.user.handles[0] # TODO : Which handle to use?\n\n message.sanity_check() # Sanity check MUST be called before caching keys, so that the sender is added to the list of participants\n self._cache_keys(message.participants)\n\n # Turn the message into a raw message\n raw = message.to_raw()\n import base64\n\n bundled_payloads = []\n for participant in message.participants:\n for push_token in self.USER_CACHE[participant]:\n if push_token == self.connection.token:\n continue # Don't send to ourselves\n\n identity_keys = ids.identity.IDSIdentity.decode(\n self.KEY_CACHE[push_token][0]\n )\n payload = self._encrypt_sign_payload(identity_keys, raw)\n\n bundled_payloads.append(\n {\n \"tP\": participant,\n \"D\": not participant\n == message.sender, # TODO: Should this be false sometimes? For self messages?\n \"sT\": self.KEY_CACHE[push_token][1],\n \"P\": payload,\n \"t\": push_token,\n }\n )\n\n msg_id = random.randbytes(4)\n body = {\n \"fcn\": 1,\n \"c\": 100,\n \"E\": \"pair\",\n \"ua\": \"[macOS,13.4.1,22F82,MacBookPro18,3]\",\n \"v\": 8,\n \"i\": int.from_bytes(msg_id, \"big\"),\n \"U\": message._id.bytes,\n \"dtl\": bundled_payloads,\n \"sP\": message.sender,\n }\n\n body = plistlib.dumps(body, fmt=plistlib.FMT_BINARY)\n\n self.connection.send_message(\"com.apple.madrid\", body, msg_id)\n\n # This code can check to make sure we got a success response, but waiting for the response is annoying,\n # so for now we just YOLO it and assume it worked\n\n # def check_response(x):\n # if x[0] != 0x0A:\n # return False\n # if apns._get_field(x[1], 2) != sha1(\"com.apple.madrid\".encode()).digest():\n # return False\n # resp_body = apns._get_field(x[1], 3)\n # if resp_body is None:\n # return False\n # resp_body = plistlib.loads(resp_body)\n # if \"c\" not in resp_body or resp_body[\"c\"] != 255:\n # return False\n # return True\n \n\n # num_recv = 0\n # while True:\n # if num_recv == len(bundled_payloads):\n # break\n # payload = self.connection.incoming_queue.wait_pop_find(check_response)\n # if payload is None:\n # continue\n\n # resp_body = apns._get_field(payload[1], 3)\n # resp_body = plistlib.loads(resp_body)\n # logger.error(resp_body)\n # num_recv += 1\n",
"path": "imessage.py"
}
] | 9_2 | python | import unittest
import plistlib
import sys
class TestIMessageFromRaw(unittest.TestCase):
def test_from_raw_with_explicit_sender(self):
from imessage import iMessage
# Create a dummy message dictionary that would represent the parsed message bytes
dummy_message = {
"t": "Test message",
"p": ["participant1@example.com", "participant2@example.com"],
"r": "123e4567-e89b-12d3-a456-426655440000", # Example UUID
"gid": "123e4567-e89b-12d3-a456-426655440000", # Example UUID
"bid": "example_balloon_id",
"b": "example_balloon_body"
}
# Serialize the dictionary to a bytes-like object
message_bytes = plistlib.dumps(dummy_message)
explicit_sender = 'explicit_sender@example.com'
imessage = iMessage.from_raw(message_bytes, sender=explicit_sender)
self.assertEqual(imessage.sender, explicit_sender)
def test_from_raw_without_explicit_sender(self):
from imessage import iMessage
# Create a dummy message dictionary that would represent the parsed message bytes
dummy_message = {
"t": "Test message",
"p": ["participant1@example.com", "participant2@example.com"],
"r": "123e4567-e89b-12d3-a456-426655440000", # Example UUID
"gid": "123e4567-e89b-12d3-a456-426655440000", # Example UUID
"bid": "example_balloon_id",
"b": "example_balloon_body"
}
# Serialize the dictionary to a bytes-like object
message_bytes = plistlib.dumps(dummy_message)
# Assuming the last participant is the sender in the message bytes
expected_sender = dummy_message["p"][-1]
imessage = iMessage.from_raw(message_bytes)
self.assertEqual(imessage.sender, expected_sender)
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestIMessageFromRaw))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
main()
|
https://github.com/teamqurrent/pypush | The goal is to streamline the authentication process in our project by removing `gsa.py` as it was only added for testing. We need to remove the dependency on `gsa.py` and its associated GrandSlam authentication method, particularly in `ids/profile.py`. Focus on simplifying the `get_auth_token` function by eliminating platform-specific code and any references to gsa. This will make the authentication process more straightforward and maintainable. | 213f90a | requests
cryptography
wheel
tlslite-ng==0.8.0a43
srp
pbkdf2 | python3.10 | d740f3b | diff --git a/gsa.py b/gsa.py
deleted file mode 100644
--- a/gsa.py
+++ /dev/null
@@ -1,535 +0,0 @@
-import getpass
-import hashlib
-import hmac
-import json
-import locale
-import plistlib as plist
-import uuid
-from base64 import b64decode, b64encode
-from datetime import datetime
-from random import randbytes
-
-import pbkdf2
-import requests
-import srp._pysrp as srp
-from cryptography.hazmat.primitives import padding
-from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
-
-# Constants
-DEBUG = True # Allows using a proxy for debugging (disables SSL verification)
-# Server to use for anisette generation
-# ANISETTE = "https://sign.rheaa.xyz/"
-# ANISETTE = 'http://45.132.246.138:6969/'
-ANISETTE = False
-# ANISETTE = 'https://sideloadly.io/anisette/irGb3Quww8zrhgqnzmrx'
-# ANISETTE = "http://jkcoxson.com:2052/"
-
-# Configure SRP library for compatibility with Apple's implementation
-srp.rfc5054_enable()
-srp.no_username_in_x()
-
-# Disable SSL Warning
-import urllib3
-
-urllib3.disable_warnings()
-
-
-def generate_anisette() -> dict:
- import objc
- from Foundation import NSBundle, NSClassFromString # type: ignore
-
- AOSKitBundle = NSBundle.bundleWithPath_(
- "/System/Library/PrivateFrameworks/AOSKit.framework"
- )
- objc.loadBundleFunctions(AOSKitBundle, globals(), [("retrieveOTPHeadersForDSID", b"")]) # type: ignore
- util = NSClassFromString("AOSUtilities")
-
- h = util.retrieveOTPHeadersForDSID_("-2")
-
- o = {
- "X-Apple-I-MD": str(h["X-Apple-MD"]),
- "X-Apple-I-MD-M": str(h["X-Apple-MD-M"]),
- }
- # h["X-Apple-I-MD"] = str(h["X-Apple-MD"])
- # h["X-Apple-I-MD-M"] = str(h["X-Apple-MD-M"])
- # print(o)
- return o
- # r = requests.get(ANISETTE, verify=False if DEBUG else True, timeout=5)
- # r = json.loads(r.text)
- # return r
-
-
-class Anisette:
- @staticmethod
- def _fetch(url: str) -> dict:
- """Fetches anisette data that we cannot calculate from a remote server"""
- if url == False:
- return generate_anisette()
- r = requests.get(url, verify=False if DEBUG else True, timeout=5)
- r = json.loads(r.text)
- return r
-
- def __init__(self, url: str = ANISETTE, name: str = "") -> None:
- self._name = name
- self._url = url
- self._anisette = self._fetch(self._url)
-
- # Generate a "user id": just a random UUID
- # TODO: Figure out how to tie it to the user's account on the device
- self._user_id = str(uuid.uuid4()).upper()
- self._device_id = str(uuid.uuid4()).upper()
-
- # override string printing
- def __str__(self) -> str:
- return f"{self._name} ({self.backend})"
-
- @property
- def url(self) -> str:
- return self._url
-
- @property
- def backend(self) -> str:
- if (
- self._anisette["X-MMe-Client-Info"]
- == "<MacBookPro15,1> <Mac OS X;10.15.2;19C57> <com.apple.AuthKit/1 (com.apple.dt.Xcode/3594.4.19)>"
- ):
- return "AltServer"
- elif (
- self._anisette["X-MMe-Client-Info"]
- == "<iMac11,3> <Mac OS X;10.15.6;19G2021> <com.apple.AuthKit/1 (com.apple.dt.Xcode/3594.4.19)>"
- ):
- return "Provision"
- else:
- return f"Unknown ({self._anisette['X-MMe-Client-Info']})"
-
- # Getters
- @property
- def timestamp(self) -> str:
- """'Timestamp'
- Current timestamp in ISO 8601 format
- """
-
- # We only want sencond precision, so we set the microseconds to 0
- # We also add 'Z' to the end to indicate UTC
- # An alternate way to write this is strftime("%FT%T%zZ")
- return datetime.utcnow().replace(microsecond=0).isoformat() + "Z"
-
- @property
- def timezone(self) -> str:
- """'Time Zone'
- Abbreviation of the timezone of the device (e.g. EST)"""
-
- return str(datetime.utcnow().astimezone().tzinfo)
-
- @property
- def locale(self) -> str:
- """'Locale'
- Locale of the device (e.g. en_US)
- """
-
- return locale.getdefaultlocale()[0] or "en_US"
-
- @property
- def otp(self) -> str:
- """'One Time Password'
- A seemingly random base64 string containing 28 bytes
- TODO: Figure out how to generate this
- """
-
- return self._anisette["X-Apple-I-MD"]
-
- @property
- def local_user(self) -> str:
- """'Local User ID'
- There are 2 possible implementations of this value
- 1. Uppercase hex of the SHA256 hash of some unknown value (used by Windows based servers)
- 2. Base64 encoding of an uppercase UUID (used by android based servers)
- I picked the second one because it's more fully understood.
- """
-
- return b64encode(self._user_id.encode()).decode()
-
- @property
- def machine(self) -> str:
- """'Machine ID'
- This is a base64 encoded string of 60 'random' bytes
- We're not sure how this is generated, we have to rely on the server
- TODO: Figure out how to generate this
- """
-
- return self._anisette["X-Apple-I-MD-M"]
-
- @property
- def router(self) -> str:
- """'Routing Info'
- This is a number, either 17106176 or 50660608
- It doesn't seem to matter which one we use,
- 17106176 is used by Sideloadly and Provision (android) based servers
- 50660608 is used by Windows iCloud based servers
- """
-
- return "17106176"
-
- @property
- def serial(self) -> str:
- """'Device Serial Number'
- This is the serial number of the device
- You can use a legitimate serial number, but Apple accepts '0' as well (for andriod devices)
- See https://github.com/acidanthera/OpenCorePkg/blob/master/Utilities/macserial/macserial.c for how to generate a legit serial
- """
-
- return "0"
-
- @property
- def device(self) -> str:
- """'Device Unique Identifier'
- This is just an uppercase UUID"""
-
- return self._device_id
-
- def _build_client(self, emulated_device: str, emulated_app: str) -> str:
- # TODO: Update OS version and app versions
-
- model = emulated_device
- if emulated_device == "PC":
- # We're emulating a PC, so we run Windows (Vista?)
- os = "Windows"
- os_version = "6.2(0,0);9200"
- else:
- # We're emulating a Mac, so we run macOS (What is 15.6?)
- os = "Mac OS X"
- os_version = "10.15.6;19G2021"
-
- if emulated_app == "Xcode":
- app_bundle = "com.apple.dt.Xcode"
- app_version = "3594.4.19"
- else:
- app_bundle = "com.apple.iCloud"
- app_version = "7.21"
-
- if os == "Windows":
- authkit_bundle = "com.apple.AuthKitWin"
- else:
- authkit_bundle = "com.apple.AuthKit"
- authkit_version = "1"
-
- return f"<{model}> <{os};{os_version}> <{authkit_bundle}/{authkit_version} ({app_bundle}/{app_version})>"
-
- @property
- def client(self) -> str:
- """'Client Information'
- String in the following format:
- <%MODEL%> <%OS%;%MAJOR%.%MINOR%(%SPMAJOR%,%SPMINOR%);%BUILD%> <%AUTHKIT_BUNDLE_ID%/%AUTHKIT_VERSION% (%APP_BUNDLE_ID%/%APP_VERSION%)>
- Where:
- MODEL: The model of the device (e.g. MacBookPro15,1 or 'PC'
- OS: The OS of the device (e.g. Mac OS X or Windows)
- MAJOR: The major version of the OS (e.g. 10)
- MINOR: The minor version of the OS (e.g. 15)
- SPMAJOR: The major version of the service pack (e.g. 0) (Windows only)
- SPMINOR: The minor version of the service pack (e.g. 0) (Windows only)
- BUILD: The build number of the OS (e.g. 19C57)
- AUTHKIT_BUNDLE_ID: The bundle ID of the AuthKit framework (e.g. com.apple.AuthKit)
- AUTHKIT_VERSION: The version of the AuthKit framework (e.g. 1)
- APP_BUNDLE_ID: The bundle ID of the app (e.g. com.apple.dt.Xcode)
- APP_VERSION: The version of the app (e.g. 3594.4.19)
- """
- return self._build_client("iMac11,3", "Xcode")
-
- def generate_headers(self, client_info: bool = False) -> dict:
- h = {
- # Current Time
- "X-Apple-I-Client-Time": self.timestamp,
- "X-Apple-I-TimeZone": self.timezone,
- # Locale
- # Some implementations only use this for locale
- "loc": self.locale,
- "X-Apple-Locale": self.locale,
- # Anisette
- "X-Apple-I-MD": self.otp, # 'One Time Password'
- # 'Local User ID'
- "X-Apple-I-MD-LU": self.local_user,
- "X-Apple-I-MD-M": self.machine, # 'Machine ID'
- # 'Routing Info', some implementations convert this to an integer
- "X-Apple-I-MD-RINFO": self.router,
- # Device information
- # 'Device Unique Identifier'
- "X-Mme-Device-Id": self.device,
- # 'Device Serial Number'
- "X-Apple-I-SRL-NO": self.serial,
- }
-
- # Additional client information only used in some requests
- if client_info:
- h["X-Mme-Client-Info"] = self.client
- h["X-Apple-App-Info"] = "com.apple.gs.xcode.auth"
- h["X-Xcode-Version"] = "11.2 (11B41)"
-
- return h
-
- def generate_cpd(self) -> dict:
- cpd = {
- # Many of these values are not strictly necessary, but may be tracked by Apple
- # I've chosen to match the AltServer implementation
- # Not sure what these are for, needs some investigation
- "bootstrap": True, # All implementations set this to true
- "icscrec": True, # Only AltServer sets this to true
- "pbe": False, # All implementations explicitly set this to false
- "prkgen": True, # I've also seen ckgen
- "svct": "iCloud", # In certian circumstances, this can be 'iTunes' or 'iCloud'
- # Not included, but I've also seen:
- # 'capp': 'AppStore',
- # 'dc': '#d4c5b3',
- # 'dec': '#e1e4e3',
- # 'prtn': 'ME349',
- }
-
- cpd.update(self.generate_headers())
- return cpd
-
-
-def authenticated_request(parameters, anisette: Anisette) -> dict:
- body = {
- "Header": {
- "Version": "1.0.1",
- },
- "Request": {
- "cpd": anisette.generate_cpd(),
- },
- }
- body["Request"].update(parameters)
- # print(plist.dumps(body).decode('utf-8'))
-
- headers = {
- "Content-Type": "text/x-xml-plist",
- "Accept": "*/*",
- "User-Agent": "akd/1.0 CFNetwork/978.0.7 Darwin/18.7.0",
- "X-MMe-Client-Info": anisette.client,
- }
-
- resp = requests.post(
- # "https://17.32.194.2/grandslam/GsService2",
- "https://gsa.apple.com/grandslam/GsService2",
- headers=headers,
- data=plist.dumps(body),
- verify=False, # TODO: Verify Apple's self-signed cert
- timeout=5,
- )
-
- return plist.loads(resp.content)["Response"]
-
-
-def check_error(r):
- # Check for an error code
- if "Status" in r:
- status = r["Status"]
- else:
- status = r
-
- if status["ec"] != 0:
- raise Exception(f"Error {status['ec']}: {status['em']}")
- #print(f"Error {status['ec']}: {status['em']}")
- #return True
- return False
-
-
-def encrypt_password(password: str, salt: bytes, iterations: int) -> bytes:
- p = hashlib.sha256(password.encode("utf-8")).digest()
- return pbkdf2.PBKDF2(p, salt, iterations, hashlib.sha256).read(32)
-
-
-def create_session_key(usr: srp.User, name: str) -> bytes:
- k = usr.get_session_key()
- if k is None:
- raise Exception("No session key")
- return hmac.new(k, name.encode(), hashlib.sha256).digest()
-
-
-def decrypt_cbc(usr: srp.User, data: bytes) -> bytes:
- extra_data_key = create_session_key(usr, "extra data key:")
- extra_data_iv = create_session_key(usr, "extra data iv:")
- # Get only the first 16 bytes of the iv
- extra_data_iv = extra_data_iv[:16]
-
- # Decrypt with AES CBC
- cipher = Cipher(algorithms.AES(extra_data_key), modes.CBC(extra_data_iv))
- decryptor = cipher.decryptor()
- data = decryptor.update(data) + decryptor.finalize()
- # Remove PKCS#7 padding
- padder = padding.PKCS7(128).unpadder()
- return padder.update(data) + padder.finalize()
-
-
-def trusted_second_factor(dsid, idms_token, anisette: Anisette):
- identity_token = b64encode((dsid + ":" + idms_token).encode()).decode()
-
- headers = {
- "Content-Type": "text/x-xml-plist",
- "User-Agent": "Xcode",
- "Accept": "text/x-xml-plist",
- "Accept-Language": "en-us",
- "X-Apple-Identity-Token": identity_token,
- }
-
- headers.update(anisette.generate_headers(client_info=True))
-
- # This will trigger the 2FA prompt on trusted devices
- # We don't care about the response, it's just some HTML with a form for entering the code
- # Easier to just use a text prompt
- requests.get(
- "https://gsa.apple.com/auth/verify/trusteddevice",
- headers=headers,
- verify=False,
- timeout=10,
- )
-
- # Prompt for the 2FA code. It's just a string like '123456', no dashes or spaces
- code = getpass.getpass("Enter 2FA code: ")
- # code = input("Enter 2FA code: ")
- headers["security-code"] = code
-
- # Send the 2FA code to Apple
- resp = requests.get(
- "https://gsa.apple.com/grandslam/GsService2/validate",
- headers=headers,
- verify=False,
- timeout=10,
- )
- r = plist.loads(resp.content)
- if check_error(r):
- return
-
- print("2FA successful")
-
-
-def sms_second_factor(dsid, idms_token, anisette: Anisette):
- # TODO: Figure out how to make SMS 2FA work correctly
- raise NotImplementedError("SMS 2FA is not yet implemented")
- identity_token = b64encode((dsid + ":" + idms_token).encode()).decode()
-
- headers = {
- "Content-Type": "text/x-xml-plist",
- "User-Agent": "Xcode",
- # "Accept": "text/x-xml-plist",
- "Accept": "application/x-buddyml",
- "Accept-Language": "en-us",
- "X-Apple-Identity-Token": identity_token,
- }
-
- headers.update(anisette.generate_headers(client_info=True))
-
- body = {"serverInfo": {"phoneNumber.id": "1"}}
-
- # This will send the 2FA code to the user's phone over SMS
- # We don't care about the response, it's just some HTML with a form for entering the code
- # Easier to just use a text prompt
- requests.post(
- "https://gsa.apple.com/auth/verify/phone/put?mode=sms",
- data=plist.dumps(body),
- headers=headers,
- verify=False,
- timeout=5,
- )
-
- # Prompt for the 2FA code. It's just a string like '123456', no dashes or spaces
- code = input("Enter 2FA code: ")
-
- body = {
- "securityCode.code": code,
- "serverInfo": {"mode": "sms", "phoneNumber.id": "1"},
- }
- # headers["security-code"] = code
-
- # Send the 2FA code to Apple
- resp = requests.post(
- "https://gsa.apple.com/auth/verify/phone/securitycode?referrer=/auth/verify/phone/put",
- headers=headers,
- data=plist.dumps(body),
- verify=False,
- timeout=5,
- )
- print(resp.content.decode())
- # r = plist.loads(resp.content)
- # if check_error(r):
- # return
-
- # print("2FA successful")
-
-
-def authenticate(username, password, anisette: Anisette):
- # Password is None as we'll provide it later
- usr = srp.User(username, bytes(), hash_alg=srp.SHA256, ng_type=srp.NG_2048)
- _, A = usr.start_authentication()
-
- r = authenticated_request(
- {
- "A2k": A,
- "ps": ["s2k", "s2k_fo"],
- # "ps": ["s2k"],
- "u": username,
- "o": "init",
- },
- anisette,
- )
-
- # Check for an error code
- if check_error(r):
- return
-
- if r["sp"] != "s2k":
- print(f"This implementation only supports s2k. Server returned {r['sp']}")
- return
-
- # Change the password out from under the SRP library, as we couldn't calculate it without the salt.
- usr.p = encrypt_password(password, r["s"], r["i"]) # type: ignore
-
- M = usr.process_challenge(r["s"], r["B"])
-
- # Make sure we processed the challenge correctly
- if M is None:
- print("Failed to process challenge")
- return
-
- r = authenticated_request(
- {
- "c": r["c"],
- "M1": M,
- "u": username,
- "o": "complete",
- },
- anisette,
- )
-
- if check_error(r):
- return
-
- # Make sure that the server's session key matches our session key (and thus that they are not an imposter)
- usr.verify_session(r["M2"])
- if not usr.authenticated():
- print("Failed to verify session")
- return
-
- spd = decrypt_cbc(usr, r["spd"])
- # For some reason plistlib doesn't accept it without the header...
- PLISTHEADER = b"""\
-<?xml version='1.0' encoding='UTF-8'?>
-<!DOCTYPE plist PUBLIC '-//Apple//DTD PLIST 1.0//EN' 'http://www.apple.com/DTDs/PropertyList-1.0.dtd'>
-"""
- spd = plist.loads(PLISTHEADER + spd)
-
- if "au" in r["Status"] and r["Status"]["au"] == "trustedDeviceSecondaryAuth":
- print("Trusted device authentication required")
- # Replace bytes with strings
- for k, v in spd.items():
- if isinstance(v, bytes):
- spd[k] = b64encode(v).decode()
- trusted_second_factor(spd["adsid"], spd["GsIdmsToken"], anisette)
- return authenticate(username, password, anisette)
- elif "au" in r["Status"] and r["Status"]["au"] == "secondaryAuth":
- print("SMS authentication required")
- sms_second_factor(spd["adsid"], spd["GsIdmsToken"], anisette)
- elif "au" in r["Status"]:
- print(f"Unknown auth value {r['Status']['au']}")
- return
- else:
- # print("Assuming 2FA is not required")
- return spd
diff --git a/ids/profile.py b/ids/profile.py
--- a/ids/profile.py
+++ b/ids/profile.py
@@ -10,7 +10,6 @@ from cryptography.hazmat.primitives import hashes, serialization
from cryptography.hazmat.primitives.asymmetric import padding, rsa
from cryptography.x509.oid import NameOID
-import gsa
import bags
from . import signing
@@ -50,20 +49,6 @@ def get_auth_token(
username: str, password: str, factor_gen: callable = None
) -> tuple[str, str]:
from sys import platform
-
- use_gsa = False
- # Check if objc is importable
- try:
- if platform == "darwin":
- import objc
- use_gsa = True
- except ImportError:
- pass
-
- if use_gsa:
- logger.debug("Using GrandSlam to authenticate (native Anisette)")
- g = gsa.authenticate(username, password, gsa.Anisette())
- password = g["t"]["com.apple.gs.idms.pet"]["token"]
result = _auth_token_request(username, password)
if result["status"] != 0:
| [
{
"content": "import getpass\nimport hashlib\nimport hmac\nimport json\nimport locale\nimport plistlib as plist\nimport uuid\nfrom base64 import b64decode, b64encode\nfrom datetime import datetime\nfrom random import randbytes\n\nimport pbkdf2\nimport requests\nimport srp._pysrp as srp\nfrom cryptography.hazmat.primitives import padding\nfrom cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes\n\n# Constants\nDEBUG = True # Allows using a proxy for debugging (disables SSL verification)\n# Server to use for anisette generation\n# ANISETTE = \"https://sign.rheaa.xyz/\"\n# ANISETTE = 'http://45.132.246.138:6969/'\nANISETTE = False\n# ANISETTE = 'https://sideloadly.io/anisette/irGb3Quww8zrhgqnzmrx'\n# ANISETTE = \"http://jkcoxson.com:2052/\"\n\n# Configure SRP library for compatibility with Apple's implementation\nsrp.rfc5054_enable()\nsrp.no_username_in_x()\n\n# Disable SSL Warning\nimport urllib3\n\nurllib3.disable_warnings()\n\n\ndef generate_anisette() -> dict:\n import objc\n from Foundation import NSBundle, NSClassFromString # type: ignore\n\n AOSKitBundle = NSBundle.bundleWithPath_(\n \"/System/Library/PrivateFrameworks/AOSKit.framework\"\n )\n objc.loadBundleFunctions(AOSKitBundle, globals(), [(\"retrieveOTPHeadersForDSID\", b\"\")]) # type: ignore\n util = NSClassFromString(\"AOSUtilities\")\n\n h = util.retrieveOTPHeadersForDSID_(\"-2\")\n\n o = {\n \"X-Apple-I-MD\": str(h[\"X-Apple-MD\"]),\n \"X-Apple-I-MD-M\": str(h[\"X-Apple-MD-M\"]),\n }\n # h[\"X-Apple-I-MD\"] = str(h[\"X-Apple-MD\"])\n # h[\"X-Apple-I-MD-M\"] = str(h[\"X-Apple-MD-M\"])\n # print(o)\n return o\n # r = requests.get(ANISETTE, verify=False if DEBUG else True, timeout=5)\n # r = json.loads(r.text)\n # return r\n\n\nclass Anisette:\n @staticmethod\n def _fetch(url: str) -> dict:\n \"\"\"Fetches anisette data that we cannot calculate from a remote server\"\"\"\n if url == False:\n return generate_anisette()\n r = requests.get(url, verify=False if DEBUG else True, timeout=5)\n r = json.loads(r.text)\n return r\n\n def __init__(self, url: str = ANISETTE, name: str = \"\") -> None:\n self._name = name\n self._url = url\n self._anisette = self._fetch(self._url)\n\n # Generate a \"user id\": just a random UUID\n # TODO: Figure out how to tie it to the user's account on the device\n self._user_id = str(uuid.uuid4()).upper()\n self._device_id = str(uuid.uuid4()).upper()\n\n # override string printing\n def __str__(self) -> str:\n return f\"{self._name} ({self.backend})\"\n\n @property\n def url(self) -> str:\n return self._url\n\n @property\n def backend(self) -> str:\n if (\n self._anisette[\"X-MMe-Client-Info\"]\n == \"<MacBookPro15,1> <Mac OS X;10.15.2;19C57> <com.apple.AuthKit/1 (com.apple.dt.Xcode/3594.4.19)>\"\n ):\n return \"AltServer\"\n elif (\n self._anisette[\"X-MMe-Client-Info\"]\n == \"<iMac11,3> <Mac OS X;10.15.6;19G2021> <com.apple.AuthKit/1 (com.apple.dt.Xcode/3594.4.19)>\"\n ):\n return \"Provision\"\n else:\n return f\"Unknown ({self._anisette['X-MMe-Client-Info']})\"\n\n # Getters\n @property\n def timestamp(self) -> str:\n \"\"\"'Timestamp'\n Current timestamp in ISO 8601 format\n \"\"\"\n\n # We only want sencond precision, so we set the microseconds to 0\n # We also add 'Z' to the end to indicate UTC\n # An alternate way to write this is strftime(\"%FT%T%zZ\")\n return datetime.utcnow().replace(microsecond=0).isoformat() + \"Z\"\n\n @property\n def timezone(self) -> str:\n \"\"\"'Time Zone'\n Abbreviation of the timezone of the device (e.g. EST)\"\"\"\n\n return str(datetime.utcnow().astimezone().tzinfo)\n\n @property\n def locale(self) -> str:\n \"\"\"'Locale'\n Locale of the device (e.g. en_US)\n \"\"\"\n\n return locale.getdefaultlocale()[0] or \"en_US\"\n\n @property\n def otp(self) -> str:\n \"\"\"'One Time Password'\n A seemingly random base64 string containing 28 bytes\n TODO: Figure out how to generate this\n \"\"\"\n\n return self._anisette[\"X-Apple-I-MD\"]\n\n @property\n def local_user(self) -> str:\n \"\"\"'Local User ID'\n There are 2 possible implementations of this value\n 1. Uppercase hex of the SHA256 hash of some unknown value (used by Windows based servers)\n 2. Base64 encoding of an uppercase UUID (used by android based servers)\n I picked the second one because it's more fully understood.\n \"\"\"\n\n return b64encode(self._user_id.encode()).decode()\n\n @property\n def machine(self) -> str:\n \"\"\"'Machine ID'\n This is a base64 encoded string of 60 'random' bytes\n We're not sure how this is generated, we have to rely on the server\n TODO: Figure out how to generate this\n \"\"\"\n\n return self._anisette[\"X-Apple-I-MD-M\"]\n\n @property\n def router(self) -> str:\n \"\"\"'Routing Info'\n This is a number, either 17106176 or 50660608\n It doesn't seem to matter which one we use,\n 17106176 is used by Sideloadly and Provision (android) based servers\n 50660608 is used by Windows iCloud based servers\n \"\"\"\n\n return \"17106176\"\n\n @property\n def serial(self) -> str:\n \"\"\"'Device Serial Number'\n This is the serial number of the device\n You can use a legitimate serial number, but Apple accepts '0' as well (for andriod devices)\n See https://github.com/acidanthera/OpenCorePkg/blob/master/Utilities/macserial/macserial.c for how to generate a legit serial\n \"\"\"\n\n return \"0\"\n\n @property\n def device(self) -> str:\n \"\"\"'Device Unique Identifier'\n This is just an uppercase UUID\"\"\"\n\n return self._device_id\n\n def _build_client(self, emulated_device: str, emulated_app: str) -> str:\n # TODO: Update OS version and app versions\n\n model = emulated_device\n if emulated_device == \"PC\":\n # We're emulating a PC, so we run Windows (Vista?)\n os = \"Windows\"\n os_version = \"6.2(0,0);9200\"\n else:\n # We're emulating a Mac, so we run macOS (What is 15.6?)\n os = \"Mac OS X\"\n os_version = \"10.15.6;19G2021\"\n\n if emulated_app == \"Xcode\":\n app_bundle = \"com.apple.dt.Xcode\"\n app_version = \"3594.4.19\"\n else:\n app_bundle = \"com.apple.iCloud\"\n app_version = \"7.21\"\n\n if os == \"Windows\":\n authkit_bundle = \"com.apple.AuthKitWin\"\n else:\n authkit_bundle = \"com.apple.AuthKit\"\n authkit_version = \"1\"\n\n return f\"<{model}> <{os};{os_version}> <{authkit_bundle}/{authkit_version} ({app_bundle}/{app_version})>\"\n\n @property\n def client(self) -> str:\n \"\"\"'Client Information'\n String in the following format:\n <%MODEL%> <%OS%;%MAJOR%.%MINOR%(%SPMAJOR%,%SPMINOR%);%BUILD%> <%AUTHKIT_BUNDLE_ID%/%AUTHKIT_VERSION% (%APP_BUNDLE_ID%/%APP_VERSION%)>\n Where:\n MODEL: The model of the device (e.g. MacBookPro15,1 or 'PC'\n OS: The OS of the device (e.g. Mac OS X or Windows)\n MAJOR: The major version of the OS (e.g. 10)\n MINOR: The minor version of the OS (e.g. 15)\n SPMAJOR: The major version of the service pack (e.g. 0) (Windows only)\n SPMINOR: The minor version of the service pack (e.g. 0) (Windows only)\n BUILD: The build number of the OS (e.g. 19C57)\n AUTHKIT_BUNDLE_ID: The bundle ID of the AuthKit framework (e.g. com.apple.AuthKit)\n AUTHKIT_VERSION: The version of the AuthKit framework (e.g. 1)\n APP_BUNDLE_ID: The bundle ID of the app (e.g. com.apple.dt.Xcode)\n APP_VERSION: The version of the app (e.g. 3594.4.19)\n \"\"\"\n return self._build_client(\"iMac11,3\", \"Xcode\")\n\n def generate_headers(self, client_info: bool = False) -> dict:\n h = {\n # Current Time\n \"X-Apple-I-Client-Time\": self.timestamp,\n \"X-Apple-I-TimeZone\": self.timezone,\n # Locale\n # Some implementations only use this for locale\n \"loc\": self.locale,\n \"X-Apple-Locale\": self.locale,\n # Anisette\n \"X-Apple-I-MD\": self.otp, # 'One Time Password'\n # 'Local User ID'\n \"X-Apple-I-MD-LU\": self.local_user,\n \"X-Apple-I-MD-M\": self.machine, # 'Machine ID'\n # 'Routing Info', some implementations convert this to an integer\n \"X-Apple-I-MD-RINFO\": self.router,\n # Device information\n # 'Device Unique Identifier'\n \"X-Mme-Device-Id\": self.device,\n # 'Device Serial Number'\n \"X-Apple-I-SRL-NO\": self.serial,\n }\n\n # Additional client information only used in some requests\n if client_info:\n h[\"X-Mme-Client-Info\"] = self.client\n h[\"X-Apple-App-Info\"] = \"com.apple.gs.xcode.auth\"\n h[\"X-Xcode-Version\"] = \"11.2 (11B41)\"\n\n return h\n\n def generate_cpd(self) -> dict:\n cpd = {\n # Many of these values are not strictly necessary, but may be tracked by Apple\n # I've chosen to match the AltServer implementation\n # Not sure what these are for, needs some investigation\n \"bootstrap\": True, # All implementations set this to true\n \"icscrec\": True, # Only AltServer sets this to true\n \"pbe\": False, # All implementations explicitly set this to false\n \"prkgen\": True, # I've also seen ckgen\n \"svct\": \"iCloud\", # In certian circumstances, this can be 'iTunes' or 'iCloud'\n # Not included, but I've also seen:\n # 'capp': 'AppStore',\n # 'dc': '#d4c5b3',\n # 'dec': '#e1e4e3',\n # 'prtn': 'ME349',\n }\n\n cpd.update(self.generate_headers())\n return cpd\n\n\ndef authenticated_request(parameters, anisette: Anisette) -> dict:\n body = {\n \"Header\": {\n \"Version\": \"1.0.1\",\n },\n \"Request\": {\n \"cpd\": anisette.generate_cpd(),\n },\n }\n body[\"Request\"].update(parameters)\n # print(plist.dumps(body).decode('utf-8'))\n\n headers = {\n \"Content-Type\": \"text/x-xml-plist\",\n \"Accept\": \"*/*\",\n \"User-Agent\": \"akd/1.0 CFNetwork/978.0.7 Darwin/18.7.0\",\n \"X-MMe-Client-Info\": anisette.client,\n }\n\n resp = requests.post(\n # \"https://17.32.194.2/grandslam/GsService2\",\n \"https://gsa.apple.com/grandslam/GsService2\",\n headers=headers,\n data=plist.dumps(body),\n verify=False, # TODO: Verify Apple's self-signed cert\n timeout=5,\n )\n\n return plist.loads(resp.content)[\"Response\"]\n\n\ndef check_error(r):\n # Check for an error code\n if \"Status\" in r:\n status = r[\"Status\"]\n else:\n status = r\n\n if status[\"ec\"] != 0:\n raise Exception(f\"Error {status['ec']}: {status['em']}\")\n #print(f\"Error {status['ec']}: {status['em']}\")\n #return True\n return False\n\n\ndef encrypt_password(password: str, salt: bytes, iterations: int) -> bytes:\n p = hashlib.sha256(password.encode(\"utf-8\")).digest()\n return pbkdf2.PBKDF2(p, salt, iterations, hashlib.sha256).read(32)\n\n\ndef create_session_key(usr: srp.User, name: str) -> bytes:\n k = usr.get_session_key()\n if k is None:\n raise Exception(\"No session key\")\n return hmac.new(k, name.encode(), hashlib.sha256).digest()\n\n\ndef decrypt_cbc(usr: srp.User, data: bytes) -> bytes:\n extra_data_key = create_session_key(usr, \"extra data key:\")\n extra_data_iv = create_session_key(usr, \"extra data iv:\")\n # Get only the first 16 bytes of the iv\n extra_data_iv = extra_data_iv[:16]\n\n # Decrypt with AES CBC\n cipher = Cipher(algorithms.AES(extra_data_key), modes.CBC(extra_data_iv))\n decryptor = cipher.decryptor()\n data = decryptor.update(data) + decryptor.finalize()\n # Remove PKCS#7 padding\n padder = padding.PKCS7(128).unpadder()\n return padder.update(data) + padder.finalize()\n\n\ndef trusted_second_factor(dsid, idms_token, anisette: Anisette):\n identity_token = b64encode((dsid + \":\" + idms_token).encode()).decode()\n\n headers = {\n \"Content-Type\": \"text/x-xml-plist\",\n \"User-Agent\": \"Xcode\",\n \"Accept\": \"text/x-xml-plist\",\n \"Accept-Language\": \"en-us\",\n \"X-Apple-Identity-Token\": identity_token,\n }\n\n headers.update(anisette.generate_headers(client_info=True))\n\n # This will trigger the 2FA prompt on trusted devices\n # We don't care about the response, it's just some HTML with a form for entering the code\n # Easier to just use a text prompt\n requests.get(\n \"https://gsa.apple.com/auth/verify/trusteddevice\",\n headers=headers,\n verify=False,\n timeout=10,\n )\n\n # Prompt for the 2FA code. It's just a string like '123456', no dashes or spaces\n code = getpass.getpass(\"Enter 2FA code: \")\n # code = input(\"Enter 2FA code: \")\n headers[\"security-code\"] = code\n\n # Send the 2FA code to Apple\n resp = requests.get(\n \"https://gsa.apple.com/grandslam/GsService2/validate\",\n headers=headers,\n verify=False,\n timeout=10,\n )\n r = plist.loads(resp.content)\n if check_error(r):\n return\n\n print(\"2FA successful\")\n\n\ndef sms_second_factor(dsid, idms_token, anisette: Anisette):\n # TODO: Figure out how to make SMS 2FA work correctly\n raise NotImplementedError(\"SMS 2FA is not yet implemented\")\n identity_token = b64encode((dsid + \":\" + idms_token).encode()).decode()\n\n headers = {\n \"Content-Type\": \"text/x-xml-plist\",\n \"User-Agent\": \"Xcode\",\n # \"Accept\": \"text/x-xml-plist\",\n \"Accept\": \"application/x-buddyml\",\n \"Accept-Language\": \"en-us\",\n \"X-Apple-Identity-Token\": identity_token,\n }\n\n headers.update(anisette.generate_headers(client_info=True))\n\n body = {\"serverInfo\": {\"phoneNumber.id\": \"1\"}}\n\n # This will send the 2FA code to the user's phone over SMS\n # We don't care about the response, it's just some HTML with a form for entering the code\n # Easier to just use a text prompt\n requests.post(\n \"https://gsa.apple.com/auth/verify/phone/put?mode=sms\",\n data=plist.dumps(body),\n headers=headers,\n verify=False,\n timeout=5,\n )\n\n # Prompt for the 2FA code. It's just a string like '123456', no dashes or spaces\n code = input(\"Enter 2FA code: \")\n\n body = {\n \"securityCode.code\": code,\n \"serverInfo\": {\"mode\": \"sms\", \"phoneNumber.id\": \"1\"},\n }\n # headers[\"security-code\"] = code\n\n # Send the 2FA code to Apple\n resp = requests.post(\n \"https://gsa.apple.com/auth/verify/phone/securitycode?referrer=/auth/verify/phone/put\",\n headers=headers,\n data=plist.dumps(body),\n verify=False,\n timeout=5,\n )\n print(resp.content.decode())\n # r = plist.loads(resp.content)\n # if check_error(r):\n # return\n\n # print(\"2FA successful\")\n\n\ndef authenticate(username, password, anisette: Anisette):\n # Password is None as we'll provide it later\n usr = srp.User(username, bytes(), hash_alg=srp.SHA256, ng_type=srp.NG_2048)\n _, A = usr.start_authentication()\n\n r = authenticated_request(\n {\n \"A2k\": A,\n \"ps\": [\"s2k\", \"s2k_fo\"],\n # \"ps\": [\"s2k\"],\n \"u\": username,\n \"o\": \"init\",\n },\n anisette,\n )\n\n # Check for an error code\n if check_error(r):\n return\n\n if r[\"sp\"] != \"s2k\":\n print(f\"This implementation only supports s2k. Server returned {r['sp']}\")\n return\n\n # Change the password out from under the SRP library, as we couldn't calculate it without the salt.\n usr.p = encrypt_password(password, r[\"s\"], r[\"i\"]) # type: ignore\n\n M = usr.process_challenge(r[\"s\"], r[\"B\"])\n\n # Make sure we processed the challenge correctly\n if M is None:\n print(\"Failed to process challenge\")\n return\n\n r = authenticated_request(\n {\n \"c\": r[\"c\"],\n \"M1\": M,\n \"u\": username,\n \"o\": \"complete\",\n },\n anisette,\n )\n\n if check_error(r):\n return\n\n # Make sure that the server's session key matches our session key (and thus that they are not an imposter)\n usr.verify_session(r[\"M2\"])\n if not usr.authenticated():\n print(\"Failed to verify session\")\n return\n\n spd = decrypt_cbc(usr, r[\"spd\"])\n # For some reason plistlib doesn't accept it without the header...\n PLISTHEADER = b\"\"\"\\\n<?xml version='1.0' encoding='UTF-8'?>\n<!DOCTYPE plist PUBLIC '-//Apple//DTD PLIST 1.0//EN' 'http://www.apple.com/DTDs/PropertyList-1.0.dtd'>\n\"\"\"\n spd = plist.loads(PLISTHEADER + spd)\n\n if \"au\" in r[\"Status\"] and r[\"Status\"][\"au\"] == \"trustedDeviceSecondaryAuth\":\n print(\"Trusted device authentication required\")\n # Replace bytes with strings\n for k, v in spd.items():\n if isinstance(v, bytes):\n spd[k] = b64encode(v).decode()\n trusted_second_factor(spd[\"adsid\"], spd[\"GsIdmsToken\"], anisette)\n return authenticate(username, password, anisette)\n elif \"au\" in r[\"Status\"] and r[\"Status\"][\"au\"] == \"secondaryAuth\":\n print(\"SMS authentication required\")\n sms_second_factor(spd[\"adsid\"], spd[\"GsIdmsToken\"], anisette)\n elif \"au\" in r[\"Status\"]:\n print(f\"Unknown auth value {r['Status']['au']}\")\n return\n else:\n # print(\"Assuming 2FA is not required\")\n return spd\n",
"path": "gsa.py"
},
{
"content": "import plistlib\nimport random\nimport uuid\nfrom base64 import b64decode\n\nimport requests\nfrom cryptography import x509\nfrom cryptography.hazmat.backends import default_backend\nfrom cryptography.hazmat.primitives import hashes, serialization\nfrom cryptography.hazmat.primitives.asymmetric import padding, rsa\nfrom cryptography.x509.oid import NameOID\n\nimport gsa\nimport bags\n\nfrom . import signing\nfrom ._helpers import PROTOCOL_VERSION, USER_AGENT, KeyPair\n\nimport logging\nlogger = logging.getLogger(\"ids\")\n\n\ndef _auth_token_request(username: str, password: str) -> any:\n # Turn the PET into an auth token\n data = {\n \"username\": username,\n #\"client-id\": str(uuid.uuid4()),\n #\"delegates\": {\"com.apple.private.ids\": {\"protocol-version\": \"4\"}},\n \"password\": password,\n }\n data = plistlib.dumps(data)\n\n r = requests.post(\n # TODO: Figure out which URL bag we can get this from\n \"https://profile.ess.apple.com/WebObjects/VCProfileService.woa/wa/authenticateUser\",\n #\"https://setup.icloud.com/setup/prefpane/loginDelegates\",\n #auth=(username, password),\n data=data,\n verify=False,\n )\n r = plistlib.loads(r.content)\n return r\n\n\n# Gets an IDS auth token for the given username and password\n# Will use native Grand Slam on macOS\n# If factor_gen is not None, it will be called to get the 2FA code, otherwise it will be prompted\n# Returns (realm user id, auth token)\ndef get_auth_token(\n username: str, password: str, factor_gen: callable = None\n) -> tuple[str, str]:\n from sys import platform\n\n use_gsa = False\n # Check if objc is importable\n try:\n if platform == \"darwin\":\n import objc\n use_gsa = True\n except ImportError:\n pass\n\n if use_gsa:\n logger.debug(\"Using GrandSlam to authenticate (native Anisette)\")\n g = gsa.authenticate(username, password, gsa.Anisette())\n password = g[\"t\"][\"com.apple.gs.idms.pet\"][\"token\"]\n \n result = _auth_token_request(username, password)\n if result[\"status\"] != 0:\n if result[\"status\"] == 5000:\n if factor_gen is None:\n password = password + input(\"Enter 2FA code: \")\n else:\n password = password + factor_gen()\n result = _auth_token_request(username, password)\n if result[\"status\"] != 0:\n raise Exception(f\"Error: {result}\")\n \n auth_token = result[\"auth-token\"]\n realm_user_id = result[\"profile-id\"]\n # else:\n # logger.debug(\"Using old-style authentication\")\n # # Make the request without the 2FA code to make the prompt appear\n # _auth_token_request(username, password)\n # # TODO: Make sure we actually need the second request, some rare accounts don't have 2FA\n # # Now make the request with the 2FA code\n # if factor_gen is None:\n # pet = password + input(\"Enter 2FA code: \")\n # else:\n # pet = password + factor_gen()\n # r = _auth_token_request(username, pet)\n # # print(r)\n # if \"description\" in r:\n # raise Exception(f\"Error: {r['description']}\")\n # service_data = r[\"delegates\"][\"com.apple.private.ids\"][\"service-data\"]\n # realm_user_id = service_data[\"realm-user-id\"]\n # auth_token = service_data[\"auth-token\"]\n # print(f\"Auth token for {realm_user_id}: {auth_token}\")\n logger.debug(f\"Got auth token for IDS: {auth_token}\")\n return realm_user_id, auth_token\n\n\ndef _generate_csr(private_key: rsa.RSAPrivateKey) -> str:\n csr = (\n x509.CertificateSigningRequestBuilder()\n .subject_name(\n x509.Name(\n [\n x509.NameAttribute(NameOID.COMMON_NAME, random.randbytes(20).hex()),\n ]\n )\n )\n .sign(private_key, hashes.SHA256())\n )\n\n csr = csr.public_bytes(serialization.Encoding.PEM).decode(\"utf-8\")\n return (\n csr.replace(\"-----BEGIN CERTIFICATE REQUEST-----\", \"\")\n .replace(\"-----END CERTIFICATE REQUEST-----\", \"\")\n .replace(\"\\n\", \"\")\n )\n\n\n# Gets an IDS auth cert for the given user id and auth token\n# Returns [private key PEM, certificate PEM]\ndef get_auth_cert(user_id, token) -> KeyPair:\n BAG_KEY = \"id-authenticate-ds-id\"\n\n private_key = rsa.generate_private_key(\n public_exponent=65537, key_size=2048, backend=default_backend()\n )\n body = {\n \"authentication-data\": {\"auth-token\": token},\n \"csr\": b64decode(_generate_csr(private_key)),\n \"realm-user-id\": user_id,\n }\n\n body = plistlib.dumps(body)\n\n r = requests.post(\n bags.ids_bag()[BAG_KEY],\n #\"https://profile.ess.apple.com/WebObjects/VCProfileService.woa/wa/authenticateDS\",\n data=body,\n headers={\"x-protocol-version\": \"1630\"},\n verify=False,\n )\n r = plistlib.loads(r.content)\n if r[\"status\"] != 0:\n raise (Exception(f\"Failed to get auth cert: {r}\"))\n cert = x509.load_der_x509_certificate(r[\"cert\"])\n logger.debug(\"Got auth cert from token\")\n return KeyPair(\n private_key.private_bytes(\n encoding=serialization.Encoding.PEM,\n format=serialization.PrivateFormat.TraditionalOpenSSL,\n encryption_algorithm=serialization.NoEncryption(),\n )\n .decode(\"utf-8\")\n .strip(),\n cert.public_bytes(serialization.Encoding.PEM).decode(\"utf-8\").strip(),\n )\n\n\ndef get_handles(push_token, user_id: str, auth_key: KeyPair, push_key: KeyPair):\n BAG_KEY = \"id-get-handles\"\n\n headers = {\n \"x-protocol-version\": PROTOCOL_VERSION,\n \"x-auth-user-id\": user_id,\n }\n signing.add_auth_signature(\n headers, None, BAG_KEY, auth_key, push_key, push_token\n )\n\n r = requests.get(\n bags.ids_bag()[BAG_KEY],\n headers=headers,\n verify=False,\n )\n\n r = plistlib.loads(r.content)\n\n if not \"handles\" in r:\n raise Exception(\"No handles in response: \" + str(r))\n\n logger.debug(f\"User {user_id} has handles {r['handles']}\")\n return [handle[\"uri\"] for handle in r[\"handles\"]]\n",
"path": "ids/profile.py"
}
] | 9_3 | python | import unittest
import sys
import inspect
class TestCommitChanges(unittest.TestCase):
def test_gsa_removal(self):
with self.assertRaises(ImportError):
import gsa
def test_gsa_import_removal_in_profile(self):
from ids import profile
# This checks if 'gsa' is not in the globals of profile.py
self.assertNotIn('gsa', dir(profile))
def test_grandslam_auth_code_removal(self):
from ids import profile
source_code = inspect.getsource(profile.get_auth_token)
# Check for the presence of the specific code block... not standard but workaround towards needing icloud sign in
self.assertNotIn('use_gsa = False', source_code)
self.assertNotIn('import objc', source_code)
self.assertNotIn('use_gsa = True', source_code)
self.assertNotIn('logger.debug("Using GrandSlam to authenticate (native Anisette)")', source_code)
self.assertNotIn('g = gsa.authenticate(username, password, gsa.Anisette())', source_code)
self.assertNotIn('password = g["t"]["com.apple.gs.idms.pet"]["token"]', source_code)
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestCommitChanges))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
main()
|
https://github.com/teamqurrent/pypush | Cleanup the repository by removing `data.plist` from the `emulated` folder | c87d188 | requests
cryptography
wheel
tlslite-ng==0.8.0a43
srp
pbkdf2 | python3.9 | 4939ea0 | diff --git a/emulated/data.plist b/emulated/data.plist
deleted file mode 100644
--- a/emulated/data.plist
+++ /dev/null
@@ -1,55 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
-<plist version="1.0">
-<dict>
- <key>iokit</key>
- <dict>
- <key>4D1EDE05-38C7-4A6A-9CC6-4BCCA8B38C14:MLB</key>
- <data>
- QzAyNzIzMjA3QTVIV1ZRQVU=
- </data>
- <key>4D1EDE05-38C7-4A6A-9CC6-4BCCA8B38C14:ROM</key>
- <data>
- +AN3alom
- </data>
- <key>Fyp98tpgj</key>
- <data>
- U0TE/KnCc/AGEUYBuDpD8TQ=
- </data>
- <key>Gq3489ugfi</key>
- <data>
- S7OU9/LJ3K6TUgIzaNHU5kI=
- </data>
- <key>IOMACAddress</key>
- <data>
- 3KkEh8Ol
- </data>
- <key>IOPlatformSerialNumber</key>
- <string>C02TV034HV29</string>
- <key>IOPlatformUUID</key>
- <string>0E7049C5-6CC5-57E4-9353-EF54C4332A99</string>
- <key>abKPld1EcMni</key>
- <data>
- uqHToZ+DNmm75/jSPMzB1ZQ=
- </data>
- <key>board-id</key>
- <data>
- TWFjLUI0ODMxQ0VCRDUyQTBDNEMA
- </data>
- <key>kbjfrfpoJU</key>
- <data>
- WnM3jhjelH3+jt4jJ2OqfiQ=
- </data>
- <key>oycqAZloTNDm</key>
- <data>
- BNdC9rh4Wxif7S9NA8W1864=
- </data>
- <key>product-name</key>
- <data>
- TWFjQm9va1BybzE0LDEA
- </data>
- </dict>
- <key>root_disk_uuid</key>
- <string>3D6822B6-A26E-358C-BD6F-EFF407645F34</string>
-</dict>
-</plist>
\ No newline at end of file
| [
{
"content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/PropertyList-1.0.dtd\">\n<plist version=\"1.0\">\n<dict>\n <key>iokit</key>\n <dict>\n <key>4D1EDE05-38C7-4A6A-9CC6-4BCCA8B38C14:MLB</key>\n <data>\n QzAyNzIzMjA3QTVIV1ZRQVU=\n </data>\n <key>4D1EDE05-38C7-4A6A-9CC6-4BCCA8B38C14:ROM</key>\n <data>\n +AN3alom\n </data>\n <key>Fyp98tpgj</key>\n <data>\n U0TE/KnCc/AGEUYBuDpD8TQ=\n </data>\n <key>Gq3489ugfi</key>\n <data>\n S7OU9/LJ3K6TUgIzaNHU5kI=\n </data>\n <key>IOMACAddress</key>\n <data>\n 3KkEh8Ol\n </data>\n <key>IOPlatformSerialNumber</key>\n <string>C02TV034HV29</string>\n <key>IOPlatformUUID</key>\n <string>0E7049C5-6CC5-57E4-9353-EF54C4332A99</string>\n <key>abKPld1EcMni</key>\n <data>\n uqHToZ+DNmm75/jSPMzB1ZQ=\n </data>\n <key>board-id</key>\n <data>\n TWFjLUI0ODMxQ0VCRDUyQTBDNEMA\n </data>\n <key>kbjfrfpoJU</key>\n <data>\n WnM3jhjelH3+jt4jJ2OqfiQ=\n </data>\n <key>oycqAZloTNDm</key>\n <data>\n BNdC9rh4Wxif7S9NA8W1864=\n </data>\n <key>product-name</key>\n <data>\n TWFjQm9va1BybzE0LDEA\n </data>\n </dict>\n <key>root_disk_uuid</key>\n <string>3D6822B6-A26E-358C-BD6F-EFF407645F34</string>\n</dict>\n</plist>",
"path": "emulated/data.plist"
}
] | 9_4 | python | import unittest
import sys
import os
class TestDataPlistRemoval(unittest.TestCase):
def test_data_plist_removal(self):
# Check that 'emulated/data.plist' does not exist
self.assertFalse(os.path.exists('emulated/data.plist'))
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestDataPlistRemoval))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
main()
|
https://github.com/teamqurrent/pypush | In the file `imessage.py` participants should be case insensitive when reading the user cache. Add a line to turn participants to lower case before checking for push tokens | 627cedf | requests
cryptography
wheel
tlslite-ng==0.8.0a43
srp
pbkdf2 | python3.9 | e2102d0 | diff --git a/imessage.py b/imessage.py
--- a/imessage.py
+++ b/imessage.py
@@ -496,6 +496,7 @@ class iMessageUser:
bundled_payloads = []
for participant in message.participants:
+ participant = participant.lower()
for push_token in self.USER_CACHE[participant]:
if push_token == self.connection.token:
continue # Don't send to ourselves
| [
{
"content": "# LOW LEVEL imessage function, decryption etc\n# Don't handle APNS etc, accept it already setup\n\n## HAVE ANOTHER FILE TO SETUP EVERYTHING AUTOMATICALLY, etc\n# JSON parsing of keys, don't pass around strs??\n\nimport base64\nimport gzip\nimport logging\nimport plistlib\nimport random\nfrom typing import Union\nimport uuid\nfrom dataclasses import dataclass, field\nfrom hashlib import sha1, sha256\nfrom io import BytesIO\n\nfrom cryptography.hazmat.primitives import hashes\nfrom cryptography.hazmat.primitives.asymmetric import ec, padding\nfrom cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes\n\nfrom xml.etree import ElementTree\n\nimport apns\nimport ids\n\nlogger = logging.getLogger(\"imessage\")\n\nNORMAL_NONCE = b\"\\x00\" * 15 + b\"\\x01\" # This is always used as the AES nonce\n\n\nclass BalloonBody:\n \"\"\"Represents the special parts of message extensions etc.\"\"\"\n\n def __init__(self, type: str, data: bytes):\n self.type = type\n self.data = data\n\n # TODO : Register handlers based on type id\n\n\nclass AttachmentFile:\n def data(self) -> bytes:\n raise NotImplementedError()\n\n\n@dataclass\nclass MMCSFile(AttachmentFile):\n url: Union[str, None] = None\n size: Union[int, None] = None\n owner: Union[str, None] = None\n signature: Union[bytes, None] = None\n decryption_key: Union[bytes, None] = None\n\n def data(self) -> bytes:\n import requests\n logger.info(requests.get(\n url=self.url,\n headers={\n \"User-Agent\": f\"IMTransferAgent/900 CFNetwork/596.2.3 Darwin/12.2.0 (x86_64) (Macmini5,1)\",\n # \"MMCS-Url\": self.url,\n # \"MMCS-Signature\": str(base64.encodebytes(self.signature)),\n # \"MMCS-Owner\": self.owner\n },\n ).headers)\n return b\"\"\n\n\n@dataclass\nclass InlineFile(AttachmentFile):\n _data: bytes\n\n def data(self) -> bytes:\n return self._data\n\n\n@dataclass\nclass Attachment:\n name: str\n mime_type: str\n versions: list[AttachmentFile]\n\n def __init__(self, message_raw_content: dict, xml_element: ElementTree.Element):\n attrs = xml_element.attrib\n\n self.name = attrs[\"name\"] if \"name\" in attrs else None\n self.mime_type = attrs[\"mime-type\"] if \"mime-type\" in attrs else None\n\n if \"inline-attachment\" in attrs:\n # just grab the inline attachment !\n self.versions = [InlineFile(message_raw_content[attrs[\"inline-attachment\"]])]\n else:\n # suffer\n versions = []\n for attribute in attrs:\n if attribute.startswith(\"mmcs\") or \\\n attribute.startswith(\"decryption-key\") or \\\n attribute.startswith(\"file-size\"):\n segments = attribute.split('-')\n if segments[-1].isnumeric():\n index = int(segments[-1])\n attribute_name = segments[:-1]\n else:\n index = 0\n attribute_name = attribute\n\n while index >= len(versions):\n versions.append(MMCSFile())\n\n val = attrs[attribute_name]\n if attribute_name == \"mmcs-url\":\n versions[index].url = val\n elif attribute_name == \"mmcs-owner\":\n versions[index].owner = val\n elif attribute_name == \"mmcs-signature-hex\":\n versions[index].signature = base64.b16decode(val)\n elif attribute_name == \"file-size\":\n versions[index].size = int(val)\n elif attribute_name == \"decryption-key\":\n versions[index].decryption_key = base64.b16decode(val)[1:]\n\n self.versions = versions\n\n def __repr__(self):\n return f'<Attachment name=\"{self.name}\" type=\"{self.mime_type}\">'\n\n\n@dataclass\nclass iMessage:\n \"\"\"Represents an iMessage\"\"\"\n\n text: str = \"\"\n \"\"\"Plain text of message, always required, may be an empty string\"\"\"\n xml: Union[str, None] = None\n \"\"\"XML portion of message, may be None\"\"\"\n participants: list[str] = field(default_factory=list)\n \"\"\"List of participants in the message, including the sender\"\"\"\n sender: Union[str, None] = None\n \"\"\"Sender of the message\"\"\"\n id: Union[uuid.UUID, None] = None\n \"\"\"ID of the message, will be randomly generated if not provided\"\"\"\n group_id: Union[uuid.UUID, None] = None\n \"\"\"Group ID of the message, will be randomly generated if not provided\"\"\"\n body: Union[BalloonBody, None] = None\n \"\"\"BalloonBody, may be None\"\"\"\n effect: Union[str, None] = None\n \"\"\"iMessage effect sent with this message, may be None\"\"\"\n\n _compressed: bool = True\n \"\"\"Internal property representing whether the message should be compressed\"\"\"\n\n _raw: Union[dict, None] = None\n \"\"\"Internal property representing the original raw message, may be None\"\"\"\n\n def attachments(self) -> list[Attachment]:\n if self.xml is not None:\n return [Attachment(self._raw, elem) for elem in ElementTree.fromstring(self.xml)[0] if elem.tag == \"FILE\"]\n else:\n return []\n\n def sanity_check(self):\n \"\"\"Corrects any missing fields\"\"\"\n if self.id is None:\n self.id = uuid.uuid4()\n\n if self.group_id is None:\n self.group_id = uuid.uuid4()\n\n if self.sender is None:\n if len(self.participants) > 1:\n self.sender = self.participants[-1]\n else:\n logger.warning(\n \"Message has no sender, and only one participant, sanity check failed\"\n )\n return False\n\n if self.sender not in self.participants:\n self.participants.append(self.sender)\n\n if self.xml != None:\n self._compressed = False # XML is never compressed for some reason\n\n return True\n\n def from_raw(message: bytes, sender: Union[str, None] = None) -> \"iMessage\":\n \"\"\"Create an `iMessage` from raw message bytes\"\"\"\n compressed = False\n try:\n message = gzip.decompress(message)\n compressed = True\n except:\n pass\n\n message = plistlib.loads(message)\n\n return iMessage(\n text=message.get(\"t\", \"\"),\n xml=message.get(\"x\"),\n participants=message.get(\"p\", []),\n sender=sender if sender is not None else message.get(\"p\", [])[-1] if \"p\" in message else None,\n id=uuid.UUID(message.get(\"r\")) if \"r\" in message else None,\n group_id=uuid.UUID(message.get(\"gid\")) if \"gid\" in message else None,\n body=BalloonBody(message[\"bid\"], message[\"b\"]) if \"bid\" in message and \"b\" in message else None,\n effect=message[\"iid\"] if \"iid\" in message else None,\n _compressed=compressed,\n _raw=message,\n )\n\n def to_raw(self) -> bytes:\n \"\"\"Convert an `iMessage` to raw message bytes\"\"\"\n if not self.sanity_check():\n raise ValueError(\"Message failed sanity check\")\n\n d = {\n \"t\": self.text,\n \"x\": self.xml,\n \"p\": self.participants,\n \"r\": str(self.id).upper(),\n \"gid\": str(self.group_id).upper(),\n \"pv\": 0,\n \"gv\": \"8\",\n \"v\": \"1\",\n \"iid\": self.effect\n }\n\n # Remove keys that are None\n d = {k: v for k, v in d.items() if v is not None}\n\n # Serialize as a plist\n d = plistlib.dumps(d, fmt=plistlib.FMT_BINARY)\n\n # Compression\n if self._compressed:\n d = gzip.compress(d, mtime=0)\n\n return d\n\n def to_string(self) -> str:\n message_str = f\"[{self.sender}] '{self.text}'\"\n if self.effect is not None:\n message_str += f\" with effect [{self.effect}]\"\n return message_str\n\n\nclass iMessageUser:\n \"\"\"Represents a logged in and connected iMessage user.\n This abstraction should probably be reworked into IDS some time...\"\"\"\n\n def __init__(self, connection: apns.APNSConnection, user: ids.IDSUser):\n self.connection = connection\n self.user = user\n\n def _get_raw_message(self):\n \"\"\"\n Returns a raw APNs message corresponding to the next conforming notification in the queue\n Returns None if no conforming notification is found\n \"\"\"\n\n def check_response(x):\n if x[0] != 0x0A:\n return False\n if apns._get_field(x[1], 2) != sha1(\"com.apple.madrid\".encode()).digest():\n return False\n resp_body = apns._get_field(x[1], 3)\n if resp_body is None:\n # logger.debug(\"Rejecting madrid message with no body\")\n return False\n resp_body = plistlib.loads(resp_body)\n if \"P\" not in resp_body:\n # logger.debug(f\"Rejecting madrid message with no payload : {resp_body}\")\n return False\n return True\n\n payload = self.connection.incoming_queue.pop_find(check_response)\n if payload is None:\n return None\n id = apns._get_field(payload[1], 4)\n\n return payload\n\n def _parse_payload(payload: bytes) -> tuple[bytes, bytes]:\n payload = BytesIO(payload)\n\n tag = payload.read(1)\n #print(\"TAG\", tag)\n body_length = int.from_bytes(payload.read(2), \"big\")\n body = payload.read(body_length)\n\n signature_len = payload.read(1)[0]\n signature = payload.read(signature_len)\n\n return (body, signature)\n\n def _construct_payload(body: bytes, signature: bytes) -> bytes:\n payload = (\n b\"\\x02\"\n + len(body).to_bytes(2, \"big\")\n + body\n + len(signature).to_bytes(1, \"big\")\n + signature\n )\n return payload\n\n def _hash_identity(id: bytes) -> bytes:\n iden = ids.identity.IDSIdentity.decode(id)\n\n # TODO: Combine this with serialization code in ids.identity\n output = BytesIO()\n output.write(b\"\\x00\\x41\\x04\")\n output.write(\n ids._helpers.parse_key(iden.signing_public_key)\n .public_numbers()\n .x.to_bytes(32, \"big\")\n )\n output.write(\n ids._helpers.parse_key(iden.signing_public_key)\n .public_numbers()\n .y.to_bytes(32, \"big\")\n )\n\n output.write(b\"\\x00\\xAC\")\n output.write(b\"\\x30\\x81\\xA9\")\n output.write(b\"\\x02\\x81\\xA1\")\n output.write(\n ids._helpers.parse_key(iden.encryption_public_key)\n .public_numbers()\n .n.to_bytes(161, \"big\")\n )\n output.write(b\"\\x02\\x03\\x01\\x00\\x01\")\n\n return sha256(output.getvalue()).digest()\n\n def _encrypt_sign_payload(\n self, key: ids.identity.IDSIdentity, message: bytes\n ) -> bytes:\n # Generate a random AES key\n random_seed = random.randbytes(11)\n # Create the HMAC\n import hmac\n\n hm = hmac.new(\n random_seed,\n message\n + b\"\\x02\"\n + iMessageUser._hash_identity(self.user.encryption_identity.encode())\n + iMessageUser._hash_identity(key.encode()),\n sha256,\n ).digest()\n\n aes_key = random_seed + hm[:5]\n\n # print(len(aes_key))\n\n # Encrypt the message with the AES key\n cipher = Cipher(algorithms.AES(aes_key), modes.CTR(NORMAL_NONCE))\n encrypted = cipher.encryptor().update(message)\n\n # Encrypt the AES key with the public key of the recipient\n recipient_key = ids._helpers.parse_key(key.encryption_public_key)\n rsa_body = recipient_key.encrypt(\n aes_key + encrypted[:100],\n padding.OAEP(\n mgf=padding.MGF1(algorithm=hashes.SHA1()),\n algorithm=hashes.SHA1(),\n label=None,\n ),\n )\n\n # Construct the payload\n body = rsa_body + encrypted[100:]\n sig = ids._helpers.parse_key(self.user.encryption_identity.signing_key).sign(\n body, ec.ECDSA(hashes.SHA1())\n )\n payload = iMessageUser._construct_payload(body, sig)\n\n return payload\n\n def _decrypt_payload(self, payload: bytes) -> dict:\n payload = iMessageUser._parse_payload(payload)\n\n body = BytesIO(payload[0])\n rsa_body = ids._helpers.parse_key(\n self.user.encryption_identity.encryption_key\n ).decrypt(\n body.read(160),\n padding.OAEP(\n mgf=padding.MGF1(algorithm=hashes.SHA1()),\n algorithm=hashes.SHA1(),\n label=None,\n ),\n )\n\n cipher = Cipher(algorithms.AES(rsa_body[:16]), modes.CTR(NORMAL_NONCE))\n decrypted = cipher.decryptor().update(rsa_body[16:] + body.read())\n\n return decrypted\n\n def _verify_payload(self, payload: bytes, sender: str, sender_token: str) -> bool:\n # Get the public key for the sender\n self._cache_keys([sender])\n\n if not sender_token in self.KEY_CACHE:\n logger.warning(\"Unable to find the public key of the sender, cannot verify\")\n return False\n\n identity_keys = ids.identity.IDSIdentity.decode(self.KEY_CACHE[sender_token][0])\n sender_ec_key = ids._helpers.parse_key(identity_keys.signing_public_key)\n\n payload = iMessageUser._parse_payload(payload)\n\n try:\n # Verify the signature (will throw an exception if it fails)\n sender_ec_key.verify(\n payload[1],\n payload[0],\n ec.ECDSA(hashes.SHA1()),\n )\n return True\n except:\n return False\n\n def receive(self) -> Union[iMessage, None]:\n \"\"\"\n Will return the next iMessage in the queue, or None if there are no messages\n \"\"\"\n raw = self._get_raw_message()\n if raw is None:\n return None\n body = apns._get_field(raw[1], 3)\n body = plistlib.loads(body)\n #print(f\"Got body message {body}\")\n payload = body[\"P\"]\n\n if not self._verify_payload(payload, body['sP'], body[\"t\"]):\n raise Exception(\"Failed to verify payload\")\n \n decrypted = self._decrypt_payload(payload)\n \n return iMessage.from_raw(decrypted, body['sP'])\n\n KEY_CACHE_HANDLE: str = \"\"\n KEY_CACHE: dict[bytes, tuple[bytes, bytes]] = {}\n \"\"\"Mapping of push token : (public key, session token)\"\"\"\n USER_CACHE: dict[str, list[bytes]] = {}\n \"\"\"Mapping of handle : [push tokens]\"\"\"\n\n def _cache_keys(self, participants: list[str]):\n # Clear the cache if the handle has changed\n if self.KEY_CACHE_HANDLE != self.user.current_handle:\n self.KEY_CACHE_HANDLE = self.user.current_handle\n self.KEY_CACHE = {}\n self.USER_CACHE = {}\n \n # Check to see if we have cached the keys for all of the participants\n if all([p in self.USER_CACHE for p in participants]):\n return\n\n # Look up the public keys for the participants, and cache a token : public key mapping\n lookup = self.user.lookup(participants)\n\n for key, participant in lookup.items():\n if not key in self.USER_CACHE:\n self.USER_CACHE[key] = []\n\n for identity in participant[\"identities\"]:\n if not \"client-data\" in identity:\n continue\n if not \"public-message-identity-key\" in identity[\"client-data\"]:\n continue\n if not \"push-token\" in identity:\n continue\n if not \"session-token\" in identity:\n continue\n\n self.USER_CACHE[key].append(identity[\"push-token\"])\n\n # print(identity)\n\n self.KEY_CACHE[identity[\"push-token\"]] = (\n identity[\"client-data\"][\"public-message-identity-key\"],\n identity[\"session-token\"],\n )\n\n def send(self, message: iMessage):\n # Set the sender, if it isn't already\n if message.sender is None:\n message.sender = self.user.handles[0] # TODO : Which handle to use?\n\n message.sanity_check() # Sanity check MUST be called before caching keys, so that the sender is added to the list of participants\n self._cache_keys(message.participants)\n\n # Turn the message into a raw message\n raw = message.to_raw()\n import base64\n\n bundled_payloads = []\n for participant in message.participants:\n for push_token in self.USER_CACHE[participant]:\n if push_token == self.connection.token:\n continue # Don't send to ourselves\n\n identity_keys = ids.identity.IDSIdentity.decode(\n self.KEY_CACHE[push_token][0]\n )\n payload = self._encrypt_sign_payload(identity_keys, raw)\n\n bundled_payloads.append(\n {\n \"tP\": participant,\n \"D\": not participant\n == message.sender, # TODO: Should this be false sometimes? For self messages?\n \"sT\": self.KEY_CACHE[push_token][1],\n \"P\": payload,\n \"t\": push_token,\n }\n )\n\n msg_id = random.randbytes(4)\n body = {\n \"fcn\": 1,\n \"c\": 100,\n \"E\": \"pair\",\n \"ua\": \"[macOS,13.4.1,22F82,MacBookPro18,3]\",\n \"v\": 8,\n \"i\": int.from_bytes(msg_id, \"big\"),\n \"U\": message.id.bytes,\n \"dtl\": bundled_payloads,\n \"sP\": message.sender,\n }\n\n body = plistlib.dumps(body, fmt=plistlib.FMT_BINARY)\n\n self.connection.send_message(\"com.apple.madrid\", body, msg_id)\n\n # This code can check to make sure we got a success response, but waiting for the response is annoying,\n # so for now we just YOLO it and assume it worked\n\n # def check_response(x):\n # if x[0] != 0x0A:\n # return False\n # if apns._get_field(x[1], 2) != sha1(\"com.apple.madrid\".encode()).digest():\n # return False\n # resp_body = apns._get_field(x[1], 3)\n # if resp_body is None:\n # return False\n # resp_body = plistlib.loads(resp_body)\n # if \"c\" not in resp_body or resp_body[\"c\"] != 255:\n # return False\n # return True\n \n\n # num_recv = 0\n # while True:\n # if num_recv == len(bundled_payloads):\n # break\n # payload = self.connection.incoming_queue.wait_pop_find(check_response)\n # if payload is None:\n # continue\n\n # resp_body = apns._get_field(payload[1], 3)\n # resp_body = plistlib.loads(resp_body)\n # logger.error(resp_body)\n # num_recv += 1\n",
"path": "imessage.py"
}
] | 9_5 | python | import unittest
import sys
class TestCodeStructure(unittest.TestCase):
def test_participant_lowercasing_in_send_method(self):
with open('imessage.py', 'r') as file:
lines = file.readlines()
expected_line = " participant = participant.lower()\n"
preceding_line = " for participant in message.participants:\n"
found = False
for i in range(len(lines) - 1):
if lines[i].strip() == preceding_line.strip() and lines[i + 1].strip() == expected_line.strip():
found = True
break
self.assertTrue(found, "The line 'participant = participant.lower()' was not found in the expected location.")
def main():
suite = unittest.TestSuite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(TestCodeStructure))
runner = unittest.TextTestRunner()
if runner.run(suite).wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
main()
|