language
stringclasses 2
values | source_code
stringlengths 0
963k
| test_code
stringlengths 300
420k
| source_path
stringlengths 29
179
| test_path
stringlengths 36
183
| repo_name
stringclasses 58
values | instruction
stringlengths 380
1.3k
| meta_class
stringlengths 2
57
|
|---|---|---|---|---|---|---|---|
python
|
import contextlib
import os
@contextlib.contextmanager
def override_environ(**kwargs):
save_env = dict(os.environ)
for key, value in kwargs.items():
if value is None:
del os.environ[key]
else:
os.environ[key] = value
try:
yield
finally:
os.environ.clear()
os.environ.update(save_env)
|
import copy
import filecmp
import os
import tarfile
import zipfile
from collections import deque
from io import BytesIO
from unittest import mock
import pytest
from requests import compat
from requests._internal_utils import unicode_is_ascii
from requests.cookies import RequestsCookieJar
from requests.structures import CaseInsensitiveDict
from requests.utils import (
_parse_content_type_header,
add_dict_to_cookiejar,
address_in_network,
dotted_netmask,
extract_zipped_paths,
get_auth_from_url,
get_encoding_from_headers,
get_encodings_from_content,
get_environ_proxies,
get_netrc_auth,
guess_filename,
guess_json_utf,
is_ipv4_address,
is_valid_cidr,
iter_slices,
parse_dict_header,
parse_header_links,
prepend_scheme_if_needed,
requote_uri,
select_proxy,
set_environ,
should_bypass_proxies,
super_len,
to_key_val_list,
to_native_string,
unquote_header_value,
unquote_unreserved,
urldefragauth,
)
from .compat import StringIO, cStringIO
class TestSuperLen:
@pytest.mark.parametrize(
"stream, value",
(
(StringIO.StringIO, "Test"),
(BytesIO, b"Test"),
pytest.param(
cStringIO, "Test", marks=pytest.mark.skipif("cStringIO is None")
),
),
)
def test_io_streams(self, stream, value):
"""Ensures that we properly deal with different kinds of IO streams."""
assert super_len(stream()) == 0
assert super_len(stream(value)) == 4
def test_super_len_correctly_calculates_len_of_partially_read_file(self):
"""Ensure that we handle partially consumed file like objects."""
s = StringIO.StringIO()
s.write("foobarbogus")
assert super_len(s) == 0
@pytest.mark.parametrize("error", [IOError, OSError])
def test_super_len_handles_files_raising_weird_errors_in_tell(self, error):
"""If tell() raises errors, assume the cursor is at position zero."""
class BoomFile:
def __len__(self):
return 5
def tell(self):
raise error()
assert super_len(BoomFile()) == 0
@pytest.mark.parametrize("error", [IOError, OSError])
def test_super_len_tell_ioerror(self, error):
"""Ensure that if tell gives an IOError super_len doesn't fail"""
class NoLenBoomFile:
def tell(self):
raise error()
def seek(self, offset, whence):
pass
assert super_len(NoLenBoomFile()) == 0
def test_string(self):
assert super_len("Test") == 4
@pytest.mark.parametrize(
"mode, warnings_num",
(
("r", 1),
("rb", 0),
),
)
def test_file(self, tmpdir, mode, warnings_num, recwarn):
file_obj = tmpdir.join("test.txt")
file_obj.write("Test")
with file_obj.open(mode) as fd:
assert super_len(fd) == 4
assert len(recwarn) == warnings_num
def test_tarfile_member(self, tmpdir):
file_obj = tmpdir.join("test.txt")
file_obj.write("Test")
tar_obj = str(tmpdir.join("test.tar"))
with tarfile.open(tar_obj, "w") as tar:
tar.add(str(file_obj), arcname="test.txt")
with tarfile.open(tar_obj) as tar:
member = tar.extractfile("test.txt")
assert super_len(member) == 4
def test_super_len_with__len__(self):
foo = [1, 2, 3, 4]
len_foo = super_len(foo)
assert len_foo == 4
def test_super_len_with_no__len__(self):
class LenFile:
def __init__(self):
self.len = 5
assert super_len(LenFile()) == 5
def test_super_len_with_tell(self):
foo = StringIO.StringIO("12345")
assert super_len(foo) == 5
foo.read(2)
assert super_len(foo) == 3
def test_super_len_with_fileno(self):
with open(__file__, "rb") as f:
length = super_len(f)
file_data = f.read()
assert length == len(file_data)
def test_super_len_with_no_matches(self):
"""Ensure that objects without any length methods default to 0"""
assert super_len(object()) == 0
class TestGetNetrcAuth:
def test_works(self, tmp_path, monkeypatch):
netrc_path = tmp_path / ".netrc"
monkeypatch.setenv("NETRC", str(netrc_path))
with open(netrc_path, "w") as f:
f.write("machine example.com login aaaa password bbbb\n")
auth = get_netrc_auth("http://example.com/thing")
assert auth == ("aaaa", "bbbb")
def test_not_vulnerable_to_bad_url_parsing(self, tmp_path, monkeypatch):
netrc_path = tmp_path / ".netrc"
monkeypatch.setenv("NETRC", str(netrc_path))
with open(netrc_path, "w") as f:
f.write("machine example.com login aaaa password bbbb\n")
auth = get_netrc_auth("http://example.com:@evil.com/'")
assert auth is None
class TestToKeyValList:
@pytest.mark.parametrize(
"value, expected",
(
([("key", "val")], [("key", "val")]),
((("key", "val"),), [("key", "val")]),
({"key": "val"}, [("key", "val")]),
(None, None),
),
)
def test_valid(self, value, expected):
assert to_key_val_list(value) == expected
def test_invalid(self):
with pytest.raises(ValueError):
to_key_val_list("string")
class TestUnquoteHeaderValue:
@pytest.mark.parametrize(
"value, expected",
(
(None, None),
("Test", "Test"),
('"Test"', "Test"),
('"Test\\\\"', "Test\\"),
('"\\\\Comp\\Res"', "\\Comp\\Res"),
),
)
def test_valid(self, value, expected):
assert unquote_header_value(value) == expected
def test_is_filename(self):
assert unquote_header_value('"\\\\Comp\\Res"', True) == "\\\\Comp\\Res"
class TestGetEnvironProxies:
"""Ensures that IP addresses are correctly matches with ranges
in no_proxy variable.
"""
@pytest.fixture(autouse=True, params=["no_proxy", "NO_PROXY"])
def no_proxy(self, request, monkeypatch):
monkeypatch.setenv(
request.param, "192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1"
)
@pytest.mark.parametrize(
"url",
(
"http://192.168.0.1:5000/",
"http://192.168.0.1/",
"http://172.16.1.1/",
"http://172.16.1.1:5000/",
"http://localhost.localdomain:5000/v1.0/",
),
)
def test_bypass(self, url):
assert get_environ_proxies(url, no_proxy=None) == {}
@pytest.mark.parametrize(
"url",
(
"http://192.168.1.1:5000/",
"http://192.168.1.1/",
"http://www.requests.com/",
),
)
def test_not_bypass(self, url):
assert get_environ_proxies(url, no_proxy=None) != {}
@pytest.mark.parametrize(
"url",
(
"http://192.168.1.1:5000/",
"http://192.168.1.1/",
"http://www.requests.com/",
),
)
def test_bypass_no_proxy_keyword(self, url):
no_proxy = "192.168.1.1,requests.com"
assert get_environ_proxies(url, no_proxy=no_proxy) == {}
@pytest.mark.parametrize(
"url",
(
"http://192.168.0.1:5000/",
"http://192.168.0.1/",
"http://172.16.1.1/",
"http://172.16.1.1:5000/",
"http://localhost.localdomain:5000/v1.0/",
),
)
def test_not_bypass_no_proxy_keyword(self, url, monkeypatch):
# This is testing that the 'no_proxy' argument overrides the
# environment variable 'no_proxy'
monkeypatch.setenv("http_proxy", "http://proxy.example.com:3128/")
no_proxy = "192.168.1.1,requests.com"
assert get_environ_proxies(url, no_proxy=no_proxy) != {}
class TestIsIPv4Address:
def test_valid(self):
assert is_ipv4_address("8.8.8.8")
@pytest.mark.parametrize("value", ("8.8.8.8.8", "localhost.localdomain"))
def test_invalid(self, value):
assert not is_ipv4_address(value)
class TestIsValidCIDR:
def test_valid(self):
assert is_valid_cidr("192.168.1.0/24")
@pytest.mark.parametrize(
"value",
(
"8.8.8.8",
"192.168.1.0/a",
"192.168.1.0/128",
"192.168.1.0/-1",
"192.168.1.999/24",
),
)
def test_invalid(self, value):
assert not is_valid_cidr(value)
class TestAddressInNetwork:
def test_valid(self):
assert address_in_network("192.168.1.1", "192.168.1.0/24")
def test_invalid(self):
assert not address_in_network("172.16.0.1", "192.168.1.0/24")
class TestGuessFilename:
@pytest.mark.parametrize(
"value",
(1, type("Fake", (object,), {"name": 1})()),
)
def test_guess_filename_invalid(self, value):
assert guess_filename(value) is None
@pytest.mark.parametrize(
"value, expected_type",
(
(b"value", compat.bytes),
(b"value".decode("utf-8"), compat.str),
),
)
def test_guess_filename_valid(self, value, expected_type):
obj = type("Fake", (object,), {"name": value})()
result = guess_filename(obj)
assert result == value
assert isinstance(result, expected_type)
class TestExtractZippedPaths:
@pytest.mark.parametrize(
"path",
(
"/",
__file__,
pytest.__file__,
"/etc/invalid/location",
),
)
def test_unzipped_paths_unchanged(self, path):
assert path == extract_zipped_paths(path)
def test_zipped_paths_extracted(self, tmpdir):
zipped_py = tmpdir.join("test.zip")
with zipfile.ZipFile(zipped_py.strpath, "w") as f:
f.write(__file__)
_, name = os.path.splitdrive(__file__)
zipped_path = os.path.join(zipped_py.strpath, name.lstrip(r"\/"))
extracted_path = extract_zipped_paths(zipped_path)
assert extracted_path != zipped_path
assert os.path.exists(extracted_path)
assert filecmp.cmp(extracted_path, __file__)
def test_invalid_unc_path(self):
path = r"\\localhost\invalid\location"
assert extract_zipped_paths(path) == path
class TestContentEncodingDetection:
def test_none(self):
encodings = get_encodings_from_content("")
assert not len(encodings)
@pytest.mark.parametrize(
"content",
(
# HTML5 meta charset attribute
'<meta charset="UTF-8">',
# HTML4 pragma directive
'<meta http-equiv="Content-type" content="text/html;charset=UTF-8">',
# XHTML 1.x served with text/html MIME type
'<meta http-equiv="Content-type" content="text/html;charset=UTF-8" />',
# XHTML 1.x served as XML
'<?xml version="1.0" encoding="UTF-8"?>',
),
)
def test_pragmas(self, content):
encodings = get_encodings_from_content(content)
assert len(encodings) == 1
assert encodings[0] == "UTF-8"
def test_precedence(self):
content = """
<?xml version="1.0" encoding="XML"?>
<meta charset="HTML5">
<meta http-equiv="Content-type" content="text/html;charset=HTML4" />
""".strip()
assert get_encodings_from_content(content) == ["HTML5", "HTML4", "XML"]
class TestGuessJSONUTF:
@pytest.mark.parametrize(
"encoding",
(
"utf-32",
"utf-8-sig",
"utf-16",
"utf-8",
"utf-16-be",
"utf-16-le",
"utf-32-be",
"utf-32-le",
),
)
def test_encoded(self, encoding):
data = "{}".encode(encoding)
assert guess_json_utf(data) == encoding
def test_bad_utf_like_encoding(self):
assert guess_json_utf(b"\x00\x00\x00\x00") is None
@pytest.mark.parametrize(
("encoding", "expected"),
(
("utf-16-be", "utf-16"),
("utf-16-le", "utf-16"),
("utf-32-be", "utf-32"),
("utf-32-le", "utf-32"),
),
)
def test_guess_by_bom(self, encoding, expected):
data = "\ufeff{}".encode(encoding)
assert guess_json_utf(data) == expected
USER = PASSWORD = "%!*'();:@&=+$,/?#[] "
ENCODED_USER = compat.quote(USER, "")
ENCODED_PASSWORD = compat.quote(PASSWORD, "")
@pytest.mark.parametrize(
"url, auth",
(
(
f"http://{ENCODED_USER}:{ENCODED_PASSWORD}@request.com/url.html#test",
(USER, PASSWORD),
),
("http://user:[email protected]/path?query=yes", ("user", "pass")),
(
"http://user:pass%[email protected]/path?query=yes",
("user", "pass pass"),
),
("http://user:pass [email protected]/path?query=yes", ("user", "pass pass")),
(
"http://user%25user:[email protected]/path?query=yes",
("user%user", "pass"),
),
(
"http://user:pass%[email protected]/path?query=yes",
("user", "pass#pass"),
),
("http://complex.url.com/path?query=yes", ("", "")),
),
)
def test_get_auth_from_url(url, auth):
assert get_auth_from_url(url) == auth
@pytest.mark.parametrize(
"uri, expected",
(
(
# Ensure requoting doesn't break expectations
"http://example.com/fiz?buz=%25ppicture",
"http://example.com/fiz?buz=%25ppicture",
),
(
# Ensure we handle unquoted percent signs in redirects
"http://example.com/fiz?buz=%ppicture",
"http://example.com/fiz?buz=%25ppicture",
),
),
)
def test_requote_uri_with_unquoted_percents(uri, expected):
"""See: https://github.com/psf/requests/issues/2356"""
assert requote_uri(uri) == expected
@pytest.mark.parametrize(
"uri, expected",
(
(
# Illegal bytes
"http://example.com/?a=%--",
"http://example.com/?a=%--",
),
(
# Reserved characters
"http://example.com/?a=%300",
"http://example.com/?a=00",
),
),
)
def test_unquote_unreserved(uri, expected):
assert unquote_unreserved(uri) == expected
@pytest.mark.parametrize(
"mask, expected",
(
(8, "255.0.0.0"),
(24, "255.255.255.0"),
(25, "255.255.255.128"),
),
)
def test_dotted_netmask(mask, expected):
assert dotted_netmask(mask) == expected
http_proxies = {
"http": "http://http.proxy",
"http://some.host": "http://some.host.proxy",
}
all_proxies = {
"all": "socks5://http.proxy",
"all://some.host": "socks5://some.host.proxy",
}
mixed_proxies = {
"http": "http://http.proxy",
"http://some.host": "http://some.host.proxy",
"all": "socks5://http.proxy",
}
@pytest.mark.parametrize(
"url, expected, proxies",
(
("hTTp://u:[email protected]/path", "http://some.host.proxy", http_proxies),
("hTTp://u:[email protected]/path", "http://http.proxy", http_proxies),
("hTTp:///path", "http://http.proxy", http_proxies),
("hTTps://Other.Host", None, http_proxies),
("file:///etc/motd", None, http_proxies),
("hTTp://u:[email protected]/path", "socks5://some.host.proxy", all_proxies),
("hTTp://u:[email protected]/path", "socks5://http.proxy", all_proxies),
("hTTp:///path", "socks5://http.proxy", all_proxies),
("hTTps://Other.Host", "socks5://http.proxy", all_proxies),
("http://u:[email protected]/path", "http://http.proxy", mixed_proxies),
("http://u:[email protected]/path", "http://some.host.proxy", mixed_proxies),
("https://u:[email protected]/path", "socks5://http.proxy", mixed_proxies),
("https://u:[email protected]/path", "socks5://http.proxy", mixed_proxies),
("https://", "socks5://http.proxy", mixed_proxies),
# XXX: unsure whether this is reasonable behavior
("file:///etc/motd", "socks5://http.proxy", all_proxies),
),
)
def test_select_proxies(url, expected, proxies):
"""Make sure we can select per-host proxies correctly."""
assert select_proxy(url, proxies) == expected
@pytest.mark.parametrize(
"value, expected",
(
('foo="is a fish", bar="as well"', {"foo": "is a fish", "bar": "as well"}),
("key_without_value", {"key_without_value": None}),
),
)
def test_parse_dict_header(value, expected):
assert parse_dict_header(value) == expected
@pytest.mark.parametrize(
"value, expected",
(
("application/xml", ("application/xml", {})),
(
"application/json ; charset=utf-8",
("application/json", {"charset": "utf-8"}),
),
(
"application/json ; Charset=utf-8",
("application/json", {"charset": "utf-8"}),
),
("text/plain", ("text/plain", {})),
(
"multipart/form-data; boundary = something ; boundary2='something_else' ; no_equals ",
(
"multipart/form-data",
{
"boundary": "something",
"boundary2": "something_else",
"no_equals": True,
},
),
),
(
'multipart/form-data; boundary = something ; boundary2="something_else" ; no_equals ',
(
"multipart/form-data",
{
"boundary": "something",
"boundary2": "something_else",
"no_equals": True,
},
),
),
(
"multipart/form-data; boundary = something ; 'boundary2=something_else' ; no_equals ",
(
"multipart/form-data",
{
"boundary": "something",
"boundary2": "something_else",
"no_equals": True,
},
),
),
(
'multipart/form-data; boundary = something ; "boundary2=something_else" ; no_equals ',
(
"multipart/form-data",
{
"boundary": "something",
"boundary2": "something_else",
"no_equals": True,
},
),
),
("application/json ; ; ", ("application/json", {})),
),
)
def test__parse_content_type_header(value, expected):
assert _parse_content_type_header(value) == expected
@pytest.mark.parametrize(
"value, expected",
(
(CaseInsensitiveDict(), None),
(
CaseInsensitiveDict({"content-type": "application/json; charset=utf-8"}),
"utf-8",
),
(CaseInsensitiveDict({"content-type": "text/plain"}), "ISO-8859-1"),
),
)
def test_get_encoding_from_headers(value, expected):
assert get_encoding_from_headers(value) == expected
@pytest.mark.parametrize(
"value, length",
(
("", 0),
("T", 1),
("Test", 4),
("Cont", 0),
("Other", -5),
("Content", None),
),
)
def test_iter_slices(value, length):
if length is None or (length <= 0 and len(value) > 0):
# Reads all content at once
assert len(list(iter_slices(value, length))) == 1
else:
assert len(list(iter_slices(value, 1))) == length
@pytest.mark.parametrize(
"value, expected",
(
(
'<http:/.../front.jpeg>; rel=front; type="image/jpeg"',
[{"url": "http:/.../front.jpeg", "rel": "front", "type": "image/jpeg"}],
),
("<http:/.../front.jpeg>", [{"url": "http:/.../front.jpeg"}]),
("<http:/.../front.jpeg>;", [{"url": "http:/.../front.jpeg"}]),
(
'<http:/.../front.jpeg>; type="image/jpeg",<http://.../back.jpeg>;',
[
{"url": "http:/.../front.jpeg", "type": "image/jpeg"},
{"url": "http://.../back.jpeg"},
],
),
("", []),
),
)
def test_parse_header_links(value, expected):
assert parse_header_links(value) == expected
@pytest.mark.parametrize(
"value, expected",
(
("example.com/path", "http://example.com/path"),
("//example.com/path", "http://example.com/path"),
("example.com:80", "http://example.com:80"),
(
"http://user:[email protected]/path?query",
"http://user:[email protected]/path?query",
),
("http://[email protected]/path?query", "http://[email protected]/path?query"),
),
)
def test_prepend_scheme_if_needed(value, expected):
assert prepend_scheme_if_needed(value, "http") == expected
@pytest.mark.parametrize(
"value, expected",
(
("T", "T"),
(b"T", "T"),
("T", "T"),
),
)
def test_to_native_string(value, expected):
assert to_native_string(value) == expected
@pytest.mark.parametrize(
"url, expected",
(
("http://u:[email protected]/path?a=1#test", "http://example.com/path?a=1"),
("http://example.com/path", "http://example.com/path"),
("//u:[email protected]/path", "//example.com/path"),
("//example.com/path", "//example.com/path"),
("example.com/path", "//example.com/path"),
("scheme:u:[email protected]/path", "scheme://example.com/path"),
),
)
def test_urldefragauth(url, expected):
assert urldefragauth(url) == expected
@pytest.mark.parametrize(
"url, expected",
(
("http://192.168.0.1:5000/", True),
("http://192.168.0.1/", True),
("http://172.16.1.1/", True),
("http://172.16.1.1:5000/", True),
("http://localhost.localdomain:5000/v1.0/", True),
("http://google.com:6000/", True),
("http://172.16.1.12/", False),
("http://172.16.1.12:5000/", False),
("http://google.com:5000/v1.0/", False),
("file:///some/path/on/disk", True),
),
)
def test_should_bypass_proxies(url, expected, monkeypatch):
"""Tests for function should_bypass_proxies to check if proxy
can be bypassed or not
"""
monkeypatch.setenv(
"no_proxy",
"192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1, google.com:6000",
)
monkeypatch.setenv(
"NO_PROXY",
"192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1, google.com:6000",
)
assert should_bypass_proxies(url, no_proxy=None) == expected
@pytest.mark.parametrize(
"url, expected",
(
("http://172.16.1.1/", "172.16.1.1"),
("http://172.16.1.1:5000/", "172.16.1.1"),
("http://user:[email protected]", "172.16.1.1"),
("http://user:[email protected]:5000", "172.16.1.1"),
("http://hostname/", "hostname"),
("http://hostname:5000/", "hostname"),
("http://user:pass@hostname", "hostname"),
("http://user:pass@hostname:5000", "hostname"),
),
)
def test_should_bypass_proxies_pass_only_hostname(url, expected):
"""The proxy_bypass function should be called with a hostname or IP without
a port number or auth credentials.
"""
with mock.patch("requests.utils.proxy_bypass") as proxy_bypass:
should_bypass_proxies(url, no_proxy=None)
proxy_bypass.assert_called_once_with(expected)
@pytest.mark.parametrize(
"cookiejar",
(
compat.cookielib.CookieJar(),
RequestsCookieJar(),
),
)
def test_add_dict_to_cookiejar(cookiejar):
"""Ensure add_dict_to_cookiejar works for
non-RequestsCookieJar CookieJars
"""
cookiedict = {"test": "cookies", "good": "cookies"}
cj = add_dict_to_cookiejar(cookiejar, cookiedict)
cookies = {cookie.name: cookie.value for cookie in cj}
assert cookiedict == cookies
@pytest.mark.parametrize(
"value, expected",
(
("test", True),
("æíöû", False),
("ジェーピーニック", False),
),
)
def test_unicode_is_ascii(value, expected):
assert unicode_is_ascii(value) is expected
@pytest.mark.parametrize(
"url, expected",
(
("http://192.168.0.1:5000/", True),
("http://192.168.0.1/", True),
("http://172.16.1.1/", True),
("http://172.16.1.1:5000/", True),
("http://localhost.localdomain:5000/v1.0/", True),
("http://172.16.1.12/", False),
("http://172.16.1.12:5000/", False),
("http://google.com:5000/v1.0/", False),
),
)
def test_should_bypass_proxies_no_proxy(url, expected, monkeypatch):
"""Tests for function should_bypass_proxies to check if proxy
can be bypassed or not using the 'no_proxy' argument
"""
no_proxy = "192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1"
# Test 'no_proxy' argument
assert should_bypass_proxies(url, no_proxy=no_proxy) == expected
@pytest.mark.skipif(os.name != "nt", reason="Test only on Windows")
@pytest.mark.parametrize(
"url, expected, override",
(
("http://192.168.0.1:5000/", True, None),
("http://192.168.0.1/", True, None),
("http://172.16.1.1/", True, None),
("http://172.16.1.1:5000/", True, None),
("http://localhost.localdomain:5000/v1.0/", True, None),
("http://172.16.1.22/", False, None),
("http://172.16.1.22:5000/", False, None),
("http://google.com:5000/v1.0/", False, None),
("http://mylocalhostname:5000/v1.0/", True, "<local>"),
("http://192.168.0.1/", False, ""),
),
)
def test_should_bypass_proxies_win_registry(url, expected, override, monkeypatch):
"""Tests for function should_bypass_proxies to check if proxy
can be bypassed or not with Windows registry settings
"""
if override is None:
override = "192.168.*;127.0.0.1;localhost.localdomain;172.16.1.1"
import winreg
class RegHandle:
def Close(self):
pass
ie_settings = RegHandle()
proxyEnableValues = deque([1, "1"])
def OpenKey(key, subkey):
return ie_settings
def QueryValueEx(key, value_name):
if key is ie_settings:
if value_name == "ProxyEnable":
# this could be a string (REG_SZ) or a 32-bit number (REG_DWORD)
proxyEnableValues.rotate()
return [proxyEnableValues[0]]
elif value_name == "ProxyOverride":
return [override]
monkeypatch.setenv("http_proxy", "")
monkeypatch.setenv("https_proxy", "")
monkeypatch.setenv("ftp_proxy", "")
monkeypatch.setenv("no_proxy", "")
monkeypatch.setenv("NO_PROXY", "")
monkeypatch.setattr(winreg, "OpenKey", OpenKey)
monkeypatch.setattr(winreg, "QueryValueEx", QueryValueEx)
assert should_bypass_proxies(url, None) == expected
@pytest.mark.skipif(os.name != "nt", reason="Test only on Windows")
def test_should_bypass_proxies_win_registry_bad_values(monkeypatch):
"""Tests for function should_bypass_proxies to check if proxy
can be bypassed or not with Windows invalid registry settings.
"""
import winreg
class RegHandle:
def Close(self):
pass
ie_settings = RegHandle()
def OpenKey(key, subkey):
return ie_settings
def QueryValueEx(key, value_name):
if key is ie_settings:
if value_name == "ProxyEnable":
# Invalid response; Should be an int or int-y value
return [""]
elif value_name == "ProxyOverride":
return ["192.168.*;127.0.0.1;localhost.localdomain;172.16.1.1"]
monkeypatch.setenv("http_proxy", "")
monkeypatch.setenv("https_proxy", "")
monkeypatch.setenv("no_proxy", "")
monkeypatch.setenv("NO_PROXY", "")
monkeypatch.setattr(winreg, "OpenKey", OpenKey)
monkeypatch.setattr(winreg, "QueryValueEx", QueryValueEx)
assert should_bypass_proxies("http://172.16.1.1/", None) is False
@pytest.mark.parametrize(
"env_name, value",
(
("no_proxy", "192.168.0.0/24,127.0.0.1,localhost.localdomain"),
("no_proxy", None),
("a_new_key", "192.168.0.0/24,127.0.0.1,localhost.localdomain"),
("a_new_key", None),
),
)
def test_set_environ(env_name, value):
"""Tests set_environ will set environ values and will restore the environ."""
environ_copy = copy.deepcopy(os.environ)
with set_environ(env_name, value):
assert os.environ.get(env_name) == value
assert os.environ == environ_copy
def test_set_environ_raises_exception():
"""Tests set_environ will raise exceptions in context when the
value parameter is None."""
with pytest.raises(Exception) as exception:
with set_environ("test1", None):
raise Exception("Expected exception")
assert "Expected exception" in str(exception.value)
@pytest.mark.skipif(os.name != "nt", reason="Test only on Windows")
def test_should_bypass_proxies_win_registry_ProxyOverride_value(monkeypatch):
"""Tests for function should_bypass_proxies to check if proxy
can be bypassed or not with Windows ProxyOverride registry value ending with a semicolon.
"""
import winreg
class RegHandle:
def Close(self):
pass
ie_settings = RegHandle()
def OpenKey(key, subkey):
return ie_settings
def QueryValueEx(key, value_name):
if key is ie_settings:
if value_name == "ProxyEnable":
return [1]
elif value_name == "ProxyOverride":
return [
"192.168.*;127.0.0.1;localhost.localdomain;172.16.1.1;<-loopback>;"
]
monkeypatch.setenv("NO_PROXY", "")
monkeypatch.setenv("no_proxy", "")
monkeypatch.setattr(winreg, "OpenKey", OpenKey)
monkeypatch.setattr(winreg, "QueryValueEx", QueryValueEx)
assert should_bypass_proxies("http://example.com/", None) is False
|
./temp_repos/requests/tests/utils.py
|
./temp_repos/requests/tests/test_utils.py
|
requests
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'Unknown'.
Context:
- Class Name: Unknown
- Dependencies to Mock: None detected
- Key Imports: os, contextlib
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
Unknown
|
python
|
"""
requests.hooks
~~~~~~~~~~~~~~
This module provides the capabilities for the Requests hooks system.
Available hooks:
``response``:
The response generated from a Request.
"""
HOOKS = ["response"]
def default_hooks():
return {event: [] for event in HOOKS}
# TODO: response is the only one
def dispatch_hook(key, hooks, hook_data, **kwargs):
"""Dispatches a hook dictionary on a given piece of data."""
hooks = hooks or {}
hooks = hooks.get(key)
if hooks:
if hasattr(hooks, "__call__"):
hooks = [hooks]
for hook in hooks:
_hook_data = hook(hook_data, **kwargs)
if _hook_data is not None:
hook_data = _hook_data
return hook_data
|
import pytest
from requests import hooks
def hook(value):
return value[1:]
@pytest.mark.parametrize(
"hooks_list, result",
(
(hook, "ata"),
([hook, lambda x: None, hook], "ta"),
),
)
def test_hooks(hooks_list, result):
assert hooks.dispatch_hook("response", {"response": hooks_list}, "Data") == result
def test_default_hooks():
assert hooks.default_hooks() == {"response": []}
|
./temp_repos/requests/src/requests/hooks.py
|
./temp_repos/requests/tests/test_hooks.py
|
requests
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'Unknown'.
Context:
- Class Name: Unknown
- Dependencies to Mock: None detected
- Key Imports:
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
Unknown
|
python
|
"""Module containing bug report helper(s)."""
import json
import platform
import ssl
import sys
import idna
import urllib3
from . import __version__ as requests_version
try:
import charset_normalizer
except ImportError:
charset_normalizer = None
try:
import chardet
except ImportError:
chardet = None
try:
from urllib3.contrib import pyopenssl
except ImportError:
pyopenssl = None
OpenSSL = None
cryptography = None
else:
import cryptography
import OpenSSL
def _implementation():
"""Return a dict with the Python implementation and version.
Provide both the name and the version of the Python implementation
currently running. For example, on CPython 3.10.3 it will return
{'name': 'CPython', 'version': '3.10.3'}.
This function works best on CPython and PyPy: in particular, it probably
doesn't work for Jython or IronPython. Future investigation should be done
to work out the correct shape of the code for those platforms.
"""
implementation = platform.python_implementation()
if implementation == "CPython":
implementation_version = platform.python_version()
elif implementation == "PyPy":
implementation_version = "{}.{}.{}".format(
sys.pypy_version_info.major,
sys.pypy_version_info.minor,
sys.pypy_version_info.micro,
)
if sys.pypy_version_info.releaselevel != "final":
implementation_version = "".join(
[implementation_version, sys.pypy_version_info.releaselevel]
)
elif implementation == "Jython":
implementation_version = platform.python_version() # Complete Guess
elif implementation == "IronPython":
implementation_version = platform.python_version() # Complete Guess
else:
implementation_version = "Unknown"
return {"name": implementation, "version": implementation_version}
def info():
"""Generate information for a bug report."""
try:
platform_info = {
"system": platform.system(),
"release": platform.release(),
}
except OSError:
platform_info = {
"system": "Unknown",
"release": "Unknown",
}
implementation_info = _implementation()
urllib3_info = {"version": urllib3.__version__}
charset_normalizer_info = {"version": None}
chardet_info = {"version": None}
if charset_normalizer:
charset_normalizer_info = {"version": charset_normalizer.__version__}
if chardet:
chardet_info = {"version": chardet.__version__}
pyopenssl_info = {
"version": None,
"openssl_version": "",
}
if OpenSSL:
pyopenssl_info = {
"version": OpenSSL.__version__,
"openssl_version": f"{OpenSSL.SSL.OPENSSL_VERSION_NUMBER:x}",
}
cryptography_info = {
"version": getattr(cryptography, "__version__", ""),
}
idna_info = {
"version": getattr(idna, "__version__", ""),
}
system_ssl = ssl.OPENSSL_VERSION_NUMBER
system_ssl_info = {"version": f"{system_ssl:x}" if system_ssl is not None else ""}
return {
"platform": platform_info,
"implementation": implementation_info,
"system_ssl": system_ssl_info,
"using_pyopenssl": pyopenssl is not None,
"using_charset_normalizer": chardet is None,
"pyOpenSSL": pyopenssl_info,
"urllib3": urllib3_info,
"chardet": chardet_info,
"charset_normalizer": charset_normalizer_info,
"cryptography": cryptography_info,
"idna": idna_info,
"requests": {
"version": requests_version,
},
}
def main():
"""Pretty-print the bug information as JSON."""
print(json.dumps(info(), sort_keys=True, indent=2))
if __name__ == "__main__":
main()
|
from unittest import mock
from requests.help import info
def test_system_ssl():
"""Verify we're actually setting system_ssl when it should be available."""
assert info()["system_ssl"]["version"] != ""
class VersionedPackage:
def __init__(self, version):
self.__version__ = version
def test_idna_without_version_attribute():
"""Older versions of IDNA don't provide a __version__ attribute, verify
that if we have such a package, we don't blow up.
"""
with mock.patch("requests.help.idna", new=None):
assert info()["idna"] == {"version": ""}
def test_idna_with_version_attribute():
"""Verify we're actually setting idna version when it should be available."""
with mock.patch("requests.help.idna", new=VersionedPackage("2.6")):
assert info()["idna"] == {"version": "2.6"}
|
./temp_repos/requests/src/requests/help.py
|
./temp_repos/requests/tests/test_help.py
|
requests
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'Unknown'.
Context:
- Class Name: Unknown
- Dependencies to Mock: None detected
- Key Imports: OpenSSL, platform, urllib3.contrib, idna, ssl, urllib3, chardet, json, charset_normalizer, cryptography
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
Unknown
|
python
|
"""
Built-in, globally-available admin actions.
"""
from django.contrib import messages
from django.contrib.admin import helpers
from django.contrib.admin.decorators import action
from django.contrib.admin.utils import model_ngettext
from django.core.exceptions import PermissionDenied
from django.template.response import TemplateResponse
from django.utils.translation import gettext as _
from django.utils.translation import gettext_lazy
@action(
permissions=["delete"],
description=gettext_lazy("Delete selected %(verbose_name_plural)s"),
)
def delete_selected(modeladmin, request, queryset):
"""
Default action which deletes the selected objects.
This action first displays a confirmation page which shows all the
deletable objects, or, if the user has no permission one of the related
childs (foreignkeys), a "permission denied" message.
Next, it deletes all selected objects and redirects back to the change
list.
"""
opts = modeladmin.model._meta
app_label = opts.app_label
# Populate deletable_objects, a data structure of all related objects that
# will also be deleted.
(
deletable_objects,
model_count,
perms_needed,
protected,
) = modeladmin.get_deleted_objects(queryset, request)
# The user has already confirmed the deletion.
# Do the deletion and return None to display the change list view again.
if request.POST.get("post") and not protected:
if perms_needed:
raise PermissionDenied
n = len(queryset)
if n:
modeladmin.log_deletions(request, queryset)
modeladmin.delete_queryset(request, queryset)
modeladmin.message_user(
request,
_("Successfully deleted %(count)d %(items)s.")
% {"count": n, "items": model_ngettext(modeladmin.opts, n)},
messages.SUCCESS,
)
# Return None to display the change list page again.
return None
objects_name = model_ngettext(queryset)
if perms_needed or protected:
title = _("Cannot delete %(name)s") % {"name": objects_name}
else:
title = _("Delete multiple objects")
context = {
**modeladmin.admin_site.each_context(request),
"title": title,
"subtitle": None,
"objects_name": str(objects_name),
"deletable_objects": [deletable_objects],
"model_count": dict(model_count).items(),
"queryset": queryset,
"perms_lacking": perms_needed,
"protected": protected,
"opts": opts,
"action_checkbox_name": helpers.ACTION_CHECKBOX_NAME,
"media": modeladmin.media,
}
request.current_app = modeladmin.admin_site.name
# Display the confirmation page
return TemplateResponse(
request,
modeladmin.delete_selected_confirmation_template
or [
"admin/%s/%s/delete_selected_confirmation.html"
% (app_label, opts.model_name),
"admin/%s/delete_selected_confirmation.html" % app_label,
"admin/delete_selected_confirmation.html",
],
context,
)
|
from django.contrib import admin
from django.contrib.auth.models import Permission, User
from django.contrib.contenttypes.models import ContentType
from django.test import TestCase
from .models import Band
class AdminActionsTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
content_type = ContentType.objects.get_for_model(Band)
Permission.objects.create(
name="custom", codename="custom_band", content_type=content_type
)
for user_type in ("view", "add", "change", "delete", "custom"):
username = "%suser" % user_type
user = User.objects.create_user(
username=username, password="secret", is_staff=True
)
permission = Permission.objects.get(
codename="%s_band" % user_type, content_type=content_type
)
user.user_permissions.add(permission)
setattr(cls, username, user)
def test_get_actions_respects_permissions(self):
class MockRequest:
pass
class BandAdmin(admin.ModelAdmin):
actions = ["custom_action"]
@admin.action
def custom_action(modeladmin, request, queryset):
pass
def has_custom_permission(self, request):
return request.user.has_perm("%s.custom_band" % self.opts.app_label)
ma = BandAdmin(Band, admin.AdminSite())
mock_request = MockRequest()
mock_request.GET = {}
cases = [
(None, self.viewuser, ["custom_action"]),
("view", self.superuser, ["delete_selected", "custom_action"]),
("view", self.viewuser, ["custom_action"]),
("add", self.adduser, ["custom_action"]),
("change", self.changeuser, ["custom_action"]),
("delete", self.deleteuser, ["delete_selected", "custom_action"]),
("custom", self.customuser, ["custom_action"]),
]
for permission, user, expected in cases:
with self.subTest(permission=permission, user=user):
if permission is None:
if hasattr(BandAdmin.custom_action, "allowed_permissions"):
del BandAdmin.custom_action.allowed_permissions
else:
BandAdmin.custom_action.allowed_permissions = (permission,)
mock_request.user = user
actions = ma.get_actions(mock_request)
self.assertEqual(list(actions.keys()), expected)
def test_actions_inheritance(self):
class AdminBase(admin.ModelAdmin):
actions = ["custom_action"]
@admin.action
def custom_action(modeladmin, request, queryset):
pass
class AdminA(AdminBase):
pass
class AdminB(AdminBase):
actions = None
ma1 = AdminA(Band, admin.AdminSite())
action_names = [name for _, name, _ in ma1._get_base_actions()]
self.assertEqual(action_names, ["delete_selected", "custom_action"])
# `actions = None` removes actions from superclasses.
ma2 = AdminB(Band, admin.AdminSite())
action_names = [name for _, name, _ in ma2._get_base_actions()]
self.assertEqual(action_names, ["delete_selected"])
def test_global_actions_description(self):
@admin.action(description="Site-wide admin action 1.")
def global_action_1(modeladmin, request, queryset):
pass
@admin.action
def global_action_2(modeladmin, request, queryset):
pass
admin_site = admin.AdminSite()
admin_site.add_action(global_action_1)
admin_site.add_action(global_action_2)
class BandAdmin(admin.ModelAdmin):
pass
ma = BandAdmin(Band, admin_site)
self.assertEqual(
[description for _, _, description in ma._get_base_actions()],
[
"Delete selected %(verbose_name_plural)s",
"Site-wide admin action 1.",
"Global action 2",
],
)
def test_actions_replace_global_action(self):
@admin.action(description="Site-wide admin action 1.")
def global_action_1(modeladmin, request, queryset):
pass
@admin.action(description="Site-wide admin action 2.")
def global_action_2(modeladmin, request, queryset):
pass
admin.site.add_action(global_action_1, name="custom_action_1")
admin.site.add_action(global_action_2, name="custom_action_2")
@admin.action(description="Local admin action 1.")
def custom_action_1(modeladmin, request, queryset):
pass
class BandAdmin(admin.ModelAdmin):
actions = [custom_action_1, "custom_action_2"]
@admin.action(description="Local admin action 2.")
def custom_action_2(self, request, queryset):
pass
ma = BandAdmin(Band, admin.site)
self.assertEqual(ma.check(), [])
self.assertEqual(
[
desc
for _, name, desc in ma._get_base_actions()
if name.startswith("custom_action")
],
[
"Local admin action 1.",
"Local admin action 2.",
],
)
|
./temp_repos/django/django/contrib/admin/actions.py
|
./temp_repos/django/tests/modeladmin/test_actions.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'Unknown'.
Context:
- Class Name: Unknown
- Dependencies to Mock: None detected
- Key Imports: django.contrib.admin.decorators, django.contrib.admin, django.core.exceptions, django.contrib.admin.utils, django.contrib, django.utils.translation, django.template.response
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
Unknown
|
python
|
from ctypes import c_void_p
class CPointerBase:
"""
Base class for objects that have a pointer access property
that controls access to the underlying C pointer.
"""
_ptr = None # Initially the pointer is NULL.
ptr_type = c_void_p
destructor = None
null_ptr_exception_class = AttributeError
@property
def ptr(self):
# Raise an exception if the pointer isn't valid so that NULL pointers
# aren't passed to routines -- that's very bad.
if self._ptr:
return self._ptr
raise self.null_ptr_exception_class(
"NULL %s pointer encountered." % self.__class__.__name__
)
@ptr.setter
def ptr(self, ptr):
# Only allow the pointer to be set with pointers of the compatible
# type or None (NULL).
if not (ptr is None or isinstance(ptr, self.ptr_type)):
raise TypeError("Incompatible pointer type: %s." % type(ptr))
self._ptr = ptr
def __del__(self):
"""
Free the memory used by the C++ object.
"""
if self.destructor and self._ptr:
try:
self.destructor(self.ptr)
except (AttributeError, ImportError, TypeError):
pass # Some part might already have been garbage collected
|
import ctypes
from unittest import mock
from django.contrib.gis.ptr import CPointerBase
from django.test import SimpleTestCase
class CPointerBaseTests(SimpleTestCase):
def test(self):
destructor_mock = mock.Mock()
class NullPointerException(Exception):
pass
class FakeGeom1(CPointerBase):
null_ptr_exception_class = NullPointerException
class FakeGeom2(FakeGeom1):
ptr_type = ctypes.POINTER(ctypes.c_float)
destructor = destructor_mock
fg1 = FakeGeom1()
fg2 = FakeGeom2()
# These assignments are OK. None is allowed because it's equivalent
# to the NULL pointer.
fg1.ptr = fg1.ptr_type()
fg1.ptr = None
fg2.ptr = fg2.ptr_type(ctypes.c_float(5.23))
fg2.ptr = None
# Because pointers have been set to NULL, an exception is raised on
# access. Raising an exception is preferable to a segmentation fault
# that commonly occurs when a C method is given a NULL reference.
for fg in (fg1, fg2):
with self.assertRaises(NullPointerException):
fg.ptr
# Anything that's either not None or the acceptable pointer type
# results in a TypeError when trying to assign it to the `ptr`
# property. Thus, memory addresses (integers) and pointers of the
# incorrect type (in `bad_ptrs`) aren't allowed.
bad_ptrs = (5, ctypes.c_char_p(b"foobar"))
for bad_ptr in bad_ptrs:
for fg in (fg1, fg2):
with self.assertRaisesMessage(TypeError, "Incompatible pointer type"):
fg.ptr = bad_ptr
# Object can be deleted without a destructor set.
fg = FakeGeom1()
fg.ptr = fg.ptr_type(1)
del fg
# A NULL pointer isn't passed to the destructor.
fg = FakeGeom2()
fg.ptr = None
del fg
self.assertFalse(destructor_mock.called)
# The destructor is called if set.
fg = FakeGeom2()
ptr = fg.ptr_type(ctypes.c_float(1.0))
fg.ptr = ptr
del fg
destructor_mock.assert_called_with(ptr)
def test_destructor_catches_importerror(self):
class FakeGeom(CPointerBase):
destructor = mock.Mock(side_effect=ImportError)
fg = FakeGeom()
fg.ptr = fg.ptr_type(1)
del fg
|
./temp_repos/django/django/contrib/gis/ptr.py
|
./temp_repos/django/tests/gis_tests/test_ptr.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'CPointerBase'.
Context:
- Class Name: CPointerBase
- Dependencies to Mock: None detected
- Key Imports: ctypes
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
CPointerBase
|
python
|
"""
This module houses the GeoIP2 object, a wrapper for the MaxMind GeoIP2(R)
Python API (https://geoip2.readthedocs.io/). This is an alternative to the
Python GeoIP2 interface provided by MaxMind.
GeoIP(R) is a registered trademark of MaxMind, Inc.
For IP-based geolocation, this module requires the GeoLite2 Country and City
datasets, in binary format (CSV will not work!). The datasets may be
downloaded from MaxMind at https://dev.maxmind.com/geoip/geoip2/geolite2/.
Grab GeoLite2-Country.mmdb.gz and GeoLite2-City.mmdb.gz, and unzip them in the
directory corresponding to settings.GEOIP_PATH.
"""
import ipaddress
import socket
from django.conf import settings
from django.core.exceptions import ValidationError
from django.core.validators import validate_ipv46_address
from django.utils._os import to_path
from django.utils.functional import cached_property
__all__ = ["HAS_GEOIP2"]
try:
import geoip2.database
except ImportError: # pragma: no cover
HAS_GEOIP2 = False
else:
HAS_GEOIP2 = True
__all__ += ["GeoIP2", "GeoIP2Exception"]
# These are the values stored in the `database_type` field of the metadata.
# See https://maxmind.github.io/MaxMind-DB/#database_type for details.
SUPPORTED_DATABASE_TYPES = {
"DBIP-City-Lite",
"DBIP-Country-Lite",
"GeoIP2-City",
"GeoIP2-Country",
"GeoLite2-City",
"GeoLite2-Country",
}
class GeoIP2Exception(Exception):
pass
class GeoIP2:
# The flags for GeoIP memory caching.
# Try MODE_MMAP_EXT, MODE_MMAP, MODE_FILE in that order.
MODE_AUTO = 0
# Use the C extension with memory map.
MODE_MMAP_EXT = 1
# Read from memory map. Pure Python.
MODE_MMAP = 2
# Read database as standard file. Pure Python.
MODE_FILE = 4
# Load database into memory. Pure Python.
MODE_MEMORY = 8
cache_options = frozenset(
(MODE_AUTO, MODE_MMAP_EXT, MODE_MMAP, MODE_FILE, MODE_MEMORY)
)
_path = None
_reader = None
def __init__(self, path=None, cache=0, country=None, city=None):
"""
Initialize the GeoIP object. No parameters are required to use default
settings. Keyword arguments may be passed in to customize the locations
of the GeoIP datasets.
* path: Base directory to where GeoIP data is located or the full path
to where the city or country data files (*.mmdb) are located.
Assumes that both the city and country data sets are located in
this directory; overrides the GEOIP_PATH setting.
* cache: The cache settings when opening up the GeoIP datasets. May be
an integer in (0, 1, 2, 4, 8) corresponding to the MODE_AUTO,
MODE_MMAP_EXT, MODE_MMAP, MODE_FILE, and MODE_MEMORY,
`GeoIPOptions` C API settings, respectively. Defaults to 0,
meaning MODE_AUTO.
* country: The name of the GeoIP country data file. Defaults to
'GeoLite2-Country.mmdb'; overrides the GEOIP_COUNTRY setting.
* city: The name of the GeoIP city data file. Defaults to
'GeoLite2-City.mmdb'; overrides the GEOIP_CITY setting.
"""
if cache not in self.cache_options:
raise GeoIP2Exception("Invalid GeoIP caching option: %s" % cache)
path = path or getattr(settings, "GEOIP_PATH", None)
city = city or getattr(settings, "GEOIP_CITY", "GeoLite2-City.mmdb")
country = country or getattr(settings, "GEOIP_COUNTRY", "GeoLite2-Country.mmdb")
if not path:
raise GeoIP2Exception(
"GeoIP path must be provided via parameter or the GEOIP_PATH setting."
)
path = to_path(path)
# Try the path first in case it is the full path to a database.
for path in (path, path / city, path / country):
if path.is_file():
self._path = path
self._reader = geoip2.database.Reader(path, mode=cache)
break
else:
raise GeoIP2Exception(
"Path must be a valid database or directory containing databases."
)
database_type = self._metadata.database_type
if database_type not in SUPPORTED_DATABASE_TYPES:
raise GeoIP2Exception(f"Unable to handle database edition: {database_type}")
def __del__(self):
# Cleanup any GeoIP file handles lying around.
if self._reader:
self._reader.close()
def __repr__(self):
m = self._metadata
version = f"v{m.binary_format_major_version}.{m.binary_format_minor_version}"
return f"<{self.__class__.__name__} [{version}] _path='{self._path}'>"
@cached_property
def _metadata(self):
return self._reader.metadata()
@cached_property
def is_city(self):
return "City" in self._metadata.database_type
@cached_property
def is_country(self):
return "Country" in self._metadata.database_type
def _query(self, query, *, require_city=False):
if not isinstance(query, (str, ipaddress.IPv4Address, ipaddress.IPv6Address)):
raise TypeError(
"GeoIP query must be a string or instance of IPv4Address or "
"IPv6Address, not type %s" % type(query).__name__,
)
if require_city and not self.is_city:
raise GeoIP2Exception(f"Invalid GeoIP city data file: {self._path}")
if isinstance(query, str):
try:
validate_ipv46_address(query)
except ValidationError:
# GeoIP2 only takes IP addresses, so try to resolve a hostname.
query = socket.gethostbyname(query)
function = self._reader.city if self.is_city else self._reader.country
return function(query)
def city(self, query):
"""
Return a dictionary of city information for the given IP address or
Fully Qualified Domain Name (FQDN). Some information in the dictionary
may be undefined (None).
"""
response = self._query(query, require_city=True)
region = response.subdivisions[0] if response.subdivisions else None
return {
"accuracy_radius": response.location.accuracy_radius,
"city": response.city.name,
"continent_code": response.continent.code,
"continent_name": response.continent.name,
"country_code": response.country.iso_code,
"country_name": response.country.name,
"is_in_european_union": response.country.is_in_european_union,
"latitude": response.location.latitude,
"longitude": response.location.longitude,
"metro_code": response.location.metro_code,
"postal_code": response.postal.code,
"region_code": region.iso_code if region else None,
"region_name": region.name if region else None,
"time_zone": response.location.time_zone,
# Kept for backward compatibility.
"dma_code": response.location.metro_code,
"region": region.iso_code if region else None,
}
def country_code(self, query):
"Return the country code for the given IP Address or FQDN."
return self.country(query)["country_code"]
def country_name(self, query):
"Return the country name for the given IP Address or FQDN."
return self.country(query)["country_name"]
def country(self, query):
"""
Return a dictionary with the country code and name when given an
IP address or a Fully Qualified Domain Name (FQDN). For example, both
'24.124.1.80' and 'djangoproject.com' are valid parameters.
"""
response = self._query(query, require_city=False)
return {
"continent_code": response.continent.code,
"continent_name": response.continent.name,
"country_code": response.country.iso_code,
"country_name": response.country.name,
"is_in_european_union": response.country.is_in_european_union,
}
def lon_lat(self, query):
"Return a tuple of the (longitude, latitude) for the given query."
data = self.city(query)
return data["longitude"], data["latitude"]
def lat_lon(self, query):
"Return a tuple of the (latitude, longitude) for the given query."
data = self.city(query)
return data["latitude"], data["longitude"]
def geos(self, query):
"Return a GEOS Point object for the given query."
# Allows importing and using GeoIP2() when GEOS is not installed.
from django.contrib.gis.geos import Point
return Point(self.lon_lat(query), srid=4326)
|
import ipaddress
import itertools
import pathlib
from unittest import mock, skipUnless
from django.conf import settings
from django.contrib.gis.geoip2 import HAS_GEOIP2
from django.contrib.gis.geos import GEOSGeometry
from django.test import SimpleTestCase, override_settings
if HAS_GEOIP2:
import geoip2
from django.contrib.gis.geoip2 import GeoIP2, GeoIP2Exception
def build_geoip_path(*parts):
return pathlib.Path(__file__).parent.joinpath("data/geoip2", *parts).resolve()
@skipUnless(HAS_GEOIP2, "GeoIP2 is required.")
@override_settings(
GEOIP_CITY="GeoLite2-City-Test.mmdb",
GEOIP_COUNTRY="GeoLite2-Country-Test.mmdb",
)
class GeoLite2Test(SimpleTestCase):
fqdn = "sky.uk"
ipv4_str = "2.125.160.216"
ipv6_str = "::ffff:027d:a0d8"
ipv4_addr = ipaddress.ip_address(ipv4_str)
ipv6_addr = ipaddress.ip_address(ipv6_str)
query_values = (fqdn, ipv4_str, ipv6_str, ipv4_addr, ipv6_addr)
expected_city = {
"accuracy_radius": 100,
"city": "Boxford",
"continent_code": "EU",
"continent_name": "Europe",
"country_code": "GB",
"country_name": "United Kingdom",
"is_in_european_union": False,
"latitude": 51.75,
"longitude": -1.25,
"metro_code": None,
"postal_code": "OX1",
"region_code": "ENG",
"region_name": "England",
"time_zone": "Europe/London",
# Kept for backward compatibility.
"dma_code": None,
"region": "ENG",
}
expected_country = {
"continent_code": "EU",
"continent_name": "Europe",
"country_code": "GB",
"country_name": "United Kingdom",
"is_in_european_union": False,
}
@classmethod
def setUpClass(cls):
# Avoid referencing __file__ at module level.
cls.enterClassContext(override_settings(GEOIP_PATH=build_geoip_path()))
# Always mock host lookup to avoid test breakage if DNS changes.
cls.enterClassContext(
mock.patch("socket.gethostbyname", return_value=cls.ipv4_str)
)
super().setUpClass()
def test_init(self):
# Everything inferred from GeoIP path.
g1 = GeoIP2()
# Path passed explicitly.
g2 = GeoIP2(settings.GEOIP_PATH, GeoIP2.MODE_AUTO)
# Path provided as a string.
g3 = GeoIP2(str(settings.GEOIP_PATH))
# Only passing in the location of one database.
g4 = GeoIP2(settings.GEOIP_PATH / settings.GEOIP_CITY, country="")
g5 = GeoIP2(settings.GEOIP_PATH / settings.GEOIP_COUNTRY, city="")
for g in (g1, g2, g3, g4, g5):
self.assertTrue(g._reader)
# Improper parameters.
bad_params = (23, "foo", 15.23)
for bad in bad_params:
with self.assertRaises(GeoIP2Exception):
GeoIP2(cache=bad)
if isinstance(bad, str):
e = GeoIP2Exception
else:
e = TypeError
with self.assertRaises(e):
GeoIP2(bad, GeoIP2.MODE_AUTO)
def test_no_database_file(self):
invalid_path = pathlib.Path(__file__).parent.joinpath("data/invalid").resolve()
msg = "Path must be a valid database or directory containing databases."
with self.assertRaisesMessage(GeoIP2Exception, msg):
GeoIP2(invalid_path)
def test_bad_query(self):
g = GeoIP2(city="<invalid>")
functions = (g.city, g.geos, g.lat_lon, g.lon_lat)
msg = "Invalid GeoIP city data file: "
for function in functions:
with self.subTest(function=function.__qualname__):
with self.assertRaisesMessage(GeoIP2Exception, msg):
function("example.com")
functions += (g.country, g.country_code, g.country_name)
values = (123, 123.45, b"", (), [], {}, set(), frozenset(), GeoIP2)
msg = (
"GeoIP query must be a string or instance of IPv4Address or IPv6Address, "
"not type"
)
for function, value in itertools.product(functions, values):
with self.subTest(function=function.__qualname__, type=type(value)):
with self.assertRaisesMessage(TypeError, msg):
function(value)
def test_country(self):
g = GeoIP2(city="<invalid>")
self.assertIs(g.is_city, False)
self.assertIs(g.is_country, True)
for query in self.query_values:
with self.subTest(query=query):
self.assertEqual(g.country(query), self.expected_country)
self.assertEqual(
g.country_code(query), self.expected_country["country_code"]
)
self.assertEqual(
g.country_name(query), self.expected_country["country_name"]
)
def test_country_using_city_database(self):
g = GeoIP2(country="<invalid>")
self.assertIs(g.is_city, True)
self.assertIs(g.is_country, False)
for query in self.query_values:
with self.subTest(query=query):
self.assertEqual(g.country(query), self.expected_country)
self.assertEqual(
g.country_code(query), self.expected_country["country_code"]
)
self.assertEqual(
g.country_name(query), self.expected_country["country_name"]
)
def test_city(self):
g = GeoIP2(country="<invalid>")
self.assertIs(g.is_city, True)
self.assertIs(g.is_country, False)
for query in self.query_values:
with self.subTest(query=query):
self.assertEqual(g.city(query), self.expected_city)
geom = g.geos(query)
self.assertIsInstance(geom, GEOSGeometry)
self.assertEqual(geom.srid, 4326)
expected_lat = self.expected_city["latitude"]
expected_lon = self.expected_city["longitude"]
self.assertEqual(geom.tuple, (expected_lon, expected_lat))
self.assertEqual(g.lat_lon(query), (expected_lat, expected_lon))
self.assertEqual(g.lon_lat(query), (expected_lon, expected_lat))
# Country queries should still work.
self.assertEqual(g.country(query), self.expected_country)
self.assertEqual(
g.country_code(query), self.expected_country["country_code"]
)
self.assertEqual(
g.country_name(query), self.expected_country["country_name"]
)
def test_not_found(self):
g1 = GeoIP2(city="<invalid>")
g2 = GeoIP2(country="<invalid>")
for function, query in itertools.product(
(g1.country, g2.city), ("127.0.0.1", "::1")
):
with self.subTest(function=function.__qualname__, query=query):
msg = f"The address {query} is not in the database."
with self.assertRaisesMessage(geoip2.errors.AddressNotFoundError, msg):
function(query)
def test_del(self):
g = GeoIP2()
reader = g._reader
self.assertIs(reader._db_reader.closed, False)
del g
self.assertIs(reader._db_reader.closed, True)
def test_repr(self):
g = GeoIP2()
m = g._metadata
version = f"{m.binary_format_major_version}.{m.binary_format_minor_version}"
self.assertEqual(repr(g), f"<GeoIP2 [v{version}] _path='{g._path}'>")
@skipUnless(HAS_GEOIP2, "GeoIP2 is required.")
@override_settings(
GEOIP_CITY="GeoIP2-City-Test.mmdb",
GEOIP_COUNTRY="GeoIP2-Country-Test.mmdb",
)
class GeoIP2Test(GeoLite2Test):
"""Non-free GeoIP2 databases are supported."""
@skipUnless(HAS_GEOIP2, "GeoIP2 is required.")
@override_settings(
GEOIP_CITY="dbip-city-lite-test.mmdb",
GEOIP_COUNTRY="dbip-country-lite-test.mmdb",
)
class DBIPLiteTest(GeoLite2Test):
"""DB-IP Lite databases are supported."""
expected_city = GeoLite2Test.expected_city | {
"accuracy_radius": None,
"city": "London (Shadwell)",
"latitude": 51.5181,
"longitude": -0.0714189,
"postal_code": None,
"region_code": None,
"time_zone": None,
# Kept for backward compatibility.
"region": None,
}
@skipUnless(HAS_GEOIP2, "GeoIP2 is required.")
class ErrorTest(SimpleTestCase):
def test_missing_path(self):
msg = "GeoIP path must be provided via parameter or the GEOIP_PATH setting."
with self.settings(GEOIP_PATH=None):
with self.assertRaisesMessage(GeoIP2Exception, msg):
GeoIP2()
def test_unsupported_database(self):
msg = "Unable to handle database edition: GeoLite2-ASN"
with self.settings(GEOIP_PATH=build_geoip_path("GeoLite2-ASN-Test.mmdb")):
with self.assertRaisesMessage(GeoIP2Exception, msg):
GeoIP2()
|
./temp_repos/django/django/contrib/gis/geoip2.py
|
./temp_repos/django/tests/gis_tests/test_geoip2.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'GeoIP2Exception'.
Context:
- Class Name: GeoIP2Exception
- Dependencies to Mock: cache, path, country, city
- Key Imports: django.utils.functional, django.conf, django.contrib.gis.geos, django.core.validators, ipaddress, django.core.exceptions, django.utils._os, geoip2.database, socket
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
GeoIP2Exception
|
python
|
"""
******** Models for test_data.py ***********
The following classes are for testing basic data marshalling, including
NULL values, where allowed.
The basic idea is to have a model for each Django data type.
"""
import uuid
from django.contrib.contenttypes.fields import GenericForeignKey, GenericRelation
from django.contrib.contenttypes.models import ContentType
from django.db import models
from .base import BaseModel
try:
from PIL import Image # NOQA
except ImportError:
ImageData = None
else:
class ImageData(models.Model):
data = models.ImageField(null=True)
class BinaryData(models.Model):
data = models.BinaryField(null=True)
class BooleanData(models.Model):
data = models.BooleanField(default=False, null=True)
class CharData(models.Model):
data = models.CharField(max_length=30, null=True)
class DateData(models.Model):
data = models.DateField(null=True)
class DateTimeData(models.Model):
data = models.DateTimeField(null=True)
class DecimalData(models.Model):
data = models.DecimalField(null=True, decimal_places=3, max_digits=5)
class EmailData(models.Model):
data = models.EmailField(null=True)
class FileData(models.Model):
data = models.FileField(null=True)
class FilePathData(models.Model):
data = models.FilePathField(null=True)
class FloatData(models.Model):
data = models.FloatField(null=True)
class IntegerData(models.Model):
data = models.IntegerField(null=True)
class BigIntegerData(models.Model):
data = models.BigIntegerField(null=True)
class GenericIPAddressData(models.Model):
data = models.GenericIPAddressField(null=True)
class PositiveBigIntegerData(models.Model):
data = models.PositiveBigIntegerField(null=True)
class PositiveIntegerData(models.Model):
data = models.PositiveIntegerField(null=True)
class PositiveSmallIntegerData(models.Model):
data = models.PositiveSmallIntegerField(null=True)
class SlugData(models.Model):
data = models.SlugField(null=True)
class SmallData(models.Model):
data = models.SmallIntegerField(null=True)
class TextData(models.Model):
data = models.TextField(null=True)
class TimeData(models.Model):
data = models.TimeField(null=True)
class Tag(models.Model):
"""A tag on an item."""
data = models.SlugField()
content_type = models.ForeignKey(ContentType, models.CASCADE)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey()
class Meta:
ordering = ["data"]
class GenericData(models.Model):
data = models.CharField(max_length=30)
tags = GenericRelation(Tag)
# The following test classes are all for validation
# of related objects; in particular, forward, backward,
# and self references.
class Anchor(models.Model):
"""This is a model that can be used as
something for other models to point at"""
data = models.CharField(max_length=30)
class Meta:
ordering = ("id",)
class UniqueAnchor(models.Model):
"""This is a model that can be used as
something for other models to point at"""
data = models.CharField(unique=True, max_length=30)
class FKData(models.Model):
data = models.ForeignKey(Anchor, models.SET_NULL, null=True)
class M2MData(models.Model):
data = models.ManyToManyField(Anchor)
class O2OData(models.Model):
# One to one field can't be null here, since it is a PK.
data = models.OneToOneField(Anchor, models.CASCADE, primary_key=True)
class FKSelfData(models.Model):
data = models.ForeignKey("self", models.CASCADE, null=True)
class M2MSelfData(models.Model):
data = models.ManyToManyField("self", symmetrical=False)
class FKDataToField(models.Model):
data = models.ForeignKey(UniqueAnchor, models.SET_NULL, null=True, to_field="data")
class FKDataToO2O(models.Model):
data = models.ForeignKey(O2OData, models.SET_NULL, null=True)
class M2MIntermediateData(models.Model):
data = models.ManyToManyField(Anchor, through="Intermediate")
class Intermediate(models.Model):
left = models.ForeignKey(M2MIntermediateData, models.CASCADE)
right = models.ForeignKey(Anchor, models.CASCADE)
extra = models.CharField(max_length=30, blank=True, default="doesn't matter")
# The following test classes are for validating the
# deserialization of objects that use a user-defined
# field as the primary key.
# Some of these data types have been commented out
# because they can't be used as a primary key on one
# or all database backends.
class BooleanPKData(models.Model):
data = models.BooleanField(primary_key=True, default=False)
class CharPKData(models.Model):
data = models.CharField(max_length=30, primary_key=True)
class DatePKData(models.Model):
data = models.DateField(primary_key=True)
class DateTimePKData(models.Model):
data = models.DateTimeField(primary_key=True)
class DecimalPKData(models.Model):
data = models.DecimalField(primary_key=True, decimal_places=3, max_digits=5)
class EmailPKData(models.Model):
data = models.EmailField(primary_key=True)
class FilePathPKData(models.Model):
data = models.FilePathField(primary_key=True)
class FloatPKData(models.Model):
data = models.FloatField(primary_key=True)
class IntegerPKData(models.Model):
data = models.IntegerField(primary_key=True)
class GenericIPAddressPKData(models.Model):
data = models.GenericIPAddressField(primary_key=True)
class PositiveIntegerPKData(models.Model):
data = models.PositiveIntegerField(primary_key=True)
class PositiveSmallIntegerPKData(models.Model):
data = models.PositiveSmallIntegerField(primary_key=True)
class SlugPKData(models.Model):
data = models.SlugField(primary_key=True)
class SmallPKData(models.Model):
data = models.SmallIntegerField(primary_key=True)
class TextPKData(models.Model):
data = models.TextField(primary_key=True)
class Meta:
required_db_features = ["supports_index_on_text_field"]
class TimePKData(models.Model):
data = models.TimeField(primary_key=True)
class UUIDData(models.Model):
data = models.UUIDField(primary_key=True)
class UUIDDefaultData(models.Model):
data = models.UUIDField(primary_key=True, default=uuid.uuid4)
class FKToUUID(models.Model):
data = models.ForeignKey(UUIDData, models.CASCADE)
# Tests for handling fields with pre_save functions, or
# models with save functions that modify data
class AutoNowDateTimeData(models.Model):
data = models.DateTimeField(null=True, auto_now=True)
class ModifyingSaveData(models.Model):
data = models.IntegerField(null=True)
def save(self, *args, **kwargs):
"""
A save method that modifies the data in the object.
A user-defined save() method isn't called when objects are deserialized
(#4459).
"""
self.data = 666
super().save(*args, **kwargs)
# Tests for serialization of models using inheritance.
# Regression for #7202, #7350
class AbstractBaseModel(models.Model):
parent_data = models.IntegerField()
class Meta:
abstract = True
class InheritAbstractModel(AbstractBaseModel):
child_data = models.IntegerField()
class InheritBaseModel(BaseModel):
child_data = models.IntegerField()
class ExplicitInheritBaseModel(BaseModel):
parent = models.OneToOneField(BaseModel, models.CASCADE, parent_link=True)
child_data = models.IntegerField()
class LengthModel(models.Model):
data = models.IntegerField()
def __len__(self):
return self.data
|
"""
This module has the mock object definitions used to hold reference geometry
for the GEOS and GDAL tests.
"""
import json
import os
from django.utils.functional import cached_property
# Path where reference test data is located.
TEST_DATA = os.path.join(os.path.dirname(__file__), "data")
def tuplize(seq):
"Turn all nested sequences to tuples in given sequence."
if isinstance(seq, (list, tuple)):
return tuple(tuplize(i) for i in seq)
return seq
def strconvert(d):
"Converts all keys in dictionary to str type."
return {str(k): v for k, v in d.items()}
def get_ds_file(name, ext):
return os.path.join(TEST_DATA, name, name + ".%s" % ext)
class TestObj:
"""
Base testing object, turns keyword args into attributes.
"""
def __init__(self, **kwargs):
for key, value in kwargs.items():
setattr(self, key, value)
class TestDS(TestObj):
"""
Object for testing GDAL data sources.
"""
def __init__(self, name, *, ext="shp", **kwargs):
# Shapefile is default extension, unless specified otherwise.
self.name = name
self.ds = get_ds_file(name, ext)
super().__init__(**kwargs)
class TestGeom(TestObj):
"""
Testing object used for wrapping reference geometry data
in GEOS/GDAL tests.
"""
def __init__(self, *, coords=None, centroid=None, ext_ring_cs=None, **kwargs):
# Converting lists to tuples of certain keyword args
# so coordinate test cases will match (JSON has no
# concept of tuple).
if coords:
self.coords = tuplize(coords)
if centroid:
self.centroid = tuple(centroid)
self.ext_ring_cs = ext_ring_cs and tuplize(ext_ring_cs)
super().__init__(**kwargs)
class TestGeomSet:
"""
Each attribute of this object is a list of `TestGeom` instances.
"""
def __init__(self, **kwargs):
for key, value in kwargs.items():
setattr(self, key, [TestGeom(**strconvert(kw)) for kw in value])
class TestDataMixin:
"""
Mixin used for GEOS/GDAL test cases that defines a `geometries`
property, which returns and/or loads the reference geometry data.
"""
@cached_property
def geometries(self):
# Load up the test geometry data from fixture into global.
with open(os.path.join(TEST_DATA, "geometries.json")) as f:
geometries = json.load(f)
return TestGeomSet(**strconvert(geometries))
|
./temp_repos/django/tests/serializers/models/data.py
|
./temp_repos/django/tests/gis_tests/test_data.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'BinaryData'.
Context:
- Class Name: BinaryData
- Dependencies to Mock: None detected
- Key Imports: base, django.contrib.contenttypes.fields, django.db, django.contrib.contenttypes.models, uuid, PIL
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
BinaryData
|
python
|
from ctypes import c_void_p
from django.contrib.gis.gdal.base import GDALBase
from django.contrib.gis.gdal.error import GDALException
from django.contrib.gis.gdal.libgdal import GDAL_VERSION
from django.contrib.gis.gdal.prototypes import ds as capi
from django.utils.encoding import force_bytes, force_str
class Driver(GDALBase):
"""
Wrap a GDAL/OGR Data Source Driver.
For more information, see the C API documentation:
https://gdal.org/api/vector_c_api.html
https://gdal.org/api/raster_c_api.html
"""
# Case-insensitive aliases for some GDAL/OGR Drivers.
# For a complete list of original driver names see
# https://gdal.org/drivers/vector/
# https://gdal.org/drivers/raster/
_alias = {
# vector
"esri": "ESRI Shapefile",
"shp": "ESRI Shapefile",
"shape": "ESRI Shapefile",
# raster
"tiff": "GTiff",
"tif": "GTiff",
"jpeg": "JPEG",
"jpg": "JPEG",
}
if GDAL_VERSION[:2] <= (3, 10):
_alias.update(
{
"tiger": "TIGER",
"tiger/line": "TIGER",
}
)
def __init__(self, dr_input):
"""
Initialize an GDAL/OGR driver on either a string or integer input.
"""
if isinstance(dr_input, str):
# If a string name of the driver was passed in
self.ensure_registered()
# Checking the alias dictionary (case-insensitive) to see if an
# alias exists for the given driver.
if dr_input.lower() in self._alias:
name = self._alias[dr_input.lower()]
else:
name = dr_input
# Attempting to get the GDAL/OGR driver by the string name.
driver = c_void_p(capi.get_driver_by_name(force_bytes(name)))
elif isinstance(dr_input, int):
self.ensure_registered()
driver = capi.get_driver(dr_input)
elif isinstance(dr_input, c_void_p):
driver = dr_input
else:
raise GDALException(
"Unrecognized input type for GDAL/OGR Driver: %s" % type(dr_input)
)
# Making sure we get a valid pointer to the OGR Driver
if not driver:
raise GDALException(
"Could not initialize GDAL/OGR Driver on input: %s" % dr_input
)
self.ptr = driver
def __str__(self):
return self.name
@classmethod
def ensure_registered(cls):
"""
Attempt to register all the data source drivers.
"""
# Only register all if the driver count is 0 (or else all drivers will
# be registered over and over again).
if not capi.get_driver_count():
capi.register_all()
@classmethod
def driver_count(cls):
"""
Return the number of GDAL/OGR data source drivers registered.
"""
return capi.get_driver_count()
@property
def name(self):
"""
Return description/name string for this driver.
"""
return force_str(capi.get_driver_description(self.ptr))
|
import unittest
from unittest import mock
from django.contrib.gis.gdal import GDAL_VERSION, Driver, GDALException
valid_drivers = (
# vector
"ESRI Shapefile",
"MapInfo File",
"S57",
"DGN",
"Memory",
"CSV",
"GML",
"KML",
# raster
"GTiff",
"JPEG",
"MEM",
"PNG",
)
invalid_drivers = ("Foo baz", "clucka", "ESRI Shp", "ESRI rast")
aliases = {
"eSrI": "ESRI Shapefile",
"SHAPE": "ESRI Shapefile",
"sHp": "ESRI Shapefile",
"tiFf": "GTiff",
"tIf": "GTiff",
"jPEg": "JPEG",
"jpG": "JPEG",
}
if GDAL_VERSION[:2] <= (3, 10):
aliases.update(
{
"tiger": "TIGER",
"tiger/line": "TIGER",
}
)
class DriverTest(unittest.TestCase):
def test01_valid_driver(self):
"Testing valid GDAL/OGR Data Source Drivers."
for d in valid_drivers:
dr = Driver(d)
self.assertEqual(d, str(dr))
def test02_invalid_driver(self):
"Testing invalid GDAL/OGR Data Source Drivers."
for i in invalid_drivers:
with self.assertRaises(GDALException):
Driver(i)
def test03_aliases(self):
"Testing driver aliases."
for alias, full_name in aliases.items():
dr = Driver(alias)
self.assertEqual(full_name, str(dr))
@mock.patch("django.contrib.gis.gdal.driver.capi.get_driver_count")
@mock.patch("django.contrib.gis.gdal.driver.capi.register_all")
def test_registered(self, reg, count):
"""
Prototypes are registered only if the driver count is zero.
"""
def check(count_val):
reg.reset_mock()
count.return_value = count_val
Driver.ensure_registered()
if count_val:
self.assertFalse(reg.called)
else:
reg.assert_called_once_with()
check(0)
check(120)
|
./temp_repos/django/django/contrib/gis/gdal/driver.py
|
./temp_repos/django/tests/gis_tests/gdal_tests/test_driver.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'Driver'.
Context:
- Class Name: Driver
- Dependencies to Mock: dr_input
- Key Imports: django.contrib.gis.gdal.libgdal, django.contrib.gis.gdal.prototypes, django.contrib.gis.gdal.error, django.utils.encoding, ctypes, django.contrib.gis.gdal.base
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
Driver
|
python
|
"""
This module houses the ctypes function prototypes for GDAL DataSource (raster)
related data structures.
"""
from ctypes import POINTER, c_bool, c_char_p, c_double, c_int, c_void_p
from functools import partial
from django.contrib.gis.gdal.libgdal import std_call
from django.contrib.gis.gdal.prototypes.generation import (
chararray_output,
const_string_output,
double_output,
int_output,
void_output,
voidptr_output,
)
# For more detail about c function names and definitions see
# https://gdal.org/api/raster_c_api.html
# https://gdal.org/doxygen/gdalwarper_8h.html
# https://gdal.org/api/gdal_utils.html
# Prepare partial functions that use cpl error codes
void_output = partial(void_output, cpl=True)
const_string_output = partial(const_string_output, cpl=True)
double_output = partial(double_output, cpl=True)
# Raster Data Source Routines
create_ds = voidptr_output(
std_call("GDALCreate"), [c_void_p, c_char_p, c_int, c_int, c_int, c_int, c_void_p]
)
open_ds = voidptr_output(std_call("GDALOpen"), [c_char_p, c_int])
close_ds = void_output(std_call("GDALClose"), [c_void_p], errcheck=False)
flush_ds = int_output(std_call("GDALFlushCache"), [c_void_p])
copy_ds = voidptr_output(
std_call("GDALCreateCopy"),
[c_void_p, c_char_p, c_void_p, c_int, POINTER(c_char_p), c_void_p, c_void_p],
)
add_band_ds = void_output(std_call("GDALAddBand"), [c_void_p, c_int])
get_ds_description = const_string_output(std_call("GDALGetDescription"), [c_void_p])
get_ds_driver = voidptr_output(std_call("GDALGetDatasetDriver"), [c_void_p])
get_ds_info = const_string_output(std_call("GDALInfo"), [c_void_p, c_void_p])
get_ds_xsize = int_output(std_call("GDALGetRasterXSize"), [c_void_p])
get_ds_ysize = int_output(std_call("GDALGetRasterYSize"), [c_void_p])
get_ds_raster_count = int_output(std_call("GDALGetRasterCount"), [c_void_p])
get_ds_raster_band = voidptr_output(std_call("GDALGetRasterBand"), [c_void_p, c_int])
get_ds_projection_ref = const_string_output(
std_call("GDALGetProjectionRef"), [c_void_p]
)
set_ds_projection_ref = void_output(std_call("GDALSetProjection"), [c_void_p, c_char_p])
get_ds_geotransform = void_output(
std_call("GDALGetGeoTransform"), [c_void_p, POINTER(c_double * 6)], errcheck=False
)
set_ds_geotransform = void_output(
std_call("GDALSetGeoTransform"), [c_void_p, POINTER(c_double * 6)]
)
get_ds_metadata = chararray_output(
std_call("GDALGetMetadata"), [c_void_p, c_char_p], errcheck=False
)
set_ds_metadata = void_output(
std_call("GDALSetMetadata"), [c_void_p, POINTER(c_char_p), c_char_p]
)
get_ds_metadata_domain_list = chararray_output(
std_call("GDALGetMetadataDomainList"), [c_void_p], errcheck=False
)
get_ds_metadata_item = const_string_output(
std_call("GDALGetMetadataItem"), [c_void_p, c_char_p, c_char_p]
)
set_ds_metadata_item = const_string_output(
std_call("GDALSetMetadataItem"), [c_void_p, c_char_p, c_char_p, c_char_p]
)
free_dsl = void_output(std_call("CSLDestroy"), [POINTER(c_char_p)], errcheck=False)
# Raster Band Routines
band_io = void_output(
std_call("GDALRasterIO"),
[
c_void_p,
c_int,
c_int,
c_int,
c_int,
c_int,
c_void_p,
c_int,
c_int,
c_int,
c_int,
c_int,
],
)
get_band_xsize = int_output(std_call("GDALGetRasterBandXSize"), [c_void_p])
get_band_ysize = int_output(std_call("GDALGetRasterBandYSize"), [c_void_p])
get_band_index = int_output(std_call("GDALGetBandNumber"), [c_void_p])
get_band_description = const_string_output(std_call("GDALGetDescription"), [c_void_p])
get_band_ds = voidptr_output(std_call("GDALGetBandDataset"), [c_void_p])
get_band_datatype = int_output(std_call("GDALGetRasterDataType"), [c_void_p])
get_band_color_interp = int_output(
std_call("GDALGetRasterColorInterpretation"), [c_void_p]
)
get_band_nodata_value = double_output(
std_call("GDALGetRasterNoDataValue"), [c_void_p, POINTER(c_int)]
)
set_band_nodata_value = void_output(
std_call("GDALSetRasterNoDataValue"), [c_void_p, c_double]
)
delete_band_nodata_value = void_output(
std_call("GDALDeleteRasterNoDataValue"), [c_void_p]
)
get_band_statistics = void_output(
std_call("GDALGetRasterStatistics"),
[
c_void_p,
c_int,
c_int,
POINTER(c_double),
POINTER(c_double),
POINTER(c_double),
POINTER(c_double),
c_void_p,
c_void_p,
],
)
compute_band_statistics = void_output(
std_call("GDALComputeRasterStatistics"),
[
c_void_p,
c_int,
POINTER(c_double),
POINTER(c_double),
POINTER(c_double),
POINTER(c_double),
c_void_p,
c_void_p,
],
)
# Reprojection routine
reproject_image = void_output(
std_call("GDALReprojectImage"),
[
c_void_p,
c_char_p,
c_void_p,
c_char_p,
c_int,
c_double,
c_double,
c_void_p,
c_void_p,
c_void_p,
],
)
auto_create_warped_vrt = voidptr_output(
std_call("GDALAutoCreateWarpedVRT"),
[c_void_p, c_char_p, c_char_p, c_int, c_double, c_void_p],
)
# Create VSI gdal raster files from in-memory buffers.
# https://gdal.org/api/cpl.html#cpl-vsi-h
create_vsi_file_from_mem_buffer = voidptr_output(
std_call("VSIFileFromMemBuffer"), [c_char_p, c_void_p, c_int, c_int]
)
get_mem_buffer_from_vsi_file = voidptr_output(
std_call("VSIGetMemFileBuffer"), [c_char_p, POINTER(c_int), c_bool]
)
unlink_vsi_file = int_output(std_call("VSIUnlink"), [c_char_p])
|
import os
import shutil
import struct
import tempfile
import zipfile
from pathlib import Path
from unittest import mock
from django.contrib.gis.gdal import GDAL_VERSION, GDALRaster, SpatialReference
from django.contrib.gis.gdal.error import GDALException
from django.contrib.gis.gdal.raster.band import GDALBand
from django.contrib.gis.shortcuts import numpy
from django.core.files.temp import NamedTemporaryFile
from django.test import SimpleTestCase
from ..data.rasters.textrasters import JSON_RASTER
class GDALRasterTests(SimpleTestCase):
"""
Test a GDALRaster instance created from a file (GeoTiff).
"""
def setUp(self):
self.rs_path = os.path.join(
os.path.dirname(__file__), "../data/rasters/raster.tif"
)
self.rs = GDALRaster(self.rs_path)
def test_gdalraster_input_as_path(self):
rs_path = Path(__file__).parent.parent / "data" / "rasters" / "raster.tif"
rs = GDALRaster(rs_path)
self.assertEqual(str(rs_path), rs.name)
def test_rs_name_repr(self):
self.assertEqual(self.rs_path, self.rs.name)
self.assertRegex(repr(self.rs), r"<Raster object at 0x\w+>")
def test_rs_driver(self):
self.assertEqual(self.rs.driver.name, "GTiff")
def test_rs_size(self):
self.assertEqual(self.rs.width, 163)
self.assertEqual(self.rs.height, 174)
def test_rs_srs(self):
self.assertEqual(self.rs.srs.srid, 3086)
self.assertEqual(self.rs.srs.units, (1.0, "metre"))
def test_rs_srid(self):
rast = GDALRaster(
{
"width": 16,
"height": 16,
"srid": 4326,
}
)
self.assertEqual(rast.srid, 4326)
rast.srid = 3086
self.assertEqual(rast.srid, 3086)
def test_geotransform_and_friends(self):
# Assert correct values for file based raster
self.assertEqual(
self.rs.geotransform,
[511700.4680706557, 100.0, 0.0, 435103.3771231986, 0.0, -100.0],
)
self.assertEqual(self.rs.origin, [511700.4680706557, 435103.3771231986])
self.assertEqual(self.rs.origin.x, 511700.4680706557)
self.assertEqual(self.rs.origin.y, 435103.3771231986)
self.assertEqual(self.rs.scale, [100.0, -100.0])
self.assertEqual(self.rs.scale.x, 100.0)
self.assertEqual(self.rs.scale.y, -100.0)
self.assertEqual(self.rs.skew, [0, 0])
self.assertEqual(self.rs.skew.x, 0)
self.assertEqual(self.rs.skew.y, 0)
# Create in-memory rasters and change gtvalues
rsmem = GDALRaster(JSON_RASTER)
# geotransform accepts both floats and ints
rsmem.geotransform = [0.0, 1.0, 2.0, 3.0, 4.0, 5.0]
self.assertEqual(rsmem.geotransform, [0.0, 1.0, 2.0, 3.0, 4.0, 5.0])
rsmem.geotransform = range(6)
self.assertEqual(rsmem.geotransform, [float(x) for x in range(6)])
self.assertEqual(rsmem.origin, [0, 3])
self.assertEqual(rsmem.origin.x, 0)
self.assertEqual(rsmem.origin.y, 3)
self.assertEqual(rsmem.scale, [1, 5])
self.assertEqual(rsmem.scale.x, 1)
self.assertEqual(rsmem.scale.y, 5)
self.assertEqual(rsmem.skew, [2, 4])
self.assertEqual(rsmem.skew.x, 2)
self.assertEqual(rsmem.skew.y, 4)
self.assertEqual(rsmem.width, 5)
self.assertEqual(rsmem.height, 5)
def test_geotransform_bad_inputs(self):
rsmem = GDALRaster(JSON_RASTER)
error_geotransforms = [
[1, 2],
[1, 2, 3, 4, 5, "foo"],
[1, 2, 3, 4, 5, 6, "foo"],
]
msg = "Geotransform must consist of 6 numeric values."
for geotransform in error_geotransforms:
with (
self.subTest(i=geotransform),
self.assertRaisesMessage(ValueError, msg),
):
rsmem.geotransform = geotransform
def test_rs_extent(self):
self.assertEqual(
self.rs.extent,
(
511700.4680706557,
417703.3771231986,
528000.4680706557,
435103.3771231986,
),
)
def test_rs_bands(self):
self.assertEqual(len(self.rs.bands), 1)
self.assertIsInstance(self.rs.bands[0], GDALBand)
def test_memory_based_raster_creation(self):
# Create uint8 raster with full pixel data range (0-255)
rast = GDALRaster(
{
"datatype": 1,
"width": 16,
"height": 16,
"srid": 4326,
"bands": [
{
"data": range(256),
"nodata_value": 255,
}
],
}
)
# Get array from raster
result = rast.bands[0].data()
if numpy:
result = result.flatten().tolist()
# Assert data is same as original input
self.assertEqual(result, list(range(256)))
def test_file_based_raster_creation(self):
# Prepare tempfile
rstfile = NamedTemporaryFile(suffix=".tif")
# Create file-based raster from scratch
GDALRaster(
{
"datatype": self.rs.bands[0].datatype(),
"driver": "tif",
"name": rstfile.name,
"width": 163,
"height": 174,
"nr_of_bands": 1,
"srid": self.rs.srs.wkt,
"origin": (self.rs.origin.x, self.rs.origin.y),
"scale": (self.rs.scale.x, self.rs.scale.y),
"skew": (self.rs.skew.x, self.rs.skew.y),
"bands": [
{
"data": self.rs.bands[0].data(),
"nodata_value": self.rs.bands[0].nodata_value,
}
],
}
)
# Reload newly created raster from file
restored_raster = GDALRaster(rstfile.name)
# Presence of TOWGS84 depend on GDAL/Proj versions.
self.assertEqual(
restored_raster.srs.wkt.replace("TOWGS84[0,0,0,0,0,0,0],", ""),
self.rs.srs.wkt.replace("TOWGS84[0,0,0,0,0,0,0],", ""),
)
self.assertEqual(restored_raster.geotransform, self.rs.geotransform)
if numpy:
numpy.testing.assert_equal(
restored_raster.bands[0].data(), self.rs.bands[0].data()
)
else:
self.assertEqual(restored_raster.bands[0].data(), self.rs.bands[0].data())
def test_nonexistent_file(self):
msg = 'Unable to read raster source input "nonexistent.tif".'
with self.assertRaisesMessage(GDALException, msg):
GDALRaster("nonexistent.tif")
def test_vsi_raster_creation(self):
# Open a raster as a file object.
with open(self.rs_path, "rb") as dat:
# Instantiate a raster from the file binary buffer.
vsimem = GDALRaster(dat.read())
# The data of the in-memory file is equal to the source file.
result = vsimem.bands[0].data()
target = self.rs.bands[0].data()
if numpy:
result = result.flatten().tolist()
target = target.flatten().tolist()
self.assertEqual(result, target)
def test_vsi_raster_deletion(self):
path = "/vsimem/raster.tif"
# Create a vsi-based raster from scratch.
vsimem = GDALRaster(
{
"name": path,
"driver": "tif",
"width": 4,
"height": 4,
"srid": 4326,
"bands": [
{
"data": range(16),
}
],
}
)
# The virtual file exists.
rst = GDALRaster(path)
self.assertEqual(rst.width, 4)
# Delete GDALRaster.
del vsimem
del rst
# The virtual file has been removed.
msg = 'Could not open the datasource at "/vsimem/raster.tif"'
with self.assertRaisesMessage(GDALException, msg):
GDALRaster(path)
def test_vsi_invalid_buffer_error(self):
msg = "Failed creating VSI raster from the input buffer."
with self.assertRaisesMessage(GDALException, msg):
GDALRaster(b"not-a-raster-buffer")
def test_vsi_buffer_property(self):
# Create a vsi-based raster from scratch.
rast = GDALRaster(
{
"name": "/vsimem/raster.tif",
"driver": "tif",
"width": 4,
"height": 4,
"srid": 4326,
"bands": [
{
"data": range(16),
}
],
}
)
# Do a round trip from raster to buffer to raster.
result = GDALRaster(rast.vsi_buffer).bands[0].data()
if numpy:
result = result.flatten().tolist()
# Band data is equal to nodata value except on input block of ones.
self.assertEqual(result, list(range(16)))
# The vsi buffer is None for rasters that are not vsi based.
self.assertIsNone(self.rs.vsi_buffer)
def test_vsi_vsizip_filesystem(self):
rst_zipfile = NamedTemporaryFile(suffix=".zip")
with zipfile.ZipFile(rst_zipfile, mode="w") as zf:
zf.write(self.rs_path, "raster.tif")
rst_path = "/vsizip/" + os.path.join(rst_zipfile.name, "raster.tif")
rst = GDALRaster(rst_path)
self.assertEqual(rst.driver.name, self.rs.driver.name)
self.assertEqual(rst.name, rst_path)
self.assertIs(rst.is_vsi_based, True)
self.assertIsNone(rst.vsi_buffer)
def test_offset_size_and_shape_on_raster_creation(self):
rast = GDALRaster(
{
"datatype": 1,
"width": 4,
"height": 4,
"srid": 4326,
"bands": [
{
"data": (1,),
"offset": (1, 1),
"size": (2, 2),
"shape": (1, 1),
"nodata_value": 2,
}
],
}
)
# Get array from raster.
result = rast.bands[0].data()
if numpy:
result = result.flatten().tolist()
# Band data is equal to nodata value except on input block of ones.
self.assertEqual(result, [2, 2, 2, 2, 2, 1, 1, 2, 2, 1, 1, 2, 2, 2, 2, 2])
def test_set_nodata_value_on_raster_creation(self):
# Create raster filled with nodata values.
rast = GDALRaster(
{
"datatype": 1,
"width": 2,
"height": 2,
"srid": 4326,
"bands": [{"nodata_value": 23}],
}
)
# Get array from raster.
result = rast.bands[0].data()
if numpy:
result = result.flatten().tolist()
# All band data is equal to nodata value.
self.assertEqual(result, [23] * 4)
def test_set_nodata_none_on_raster_creation(self):
# Create raster without data and without nodata value.
rast = GDALRaster(
{
"datatype": 1,
"width": 2,
"height": 2,
"srid": 4326,
"bands": [{"nodata_value": None}],
}
)
# Get array from raster.
result = rast.bands[0].data()
if numpy:
result = result.flatten().tolist()
# Band data is equal to zero because no nodata value has been
# specified.
self.assertEqual(result, [0] * 4)
def test_raster_metadata_property(self):
data = self.rs.metadata
self.assertEqual(data["DEFAULT"], {"AREA_OR_POINT": "Area"})
self.assertEqual(data["IMAGE_STRUCTURE"], {"INTERLEAVE": "BAND"})
# Create file-based raster from scratch
source = GDALRaster(
{
"datatype": 1,
"width": 2,
"height": 2,
"srid": 4326,
"bands": [{"data": range(4), "nodata_value": 99}],
}
)
# Set metadata on raster and on a band.
metadata = {
"DEFAULT": {"OWNER": "Django", "VERSION": "1.0", "AREA_OR_POINT": "Point"},
}
source.metadata = metadata
source.bands[0].metadata = metadata
self.assertEqual(source.metadata["DEFAULT"], metadata["DEFAULT"])
self.assertEqual(source.bands[0].metadata["DEFAULT"], metadata["DEFAULT"])
# Update metadata on raster.
metadata = {
"DEFAULT": {"VERSION": "2.0"},
}
source.metadata = metadata
self.assertEqual(source.metadata["DEFAULT"]["VERSION"], "2.0")
# Remove metadata on raster.
metadata = {
"DEFAULT": {"OWNER": None},
}
source.metadata = metadata
self.assertNotIn("OWNER", source.metadata["DEFAULT"])
def test_raster_info_accessor(self):
infos = self.rs.info
# Data
info_lines = [line.strip() for line in infos.split("\n") if line.strip() != ""]
for line in [
"Driver: GTiff/GeoTIFF",
"Files: {}".format(self.rs_path),
"Size is 163, 174",
"Origin = (511700.468070655711927,435103.377123198588379)",
"Pixel Size = (100.000000000000000,-100.000000000000000)",
"Metadata:",
"AREA_OR_POINT=Area",
"Image Structure Metadata:",
"INTERLEAVE=BAND",
"Band 1 Block=163x50 Type=Byte, ColorInterp=Gray",
"NoData Value=15",
]:
self.assertIn(line, info_lines)
for line in [
r"Upper Left \( 511700.468, 435103.377\) "
r'\( 82d51\'46.1\d"W, 27d55\' 1.5\d"N\)',
r"Lower Left \( 511700.468, 417703.377\) "
r'\( 82d51\'52.0\d"W, 27d45\'37.5\d"N\)',
r"Upper Right \( 528000.468, 435103.377\) "
r'\( 82d41\'48.8\d"W, 27d54\'56.3\d"N\)',
r"Lower Right \( 528000.468, 417703.377\) "
r'\( 82d41\'55.5\d"W, 27d45\'32.2\d"N\)',
r"Center \( 519850.468, 426403.377\) "
r'\( 82d46\'50.6\d"W, 27d50\'16.9\d"N\)',
]:
self.assertRegex(infos, line)
# CRS (skip the name because string depends on the GDAL/Proj versions).
self.assertIn("NAD83 / Florida GDL Albers", infos)
def test_compressed_file_based_raster_creation(self):
rstfile = NamedTemporaryFile(suffix=".tif")
# Make a compressed copy of an existing raster.
compressed = self.rs.warp(
{"papsz_options": {"compress": "packbits"}, "name": rstfile.name}
)
# Check physically if compression worked.
self.assertLess(os.path.getsize(compressed.name), os.path.getsize(self.rs.name))
# Create file-based raster with options from scratch.
papsz_options = {
"compress": "packbits",
"blockxsize": 23,
"blockysize": 23,
}
if GDAL_VERSION < (3, 7):
datatype = 1
papsz_options["pixeltype"] = "signedbyte"
else:
datatype = 14
compressed = GDALRaster(
{
"datatype": datatype,
"driver": "tif",
"name": rstfile.name,
"width": 40,
"height": 40,
"srid": 3086,
"origin": (500000, 400000),
"scale": (100, -100),
"skew": (0, 0),
"bands": [
{
"data": range(40 ^ 2),
"nodata_value": 255,
}
],
"papsz_options": papsz_options,
}
)
# Check if options used on creation are stored in metadata.
# Reopening the raster ensures that all metadata has been written
# to the file.
compressed = GDALRaster(compressed.name)
self.assertEqual(
compressed.metadata["IMAGE_STRUCTURE"]["COMPRESSION"],
"PACKBITS",
)
self.assertEqual(compressed.bands[0].datatype(), datatype)
if GDAL_VERSION < (3, 7):
self.assertEqual(
compressed.bands[0].metadata["IMAGE_STRUCTURE"]["PIXELTYPE"],
"SIGNEDBYTE",
)
self.assertIn("Block=40x23", compressed.info)
def test_raster_warp(self):
# Create in memory raster
source = GDALRaster(
{
"datatype": 1,
"driver": "MEM",
"name": "sourceraster",
"width": 4,
"height": 4,
"nr_of_bands": 1,
"srid": 3086,
"origin": (500000, 400000),
"scale": (100, -100),
"skew": (0, 0),
"bands": [
{
"data": range(16),
"nodata_value": 255,
}
],
}
)
# Test altering the scale, width, and height of a raster
data = {
"scale": [200, -200],
"width": 2,
"height": 2,
}
target = source.warp(data)
self.assertEqual(target.width, data["width"])
self.assertEqual(target.height, data["height"])
self.assertEqual(target.scale, data["scale"])
self.assertEqual(target.bands[0].datatype(), source.bands[0].datatype())
self.assertEqual(target.name, "sourceraster_copy.MEM")
result = target.bands[0].data()
if numpy:
result = result.flatten().tolist()
self.assertEqual(result, [5, 7, 13, 15])
# Test altering the name and datatype (to float)
data = {
"name": "/path/to/targetraster.tif",
"datatype": 6,
}
target = source.warp(data)
self.assertEqual(target.bands[0].datatype(), 6)
self.assertEqual(target.name, "/path/to/targetraster.tif")
self.assertEqual(target.driver.name, "MEM")
result = target.bands[0].data()
if numpy:
result = result.flatten().tolist()
self.assertEqual(
result,
[
0.0,
1.0,
2.0,
3.0,
4.0,
5.0,
6.0,
7.0,
8.0,
9.0,
10.0,
11.0,
12.0,
13.0,
14.0,
15.0,
],
)
def test_raster_warp_nodata_zone(self):
# Create in memory raster.
source = GDALRaster(
{
"datatype": 1,
"driver": "MEM",
"width": 4,
"height": 4,
"srid": 3086,
"origin": (500000, 400000),
"scale": (100, -100),
"skew": (0, 0),
"bands": [
{
"data": range(16),
"nodata_value": 23,
}
],
}
)
# Warp raster onto a location that does not cover any pixels of the
# original.
result = source.warp({"origin": (200000, 200000)}).bands[0].data()
if numpy:
result = result.flatten().tolist()
# The result is an empty raster filled with the correct nodata value.
self.assertEqual(result, [23] * 16)
def test_raster_clone(self):
rstfile = NamedTemporaryFile(suffix=".tif")
tests = [
("MEM", "", 23), # In memory raster.
("tif", rstfile.name, 99), # In file based raster.
]
for driver, name, nodata_value in tests:
with self.subTest(driver=driver):
source = GDALRaster(
{
"datatype": 1,
"driver": driver,
"name": name,
"width": 4,
"height": 4,
"srid": 3086,
"origin": (500000, 400000),
"scale": (100, -100),
"skew": (0, 0),
"bands": [
{
"data": range(16),
"nodata_value": nodata_value,
}
],
}
)
clone = source.clone()
self.assertNotEqual(clone.name, source.name)
self.assertEqual(clone._write, source._write)
self.assertEqual(clone.srs.srid, source.srs.srid)
self.assertEqual(clone.width, source.width)
self.assertEqual(clone.height, source.height)
self.assertEqual(clone.origin, source.origin)
self.assertEqual(clone.scale, source.scale)
self.assertEqual(clone.skew, source.skew)
self.assertIsNot(clone, source)
def test_raster_transform(self):
tests = [
3086,
"3086",
SpatialReference(3086),
]
for srs in tests:
with self.subTest(srs=srs):
# Prepare tempfile and nodata value.
rstfile = NamedTemporaryFile(suffix=".tif")
ndv = 99
# Create in file based raster.
source = GDALRaster(
{
"datatype": 1,
"driver": "tif",
"name": rstfile.name,
"width": 5,
"height": 5,
"nr_of_bands": 1,
"srid": 4326,
"origin": (-5, 5),
"scale": (2, -2),
"skew": (0, 0),
"bands": [
{
"data": range(25),
"nodata_value": ndv,
}
],
}
)
target = source.transform(srs)
# Reload data from disk.
target = GDALRaster(target.name)
self.assertEqual(target.srs.srid, 3086)
self.assertEqual(target.width, 7)
self.assertEqual(target.height, 7)
self.assertEqual(target.bands[0].datatype(), source.bands[0].datatype())
self.assertAlmostEqual(target.origin[0], 9124842.791079799, 3)
self.assertAlmostEqual(target.origin[1], 1589911.6476407414, 3)
self.assertAlmostEqual(target.scale[0], 223824.82664250192, 3)
self.assertAlmostEqual(target.scale[1], -223824.82664250192, 3)
self.assertEqual(target.skew, [0, 0])
result = target.bands[0].data()
if numpy:
result = result.flatten().tolist()
# The reprojection of a raster that spans over a large area
# skews the data matrix and might introduce nodata values.
self.assertEqual(
result,
[
ndv,
ndv,
ndv,
ndv,
4,
ndv,
ndv,
ndv,
ndv,
2,
3,
9,
ndv,
ndv,
ndv,
1,
2,
8,
13,
19,
ndv,
0,
6,
6,
12,
18,
18,
24,
ndv,
10,
11,
16,
22,
23,
ndv,
ndv,
ndv,
15,
21,
22,
ndv,
ndv,
ndv,
ndv,
20,
ndv,
ndv,
ndv,
ndv,
],
)
def test_raster_transform_clone(self):
with mock.patch.object(GDALRaster, "clone") as mocked_clone:
# Create in file based raster.
rstfile = NamedTemporaryFile(suffix=".tif")
source = GDALRaster(
{
"datatype": 1,
"driver": "tif",
"name": rstfile.name,
"width": 5,
"height": 5,
"nr_of_bands": 1,
"srid": 4326,
"origin": (-5, 5),
"scale": (2, -2),
"skew": (0, 0),
"bands": [
{
"data": range(25),
"nodata_value": 99,
}
],
}
)
# transform() returns a clone because it is the same SRID and
# driver.
source.transform(4326)
self.assertEqual(mocked_clone.call_count, 1)
def test_raster_transform_clone_name(self):
# Create in file based raster.
rstfile = NamedTemporaryFile(suffix=".tif")
source = GDALRaster(
{
"datatype": 1,
"driver": "tif",
"name": rstfile.name,
"width": 5,
"height": 5,
"nr_of_bands": 1,
"srid": 4326,
"origin": (-5, 5),
"scale": (2, -2),
"skew": (0, 0),
"bands": [
{
"data": range(25),
"nodata_value": 99,
}
],
}
)
clone_name = rstfile.name + "_respect_name.GTiff"
target = source.transform(4326, name=clone_name)
self.assertEqual(target.name, clone_name)
class GDALBandTests(SimpleTestCase):
rs_path = os.path.join(os.path.dirname(__file__), "../data/rasters/raster.tif")
def test_band_data(self):
rs = GDALRaster(self.rs_path)
band = rs.bands[0]
self.assertEqual(band.width, 163)
self.assertEqual(band.height, 174)
self.assertEqual(band.description, "")
self.assertEqual(band.datatype(), 1)
self.assertEqual(band.datatype(as_string=True), "GDT_Byte")
self.assertEqual(band.color_interp(), 1)
self.assertEqual(band.color_interp(as_string=True), "GCI_GrayIndex")
self.assertEqual(band.nodata_value, 15)
if numpy:
data = band.data()
assert_array = numpy.loadtxt(
os.path.join(
os.path.dirname(__file__), "../data/rasters/raster.numpy.txt"
)
)
numpy.testing.assert_equal(data, assert_array)
self.assertEqual(data.shape, (band.height, band.width))
def test_band_statistics(self):
with tempfile.TemporaryDirectory() as tmp_dir:
rs_path = os.path.join(tmp_dir, "raster.tif")
shutil.copyfile(self.rs_path, rs_path)
rs = GDALRaster(rs_path)
band = rs.bands[0]
pam_file = rs_path + ".aux.xml"
smin, smax, smean, sstd = band.statistics(approximate=True)
self.assertEqual(smin, 0)
self.assertEqual(smax, 9)
self.assertAlmostEqual(smean, 2.842331288343558)
self.assertAlmostEqual(sstd, 2.3965567248965356)
smin, smax, smean, sstd = band.statistics(approximate=False, refresh=True)
self.assertEqual(smin, 0)
self.assertEqual(smax, 9)
self.assertAlmostEqual(smean, 2.828326634228898)
self.assertAlmostEqual(sstd, 2.4260526986669095)
self.assertEqual(band.min, 0)
self.assertEqual(band.max, 9)
self.assertAlmostEqual(band.mean, 2.828326634228898)
self.assertAlmostEqual(band.std, 2.4260526986669095)
# Statistics are persisted into PAM file on band close
rs = band = None
self.assertTrue(os.path.isfile(pam_file))
def _remove_aux_file(self):
pam_file = self.rs_path + ".aux.xml"
if os.path.isfile(pam_file):
os.remove(pam_file)
def test_read_mode_error(self):
# Open raster in read mode
rs = GDALRaster(self.rs_path, write=False)
band = rs.bands[0]
self.addCleanup(self._remove_aux_file)
# Setting attributes in write mode raises exception in the _flush
# method
with self.assertRaises(GDALException):
setattr(band, "nodata_value", 10)
def test_band_data_setters(self):
# Create in-memory raster and get band
rsmem = GDALRaster(
{
"datatype": 1,
"driver": "MEM",
"name": "mem_rst",
"width": 10,
"height": 10,
"nr_of_bands": 1,
"srid": 4326,
}
)
bandmem = rsmem.bands[0]
# Set nodata value
bandmem.nodata_value = 99
self.assertEqual(bandmem.nodata_value, 99)
# Set data for entire dataset
bandmem.data(range(100))
if numpy:
numpy.testing.assert_equal(
bandmem.data(), numpy.arange(100).reshape(10, 10)
)
else:
self.assertEqual(bandmem.data(), list(range(100)))
# Prepare data for setting values in subsequent tests
block = list(range(100, 104))
packed_block = struct.pack("<" + "B B B B", *block)
# Set data from list
bandmem.data(block, (1, 1), (2, 2))
result = bandmem.data(offset=(1, 1), size=(2, 2))
if numpy:
numpy.testing.assert_equal(result, numpy.array(block).reshape(2, 2))
else:
self.assertEqual(result, block)
# Set data from packed block
bandmem.data(packed_block, (1, 1), (2, 2))
result = bandmem.data(offset=(1, 1), size=(2, 2))
if numpy:
numpy.testing.assert_equal(result, numpy.array(block).reshape(2, 2))
else:
self.assertEqual(result, block)
# Set data from bytes
bandmem.data(bytes(packed_block), (1, 1), (2, 2))
result = bandmem.data(offset=(1, 1), size=(2, 2))
if numpy:
numpy.testing.assert_equal(result, numpy.array(block).reshape(2, 2))
else:
self.assertEqual(result, block)
# Set data from bytearray
bandmem.data(bytearray(packed_block), (1, 1), (2, 2))
result = bandmem.data(offset=(1, 1), size=(2, 2))
if numpy:
numpy.testing.assert_equal(result, numpy.array(block).reshape(2, 2))
else:
self.assertEqual(result, block)
# Set data from memoryview
bandmem.data(memoryview(packed_block), (1, 1), (2, 2))
result = bandmem.data(offset=(1, 1), size=(2, 2))
if numpy:
numpy.testing.assert_equal(result, numpy.array(block).reshape(2, 2))
else:
self.assertEqual(result, block)
# Set data from numpy array
if numpy:
bandmem.data(numpy.array(block, dtype="int8").reshape(2, 2), (1, 1), (2, 2))
numpy.testing.assert_equal(
bandmem.data(offset=(1, 1), size=(2, 2)),
numpy.array(block).reshape(2, 2),
)
# Test json input data
rsmemjson = GDALRaster(JSON_RASTER)
bandmemjson = rsmemjson.bands[0]
if numpy:
numpy.testing.assert_equal(
bandmemjson.data(), numpy.array(range(25)).reshape(5, 5)
)
else:
self.assertEqual(bandmemjson.data(), list(range(25)))
def test_band_statistics_automatic_refresh(self):
rsmem = GDALRaster(
{
"srid": 4326,
"width": 2,
"height": 2,
"bands": [{"data": [0] * 4, "nodata_value": 99}],
}
)
band = rsmem.bands[0]
# Populate statistics cache
self.assertEqual(band.statistics(), (0, 0, 0, 0))
# Change data
band.data([1, 1, 0, 0])
# Statistics are properly updated
self.assertEqual(band.statistics(), (0.0, 1.0, 0.5, 0.5))
# Change nodata_value
band.nodata_value = 0
# Statistics are properly updated
self.assertEqual(band.statistics(), (1.0, 1.0, 1.0, 0.0))
def test_band_statistics_empty_band(self):
rsmem = GDALRaster(
{
"srid": 4326,
"width": 1,
"height": 1,
"bands": [{"data": [0], "nodata_value": 0}],
}
)
self.assertEqual(rsmem.bands[0].statistics(), (None, None, None, None))
def test_band_delete_nodata(self):
rsmem = GDALRaster(
{
"srid": 4326,
"width": 1,
"height": 1,
"bands": [{"data": [0], "nodata_value": 1}],
}
)
rsmem.bands[0].nodata_value = None
self.assertIsNone(rsmem.bands[0].nodata_value)
def test_band_data_replication(self):
band = GDALRaster(
{
"srid": 4326,
"width": 3,
"height": 3,
"bands": [{"data": range(10, 19), "nodata_value": 0}],
}
).bands[0]
# Variations for input (data, shape, expected result).
combos = (
([1], (1, 1), [1] * 9),
(range(3), (1, 3), [0, 0, 0, 1, 1, 1, 2, 2, 2]),
(range(3), (3, 1), [0, 1, 2, 0, 1, 2, 0, 1, 2]),
)
for combo in combos:
band.data(combo[0], shape=combo[1])
if numpy:
numpy.testing.assert_equal(
band.data(), numpy.array(combo[2]).reshape(3, 3)
)
else:
self.assertEqual(band.data(), list(combo[2]))
|
./temp_repos/django/django/contrib/gis/gdal/prototypes/raster.py
|
./temp_repos/django/tests/gis_tests/gdal_tests/test_raster.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'Unknown'.
Context:
- Class Name: Unknown
- Dependencies to Mock: None detected
- Key Imports: django.contrib.gis.gdal.libgdal, django.contrib.gis.gdal.prototypes.generation, functools, ctypes
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
Unknown
|
python
|
"""
Creates the default Site object.
"""
from django.apps import apps as global_apps
from django.conf import settings
from django.core.management.color import no_style
from django.db import DEFAULT_DB_ALIAS, connections, router
def create_default_site(
app_config,
verbosity=2,
interactive=True,
using=DEFAULT_DB_ALIAS,
apps=global_apps,
**kwargs,
):
try:
Site = apps.get_model("sites", "Site")
except LookupError:
return
if not router.allow_migrate_model(using, Site):
return
if not Site.objects.using(using).exists():
# The default settings set SITE_ID = 1, and some tests in Django's test
# suite rely on this value. However, if database sequences are reused
# (e.g. in the test suite after flush/syncdb), it isn't guaranteed that
# the next id will be 1, so we coerce it. See #15573 and #16353. This
# can also crop up outside of tests - see #15346.
if verbosity >= 2:
print("Creating example.com Site object")
Site(
pk=getattr(settings, "SITE_ID", 1), domain="example.com", name="example.com"
).save(using=using)
# We set an explicit pk instead of relying on auto-incrementation,
# so we need to reset the database sequence. See #17415.
sequence_sql = connections[using].ops.sequence_reset_sql(no_style(), [Site])
if sequence_sql:
if verbosity >= 2:
print("Resetting sequence")
with connections[using].cursor() as cursor:
for command in sequence_sql:
cursor.execute(command)
|
import datetime
import os
import shutil
import tempfile
import unittest
from io import StringIO
from pathlib import Path
from unittest import mock
from admin_scripts.tests import AdminScriptTestCase
from django.conf import STATICFILES_STORAGE_ALIAS, settings
from django.contrib.staticfiles import storage
from django.contrib.staticfiles.management.commands import collectstatic, runserver
from django.core.exceptions import ImproperlyConfigured
from django.core.management import CommandError, call_command
from django.core.management.base import SystemCheckError
from django.test import RequestFactory, override_settings
from django.test.utils import extend_sys_path
from django.utils._os import symlinks_supported
from django.utils.functional import empty
from .cases import CollectionTestCase, StaticFilesTestCase, TestDefaults
from .settings import TEST_ROOT, TEST_SETTINGS
from .storage import DummyStorage
class TestNoFilesCreated:
def test_no_files_created(self):
"""
Make sure no files were create in the destination directory.
"""
self.assertEqual(os.listdir(settings.STATIC_ROOT), [])
class TestRunserver(StaticFilesTestCase):
@override_settings(MIDDLEWARE=["django.middleware.common.CommonMiddleware"])
def test_middleware_loaded_only_once(self):
command = runserver.Command()
with mock.patch("django.middleware.common.CommonMiddleware") as mocked:
command.get_handler(use_static_handler=True, insecure_serving=True)
self.assertEqual(mocked.call_count, 1)
def test_404_response(self):
command = runserver.Command()
handler = command.get_handler(use_static_handler=True, insecure_serving=True)
missing_static_file = os.path.join(settings.STATIC_URL, "unknown.css")
req = RequestFactory().get(missing_static_file)
with override_settings(DEBUG=False):
response = handler.get_response(req)
self.assertEqual(response.status_code, 404)
with override_settings(DEBUG=True):
response = handler.get_response(req)
self.assertEqual(response.status_code, 404)
class TestFindStatic(TestDefaults, CollectionTestCase):
"""
Test ``findstatic`` management command.
"""
def _get_file(self, filepath):
path = call_command(
"findstatic", filepath, all=False, verbosity=0, stdout=StringIO()
)
with open(path, encoding="utf-8") as f:
return f.read()
def test_all_files(self):
"""
findstatic returns all candidate files if run without --first and -v1.
"""
result = call_command(
"findstatic", "test/file.txt", verbosity=1, stdout=StringIO()
)
lines = [line.strip() for line in result.split("\n")]
self.assertEqual(
len(lines), 3
) # three because there is also the "Found <file> here" line
self.assertIn("project", lines[1])
self.assertIn("apps", lines[2])
def test_all_files_less_verbose(self):
"""
findstatic returns all candidate files if run without --first and -v0.
"""
result = call_command(
"findstatic", "test/file.txt", verbosity=0, stdout=StringIO()
)
lines = [line.strip() for line in result.split("\n")]
self.assertEqual(len(lines), 2)
self.assertIn("project", lines[0])
self.assertIn("apps", lines[1])
def test_all_files_more_verbose(self):
"""
findstatic returns all candidate files if run without --first and -v2.
Also, test that findstatic returns the searched locations with -v2.
"""
result = call_command(
"findstatic", "test/file.txt", verbosity=2, stdout=StringIO()
)
lines = [line.strip() for line in result.split("\n")]
self.assertIn("project", lines[1])
self.assertIn("apps", lines[2])
self.assertIn("Looking in the following locations:", lines[3])
searched_locations = ", ".join(lines[4:])
# AppDirectoriesFinder searched locations
self.assertIn(
os.path.join("staticfiles_tests", "apps", "test", "static"),
searched_locations,
)
self.assertIn(
os.path.join("staticfiles_tests", "apps", "no_label", "static"),
searched_locations,
)
# FileSystemFinder searched locations
self.assertIn(TEST_SETTINGS["STATICFILES_DIRS"][1][1], searched_locations)
self.assertIn(TEST_SETTINGS["STATICFILES_DIRS"][0], searched_locations)
self.assertIn(str(TEST_SETTINGS["STATICFILES_DIRS"][2]), searched_locations)
# DefaultStorageFinder searched locations
self.assertIn(
os.path.join("staticfiles_tests", "project", "site_media", "media"),
searched_locations,
)
def test_missing_args_message(self):
msg = "Enter at least one staticfile."
with self.assertRaisesMessage(CommandError, msg):
call_command("findstatic")
class TestConfiguration(StaticFilesTestCase):
def test_location_empty(self):
msg = "without having set the STATIC_ROOT setting to a filesystem path"
err = StringIO()
for root in ["", None]:
with override_settings(STATIC_ROOT=root):
with self.assertRaisesMessage(ImproperlyConfigured, msg):
call_command(
"collectstatic", interactive=False, verbosity=0, stderr=err
)
def test_local_storage_detection_helper(self):
staticfiles_storage = storage.staticfiles_storage
try:
storage.staticfiles_storage._wrapped = empty
with self.settings(
STORAGES={
**settings.STORAGES,
STATICFILES_STORAGE_ALIAS: {
"BACKEND": (
"django.contrib.staticfiles.storage.StaticFilesStorage"
)
},
}
):
command = collectstatic.Command()
self.assertTrue(command.is_local_storage())
storage.staticfiles_storage._wrapped = empty
with self.settings(
STORAGES={
**settings.STORAGES,
STATICFILES_STORAGE_ALIAS: {
"BACKEND": "staticfiles_tests.storage.DummyStorage"
},
}
):
command = collectstatic.Command()
self.assertFalse(command.is_local_storage())
collectstatic.staticfiles_storage = storage.FileSystemStorage()
command = collectstatic.Command()
self.assertTrue(command.is_local_storage())
collectstatic.staticfiles_storage = DummyStorage()
command = collectstatic.Command()
self.assertFalse(command.is_local_storage())
finally:
staticfiles_storage._wrapped = empty
collectstatic.staticfiles_storage = staticfiles_storage
storage.staticfiles_storage = staticfiles_storage
@override_settings(STATICFILES_DIRS=("test"))
def test_collectstatis_check(self):
msg = "The STATICFILES_DIRS setting is not a tuple or list."
with self.assertRaisesMessage(SystemCheckError, msg):
call_command("collectstatic", skip_checks=False)
class TestCollectionHelpSubcommand(AdminScriptTestCase):
@override_settings(STATIC_ROOT=None)
def test_missing_settings_dont_prevent_help(self):
"""
Even if the STATIC_ROOT setting is not set, one can still call the
`manage.py help collectstatic` command.
"""
self.write_settings("settings.py", apps=["django.contrib.staticfiles"])
out, err = self.run_manage(["help", "collectstatic"])
self.assertNoOutput(err)
class TestCollection(TestDefaults, CollectionTestCase):
"""
Test ``collectstatic`` management command.
"""
def test_ignore(self):
"""
-i patterns are ignored.
"""
self.assertFileNotFound("test/test.ignoreme")
def test_common_ignore_patterns(self):
"""
Common ignore patterns (*~, .*, CVS) are ignored.
"""
self.assertFileNotFound("test/.hidden")
self.assertFileNotFound("test/backup~")
self.assertFileNotFound("test/CVS")
def test_pathlib(self):
self.assertFileContains("pathlib.txt", "pathlib")
class TestCollectionPathLib(TestCollection):
def mkdtemp(self):
tmp_dir = super().mkdtemp()
return Path(tmp_dir)
class TestCollectionVerbosity(CollectionTestCase):
copying_msg = "Copying "
run_collectstatic_in_setUp = False
post_process_msg = "Post-processed"
staticfiles_copied_msg = "static files copied to"
def test_verbosity_0(self):
stdout = StringIO()
self.run_collectstatic(verbosity=0, stdout=stdout)
self.assertEqual(stdout.getvalue(), "")
def test_verbosity_1(self):
stdout = StringIO()
self.run_collectstatic(verbosity=1, stdout=stdout)
output = stdout.getvalue()
self.assertIn(self.staticfiles_copied_msg, output)
self.assertNotIn(self.copying_msg, output)
def test_verbosity_2(self):
stdout = StringIO()
self.run_collectstatic(verbosity=2, stdout=stdout)
output = stdout.getvalue()
self.assertIn(self.staticfiles_copied_msg, output)
self.assertIn(self.copying_msg, output)
@override_settings(
STORAGES={
**settings.STORAGES,
STATICFILES_STORAGE_ALIAS: {
"BACKEND": (
"django.contrib.staticfiles.storage.ManifestStaticFilesStorage"
)
},
}
)
def test_verbosity_1_with_post_process(self):
stdout = StringIO()
self.run_collectstatic(verbosity=1, stdout=stdout, post_process=True)
self.assertNotIn(self.post_process_msg, stdout.getvalue())
@override_settings(
STORAGES={
**settings.STORAGES,
STATICFILES_STORAGE_ALIAS: {
"BACKEND": (
"django.contrib.staticfiles.storage.ManifestStaticFilesStorage"
)
},
}
)
def test_verbosity_2_with_post_process(self):
stdout = StringIO()
self.run_collectstatic(verbosity=2, stdout=stdout, post_process=True)
self.assertIn(self.post_process_msg, stdout.getvalue())
class TestCollectionClear(CollectionTestCase):
"""
Test the ``--clear`` option of the ``collectstatic`` management command.
"""
run_collectstatic_in_setUp = False
def run_collectstatic(self, **kwargs):
clear_filepath = os.path.join(settings.STATIC_ROOT, "cleared.txt")
with open(clear_filepath, "w") as f:
f.write("should be cleared")
super().run_collectstatic(clear=True, **kwargs)
def test_cleared_not_found(self):
self.assertFileNotFound("cleared.txt")
def test_dir_not_exists(self, **kwargs):
shutil.rmtree(settings.STATIC_ROOT)
super().run_collectstatic(clear=True)
@override_settings(
STORAGES={
**settings.STORAGES,
STATICFILES_STORAGE_ALIAS: {
"BACKEND": "staticfiles_tests.storage.PathNotImplementedStorage"
},
}
)
def test_handle_path_notimplemented(self):
self.run_collectstatic()
self.assertFileNotFound("cleared.txt")
def test_verbosity_0(self):
for kwargs in [{}, {"dry_run": True}]:
with self.subTest(kwargs=kwargs):
stdout = StringIO()
self.run_collectstatic(verbosity=0, stdout=stdout, **kwargs)
self.assertEqual(stdout.getvalue(), "")
def test_verbosity_1(self):
for deletion_message, kwargs in [
("Deleting", {}),
("Pretending to delete", {"dry_run": True}),
]:
with self.subTest(kwargs=kwargs):
stdout = StringIO()
self.run_collectstatic(verbosity=1, stdout=stdout, **kwargs)
output = stdout.getvalue()
self.assertIn("static file", output)
self.assertIn("deleted", output)
self.assertNotIn(deletion_message, output)
def test_verbosity_2(self):
for deletion_message, kwargs in [
("Deleting", {}),
("Pretending to delete", {"dry_run": True}),
]:
with self.subTest(kwargs=kwargs):
stdout = StringIO()
self.run_collectstatic(verbosity=2, stdout=stdout, **kwargs)
output = stdout.getvalue()
self.assertIn("static file", output)
self.assertIn("deleted", output)
self.assertIn(deletion_message, output)
class TestInteractiveMessages(CollectionTestCase):
overwrite_warning_msg = "This will overwrite existing files!"
delete_warning_msg = "This will DELETE ALL FILES in this location!"
files_copied_msg = "static files copied"
@staticmethod
def mock_input(stdout):
def _input(msg):
stdout.write(msg)
return "yes"
return _input
def test_warning_when_clearing_staticdir(self):
stdout = StringIO()
self.run_collectstatic()
with mock.patch("builtins.input", side_effect=self.mock_input(stdout)):
call_command("collectstatic", interactive=True, clear=True, stdout=stdout)
output = stdout.getvalue()
self.assertNotIn(self.overwrite_warning_msg, output)
self.assertIn(self.delete_warning_msg, output)
def test_warning_when_overwriting_files_in_staticdir(self):
stdout = StringIO()
self.run_collectstatic()
with mock.patch("builtins.input", side_effect=self.mock_input(stdout)):
call_command("collectstatic", interactive=True, stdout=stdout)
output = stdout.getvalue()
self.assertIn(self.overwrite_warning_msg, output)
self.assertNotIn(self.delete_warning_msg, output)
def test_no_warning_when_staticdir_does_not_exist(self):
stdout = StringIO()
shutil.rmtree(settings.STATIC_ROOT)
call_command("collectstatic", interactive=True, stdout=stdout)
output = stdout.getvalue()
self.assertNotIn(self.overwrite_warning_msg, output)
self.assertNotIn(self.delete_warning_msg, output)
self.assertIn(self.files_copied_msg, output)
def test_no_warning_for_empty_staticdir(self):
stdout = StringIO()
with tempfile.TemporaryDirectory(
prefix="collectstatic_empty_staticdir_test"
) as static_dir:
with override_settings(STATIC_ROOT=static_dir):
call_command("collectstatic", interactive=True, stdout=stdout)
output = stdout.getvalue()
self.assertNotIn(self.overwrite_warning_msg, output)
self.assertNotIn(self.delete_warning_msg, output)
self.assertIn(self.files_copied_msg, output)
def test_cancelled(self):
self.run_collectstatic()
with mock.patch("builtins.input", side_effect=lambda _: "no"):
with self.assertRaisesMessage(
CommandError, "Collecting static files cancelled"
):
call_command("collectstatic", interactive=True)
class TestCollectionNoDefaultIgnore(TestDefaults, CollectionTestCase):
"""
The ``--no-default-ignore`` option of the ``collectstatic`` management
command.
"""
def run_collectstatic(self):
super().run_collectstatic(use_default_ignore_patterns=False)
def test_no_common_ignore_patterns(self):
"""
With --no-default-ignore, common ignore patterns (*~, .*, CVS)
are not ignored.
"""
self.assertFileContains("test/.hidden", "should be ignored")
self.assertFileContains("test/backup~", "should be ignored")
self.assertFileContains("test/CVS", "should be ignored")
@override_settings(
INSTALLED_APPS=[
"staticfiles_tests.apps.staticfiles_config.IgnorePatternsAppConfig",
"staticfiles_tests.apps.test",
]
)
class TestCollectionCustomIgnorePatterns(CollectionTestCase):
def test_custom_ignore_patterns(self):
"""
A custom ignore_patterns list, ['*.css', '*/vendor/*.js'] in this case,
can be specified in an AppConfig definition.
"""
self.assertFileNotFound("test/nonascii.css")
self.assertFileContains("test/.hidden", "should be ignored")
self.assertFileNotFound(os.path.join("test", "vendor", "module.js"))
class TestCollectionDryRun(TestNoFilesCreated, CollectionTestCase):
"""
Test ``--dry-run`` option for ``collectstatic`` management command.
"""
def run_collectstatic(self):
super().run_collectstatic(dry_run=True)
@override_settings(
STORAGES={
**settings.STORAGES,
STATICFILES_STORAGE_ALIAS: {
"BACKEND": "django.contrib.staticfiles.storage.ManifestStaticFilesStorage"
},
}
)
class TestCollectionDryRunManifestStaticFilesStorage(TestCollectionDryRun):
pass
class TestCollectionFilesOverride(CollectionTestCase):
"""
Test overriding duplicated files by ``collectstatic`` management command.
Check for proper handling of apps order in installed apps even if file
modification dates are in different order:
'staticfiles_test_app',
'staticfiles_tests.apps.no_label',
"""
def setUp(self):
self.temp_dir = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, self.temp_dir)
# get modification and access times for no_label/static/file2.txt
self.orig_path = os.path.join(
TEST_ROOT, "apps", "no_label", "static", "file2.txt"
)
self.orig_mtime = os.path.getmtime(self.orig_path)
self.orig_atime = os.path.getatime(self.orig_path)
# prepare duplicate of file2.txt from a temporary app
# this file will have modification time older than
# no_label/static/file2.txt anyway it should be taken to STATIC_ROOT
# because the temporary app is before 'no_label' app in installed apps
self.temp_app_path = os.path.join(self.temp_dir, "staticfiles_test_app")
self.testfile_path = os.path.join(self.temp_app_path, "static", "file2.txt")
os.makedirs(self.temp_app_path)
with open(os.path.join(self.temp_app_path, "__init__.py"), "w+"):
pass
os.makedirs(os.path.dirname(self.testfile_path))
with open(self.testfile_path, "w+") as f:
f.write("duplicate of file2.txt")
os.utime(self.testfile_path, (self.orig_atime - 1, self.orig_mtime - 1))
settings_with_test_app = self.modify_settings(
INSTALLED_APPS={"prepend": "staticfiles_test_app"},
)
with extend_sys_path(self.temp_dir):
settings_with_test_app.enable()
self.addCleanup(settings_with_test_app.disable)
super().setUp()
def test_ordering_override(self):
"""
Test if collectstatic takes files in proper order
"""
self.assertFileContains("file2.txt", "duplicate of file2.txt")
# run collectstatic again
self.run_collectstatic()
self.assertFileContains("file2.txt", "duplicate of file2.txt")
# The collectstatic test suite already has conflicting files since both
# project/test/file.txt and apps/test/static/test/file.txt are collected. To
# properly test for the warning not happening unless we tell it to explicitly,
# we remove the project directory and will add back a conflicting file later.
@override_settings(STATICFILES_DIRS=[])
class TestCollectionOverwriteWarning(CollectionTestCase):
"""
Test warning in ``collectstatic`` output when a file is skipped because a
previous file was already written to the same path.
"""
# If this string is in the collectstatic output, it means the warning we're
# looking for was emitted.
warning_string = "Found another file"
def _collectstatic_output(self, verbosity=3, **kwargs):
"""
Run collectstatic, and capture and return the output.
"""
out = StringIO()
call_command(
"collectstatic",
interactive=False,
verbosity=verbosity,
stdout=out,
**kwargs,
)
return out.getvalue()
def test_no_warning(self):
"""
There isn't a warning if there isn't a duplicate destination.
"""
output = self._collectstatic_output(clear=True)
self.assertNotIn(self.warning_string, output)
def test_warning_at_verbosity_2(self):
"""
There is a warning when there are duplicate destinations at verbosity
2+.
"""
with tempfile.TemporaryDirectory() as static_dir:
duplicate = os.path.join(static_dir, "test", "file.txt")
os.mkdir(os.path.dirname(duplicate))
with open(duplicate, "w+") as f:
f.write("duplicate of file.txt")
with self.settings(STATICFILES_DIRS=[static_dir]):
output = self._collectstatic_output(clear=True, verbosity=2)
self.assertIn(self.warning_string, output)
def test_no_warning_at_verbosity_1(self):
"""
There is no individual warning at verbosity 1, but summary is shown.
"""
with tempfile.TemporaryDirectory() as static_dir:
duplicate = os.path.join(static_dir, "test", "file.txt")
os.mkdir(os.path.dirname(duplicate))
with open(duplicate, "w+") as f:
f.write("duplicate of file.txt")
with self.settings(STATICFILES_DIRS=[static_dir]):
output = self._collectstatic_output(clear=True, verbosity=1)
self.assertNotIn(self.warning_string, output)
self.assertIn("1 skipped due to conflict", output)
def test_summary_multiple_conflicts(self):
"""
Summary shows correct count for multiple conflicts.
"""
with tempfile.TemporaryDirectory() as static_dir:
duplicate1 = os.path.join(static_dir, "test", "file.txt")
os.makedirs(os.path.dirname(duplicate1))
with open(duplicate1, "w+") as f:
f.write("duplicate of file.txt")
duplicate2 = os.path.join(static_dir, "test", "file1.txt")
with open(duplicate2, "w+") as f:
f.write("duplicate of file1.txt")
duplicate3 = os.path.join(static_dir, "test", "nonascii.css")
shutil.copy2(duplicate1, duplicate3)
with self.settings(STATICFILES_DIRS=[static_dir]):
output = self._collectstatic_output(clear=True, verbosity=1)
self.assertIn("3 skipped due to conflict", output)
@override_settings(
STORAGES={
**settings.STORAGES,
STATICFILES_STORAGE_ALIAS: {
"BACKEND": "staticfiles_tests.storage.DummyStorage"
},
}
)
class TestCollectionNonLocalStorage(TestNoFilesCreated, CollectionTestCase):
"""
Tests for a Storage that implements get_modified_time() but not path()
(#15035).
"""
def test_storage_properties(self):
# Properties of the Storage as described in the ticket.
storage = DummyStorage()
self.assertEqual(
storage.get_modified_time("name"),
datetime.datetime(1970, 1, 1, tzinfo=datetime.UTC),
)
with self.assertRaisesMessage(
NotImplementedError, "This backend doesn't support absolute paths."
):
storage.path("name")
class TestCollectionNeverCopyStorage(CollectionTestCase):
@override_settings(
STORAGES={
**settings.STORAGES,
STATICFILES_STORAGE_ALIAS: {
"BACKEND": "staticfiles_tests.storage.NeverCopyRemoteStorage"
},
}
)
def test_skips_newer_files_in_remote_storage(self):
"""
collectstatic skips newer files in a remote storage.
run_collectstatic() in setUp() copies the static files, then files are
always skipped after NeverCopyRemoteStorage is activated since
NeverCopyRemoteStorage.get_modified_time() returns a datetime in the
future to simulate an unmodified file.
"""
stdout = StringIO()
self.run_collectstatic(stdout=stdout, verbosity=2)
output = stdout.getvalue()
self.assertIn("Skipping 'test.txt' (not modified)", output)
@unittest.skipUnless(symlinks_supported(), "Must be able to symlink to run this test.")
class TestCollectionLinks(TestDefaults, CollectionTestCase):
"""
Test ``--link`` option for ``collectstatic`` management command.
Note that by inheriting ``TestDefaults`` we repeat all
the standard file resolving tests here, to make sure using
``--link`` does not change the file-selection semantics.
"""
def run_collectstatic(self, clear=False, link=True, **kwargs):
super().run_collectstatic(link=link, clear=clear, **kwargs)
def test_links_created(self):
"""
With ``--link``, symbolic links are created.
"""
self.assertTrue(os.path.islink(os.path.join(settings.STATIC_ROOT, "test.txt")))
def test_broken_symlink(self):
"""
Test broken symlink gets deleted.
"""
path = os.path.join(settings.STATIC_ROOT, "test.txt")
os.unlink(path)
self.run_collectstatic()
self.assertTrue(os.path.islink(path))
def test_symlinks_and_files_replaced(self):
"""
Running collectstatic in non-symlink mode replaces symlinks with files,
while symlink mode replaces files with symlinks.
"""
path = os.path.join(settings.STATIC_ROOT, "test.txt")
self.assertTrue(os.path.islink(path))
self.run_collectstatic(link=False)
self.assertFalse(os.path.islink(path))
self.run_collectstatic(link=True)
self.assertTrue(os.path.islink(path))
def test_clear_broken_symlink(self):
"""
With ``--clear``, broken symbolic links are deleted.
"""
nonexistent_file_path = os.path.join(settings.STATIC_ROOT, "nonexistent.txt")
broken_symlink_path = os.path.join(settings.STATIC_ROOT, "symlink.txt")
os.symlink(nonexistent_file_path, broken_symlink_path)
self.run_collectstatic(clear=True)
self.assertFalse(os.path.lexists(broken_symlink_path))
@override_settings(
STORAGES={
**settings.STORAGES,
STATICFILES_STORAGE_ALIAS: {
"BACKEND": "staticfiles_tests.storage.PathNotImplementedStorage"
},
}
)
def test_no_remote_link(self):
with self.assertRaisesMessage(
CommandError, "Can't symlink to a remote destination."
):
self.run_collectstatic()
|
./temp_repos/django/django/contrib/sites/management.py
|
./temp_repos/django/tests/staticfiles_tests/test_management.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'Unknown'.
Context:
- Class Name: Unknown
- Dependencies to Mock: None detected
- Key Imports: django.db, django.conf, django.apps, django.core.management.color
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
Unknown
|
python
|
import collections
from itertools import chain
from django.apps import apps
from django.conf import settings
from django.contrib.admin.exceptions import NotRegistered
from django.contrib.admin.utils import NotRelationField, flatten, get_fields_from_path
from django.core import checks
from django.core.exceptions import FieldDoesNotExist
from django.db import models
from django.db.models.constants import LOOKUP_SEP
from django.db.models.expressions import Combinable
from django.forms.models import BaseModelForm, BaseModelFormSet, _get_foreign_key
from django.template import engines
from django.template.backends.django import DjangoTemplates
from django.utils.module_loading import import_string
def _issubclass(cls, classinfo):
"""
issubclass() variant that doesn't raise an exception if cls isn't a
class.
"""
try:
return issubclass(cls, classinfo)
except TypeError:
return False
def _contains_subclass(class_path, candidate_paths):
"""
Return whether or not a dotted class path (or a subclass of that class) is
found in a list of candidate paths.
"""
cls = import_string(class_path)
for path in candidate_paths:
try:
candidate_cls = import_string(path)
except ImportError:
# ImportErrors are raised elsewhere.
continue
if _issubclass(candidate_cls, cls):
return True
return False
def check_admin_app(app_configs, **kwargs):
from django.contrib.admin.sites import all_sites
errors = []
for site in all_sites:
errors.extend(site.check(app_configs))
return errors
def check_dependencies(**kwargs):
"""
Check that the admin's dependencies are correctly installed.
"""
from django.contrib.admin.sites import all_sites
if not apps.is_installed("django.contrib.admin"):
return []
errors = []
app_dependencies = (
("django.contrib.contenttypes", 401),
("django.contrib.auth", 405),
("django.contrib.messages", 406),
)
for app_name, error_code in app_dependencies:
if not apps.is_installed(app_name):
errors.append(
checks.Error(
"'%s' must be in INSTALLED_APPS in order to use the admin "
"application." % app_name,
id="admin.E%d" % error_code,
)
)
for engine in engines.all():
if isinstance(engine, DjangoTemplates):
django_templates_instance = engine.engine
break
else:
django_templates_instance = None
if not django_templates_instance:
errors.append(
checks.Error(
"A 'django.template.backends.django.DjangoTemplates' instance "
"must be configured in TEMPLATES in order to use the admin "
"application.",
id="admin.E403",
)
)
else:
if (
"django.contrib.auth.context_processors.auth"
not in django_templates_instance.context_processors
and _contains_subclass(
"django.contrib.auth.backends.ModelBackend",
settings.AUTHENTICATION_BACKENDS,
)
):
errors.append(
checks.Error(
"'django.contrib.auth.context_processors.auth' must be "
"enabled in DjangoTemplates (TEMPLATES) if using the default "
"auth backend in order to use the admin application.",
id="admin.E402",
)
)
if (
"django.contrib.messages.context_processors.messages"
not in django_templates_instance.context_processors
):
errors.append(
checks.Error(
"'django.contrib.messages.context_processors.messages' must "
"be enabled in DjangoTemplates (TEMPLATES) in order to use "
"the admin application.",
id="admin.E404",
)
)
sidebar_enabled = any(site.enable_nav_sidebar for site in all_sites)
if (
sidebar_enabled
and "django.template.context_processors.request"
not in django_templates_instance.context_processors
):
errors.append(
checks.Warning(
"'django.template.context_processors.request' must be enabled "
"in DjangoTemplates (TEMPLATES) in order to use the admin "
"navigation sidebar.",
id="admin.W411",
)
)
if not _contains_subclass(
"django.contrib.auth.middleware.AuthenticationMiddleware", settings.MIDDLEWARE
):
errors.append(
checks.Error(
"'django.contrib.auth.middleware.AuthenticationMiddleware' must "
"be in MIDDLEWARE in order to use the admin application.",
id="admin.E408",
)
)
if not _contains_subclass(
"django.contrib.messages.middleware.MessageMiddleware", settings.MIDDLEWARE
):
errors.append(
checks.Error(
"'django.contrib.messages.middleware.MessageMiddleware' must "
"be in MIDDLEWARE in order to use the admin application.",
id="admin.E409",
)
)
if not _contains_subclass(
"django.contrib.sessions.middleware.SessionMiddleware", settings.MIDDLEWARE
):
errors.append(
checks.Error(
"'django.contrib.sessions.middleware.SessionMiddleware' must "
"be in MIDDLEWARE in order to use the admin application.",
hint=(
"Insert "
"'django.contrib.sessions.middleware.SessionMiddleware' "
"before "
"'django.contrib.auth.middleware.AuthenticationMiddleware'."
),
id="admin.E410",
)
)
return errors
class BaseModelAdminChecks:
def check(self, admin_obj, **kwargs):
return [
*self._check_autocomplete_fields(admin_obj),
*self._check_raw_id_fields(admin_obj),
*self._check_fields(admin_obj),
*self._check_fieldsets(admin_obj),
*self._check_exclude(admin_obj),
*self._check_form(admin_obj),
*self._check_filter_vertical(admin_obj),
*self._check_filter_horizontal(admin_obj),
*self._check_radio_fields(admin_obj),
*self._check_prepopulated_fields(admin_obj),
*self._check_view_on_site_url(admin_obj),
*self._check_ordering(admin_obj),
*self._check_readonly_fields(admin_obj),
]
def _check_autocomplete_fields(self, obj):
"""
Check that `autocomplete_fields` is a list or tuple of model fields.
"""
if not isinstance(obj.autocomplete_fields, (list, tuple)):
return must_be(
"a list or tuple",
option="autocomplete_fields",
obj=obj,
id="admin.E036",
)
else:
return list(
chain.from_iterable(
[
self._check_autocomplete_fields_item(
obj, field_name, "autocomplete_fields[%d]" % index
)
for index, field_name in enumerate(obj.autocomplete_fields)
]
)
)
def _check_autocomplete_fields_item(self, obj, field_name, label):
"""
Check that an item in `autocomplete_fields` is a ForeignKey or a
ManyToManyField and that the item has a related ModelAdmin with
search_fields defined.
"""
try:
field = obj.model._meta.get_field(field_name)
except FieldDoesNotExist:
return refer_to_missing_field(
field=field_name, option=label, obj=obj, id="admin.E037"
)
else:
if not field.many_to_many and not isinstance(field, models.ForeignKey):
return must_be(
"a foreign key or a many-to-many field",
option=label,
obj=obj,
id="admin.E038",
)
try:
related_admin = obj.admin_site.get_model_admin(field.remote_field.model)
except NotRegistered:
return [
checks.Error(
'An admin for model "%s" has to be registered '
"to be referenced by %s.autocomplete_fields."
% (
field.remote_field.model.__name__,
type(obj).__name__,
),
obj=obj.__class__,
id="admin.E039",
)
]
else:
if not related_admin.search_fields:
return [
checks.Error(
'%s must define "search_fields", because it\'s '
"referenced by %s.autocomplete_fields."
% (
related_admin.__class__.__name__,
type(obj).__name__,
),
obj=obj.__class__,
id="admin.E040",
)
]
return []
def _check_raw_id_fields(self, obj):
"""Check that `raw_id_fields` only contains field names that are listed
on the model."""
if not isinstance(obj.raw_id_fields, (list, tuple)):
return must_be(
"a list or tuple", option="raw_id_fields", obj=obj, id="admin.E001"
)
else:
return list(
chain.from_iterable(
self._check_raw_id_fields_item(
obj, field_name, "raw_id_fields[%d]" % index
)
for index, field_name in enumerate(obj.raw_id_fields)
)
)
def _check_raw_id_fields_item(self, obj, field_name, label):
"""Check an item of `raw_id_fields`, i.e. check that field named
`field_name` exists in model `model` and is a ForeignKey or a
ManyToManyField."""
try:
field = obj.model._meta.get_field(field_name)
except FieldDoesNotExist:
return refer_to_missing_field(
field=field_name, option=label, obj=obj, id="admin.E002"
)
else:
# Using attname is not supported.
if field.name != field_name:
return refer_to_missing_field(
field=field_name,
option=label,
obj=obj,
id="admin.E002",
)
if not field.many_to_many and not isinstance(field, models.ForeignKey):
return must_be(
"a foreign key or a many-to-many field",
option=label,
obj=obj,
id="admin.E003",
)
else:
return []
def _check_fields(self, obj):
"""Check that `fields` only refer to existing fields, doesn't contain
duplicates. Check if at most one of `fields` and `fieldsets` is
defined.
"""
if obj.fields is None:
return []
elif not isinstance(obj.fields, (list, tuple)):
return must_be("a list or tuple", option="fields", obj=obj, id="admin.E004")
elif obj.fieldsets:
return [
checks.Error(
"Both 'fieldsets' and 'fields' are specified.",
obj=obj.__class__,
id="admin.E005",
)
]
field_counts = collections.Counter(flatten(obj.fields))
if duplicate_fields := [
field for field, count in field_counts.items() if count > 1
]:
return [
checks.Error(
"The value of 'fields' contains duplicate field(s).",
hint="Remove duplicates of %s."
% ", ".join(map(repr, duplicate_fields)),
obj=obj.__class__,
id="admin.E006",
)
]
return list(
chain.from_iterable(
self._check_field_spec(obj, field_name, "fields")
for field_name in obj.fields
)
)
def _check_fieldsets(self, obj):
"""Check that fieldsets is properly formatted and doesn't contain
duplicates."""
if obj.fieldsets is None:
return []
elif not isinstance(obj.fieldsets, (list, tuple)):
return must_be(
"a list or tuple", option="fieldsets", obj=obj, id="admin.E007"
)
else:
seen_fields = []
return list(
chain.from_iterable(
self._check_fieldsets_item(
obj, fieldset, "fieldsets[%d]" % index, seen_fields
)
for index, fieldset in enumerate(obj.fieldsets)
)
)
def _check_fieldsets_item(self, obj, fieldset, label, seen_fields):
"""Check an item of `fieldsets`, i.e. check that this is a pair of a
set name and a dictionary containing "fields" key."""
if not isinstance(fieldset, (list, tuple)):
return must_be("a list or tuple", option=label, obj=obj, id="admin.E008")
elif len(fieldset) != 2:
return must_be("of length 2", option=label, obj=obj, id="admin.E009")
elif not isinstance(fieldset[1], dict):
return must_be(
"a dictionary", option="%s[1]" % label, obj=obj, id="admin.E010"
)
elif "fields" not in fieldset[1]:
return [
checks.Error(
"The value of '%s[1]' must contain the key 'fields'." % label,
obj=obj.__class__,
id="admin.E011",
)
]
elif not isinstance(fieldset[1]["fields"], (list, tuple)):
return must_be(
"a list or tuple",
option="%s[1]['fields']" % label,
obj=obj,
id="admin.E008",
)
fieldset_fields = flatten(fieldset[1]["fields"])
seen_fields.extend(fieldset_fields)
field_counts = collections.Counter(seen_fields)
fieldset_fields_set = set(fieldset_fields)
if duplicate_fields := [
field
for field, count in field_counts.items()
if count > 1 and field in fieldset_fields_set
]:
return [
checks.Error(
"There are duplicate field(s) in '%s[1]'." % label,
hint="Remove duplicates of %s."
% ", ".join(map(repr, duplicate_fields)),
obj=obj.__class__,
id="admin.E012",
)
]
return list(
chain.from_iterable(
self._check_field_spec(obj, fieldset_fields, '%s[1]["fields"]' % label)
for fieldset_fields in fieldset[1]["fields"]
)
)
def _check_field_spec(self, obj, fields, label):
"""`fields` should be an item of `fields` or an item of
fieldset[1]['fields'] for any `fieldset` in `fieldsets`. It should be a
field name or a tuple of field names."""
if isinstance(fields, tuple):
return list(
chain.from_iterable(
self._check_field_spec_item(
obj, field_name, "%s[%d]" % (label, index)
)
for index, field_name in enumerate(fields)
)
)
else:
return self._check_field_spec_item(obj, fields, label)
def _check_field_spec_item(self, obj, field_name, label):
if field_name in obj.readonly_fields:
# Stuff can be put in fields that isn't actually a model field if
# it's in readonly_fields, readonly_fields will handle the
# validation of such things.
return []
else:
try:
field = obj.model._meta.get_field(field_name)
except FieldDoesNotExist:
# If we can't find a field on the model that matches, it could
# be an extra field on the form.
return []
else:
if (
isinstance(field, models.ManyToManyField)
and not field.remote_field.through._meta.auto_created
):
return [
checks.Error(
"The value of '%s' cannot include the ManyToManyField "
"'%s', because that field manually specifies a "
"relationship model." % (label, field_name),
obj=obj.__class__,
id="admin.E013",
)
]
else:
return []
def _check_exclude(self, obj):
"""Check that exclude is a sequence without duplicates."""
if obj.exclude is None: # default value is None
return []
elif not isinstance(obj.exclude, (list, tuple)):
return must_be(
"a list or tuple", option="exclude", obj=obj, id="admin.E014"
)
field_counts = collections.Counter(obj.exclude)
if duplicate_fields := [
field for field, count in field_counts.items() if count > 1
]:
return [
checks.Error(
"The value of 'exclude' contains duplicate field(s).",
hint="Remove duplicates of %s."
% ", ".join(map(repr, duplicate_fields)),
obj=obj.__class__,
id="admin.E015",
)
]
else:
return []
def _check_form(self, obj):
"""Check that form subclasses BaseModelForm."""
if not _issubclass(obj.form, BaseModelForm):
return must_inherit_from(
parent="BaseModelForm", option="form", obj=obj, id="admin.E016"
)
else:
return []
def _check_filter_vertical(self, obj):
"""Check that filter_vertical is a sequence of field names."""
if not isinstance(obj.filter_vertical, (list, tuple)):
return must_be(
"a list or tuple", option="filter_vertical", obj=obj, id="admin.E017"
)
else:
return list(
chain.from_iterable(
self._check_filter_item(
obj, field_name, "filter_vertical[%d]" % index
)
for index, field_name in enumerate(obj.filter_vertical)
)
)
def _check_filter_horizontal(self, obj):
"""Check that filter_horizontal is a sequence of field names."""
if not isinstance(obj.filter_horizontal, (list, tuple)):
return must_be(
"a list or tuple", option="filter_horizontal", obj=obj, id="admin.E018"
)
else:
return list(
chain.from_iterable(
self._check_filter_item(
obj, field_name, "filter_horizontal[%d]" % index
)
for index, field_name in enumerate(obj.filter_horizontal)
)
)
def _check_filter_item(self, obj, field_name, label):
"""Check one item of `filter_vertical` or `filter_horizontal`, i.e.
check that given field exists and is a ManyToManyField."""
try:
field = obj.model._meta.get_field(field_name)
except FieldDoesNotExist:
return refer_to_missing_field(
field=field_name, option=label, obj=obj, id="admin.E019"
)
else:
if not field.many_to_many or isinstance(field, models.ManyToManyRel):
return must_be(
"a many-to-many field", option=label, obj=obj, id="admin.E020"
)
elif not field.remote_field.through._meta.auto_created:
return [
checks.Error(
f"The value of '{label}' cannot include the ManyToManyField "
f"'{field_name}', because that field manually specifies a "
f"relationship model.",
obj=obj.__class__,
id="admin.E013",
)
]
else:
return []
def _check_radio_fields(self, obj):
"""Check that `radio_fields` is a dictionary."""
if not isinstance(obj.radio_fields, dict):
return must_be(
"a dictionary", option="radio_fields", obj=obj, id="admin.E021"
)
else:
return list(
chain.from_iterable(
self._check_radio_fields_key(obj, field_name, "radio_fields")
+ self._check_radio_fields_value(
obj, val, 'radio_fields["%s"]' % field_name
)
for field_name, val in obj.radio_fields.items()
)
)
def _check_radio_fields_key(self, obj, field_name, label):
"""Check that a key of `radio_fields` dictionary is name of existing
field and that the field is a ForeignKey or has `choices` defined."""
try:
field = obj.model._meta.get_field(field_name)
except FieldDoesNotExist:
return refer_to_missing_field(
field=field_name, option=label, obj=obj, id="admin.E022"
)
else:
if not (isinstance(field, models.ForeignKey) or field.choices):
return [
checks.Error(
"The value of '%s' refers to '%s', which is not an "
"instance of ForeignKey, and does not have a 'choices' "
"definition." % (label, field_name),
obj=obj.__class__,
id="admin.E023",
)
]
else:
return []
def _check_radio_fields_value(self, obj, val, label):
"""Check type of a value of `radio_fields` dictionary."""
from django.contrib.admin.options import HORIZONTAL, VERTICAL
if val not in (HORIZONTAL, VERTICAL):
return [
checks.Error(
"The value of '%s' must be either admin.HORIZONTAL or "
"admin.VERTICAL." % label,
obj=obj.__class__,
id="admin.E024",
)
]
else:
return []
def _check_view_on_site_url(self, obj):
if not callable(obj.view_on_site) and not isinstance(obj.view_on_site, bool):
return [
checks.Error(
"The value of 'view_on_site' must be a callable or a boolean "
"value.",
obj=obj.__class__,
id="admin.E025",
)
]
else:
return []
def _check_prepopulated_fields(self, obj):
"""Check that `prepopulated_fields` is a dictionary containing allowed
field types."""
if not isinstance(obj.prepopulated_fields, dict):
return must_be(
"a dictionary", option="prepopulated_fields", obj=obj, id="admin.E026"
)
else:
return list(
chain.from_iterable(
self._check_prepopulated_fields_key(
obj, field_name, "prepopulated_fields"
)
+ self._check_prepopulated_fields_value(
obj, val, 'prepopulated_fields["%s"]' % field_name
)
for field_name, val in obj.prepopulated_fields.items()
)
)
def _check_prepopulated_fields_key(self, obj, field_name, label):
"""Check a key of `prepopulated_fields` dictionary, i.e. check that it
is a name of existing field and the field is one of the allowed types.
"""
try:
field = obj.model._meta.get_field(field_name)
except FieldDoesNotExist:
return refer_to_missing_field(
field=field_name, option=label, obj=obj, id="admin.E027"
)
else:
if isinstance(
field, (models.DateTimeField, models.ForeignKey, models.ManyToManyField)
):
return [
checks.Error(
"The value of '%s' refers to '%s', which must not be a "
"DateTimeField, a ForeignKey, a OneToOneField, or a "
"ManyToManyField." % (label, field_name),
obj=obj.__class__,
id="admin.E028",
)
]
else:
return []
def _check_prepopulated_fields_value(self, obj, val, label):
"""Check a value of `prepopulated_fields` dictionary, i.e. it's an
iterable of existing fields."""
if not isinstance(val, (list, tuple)):
return must_be("a list or tuple", option=label, obj=obj, id="admin.E029")
else:
return list(
chain.from_iterable(
self._check_prepopulated_fields_value_item(
obj, subfield_name, "%s[%r]" % (label, index)
)
for index, subfield_name in enumerate(val)
)
)
def _check_prepopulated_fields_value_item(self, obj, field_name, label):
"""For `prepopulated_fields` equal to {"slug": ("title",)},
`field_name` is "title"."""
try:
obj.model._meta.get_field(field_name)
except FieldDoesNotExist:
return refer_to_missing_field(
field=field_name, option=label, obj=obj, id="admin.E030"
)
else:
return []
def _check_ordering(self, obj):
"""Check that ordering refers to existing fields or is random."""
# ordering = None
if obj.ordering is None: # The default value is None
return []
elif not isinstance(obj.ordering, (list, tuple)):
return must_be(
"a list or tuple", option="ordering", obj=obj, id="admin.E031"
)
else:
return list(
chain.from_iterable(
self._check_ordering_item(obj, field_name, "ordering[%d]" % index)
for index, field_name in enumerate(obj.ordering)
)
)
def _check_ordering_item(self, obj, field_name, label):
"""Check that `ordering` refers to existing fields."""
if isinstance(field_name, (Combinable, models.OrderBy)):
if not isinstance(field_name, models.OrderBy):
field_name = field_name.asc()
if isinstance(field_name.expression, models.F):
field_name = field_name.expression.name
else:
return []
if field_name == "?" and len(obj.ordering) != 1:
return [
checks.Error(
"The value of 'ordering' has the random ordering marker '?', "
"but contains other fields as well.",
hint='Either remove the "?", or remove the other fields.',
obj=obj.__class__,
id="admin.E032",
)
]
elif field_name == "?":
return []
elif LOOKUP_SEP in field_name:
# Skip ordering in the format field1__field2 (FIXME: checking
# this format would be nice, but it's a little fiddly).
return []
else:
field_name = field_name.removeprefix("-")
if field_name == "pk":
return []
try:
obj.model._meta.get_field(field_name)
except FieldDoesNotExist:
return refer_to_missing_field(
field=field_name, option=label, obj=obj, id="admin.E033"
)
else:
return []
def _check_readonly_fields(self, obj):
"""Check that readonly_fields refers to proper attribute or field."""
if obj.readonly_fields == ():
return []
elif not isinstance(obj.readonly_fields, (list, tuple)):
return must_be(
"a list or tuple", option="readonly_fields", obj=obj, id="admin.E034"
)
else:
return list(
chain.from_iterable(
self._check_readonly_fields_item(
obj, field_name, "readonly_fields[%d]" % index
)
for index, field_name in enumerate(obj.readonly_fields)
)
)
def _check_readonly_fields_item(self, obj, field_name, label):
if callable(field_name):
return []
elif hasattr(obj, field_name):
return []
elif hasattr(obj.model, field_name):
return []
else:
try:
obj.model._meta.get_field(field_name)
except FieldDoesNotExist:
return [
checks.Error(
"The value of '%s' refers to '%s', which is not a callable, "
"an attribute of '%s', or an attribute of '%s'."
% (
label,
field_name,
obj.__class__.__name__,
obj.model._meta.label,
),
obj=obj.__class__,
id="admin.E035",
)
]
else:
return []
class ModelAdminChecks(BaseModelAdminChecks):
def check(self, admin_obj, **kwargs):
return [
*super().check(admin_obj),
*self._check_save_as(admin_obj),
*self._check_save_on_top(admin_obj),
*self._check_inlines(admin_obj),
*self._check_list_display(admin_obj),
*self._check_list_display_links(admin_obj),
*self._check_list_filter(admin_obj),
*self._check_list_select_related(admin_obj),
*self._check_list_per_page(admin_obj),
*self._check_list_max_show_all(admin_obj),
*self._check_list_editable(admin_obj),
*self._check_search_fields(admin_obj),
*self._check_date_hierarchy(admin_obj),
*self._check_actions(admin_obj),
]
def _check_save_as(self, obj):
"""Check save_as is a boolean."""
if not isinstance(obj.save_as, bool):
return must_be("a boolean", option="save_as", obj=obj, id="admin.E101")
else:
return []
def _check_save_on_top(self, obj):
"""Check save_on_top is a boolean."""
if not isinstance(obj.save_on_top, bool):
return must_be("a boolean", option="save_on_top", obj=obj, id="admin.E102")
else:
return []
def _check_inlines(self, obj):
"""Check all inline model admin classes."""
if not isinstance(obj.inlines, (list, tuple)):
return must_be(
"a list or tuple", option="inlines", obj=obj, id="admin.E103"
)
else:
return list(
chain.from_iterable(
self._check_inlines_item(obj, item, "inlines[%d]" % index)
for index, item in enumerate(obj.inlines)
)
)
def _check_inlines_item(self, obj, inline, label):
"""Check one inline model admin."""
try:
inline_label = inline.__module__ + "." + inline.__name__
except AttributeError:
return [
checks.Error(
"'%s' must inherit from 'InlineModelAdmin'." % obj,
obj=obj.__class__,
id="admin.E104",
)
]
from django.contrib.admin.options import InlineModelAdmin
if not _issubclass(inline, InlineModelAdmin):
return [
checks.Error(
"'%s' must inherit from 'InlineModelAdmin'." % inline_label,
obj=obj.__class__,
id="admin.E104",
)
]
elif not inline.model:
return [
checks.Error(
"'%s' must have a 'model' attribute." % inline_label,
obj=obj.__class__,
id="admin.E105",
)
]
elif not _issubclass(inline.model, models.Model):
return must_be(
"a Model", option="%s.model" % inline_label, obj=obj, id="admin.E106"
)
else:
return inline(obj.model, obj.admin_site).check()
def _check_list_display(self, obj):
"""Check list_display only contains fields or usable attributes."""
if not isinstance(obj.list_display, (list, tuple)):
return must_be(
"a list or tuple", option="list_display", obj=obj, id="admin.E107"
)
else:
return list(
chain.from_iterable(
self._check_list_display_item(obj, item, "list_display[%d]" % index)
for index, item in enumerate(obj.list_display)
)
)
def _check_list_display_item(self, obj, item, label):
if callable(item):
return []
elif hasattr(obj, item):
return []
try:
field = obj.model._meta.get_field(item)
except FieldDoesNotExist:
try:
field = getattr(obj.model, item)
except AttributeError:
try:
field = get_fields_from_path(obj.model, item)[-1]
except (FieldDoesNotExist, NotRelationField):
return [
checks.Error(
f"The value of '{label}' refers to '{item}', which is not "
f"a callable or attribute of '{obj.__class__.__name__}', "
"or an attribute, method, or field on "
f"'{obj.model._meta.label}'.",
obj=obj.__class__,
id="admin.E108",
)
]
if (
getattr(field, "is_relation", False)
and (field.many_to_many or field.one_to_many)
) or (getattr(field, "rel", None) and field.rel.field.many_to_one):
return [
checks.Error(
f"The value of '{label}' must not be a many-to-many field or a "
f"reverse foreign key.",
obj=obj.__class__,
id="admin.E109",
)
]
return []
def _check_list_display_links(self, obj):
"""Check that list_display_links is a unique subset of list_display."""
from django.contrib.admin.options import ModelAdmin
if obj.list_display_links is None:
return []
elif not isinstance(obj.list_display_links, (list, tuple)):
return must_be(
"a list, a tuple, or None",
option="list_display_links",
obj=obj,
id="admin.E110",
)
# Check only if ModelAdmin.get_list_display() isn't overridden.
elif obj.get_list_display.__func__ is ModelAdmin.get_list_display:
return list(
chain.from_iterable(
self._check_list_display_links_item(
obj, field_name, "list_display_links[%d]" % index
)
for index, field_name in enumerate(obj.list_display_links)
)
)
return []
def _check_list_display_links_item(self, obj, field_name, label):
if field_name not in obj.list_display:
return [
checks.Error(
"The value of '%s' refers to '%s', which is not defined in "
"'list_display'." % (label, field_name),
obj=obj.__class__,
id="admin.E111",
)
]
else:
return []
def _check_list_filter(self, obj):
if not isinstance(obj.list_filter, (list, tuple)):
return must_be(
"a list or tuple", option="list_filter", obj=obj, id="admin.E112"
)
else:
return list(
chain.from_iterable(
self._check_list_filter_item(obj, item, "list_filter[%d]" % index)
for index, item in enumerate(obj.list_filter)
)
)
def _check_list_filter_item(self, obj, item, label):
"""
Check one item of `list_filter`, the three valid options are:
1. 'field' -- a basic field filter, possibly w/ relationships (e.g.
'field__rel')
2. ('field', SomeFieldListFilter) - a field-based list filter class
3. SomeListFilter - a non-field list filter class
"""
from django.contrib.admin import FieldListFilter, ListFilter
if callable(item) and not isinstance(item, models.Field):
# If item is option 3, it should be a ListFilter...
if not _issubclass(item, ListFilter):
return must_inherit_from(
parent="ListFilter", option=label, obj=obj, id="admin.E113"
)
# ... but not a FieldListFilter.
elif issubclass(item, FieldListFilter):
return [
checks.Error(
"The value of '%s' must not inherit from 'FieldListFilter'."
% label,
obj=obj.__class__,
id="admin.E114",
)
]
else:
return []
elif isinstance(item, (tuple, list)):
# item is option #2
field, list_filter_class = item
if not _issubclass(list_filter_class, FieldListFilter):
return must_inherit_from(
parent="FieldListFilter",
option="%s[1]" % label,
obj=obj,
id="admin.E115",
)
else:
return []
else:
# item is option #1
field = item
# Validate the field string
try:
get_fields_from_path(obj.model, field)
except (NotRelationField, FieldDoesNotExist):
return [
checks.Error(
"The value of '%s' refers to '%s', which does not refer to a "
"Field." % (label, field),
obj=obj.__class__,
id="admin.E116",
)
]
else:
return []
def _check_list_select_related(self, obj):
"""Check that list_select_related is a boolean, a list or a tuple."""
if not isinstance(obj.list_select_related, (bool, list, tuple)):
return must_be(
"a boolean, tuple or list",
option="list_select_related",
obj=obj,
id="admin.E117",
)
else:
return []
def _check_list_per_page(self, obj):
"""Check that list_per_page is an integer."""
if not isinstance(obj.list_per_page, int):
return must_be(
"an integer", option="list_per_page", obj=obj, id="admin.E118"
)
else:
return []
def _check_list_max_show_all(self, obj):
"""Check that list_max_show_all is an integer."""
if not isinstance(obj.list_max_show_all, int):
return must_be(
"an integer", option="list_max_show_all", obj=obj, id="admin.E119"
)
else:
return []
def _check_list_editable(self, obj):
"""Check that list_editable is a sequence of editable fields from
list_display without first element."""
if not isinstance(obj.list_editable, (list, tuple)):
return must_be(
"a list or tuple", option="list_editable", obj=obj, id="admin.E120"
)
else:
return list(
chain.from_iterable(
self._check_list_editable_item(
obj, item, "list_editable[%d]" % index
)
for index, item in enumerate(obj.list_editable)
)
)
def _check_list_editable_item(self, obj, field_name, label):
try:
field = obj.model._meta.get_field(field_name)
except FieldDoesNotExist:
return refer_to_missing_field(
field=field_name, option=label, obj=obj, id="admin.E121"
)
else:
if field_name not in obj.list_display:
return [
checks.Error(
"The value of '%s' refers to '%s', which is not "
"contained in 'list_display'." % (label, field_name),
obj=obj.__class__,
id="admin.E122",
)
]
elif obj.list_display_links and field_name in obj.list_display_links:
return [
checks.Error(
"The value of '%s' cannot be in both 'list_editable' and "
"'list_display_links'." % field_name,
obj=obj.__class__,
id="admin.E123",
)
]
# If list_display[0] is in list_editable, check that
# list_display_links is set. See #22792 and #26229 for use cases.
elif (
obj.list_display[0] == field_name
and not obj.list_display_links
and obj.list_display_links is not None
):
return [
checks.Error(
"The value of '%s' refers to the first field in 'list_display' "
"('%s'), which cannot be used unless 'list_display_links' is "
"set." % (label, obj.list_display[0]),
obj=obj.__class__,
id="admin.E124",
)
]
elif not field.editable or field.primary_key:
return [
checks.Error(
"The value of '%s' refers to '%s', which is not editable "
"through the admin." % (label, field_name),
obj=obj.__class__,
id="admin.E125",
)
]
else:
return []
def _check_search_fields(self, obj):
"""Check search_fields is a sequence."""
if not isinstance(obj.search_fields, (list, tuple)):
return must_be(
"a list or tuple", option="search_fields", obj=obj, id="admin.E126"
)
else:
return []
def _check_date_hierarchy(self, obj):
"""Check that date_hierarchy refers to DateField or DateTimeField."""
if obj.date_hierarchy is None:
return []
else:
try:
field = get_fields_from_path(obj.model, obj.date_hierarchy)[-1]
except (NotRelationField, FieldDoesNotExist):
return [
checks.Error(
"The value of 'date_hierarchy' refers to '%s', which "
"does not refer to a Field." % obj.date_hierarchy,
obj=obj.__class__,
id="admin.E127",
)
]
else:
if field.get_internal_type() not in {"DateField", "DateTimeField"}:
return must_be(
"a DateField or DateTimeField",
option="date_hierarchy",
obj=obj,
id="admin.E128",
)
else:
return []
def _check_actions(self, obj):
errors = []
actions = obj._get_base_actions()
# Actions with an allowed_permission attribute require the ModelAdmin
# to implement a has_<perm>_permission() method for each permission.
for func, name, _ in actions:
if not hasattr(func, "allowed_permissions"):
continue
for permission in func.allowed_permissions:
method_name = "has_%s_permission" % permission
if not hasattr(obj, method_name):
errors.append(
checks.Error(
"%s must define a %s() method for the %s action."
% (
obj.__class__.__name__,
method_name,
func.__name__,
),
obj=obj.__class__,
id="admin.E129",
)
)
# Names need to be unique.
names = collections.Counter(name for _, name, _ in actions)
for name, count in names.items():
if count > 1:
errors.append(
checks.Error(
"__name__ attributes of actions defined in %s must be "
"unique. Name %r is not unique."
% (
obj.__class__.__name__,
name,
),
obj=obj.__class__,
id="admin.E130",
)
)
return errors
class InlineModelAdminChecks(BaseModelAdminChecks):
def check(self, inline_obj, **kwargs):
parent_model = inline_obj.parent_model
return [
*super().check(inline_obj),
*self._check_relation(inline_obj, parent_model),
*self._check_exclude_of_parent_model(inline_obj, parent_model),
*self._check_extra(inline_obj),
*self._check_max_num(inline_obj),
*self._check_min_num(inline_obj),
*self._check_formset(inline_obj),
]
def _check_exclude_of_parent_model(self, obj, parent_model):
# Do not perform more specific checks if the base checks result in an
# error.
errors = super()._check_exclude(obj)
if errors:
return []
# Skip if `fk_name` is invalid.
if self._check_relation(obj, parent_model):
return []
if obj.exclude is None:
return []
fk = _get_foreign_key(parent_model, obj.model, fk_name=obj.fk_name)
if fk.name in obj.exclude:
return [
checks.Error(
"Cannot exclude the field '%s', because it is the foreign key "
"to the parent model '%s'."
% (
fk.name,
parent_model._meta.label,
),
obj=obj.__class__,
id="admin.E201",
)
]
else:
return []
def _check_relation(self, obj, parent_model):
try:
_get_foreign_key(parent_model, obj.model, fk_name=obj.fk_name)
except ValueError as e:
return [checks.Error(e.args[0], obj=obj.__class__, id="admin.E202")]
else:
return []
def _check_extra(self, obj):
"""Check that extra is an integer."""
if not isinstance(obj.extra, int):
return must_be("an integer", option="extra", obj=obj, id="admin.E203")
else:
return []
def _check_max_num(self, obj):
"""Check that max_num is an integer."""
if obj.max_num is None:
return []
elif not isinstance(obj.max_num, int):
return must_be("an integer", option="max_num", obj=obj, id="admin.E204")
else:
return []
def _check_min_num(self, obj):
"""Check that min_num is an integer."""
if obj.min_num is None:
return []
elif not isinstance(obj.min_num, int):
return must_be("an integer", option="min_num", obj=obj, id="admin.E205")
else:
return []
def _check_formset(self, obj):
"""Check formset is a subclass of BaseModelFormSet."""
if not _issubclass(obj.formset, BaseModelFormSet):
return must_inherit_from(
parent="BaseModelFormSet", option="formset", obj=obj, id="admin.E206"
)
else:
return []
def must_be(type, option, obj, id):
return [
checks.Error(
"The value of '%s' must be %s." % (option, type),
obj=obj.__class__,
id=id,
),
]
def must_inherit_from(parent, option, obj, id):
return [
checks.Error(
"The value of '%s' must inherit from '%s'." % (option, parent),
obj=obj.__class__,
id=id,
),
]
def refer_to_missing_field(field, option, obj, id):
return [
checks.Error(
"The value of '%s' refers to '%s', which is not a field of '%s'."
% (option, field, obj.model._meta.label),
obj=obj.__class__,
id=id,
),
]
|
from pathlib import Path
from unittest import mock
from django.conf import DEFAULT_STORAGE_ALIAS, STATICFILES_STORAGE_ALIAS, settings
from django.contrib.staticfiles.checks import E005, check_finders, check_storages
from django.contrib.staticfiles.finders import BaseFinder, get_finder
from django.core.checks import Error, Warning
from django.test import SimpleTestCase, override_settings
from .cases import CollectionTestCase
from .settings import TEST_ROOT
class FindersCheckTests(CollectionTestCase):
run_collectstatic_in_setUp = False
def test_base_finder_check_not_implemented(self):
finder = BaseFinder()
msg = (
"subclasses may provide a check() method to verify the finder is "
"configured correctly."
)
with self.assertRaisesMessage(NotImplementedError, msg):
finder.check()
def test_check_finders(self):
"""check_finders() concatenates all errors."""
error1 = Error("1")
error2 = Error("2")
error3 = Error("3")
def get_finders():
class Finder1(BaseFinder):
def check(self, **kwargs):
return [error1]
class Finder2(BaseFinder):
def check(self, **kwargs):
return []
class Finder3(BaseFinder):
def check(self, **kwargs):
return [error2, error3]
class Finder4(BaseFinder):
pass
return [Finder1(), Finder2(), Finder3(), Finder4()]
with mock.patch("django.contrib.staticfiles.checks.get_finders", get_finders):
errors = check_finders(None)
self.assertEqual(errors, [error1, error2, error3])
def test_no_errors_with_test_settings(self):
self.assertEqual(check_finders(None), [])
@override_settings(STATICFILES_DIRS="a string")
def test_dirs_not_tuple_or_list(self):
self.assertEqual(
check_finders(None),
[
Error(
"The STATICFILES_DIRS setting is not a tuple or list.",
hint="Perhaps you forgot a trailing comma?",
id="staticfiles.E001",
)
],
)
def test_dirs_contains_static_root(self):
with self.settings(STATICFILES_DIRS=[settings.STATIC_ROOT]):
self.assertEqual(
check_finders(None),
[
Error(
"The STATICFILES_DIRS setting should not contain the "
"STATIC_ROOT setting.",
id="staticfiles.E002",
)
],
)
def test_dirs_contains_static_root_in_tuple(self):
with self.settings(STATICFILES_DIRS=[("prefix", settings.STATIC_ROOT)]):
self.assertEqual(
check_finders(None),
[
Error(
"The STATICFILES_DIRS setting should not contain the "
"STATIC_ROOT setting.",
id="staticfiles.E002",
)
],
)
def test_prefix_contains_trailing_slash(self):
static_dir = Path(TEST_ROOT) / "project" / "documents"
with self.settings(STATICFILES_DIRS=[("prefix/", static_dir)]):
self.assertEqual(
check_finders(None),
[
Error(
"The prefix 'prefix/' in the STATICFILES_DIRS setting must "
"not end with a slash.",
id="staticfiles.E003",
),
],
)
def test_nonexistent_directories(self):
with self.settings(
STATICFILES_DIRS=[
"/fake/path",
("prefix", "/fake/prefixed/path"),
]
):
self.assertEqual(
check_finders(None),
[
Warning(
"The directory '/fake/path' in the STATICFILES_DIRS "
"setting does not exist.",
id="staticfiles.W004",
),
Warning(
"The directory '/fake/prefixed/path' in the "
"STATICFILES_DIRS setting does not exist.",
id="staticfiles.W004",
),
],
)
# Nonexistent directories are skipped.
finder = get_finder("django.contrib.staticfiles.finders.FileSystemFinder")
self.assertEqual(list(finder.list(None)), [])
class StoragesCheckTests(SimpleTestCase):
@override_settings(STORAGES={})
def test_error_empty_storages(self):
errors = check_storages(None)
self.assertEqual(errors, [E005])
@override_settings(
STORAGES={
DEFAULT_STORAGE_ALIAS: {
"BACKEND": "django.core.files.storage.FileSystemStorage",
},
"example": {
"BACKEND": "ignore.me",
},
}
)
def test_error_missing_staticfiles(self):
errors = check_storages(None)
self.assertEqual(errors, [E005])
@override_settings(
STORAGES={
STATICFILES_STORAGE_ALIAS: {
"BACKEND": "django.contrib.staticfiles.storage.StaticFilesStorage",
},
}
)
def test_staticfiles_no_errors(self):
errors = check_storages(None)
self.assertEqual(errors, [])
|
./temp_repos/django/django/contrib/admin/checks.py
|
./temp_repos/django/tests/staticfiles_tests/test_checks.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'BaseModelAdminChecks'.
Context:
- Class Name: BaseModelAdminChecks
- Dependencies to Mock: None detected
- Key Imports: collections, django.utils.module_loading, itertools, django.conf, django.db, django.contrib.admin.sites, django.contrib.admin, django.contrib.admin.exceptions, django.core, django.core.exceptions
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
BaseModelAdminChecks
|
python
|
from django.core.files.storage.filesystem import FileSystemStorage
class NoReadFileSystemStorage(FileSystemStorage):
def open(self, *args, **kwargs):
raise AssertionError("This storage class does not support reading.")
|
import json
import os
import shutil
import sys
import tempfile
import unittest
from io import StringIO
from pathlib import Path
from unittest import mock
from django.conf import STATICFILES_STORAGE_ALIAS, settings
from django.contrib.staticfiles import finders, storage
from django.contrib.staticfiles.management.commands.collectstatic import (
Command as CollectstaticCommand,
)
from django.core.management import call_command
from django.test import SimpleTestCase, override_settings
from .cases import CollectionTestCase
from .settings import TEST_ROOT
def hashed_file_path(test, path):
fullpath = test.render_template(test.static_template_snippet(path))
return fullpath.removeprefix(settings.STATIC_URL)
class TestHashedFiles:
hashed_file_path = hashed_file_path
def tearDown(self):
# Clear hashed files to avoid side effects among tests.
storage.staticfiles_storage.hashed_files.clear()
def assertPostCondition(self):
"""
Assert post conditions for a test are met. Must be manually called at
the end of each test.
"""
pass
def test_template_tag_return(self):
self.assertStaticRaises(
ValueError, "does/not/exist.png", "/static/does/not/exist.png"
)
self.assertStaticRenders("test/file.txt", "/static/test/file.dad0999e4f8f.txt")
self.assertStaticRenders(
"test/file.txt", "/static/test/file.dad0999e4f8f.txt", asvar=True
)
self.assertStaticRenders(
"cached/styles.css", "/static/cached/styles.5e0040571e1a.css"
)
self.assertStaticRenders("path/", "/static/path/")
self.assertStaticRenders("path/?query", "/static/path/?query")
self.assertPostCondition()
def test_template_tag_simple_content(self):
relpath = self.hashed_file_path("cached/styles.css")
self.assertEqual(relpath, "cached/styles.5e0040571e1a.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertNotIn(b"cached/other.css", content)
self.assertIn(b"other.d41d8cd98f00.css", content)
self.assertPostCondition()
def test_path_ignored_completely(self):
relpath = self.hashed_file_path("cached/css/ignored.css")
self.assertEqual(relpath, "cached/css/ignored.55e7c226dda1.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertIn(b"#foobar", content)
self.assertIn(b"http:foobar", content)
self.assertIn(b"https:foobar", content)
self.assertIn(b"data:foobar", content)
self.assertIn(b"chrome:foobar", content)
self.assertIn(b"//foobar", content)
self.assertIn(b"url()", content)
self.assertPostCondition()
def test_path_with_querystring(self):
relpath = self.hashed_file_path("cached/styles.css?spam=eggs")
self.assertEqual(relpath, "cached/styles.5e0040571e1a.css?spam=eggs")
with storage.staticfiles_storage.open(
"cached/styles.5e0040571e1a.css"
) as relfile:
content = relfile.read()
self.assertNotIn(b"cached/other.css", content)
self.assertIn(b"other.d41d8cd98f00.css", content)
self.assertPostCondition()
def test_path_with_fragment(self):
relpath = self.hashed_file_path("cached/styles.css#eggs")
self.assertEqual(relpath, "cached/styles.5e0040571e1a.css#eggs")
with storage.staticfiles_storage.open(
"cached/styles.5e0040571e1a.css"
) as relfile:
content = relfile.read()
self.assertNotIn(b"cached/other.css", content)
self.assertIn(b"other.d41d8cd98f00.css", content)
self.assertPostCondition()
def test_path_with_querystring_and_fragment(self):
relpath = self.hashed_file_path("cached/css/fragments.css")
self.assertEqual(relpath, "cached/css/fragments.7fe344dee895.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertIn(b"fonts/font.b9b105392eb8.eot?#iefix", content)
self.assertIn(b"fonts/font.b8d603e42714.svg#webfontIyfZbseF", content)
self.assertIn(
b"fonts/font.b8d603e42714.svg#path/to/../../fonts/font.svg", content
)
self.assertIn(
b"data:font/woff;charset=utf-8;"
b"base64,d09GRgABAAAAADJoAA0AAAAAR2QAAQAAAAAAAAAAAAA",
content,
)
self.assertIn(b"#default#VML", content)
self.assertPostCondition()
def test_template_tag_absolute(self):
relpath = self.hashed_file_path("cached/absolute.css")
self.assertEqual(relpath, "cached/absolute.eb04def9f9a4.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertNotIn(b"/static/cached/styles.css", content)
self.assertIn(b"/static/cached/styles.5e0040571e1a.css", content)
self.assertNotIn(b"/static/styles_root.css", content)
self.assertIn(b"/static/styles_root.401f2509a628.css", content)
self.assertIn(b"/static/cached/img/relative.acae32e4532b.png", content)
self.assertPostCondition()
def test_template_tag_absolute_root(self):
"""
Like test_template_tag_absolute, but for a file in STATIC_ROOT
(#26249).
"""
relpath = self.hashed_file_path("absolute_root.css")
self.assertEqual(relpath, "absolute_root.f821df1b64f7.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertNotIn(b"/static/styles_root.css", content)
self.assertIn(b"/static/styles_root.401f2509a628.css", content)
self.assertPostCondition()
def test_template_tag_relative(self):
relpath = self.hashed_file_path("cached/relative.css")
self.assertEqual(relpath, "cached/relative.c3e9e1ea6f2e.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertNotIn(b"../cached/styles.css", content)
self.assertNotIn(b'@import "styles.css"', content)
self.assertNotIn(b"url(img/relative.png)", content)
self.assertIn(b'url("img/relative.acae32e4532b.png")', content)
self.assertIn(b"../cached/styles.5e0040571e1a.css", content)
self.assertPostCondition()
def test_import_replacement(self):
"See #18050"
relpath = self.hashed_file_path("cached/import.css")
self.assertEqual(relpath, "cached/import.f53576679e5a.css")
with storage.staticfiles_storage.open(relpath) as relfile:
self.assertIn(b"""import url("styles.5e0040571e1a.css")""", relfile.read())
self.assertPostCondition()
def test_template_tag_deep_relative(self):
relpath = self.hashed_file_path("cached/css/window.css")
self.assertEqual(relpath, "cached/css/window.5d5c10836967.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertNotIn(b"url(img/window.png)", content)
self.assertIn(b'url("img/window.acae32e4532b.png")', content)
self.assertPostCondition()
def test_template_tag_url(self):
relpath = self.hashed_file_path("cached/url.css")
self.assertEqual(relpath, "cached/url.902310b73412.css")
with storage.staticfiles_storage.open(relpath) as relfile:
self.assertIn(b"https://", relfile.read())
self.assertPostCondition()
@override_settings(
STATICFILES_DIRS=[os.path.join(TEST_ROOT, "project", "loop")],
STATICFILES_FINDERS=["django.contrib.staticfiles.finders.FileSystemFinder"],
)
def test_import_loop(self):
finders.get_finder.cache_clear()
err = StringIO()
with self.assertRaisesMessage(RuntimeError, "Max post-process passes exceeded"):
call_command("collectstatic", interactive=False, verbosity=0, stderr=err)
self.assertEqual(
"Post-processing 'bar.css, foo.css' failed!\n\n", err.getvalue()
)
self.assertPostCondition()
def test_post_processing(self):
"""
post_processing behaves correctly.
Files that are alterable should always be post-processed; files that
aren't should be skipped.
collectstatic has already been called once in setUp() for this
testcase, therefore we check by verifying behavior on a second run.
"""
collectstatic_args = {
"interactive": False,
"verbosity": 0,
"link": False,
"clear": False,
"dry_run": False,
"post_process": True,
"use_default_ignore_patterns": True,
"ignore_patterns": ["*.ignoreme"],
}
collectstatic_cmd = CollectstaticCommand()
collectstatic_cmd.set_options(**collectstatic_args)
stats = collectstatic_cmd.collect()
self.assertIn(
os.path.join("cached", "css", "window.css"), stats["post_processed"]
)
self.assertIn(
os.path.join("cached", "css", "img", "window.png"), stats["unmodified"]
)
self.assertIn(os.path.join("test", "nonascii.css"), stats["post_processed"])
# No file should be yielded twice.
self.assertCountEqual(stats["post_processed"], set(stats["post_processed"]))
self.assertPostCondition()
def test_css_import_case_insensitive(self):
relpath = self.hashed_file_path("cached/styles_insensitive.css")
self.assertEqual(relpath, "cached/styles_insensitive.3fa427592a53.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertNotIn(b"cached/other.css", content)
self.assertIn(b"other.d41d8cd98f00.css", content)
self.assertPostCondition()
def test_css_data_uri_with_nested_url(self):
relpath = self.hashed_file_path("cached/data_uri_with_nested_url.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertIn(b'url("data:image/svg+xml,url(%23b) url(%23c)")', content)
self.assertPostCondition()
def test_css_source_map(self):
relpath = self.hashed_file_path("cached/source_map.css")
self.assertEqual(relpath, "cached/source_map.b2fceaf426aa.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertNotIn(b"/*# sourceMappingURL=source_map.css.map*/", content)
self.assertIn(
b"/*# sourceMappingURL=source_map.css.99914b932bd3.map */",
content,
)
self.assertPostCondition()
def test_css_source_map_tabs(self):
relpath = self.hashed_file_path("cached/source_map_tabs.css")
self.assertEqual(relpath, "cached/source_map_tabs.b2fceaf426aa.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertNotIn(b"/*#\tsourceMappingURL=source_map.css.map\t*/", content)
self.assertIn(
b"/*# sourceMappingURL=source_map.css.99914b932bd3.map */",
content,
)
self.assertPostCondition()
def test_css_source_map_sensitive(self):
relpath = self.hashed_file_path("cached/source_map_sensitive.css")
self.assertEqual(relpath, "cached/source_map_sensitive.456683f2106f.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertIn(b"/*# sOuRcEMaPpInGURL=source_map.css.map */", content)
self.assertNotIn(
b"/*# sourceMappingURL=source_map.css.99914b932bd3.map */",
content,
)
self.assertPostCondition()
def test_css_source_map_data_uri(self):
relpath = self.hashed_file_path("cached/source_map_data_uri.css")
self.assertEqual(relpath, "cached/source_map_data_uri.3166be10260d.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
source_map_data_uri = (
b"/*# sourceMappingURL=data:application/json;charset=utf8;base64,"
b"eyJ2ZXJzaW9uIjozLCJzb3VyY2VzIjpbIl9zcmMv*/"
)
self.assertIn(source_map_data_uri, content)
self.assertPostCondition()
def test_js_source_map(self):
relpath = self.hashed_file_path("cached/source_map.js")
self.assertEqual(relpath, "cached/source_map.cd45b8534a87.js")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertNotIn(b"//# sourceMappingURL=source_map.js.map", content)
self.assertIn(
b"//# sourceMappingURL=source_map.js.99914b932bd3.map",
content,
)
self.assertPostCondition()
def test_js_source_map_trailing_whitespace(self):
relpath = self.hashed_file_path("cached/source_map_trailing_whitespace.js")
self.assertEqual(
relpath, "cached/source_map_trailing_whitespace.cd45b8534a87.js"
)
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertNotIn(b"//# sourceMappingURL=source_map.js.map\t ", content)
self.assertIn(
b"//# sourceMappingURL=source_map.js.99914b932bd3.map",
content,
)
self.assertPostCondition()
def test_js_source_map_sensitive(self):
relpath = self.hashed_file_path("cached/source_map_sensitive.js")
self.assertEqual(relpath, "cached/source_map_sensitive.5da96fdd3cb3.js")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertIn(b"//# sOuRcEMaPpInGURL=source_map.js.map", content)
self.assertNotIn(
b"//# sourceMappingURL=source_map.js.99914b932bd3.map",
content,
)
self.assertPostCondition()
def test_js_source_map_data_uri(self):
relpath = self.hashed_file_path("cached/source_map_data_uri.js")
self.assertEqual(relpath, "cached/source_map_data_uri.a68d23cbf6dd.js")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
source_map_data_uri = (
b"//# sourceMappingURL=data:application/json;charset=utf8;base64,"
b"eyJ2ZXJzaW9uIjozLCJzb3VyY2VzIjpbIl9zcmMv"
)
self.assertIn(source_map_data_uri, content)
self.assertPostCondition()
@override_settings(
STATICFILES_DIRS=[os.path.join(TEST_ROOT, "project", "faulty")],
STATICFILES_FINDERS=["django.contrib.staticfiles.finders.FileSystemFinder"],
)
def test_post_processing_failure(self):
"""
post_processing indicates the origin of the error when it fails.
"""
finders.get_finder.cache_clear()
err = StringIO()
with self.assertRaises(Exception):
call_command("collectstatic", interactive=False, verbosity=0, stderr=err)
self.assertEqual("Post-processing 'faulty.css' failed!\n\n", err.getvalue())
self.assertPostCondition()
@override_settings(
STATICFILES_DIRS=[os.path.join(TEST_ROOT, "project", "nonutf8")],
STATICFILES_FINDERS=["django.contrib.staticfiles.finders.FileSystemFinder"],
)
def test_post_processing_nonutf8(self):
finders.get_finder.cache_clear()
err = StringIO()
with self.assertRaises(UnicodeDecodeError):
call_command("collectstatic", interactive=False, verbosity=0, stderr=err)
self.assertEqual("Post-processing 'nonutf8.css' failed!\n\n", err.getvalue())
self.assertPostCondition()
@override_settings(
STORAGES={
**settings.STORAGES,
STATICFILES_STORAGE_ALIAS: {
"BACKEND": "staticfiles_tests.storage.ExtraPatternsStorage",
},
}
)
class TestExtraPatternsStorage(CollectionTestCase):
def setUp(self):
storage.staticfiles_storage.hashed_files.clear() # avoid cache interference
super().setUp()
def cached_file_path(self, path):
fullpath = self.render_template(self.static_template_snippet(path))
return fullpath.replace(settings.STATIC_URL, "")
def test_multi_extension_patterns(self):
"""
With storage classes having several file extension patterns, only the
files matching a specific file pattern should be affected by the
substitution (#19670).
"""
# CSS files shouldn't be touched by JS patterns.
relpath = self.cached_file_path("cached/import.css")
self.assertEqual(relpath, "cached/import.f53576679e5a.css")
with storage.staticfiles_storage.open(relpath) as relfile:
self.assertIn(b'import url("styles.5e0040571e1a.css")', relfile.read())
# Confirm JS patterns have been applied to JS files.
relpath = self.cached_file_path("cached/test.js")
self.assertEqual(relpath, "cached/test.388d7a790d46.js")
with storage.staticfiles_storage.open(relpath) as relfile:
self.assertIn(b'JS_URL("import.f53576679e5a.css")', relfile.read())
@override_settings(
STORAGES={
**settings.STORAGES,
STATICFILES_STORAGE_ALIAS: {
"BACKEND": "django.contrib.staticfiles.storage.ManifestStaticFilesStorage",
},
}
)
class TestCollectionManifestStorage(TestHashedFiles, CollectionTestCase):
"""
Tests for the Cache busting storage
"""
def setUp(self):
super().setUp()
temp_dir = tempfile.mkdtemp()
os.makedirs(os.path.join(temp_dir, "test"))
self._clear_filename = os.path.join(temp_dir, "test", "cleared.txt")
with open(self._clear_filename, "w") as f:
f.write("to be deleted in one test")
patched_settings = self.settings(
STATICFILES_DIRS=settings.STATICFILES_DIRS + [temp_dir],
)
patched_settings.enable()
self.addCleanup(patched_settings.disable)
self.addCleanup(shutil.rmtree, temp_dir)
self._manifest_strict = storage.staticfiles_storage.manifest_strict
def tearDown(self):
if os.path.exists(self._clear_filename):
os.unlink(self._clear_filename)
storage.staticfiles_storage.manifest_strict = self._manifest_strict
super().tearDown()
def assertPostCondition(self):
hashed_files = storage.staticfiles_storage.hashed_files
# The in-memory version of the manifest matches the one on disk
# since a properly created manifest should cover all filenames.
if hashed_files:
manifest, _ = storage.staticfiles_storage.load_manifest()
self.assertEqual(hashed_files, manifest)
def test_manifest_exists(self):
filename = storage.staticfiles_storage.manifest_name
path = storage.staticfiles_storage.path(filename)
self.assertTrue(os.path.exists(path))
def test_manifest_does_not_exist(self):
storage.staticfiles_storage.manifest_name = "does.not.exist.json"
self.assertIsNone(storage.staticfiles_storage.read_manifest())
def test_manifest_does_not_ignore_permission_error(self):
with mock.patch("builtins.open", side_effect=PermissionError):
with self.assertRaises(PermissionError):
storage.staticfiles_storage.read_manifest()
def test_loaded_cache(self):
self.assertNotEqual(storage.staticfiles_storage.hashed_files, {})
manifest_content = storage.staticfiles_storage.read_manifest()
self.assertIn(
'"version": "%s"' % storage.staticfiles_storage.manifest_version,
manifest_content,
)
def test_parse_cache(self):
hashed_files = storage.staticfiles_storage.hashed_files
manifest, _ = storage.staticfiles_storage.load_manifest()
self.assertEqual(hashed_files, manifest)
def test_clear_empties_manifest(self):
cleared_file_name = storage.staticfiles_storage.clean_name(
os.path.join("test", "cleared.txt")
)
# collect the additional file
self.run_collectstatic()
hashed_files = storage.staticfiles_storage.hashed_files
self.assertIn(cleared_file_name, hashed_files)
manifest_content, _ = storage.staticfiles_storage.load_manifest()
self.assertIn(cleared_file_name, manifest_content)
original_path = storage.staticfiles_storage.path(cleared_file_name)
self.assertTrue(os.path.exists(original_path))
# delete the original file form the app, collect with clear
os.unlink(self._clear_filename)
self.run_collectstatic(clear=True)
self.assertFileNotFound(original_path)
hashed_files = storage.staticfiles_storage.hashed_files
self.assertNotIn(cleared_file_name, hashed_files)
manifest_content, _ = storage.staticfiles_storage.load_manifest()
self.assertNotIn(cleared_file_name, manifest_content)
def test_missing_entry(self):
missing_file_name = "cached/missing.css"
configured_storage = storage.staticfiles_storage
self.assertNotIn(missing_file_name, configured_storage.hashed_files)
# File name not found in manifest
with self.assertRaisesMessage(
ValueError,
"Missing staticfiles manifest entry for '%s'" % missing_file_name,
):
self.hashed_file_path(missing_file_name)
configured_storage.manifest_strict = False
# File doesn't exist on disk
err_msg = "The file '%s' could not be found with %r." % (
missing_file_name,
configured_storage._wrapped,
)
with self.assertRaisesMessage(ValueError, err_msg):
self.hashed_file_path(missing_file_name)
content = StringIO()
content.write("Found")
configured_storage.save(missing_file_name, content)
# File exists on disk
self.hashed_file_path(missing_file_name)
def test_intermediate_files(self):
cached_files = os.listdir(os.path.join(settings.STATIC_ROOT, "cached"))
# Intermediate files shouldn't be created for reference.
self.assertEqual(
len(
[
cached_file
for cached_file in cached_files
if cached_file.startswith("relative.")
]
),
2,
)
def test_manifest_hash(self):
# Collect the additional file.
self.run_collectstatic()
_, manifest_hash_orig = storage.staticfiles_storage.load_manifest()
self.assertNotEqual(manifest_hash_orig, "")
self.assertEqual(storage.staticfiles_storage.manifest_hash, manifest_hash_orig)
# Saving doesn't change the hash.
storage.staticfiles_storage.save_manifest()
self.assertEqual(storage.staticfiles_storage.manifest_hash, manifest_hash_orig)
# Delete the original file from the app, collect with clear.
os.unlink(self._clear_filename)
self.run_collectstatic(clear=True)
# Hash is changed.
_, manifest_hash = storage.staticfiles_storage.load_manifest()
self.assertNotEqual(manifest_hash, manifest_hash_orig)
def test_manifest_hash_v1(self):
storage.staticfiles_storage.manifest_name = "staticfiles_v1.json"
manifest_content, manifest_hash = storage.staticfiles_storage.load_manifest()
self.assertEqual(manifest_hash, "")
self.assertEqual(manifest_content, {"dummy.txt": "dummy.txt"})
def test_manifest_file_consistent_content(self):
original_manifest_content = storage.staticfiles_storage.read_manifest()
hashed_files = storage.staticfiles_storage.hashed_files
# Force a change in the order of the hashed files.
with mock.patch.object(
storage.staticfiles_storage,
"hashed_files",
dict(reversed(hashed_files.items())),
):
storage.staticfiles_storage.save_manifest()
manifest_file_content = storage.staticfiles_storage.read_manifest()
# The manifest file content should not change.
self.assertEqual(original_manifest_content, manifest_file_content)
@override_settings(
STATIC_URL="/",
STORAGES={
**settings.STORAGES,
STATICFILES_STORAGE_ALIAS: {
"BACKEND": "django.contrib.staticfiles.storage.ManifestStaticFilesStorage",
},
},
)
class TestCollectionManifestStorageStaticUrlSlash(CollectionTestCase):
run_collectstatic_in_setUp = False
hashed_file_path = hashed_file_path
def test_protocol_relative_url_ignored(self):
with override_settings(
STATICFILES_DIRS=[os.path.join(TEST_ROOT, "project", "static_url_slash")],
STATICFILES_FINDERS=["django.contrib.staticfiles.finders.FileSystemFinder"],
):
self.run_collectstatic()
relpath = self.hashed_file_path("ignored.css")
self.assertEqual(relpath, "ignored.61707f5f4942.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertIn(b"//foobar", content)
@override_settings(
STORAGES={
**settings.STORAGES,
STATICFILES_STORAGE_ALIAS: {
"BACKEND": "staticfiles_tests.storage.NoneHashStorage",
},
}
)
class TestCollectionNoneHashStorage(CollectionTestCase):
hashed_file_path = hashed_file_path
def test_hashed_name(self):
relpath = self.hashed_file_path("cached/styles.css")
self.assertEqual(relpath, "cached/styles.css")
@override_settings(
STORAGES={
**settings.STORAGES,
STATICFILES_STORAGE_ALIAS: {
"BACKEND": "staticfiles_tests.storage.NoPostProcessReplacedPathStorage",
},
}
)
class TestCollectionNoPostProcessReplacedPaths(CollectionTestCase):
run_collectstatic_in_setUp = False
def test_collectstatistic_no_post_process_replaced_paths(self):
stdout = StringIO()
self.run_collectstatic(verbosity=1, stdout=stdout)
self.assertIn("post-processed", stdout.getvalue())
@override_settings(
STORAGES={
**settings.STORAGES,
STATICFILES_STORAGE_ALIAS: {
"BACKEND": "staticfiles_tests.storage.SimpleStorage",
},
}
)
class TestCollectionSimpleStorage(CollectionTestCase):
hashed_file_path = hashed_file_path
def setUp(self):
storage.staticfiles_storage.hashed_files.clear() # avoid cache interference
super().setUp()
def test_template_tag_return(self):
self.assertStaticRaises(
ValueError, "does/not/exist.png", "/static/does/not/exist.png"
)
self.assertStaticRenders("test/file.txt", "/static/test/file.deploy12345.txt")
self.assertStaticRenders(
"cached/styles.css", "/static/cached/styles.deploy12345.css"
)
self.assertStaticRenders("path/", "/static/path/")
self.assertStaticRenders("path/?query", "/static/path/?query")
def test_template_tag_simple_content(self):
relpath = self.hashed_file_path("cached/styles.css")
self.assertEqual(relpath, "cached/styles.deploy12345.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertNotIn(b"cached/other.css", content)
self.assertIn(b"other.deploy12345.css", content)
class JSModuleImportAggregationManifestStorage(storage.ManifestStaticFilesStorage):
support_js_module_import_aggregation = True
@override_settings(
STORAGES={
**settings.STORAGES,
STATICFILES_STORAGE_ALIAS: {
"BACKEND": (
"staticfiles_tests.test_storage."
"JSModuleImportAggregationManifestStorage"
),
},
}
)
class TestCollectionJSModuleImportAggregationManifestStorage(CollectionTestCase):
hashed_file_path = hashed_file_path
def test_module_import(self):
relpath = self.hashed_file_path("cached/module.js")
self.assertEqual(relpath, "cached/module.4326210cf0bd.js")
tests = [
# Relative imports.
b'import testConst from "./module_test.477bbebe77f0.js";',
b'import relativeModule from "../nested/js/nested.866475c46bb4.js";',
b'import { firstConst, secondConst } from "./module_test.477bbebe77f0.js";',
# Absolute import.
b'import rootConst from "/static/absolute_root.5586327fe78c.js";',
# Dynamic import.
b'const dynamicModule = import("./module_test.477bbebe77f0.js");',
# Creating a module object.
b'import * as NewModule from "./module_test.477bbebe77f0.js";',
# Creating a minified module object.
b'import*as m from "./module_test.477bbebe77f0.js";',
b'import* as m from "./module_test.477bbebe77f0.js";',
b'import *as m from "./module_test.477bbebe77f0.js";',
b'import* as m from "./module_test.477bbebe77f0.js";',
# Aliases.
b'import { testConst as alias } from "./module_test.477bbebe77f0.js";',
b"import {\n"
b" firstVar1 as firstVarAlias,\n"
b" $second_var_2 as secondVarAlias\n"
b'} from "./module_test.477bbebe77f0.js";',
]
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
for module_import in tests:
with self.subTest(module_import=module_import):
self.assertIn(module_import, content)
def test_aggregating_modules(self):
relpath = self.hashed_file_path("cached/module.js")
self.assertEqual(relpath, "cached/module.4326210cf0bd.js")
tests = [
b'export * from "./module_test.477bbebe77f0.js";',
b'export { testConst } from "./module_test.477bbebe77f0.js";',
b"export {\n"
b" firstVar as firstVarAlias,\n"
b" secondVar as secondVarAlias\n"
b'} from "./module_test.477bbebe77f0.js";',
]
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
for module_import in tests:
with self.subTest(module_import=module_import):
self.assertIn(module_import, content)
class CustomManifestStorage(storage.ManifestStaticFilesStorage):
def __init__(self, *args, manifest_storage=None, **kwargs):
manifest_storage = storage.StaticFilesStorage(
location=kwargs.pop("manifest_location"),
)
super().__init__(*args, manifest_storage=manifest_storage, **kwargs)
class TestCustomManifestStorage(SimpleTestCase):
def setUp(self):
manifest_path = Path(tempfile.mkdtemp())
self.addCleanup(shutil.rmtree, manifest_path)
self.staticfiles_storage = CustomManifestStorage(
manifest_location=manifest_path,
)
self.manifest_file = manifest_path / self.staticfiles_storage.manifest_name
# Manifest without paths.
self.manifest = {"version": self.staticfiles_storage.manifest_version}
with self.manifest_file.open("w") as manifest_file:
json.dump(self.manifest, manifest_file)
def test_read_manifest(self):
self.assertEqual(
self.staticfiles_storage.read_manifest(),
json.dumps(self.manifest),
)
def test_read_manifest_nonexistent(self):
os.remove(self.manifest_file)
self.assertIsNone(self.staticfiles_storage.read_manifest())
def test_save_manifest_override(self):
self.assertIs(self.manifest_file.exists(), True)
self.staticfiles_storage.save_manifest()
self.assertIs(self.manifest_file.exists(), True)
new_manifest = json.loads(self.staticfiles_storage.read_manifest())
self.assertIn("paths", new_manifest)
self.assertNotEqual(new_manifest, self.manifest)
def test_save_manifest_create(self):
os.remove(self.manifest_file)
self.staticfiles_storage.save_manifest()
self.assertIs(self.manifest_file.exists(), True)
new_manifest = json.loads(self.staticfiles_storage.read_manifest())
self.assertIn("paths", new_manifest)
self.assertNotEqual(new_manifest, self.manifest)
class CustomStaticFilesStorage(storage.StaticFilesStorage):
"""
Used in TestStaticFilePermissions
"""
def __init__(self, *args, **kwargs):
kwargs["file_permissions_mode"] = 0o640
kwargs["directory_permissions_mode"] = 0o740
super().__init__(*args, **kwargs)
@unittest.skipIf(sys.platform == "win32", "Windows only partially supports chmod.")
class TestStaticFilePermissions(CollectionTestCase):
command_params = {
"interactive": False,
"verbosity": 0,
"ignore_patterns": ["*.ignoreme"],
}
def setUp(self):
self.umask = 0o027
old_umask = os.umask(self.umask)
self.addCleanup(os.umask, old_umask)
super().setUp()
# Don't run collectstatic command in this test class.
def run_collectstatic(self, **kwargs):
pass
@override_settings(
FILE_UPLOAD_PERMISSIONS=0o655,
FILE_UPLOAD_DIRECTORY_PERMISSIONS=0o765,
)
def test_collect_static_files_permissions(self):
call_command("collectstatic", **self.command_params)
static_root = Path(settings.STATIC_ROOT)
test_file = static_root / "test.txt"
file_mode = test_file.stat().st_mode & 0o777
self.assertEqual(file_mode, 0o655)
tests = [
static_root / "subdir",
static_root / "nested",
static_root / "nested" / "css",
]
for directory in tests:
with self.subTest(directory=directory):
dir_mode = directory.stat().st_mode & 0o777
self.assertEqual(dir_mode, 0o765)
@override_settings(
FILE_UPLOAD_PERMISSIONS=None,
FILE_UPLOAD_DIRECTORY_PERMISSIONS=None,
)
def test_collect_static_files_default_permissions(self):
call_command("collectstatic", **self.command_params)
static_root = Path(settings.STATIC_ROOT)
test_file = static_root / "test.txt"
file_mode = test_file.stat().st_mode & 0o777
self.assertEqual(file_mode, 0o666 & ~self.umask)
tests = [
static_root / "subdir",
static_root / "nested",
static_root / "nested" / "css",
]
for directory in tests:
with self.subTest(directory=directory):
dir_mode = directory.stat().st_mode & 0o777
self.assertEqual(dir_mode, 0o777 & ~self.umask)
@override_settings(
FILE_UPLOAD_PERMISSIONS=0o655,
FILE_UPLOAD_DIRECTORY_PERMISSIONS=0o765,
STORAGES={
**settings.STORAGES,
STATICFILES_STORAGE_ALIAS: {
"BACKEND": "staticfiles_tests.test_storage.CustomStaticFilesStorage",
},
},
)
def test_collect_static_files_subclass_of_static_storage(self):
call_command("collectstatic", **self.command_params)
static_root = Path(settings.STATIC_ROOT)
test_file = static_root / "test.txt"
file_mode = test_file.stat().st_mode & 0o777
self.assertEqual(file_mode, 0o640)
tests = [
static_root / "subdir",
static_root / "nested",
static_root / "nested" / "css",
]
for directory in tests:
with self.subTest(directory=directory):
dir_mode = directory.stat().st_mode & 0o777
self.assertEqual(dir_mode, 0o740)
@override_settings(
STORAGES={
**settings.STORAGES,
STATICFILES_STORAGE_ALIAS: {
"BACKEND": "django.contrib.staticfiles.storage.ManifestStaticFilesStorage",
},
}
)
class TestCollectionHashedFilesCache(CollectionTestCase):
"""
Files referenced from CSS use the correct final hashed name regardless of
the order in which the files are post-processed.
"""
hashed_file_path = hashed_file_path
def setUp(self):
super().setUp()
self._temp_dir = temp_dir = tempfile.mkdtemp()
os.makedirs(os.path.join(temp_dir, "test"))
self.addCleanup(shutil.rmtree, temp_dir)
def _get_filename_path(self, filename):
return os.path.join(self._temp_dir, "test", filename)
def test_file_change_after_collectstatic(self):
# Create initial static files.
file_contents = (
("foo.png", "foo"),
("bar.css", 'url("foo.png")\nurl("xyz.png")'),
("xyz.png", "xyz"),
)
for filename, content in file_contents:
with open(self._get_filename_path(filename), "w") as f:
f.write(content)
with self.modify_settings(STATICFILES_DIRS={"append": self._temp_dir}):
finders.get_finder.cache_clear()
err = StringIO()
# First collectstatic run.
call_command("collectstatic", interactive=False, verbosity=0, stderr=err)
relpath = self.hashed_file_path("test/bar.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertIn(b"foo.acbd18db4cc2.png", content)
self.assertIn(b"xyz.d16fb36f0911.png", content)
# Change the contents of the png files.
for filename in ("foo.png", "xyz.png"):
with open(self._get_filename_path(filename), "w+b") as f:
f.write(b"new content of file to change its hash")
# The hashes of the png files in the CSS file are updated after
# a second collectstatic.
call_command("collectstatic", interactive=False, verbosity=0, stderr=err)
relpath = self.hashed_file_path("test/bar.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertIn(b"foo.57a5cb9ba68d.png", content)
self.assertIn(b"xyz.57a5cb9ba68d.png", content)
|
./temp_repos/django/tests/model_fields/storage.py
|
./temp_repos/django/tests/staticfiles_tests/test_storage.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'NoReadFileSystemStorage'.
Context:
- Class Name: NoReadFileSystemStorage
- Dependencies to Mock: None detected
- Key Imports: django.core.files.storage.filesystem
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
NoReadFileSystemStorage
|
python
|
from urllib.parse import urlparse
from urllib.request import url2pathname
from asgiref.sync import sync_to_async
from django.conf import settings
from django.contrib.staticfiles import utils
from django.contrib.staticfiles.views import serve
from django.core.handlers.asgi import ASGIHandler
from django.core.handlers.exception import response_for_exception
from django.core.handlers.wsgi import WSGIHandler, get_path_info
from django.http import Http404
class StaticFilesHandlerMixin:
"""
Common methods used by WSGI and ASGI handlers.
"""
# May be used to differentiate between handler types (e.g. in a
# request_finished signal)
handles_files = True
def load_middleware(self):
# Middleware are already loaded for self.application; no need to reload
# them for self.
pass
def get_base_url(self):
utils.check_settings()
return settings.STATIC_URL
def _should_handle(self, path):
"""
Check if the path should be handled. Ignore the path if:
* the host is provided as part of the base_url
* the request's path isn't under the media path (or equal)
"""
return path.startswith(self.base_url.path) and not self.base_url.netloc
def file_path(self, url):
"""
Return the relative path to the media file on disk for the given URL.
"""
relative_url = url.removeprefix(self.base_url.path)
return url2pathname(relative_url)
def serve(self, request):
"""Serve the request path."""
return serve(request, self.file_path(request.path), insecure=True)
def get_response(self, request):
try:
return self.serve(request)
except Http404 as e:
return response_for_exception(request, e)
async def get_response_async(self, request):
try:
return await sync_to_async(self.serve, thread_sensitive=False)(request)
except Http404 as e:
return await sync_to_async(response_for_exception, thread_sensitive=False)(
request, e
)
class StaticFilesHandler(StaticFilesHandlerMixin, WSGIHandler):
"""
WSGI middleware that intercepts calls to the static files directory, as
defined by the STATIC_URL setting, and serves those files.
"""
def __init__(self, application):
self.application = application
self.base_url = urlparse(self.get_base_url())
super().__init__()
def __call__(self, environ, start_response):
if not self._should_handle(get_path_info(environ)):
return self.application(environ, start_response)
return super().__call__(environ, start_response)
class ASGIStaticFilesHandler(StaticFilesHandlerMixin, ASGIHandler):
"""
ASGI application which wraps another and intercepts requests for static
files, passing them off to Django's static file serving.
"""
def __init__(self, application):
self.application = application
self.base_url = urlparse(self.get_base_url())
async def __call__(self, scope, receive, send):
# Only even look at HTTP requests
if scope["type"] == "http" and self._should_handle(scope["path"]):
# Serve static content
# (the one thing super() doesn't do is __call__, apparently)
return await super().__call__(scope, receive, send)
# Hand off to the main app
return await self.application(scope, receive, send)
async def get_response_async(self, request):
response = await super().get_response_async(request)
response._resource_closers.append(request.close)
# FileResponse is not async compatible.
if response.streaming and not response.is_async:
_iterator = response.streaming_content
async def awrapper():
for part in await sync_to_async(list)(_iterator):
yield part
response.streaming_content = awrapper()
return response
|
from django.contrib.staticfiles.handlers import ASGIStaticFilesHandler
from django.core.handlers.asgi import ASGIHandler
from django.test import AsyncRequestFactory
from .cases import StaticFilesTestCase
class MockApplication:
"""ASGI application that returns a string indicating that it was called."""
async def __call__(self, scope, receive, send):
return "Application called"
class TestASGIStaticFilesHandler(StaticFilesTestCase):
async_request_factory = AsyncRequestFactory()
async def test_get_async_response(self):
request = self.async_request_factory.get("/static/test/file.txt")
handler = ASGIStaticFilesHandler(ASGIHandler())
response = await handler.get_response_async(request)
response.close()
self.assertEqual(response.status_code, 200)
async def test_get_async_response_not_found(self):
request = self.async_request_factory.get("/static/test/not-found.txt")
handler = ASGIStaticFilesHandler(ASGIHandler())
response = await handler.get_response_async(request)
self.assertEqual(response.status_code, 404)
async def test_non_http_requests_passed_to_the_wrapped_application(self):
tests = [
"/static/path.txt",
"/non-static/path.txt",
]
for path in tests:
with self.subTest(path=path):
scope = {"type": "websocket", "path": path}
handler = ASGIStaticFilesHandler(MockApplication())
response = await handler(scope, None, None)
self.assertEqual(response, "Application called")
|
./temp_repos/django/django/contrib/staticfiles/handlers.py
|
./temp_repos/django/tests/staticfiles_tests/test_handlers.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'StaticFilesHandlerMixin'.
Context:
- Class Name: StaticFilesHandlerMixin
- Dependencies to Mock: application
- Key Imports: django.conf, django.http, django.core.handlers.asgi, urllib.parse, django.core.handlers.wsgi, django.contrib.staticfiles, django.core.handlers.exception, urllib.request, django.contrib.staticfiles.views, asgiref.sync
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
StaticFilesHandlerMixin
|
python
|
"""
This module contains helper functions for controlling caching. It does so by
managing the "Vary" header of responses. It includes functions to patch the
header of response objects directly and decorators that change functions to do
that header-patching themselves.
For information on the Vary header, see RFC 9110 Section 12.5.5.
Essentially, the "Vary" HTTP header defines which headers a cache should take
into account when building its cache key. Requests with the same path but
different header content for headers named in "Vary" need to get different
cache keys to prevent delivery of wrong content.
An example: i18n middleware would need to distinguish caches by the
"Accept-language" header.
"""
import time
from collections import defaultdict
from hashlib import md5
from django.conf import settings
from django.core.cache import caches
from django.http import HttpResponse, HttpResponseNotModified
from django.utils.http import http_date, parse_etags, parse_http_date_safe, quote_etag
from django.utils.log import log_response
from django.utils.regex_helper import _lazy_re_compile
from django.utils.timezone import get_current_timezone_name
from django.utils.translation import get_language
cc_delim_re = _lazy_re_compile(r"\s*,\s*")
def patch_cache_control(response, **kwargs):
"""
Patch the Cache-Control header by adding all keyword arguments to it.
The transformation is as follows:
* All keyword parameter names are turned to lowercase, and underscores
are converted to hyphens.
* If the value of a parameter is True (exactly True, not just a
true value), only the parameter name is added to the header.
* All other parameters are added with their value, after applying
str() to it.
"""
def dictitem(s):
t = s.split("=", 1)
if len(t) > 1:
return (t[0].lower(), t[1])
else:
return (t[0].lower(), True)
def dictvalue(*t):
if t[1] is True:
return t[0]
else:
return "%s=%s" % (t[0], t[1])
cc = defaultdict(set)
if response.get("Cache-Control"):
for field in cc_delim_re.split(response.headers["Cache-Control"]):
directive, value = dictitem(field)
if directive == "no-cache":
# no-cache supports multiple field names.
cc[directive].add(value)
else:
cc[directive] = value
# If there's already a max-age header but we're being asked to set a new
# max-age, use the minimum of the two ages. In practice this happens when
# a decorator and a piece of middleware both operate on a given view.
if "max-age" in cc and "max_age" in kwargs:
kwargs["max_age"] = min(int(cc["max-age"]), kwargs["max_age"])
# Allow overriding private caching and vice versa
if "private" in cc and "public" in kwargs:
del cc["private"]
elif "public" in cc and "private" in kwargs:
del cc["public"]
for k, v in kwargs.items():
directive = k.replace("_", "-")
if directive == "no-cache":
# no-cache supports multiple field names.
cc[directive].add(v)
else:
cc[directive] = v
directives = []
for directive, values in cc.items():
if isinstance(values, set):
if True in values:
# True takes precedence.
values = {True}
directives.extend([dictvalue(directive, value) for value in values])
else:
directives.append(dictvalue(directive, values))
cc = ", ".join(directives)
response.headers["Cache-Control"] = cc
def get_max_age(response):
"""
Return the max-age from the response Cache-Control header as an integer,
or None if it wasn't found or wasn't an integer.
"""
if not response.has_header("Cache-Control"):
return
cc = dict(
_to_tuple(el) for el in cc_delim_re.split(response.headers["Cache-Control"])
)
try:
return int(cc["max-age"])
except (ValueError, TypeError, KeyError):
pass
def set_response_etag(response):
if not response.streaming and response.content:
response.headers["ETag"] = quote_etag(
md5(response.content, usedforsecurity=False).hexdigest(),
)
return response
def _precondition_failed(request):
response = HttpResponse(status=412)
log_response(
"Precondition Failed: %s",
request.path,
response=response,
request=request,
)
return response
def _not_modified(request, response=None):
new_response = HttpResponseNotModified()
if response:
# Preserve the headers required by RFC 9110 Section 15.4.5, as well as
# Last-Modified.
for header in (
"Cache-Control",
"Content-Location",
"Date",
"ETag",
"Expires",
"Last-Modified",
"Vary",
):
if header in response:
new_response.headers[header] = response.headers[header]
# Preserve cookies as per the cookie specification: "If a proxy server
# receives a response which contains a Set-cookie header, it should
# propagate the Set-cookie header to the client, regardless of whether
# the response was 304 (Not Modified) or 200 (OK).
# https://curl.haxx.se/rfc/cookie_spec.html
new_response.cookies = response.cookies
return new_response
def get_conditional_response(request, etag=None, last_modified=None, response=None):
# Only return conditional responses on successful requests.
if response and not (200 <= response.status_code < 300):
return response
# Get HTTP request headers.
if_match_etags = parse_etags(request.META.get("HTTP_IF_MATCH", ""))
if_unmodified_since = request.META.get("HTTP_IF_UNMODIFIED_SINCE")
if_unmodified_since = if_unmodified_since and parse_http_date_safe(
if_unmodified_since
)
if_none_match_etags = parse_etags(request.META.get("HTTP_IF_NONE_MATCH", ""))
if_modified_since = request.META.get("HTTP_IF_MODIFIED_SINCE")
if_modified_since = if_modified_since and parse_http_date_safe(if_modified_since)
# Evaluation of request preconditions below follows RFC 9110 Section
# 13.2.2.
# Step 1: Test the If-Match precondition.
if if_match_etags and not _if_match_passes(etag, if_match_etags):
return _precondition_failed(request)
# Step 2: Test the If-Unmodified-Since precondition.
if (
not if_match_etags
and if_unmodified_since
and not _if_unmodified_since_passes(last_modified, if_unmodified_since)
):
return _precondition_failed(request)
# Step 3: Test the If-None-Match precondition.
if if_none_match_etags and not _if_none_match_passes(etag, if_none_match_etags):
if request.method in ("GET", "HEAD"):
return _not_modified(request, response)
else:
return _precondition_failed(request)
# Step 4: Test the If-Modified-Since precondition.
if (
not if_none_match_etags
and if_modified_since
and not _if_modified_since_passes(last_modified, if_modified_since)
and request.method in ("GET", "HEAD")
):
return _not_modified(request, response)
# Step 5: Test the If-Range precondition (not supported).
# Step 6: Return original response since there isn't a conditional
# response.
return response
def _if_match_passes(target_etag, etags):
"""
Test the If-Match comparison as defined in RFC 9110 Section 13.1.1.
"""
if not target_etag:
# If there isn't an ETag, then there can't be a match.
return False
elif etags == ["*"]:
# The existence of an ETag means that there is "a current
# representation for the target resource", even if the ETag is weak,
# so there is a match to '*'.
return True
elif target_etag.startswith("W/"):
# A weak ETag can never strongly match another ETag.
return False
else:
# Since the ETag is strong, this will only return True if there's a
# strong match.
return target_etag in etags
def _if_unmodified_since_passes(last_modified, if_unmodified_since):
"""
Test the If-Unmodified-Since comparison as defined in RFC 9110 Section
13.1.4.
"""
return last_modified and last_modified <= if_unmodified_since
def _if_none_match_passes(target_etag, etags):
"""
Test the If-None-Match comparison as defined in RFC 9110 Section 13.1.2.
"""
if not target_etag:
# If there isn't an ETag, then there isn't a match.
return True
elif etags == ["*"]:
# The existence of an ETag means that there is "a current
# representation for the target resource", so there is a match to '*'.
return False
else:
# The comparison should be weak, so look for a match after stripping
# off any weak indicators.
target_etag = target_etag.strip("W/")
etags = (etag.strip("W/") for etag in etags)
return target_etag not in etags
def _if_modified_since_passes(last_modified, if_modified_since):
"""
Test the If-Modified-Since comparison as defined in RFC 9110 Section
13.1.3.
"""
return not last_modified or last_modified > if_modified_since
def patch_response_headers(response, cache_timeout=None):
"""
Add HTTP caching headers to the given HttpResponse: Expires and
Cache-Control.
Each header is only added if it isn't already set.
cache_timeout is in seconds. The CACHE_MIDDLEWARE_SECONDS setting is used
by default.
"""
if cache_timeout is None:
cache_timeout = settings.CACHE_MIDDLEWARE_SECONDS
if cache_timeout < 0:
cache_timeout = 0 # Can't have max-age negative
if not response.has_header("Expires"):
response.headers["Expires"] = http_date(time.time() + cache_timeout)
patch_cache_control(response, max_age=cache_timeout)
def add_never_cache_headers(response):
"""
Add headers to a response to indicate that a page should never be cached.
"""
patch_response_headers(response, cache_timeout=-1)
patch_cache_control(
response, no_cache=True, no_store=True, must_revalidate=True, private=True
)
def patch_vary_headers(response, newheaders):
"""
Add (or update) the "Vary" header in the given HttpResponse object.
newheaders is a list of header names that should be in "Vary". If headers
contains an asterisk, then "Vary" header will consist of a single asterisk
'*'. Otherwise, existing headers in "Vary" aren't removed.
"""
# Note that we need to keep the original order intact, because cache
# implementations may rely on the order of the Vary contents in, say,
# computing an MD5 hash.
if response.has_header("Vary"):
vary_headers = cc_delim_re.split(response.headers["Vary"])
else:
vary_headers = []
# Use .lower() here so we treat headers as case-insensitive.
existing_headers = {header.lower() for header in vary_headers}
additional_headers = [
newheader
for newheader in newheaders
if newheader.lower() not in existing_headers
]
vary_headers += additional_headers
if "*" in vary_headers:
response.headers["Vary"] = "*"
else:
response.headers["Vary"] = ", ".join(vary_headers)
def has_vary_header(response, header_query):
"""
Check to see if the response has a given header name in its Vary header.
"""
if not response.has_header("Vary"):
return False
vary_headers = cc_delim_re.split(response.headers["Vary"])
existing_headers = {header.lower() for header in vary_headers}
return header_query.lower() in existing_headers
def _i18n_cache_key_suffix(request, cache_key):
"""If necessary, add the current locale or time zone to the cache key."""
if settings.USE_I18N:
# first check if LocaleMiddleware or another middleware added
# LANGUAGE_CODE to request, then fall back to the active language
# which in turn can also fall back to settings.LANGUAGE_CODE
cache_key += ".%s" % getattr(request, "LANGUAGE_CODE", get_language())
if settings.USE_TZ:
cache_key += ".%s" % get_current_timezone_name()
return cache_key
def _generate_cache_key(request, method, headerlist, key_prefix):
"""Return a cache key from the headers given in the header list."""
ctx = md5(usedforsecurity=False)
for header in headerlist:
value = request.META.get(header)
if value is not None:
ctx.update(value.encode())
url = md5(request.build_absolute_uri().encode("ascii"), usedforsecurity=False)
cache_key = "views.decorators.cache.cache_page.%s.%s.%s.%s" % (
key_prefix,
method,
url.hexdigest(),
ctx.hexdigest(),
)
return _i18n_cache_key_suffix(request, cache_key)
def _generate_cache_header_key(key_prefix, request):
"""Return a cache key for the header cache."""
url = md5(request.build_absolute_uri().encode("ascii"), usedforsecurity=False)
cache_key = "views.decorators.cache.cache_header.%s.%s" % (
key_prefix,
url.hexdigest(),
)
return _i18n_cache_key_suffix(request, cache_key)
def get_cache_key(request, key_prefix=None, method="GET", cache=None):
"""
Return a cache key based on the request URL and query. It can be used
in the request phase because it pulls the list of headers to take into
account from the global URL registry and uses those to build a cache key
to check against.
If there isn't a headerlist stored, return None, indicating that the page
needs to be rebuilt.
"""
if key_prefix is None:
key_prefix = settings.CACHE_MIDDLEWARE_KEY_PREFIX
cache_key = _generate_cache_header_key(key_prefix, request)
if cache is None:
cache = caches[settings.CACHE_MIDDLEWARE_ALIAS]
headerlist = cache.get(cache_key)
if headerlist is not None:
return _generate_cache_key(request, method, headerlist, key_prefix)
else:
return None
def learn_cache_key(request, response, cache_timeout=None, key_prefix=None, cache=None):
"""
Learn what headers to take into account for some request URL from the
response object. Store those headers in a global URL registry so that
later access to that URL will know what headers to take into account
without building the response object itself. The headers are named in the
Vary header of the response, but we want to prevent response generation.
The list of headers to use for cache key generation is stored in the same
cache as the pages themselves. If the cache ages some data out of the
cache, this just means that we have to build the response once to get at
the Vary header and so at the list of headers to use for the cache key.
"""
if key_prefix is None:
key_prefix = settings.CACHE_MIDDLEWARE_KEY_PREFIX
if cache_timeout is None:
cache_timeout = settings.CACHE_MIDDLEWARE_SECONDS
cache_key = _generate_cache_header_key(key_prefix, request)
if cache is None:
cache = caches[settings.CACHE_MIDDLEWARE_ALIAS]
if response.has_header("Vary"):
is_accept_language_redundant = settings.USE_I18N
# If i18n is used, the generated cache key will be suffixed with the
# current locale. Adding the raw value of Accept-Language is redundant
# in that case and would result in storing the same content under
# multiple keys in the cache. See #18191 for details.
headerlist = []
for header in cc_delim_re.split(response.headers["Vary"]):
header = header.upper().replace("-", "_")
if header != "ACCEPT_LANGUAGE" or not is_accept_language_redundant:
headerlist.append("HTTP_" + header)
headerlist.sort()
cache.set(cache_key, headerlist, cache_timeout)
return _generate_cache_key(request, request.method, headerlist, key_prefix)
else:
# if there is no Vary header, we still need a cache key
# for the request.build_absolute_uri()
cache.set(cache_key, [], cache_timeout)
return _generate_cache_key(request, request.method, [], key_prefix)
def _to_tuple(s):
t = s.split("=", 1)
if len(t) == 2:
return t[0].lower(), t[1]
return t[0].lower(), True
|
from unittest import mock
from asgiref.sync import iscoroutinefunction
from django.http import HttpRequest, HttpResponse
from django.test import SimpleTestCase
from django.utils.decorators import method_decorator
from django.views.decorators.cache import cache_control, cache_page, never_cache
class HttpRequestProxy:
def __init__(self, request):
self._request = request
def __getattr__(self, attr):
"""Proxy to the underlying HttpRequest object."""
return getattr(self._request, attr)
class CacheControlDecoratorTest(SimpleTestCase):
def test_wrapped_sync_function_is_not_coroutine_function(self):
def sync_view(request):
return HttpResponse()
wrapped_view = cache_control()(sync_view)
self.assertIs(iscoroutinefunction(wrapped_view), False)
def test_wrapped_async_function_is_coroutine_function(self):
async def async_view(request):
return HttpResponse()
wrapped_view = cache_control()(async_view)
self.assertIs(iscoroutinefunction(wrapped_view), True)
def test_cache_control_decorator_http_request(self):
class MyClass:
@cache_control(a="b")
def a_view(self, request):
return HttpResponse()
msg = (
"cache_control didn't receive an HttpRequest. If you are "
"decorating a classmethod, be sure to use @method_decorator."
)
request = HttpRequest()
with self.assertRaisesMessage(TypeError, msg):
MyClass().a_view(request)
with self.assertRaisesMessage(TypeError, msg):
MyClass().a_view(HttpRequestProxy(request))
async def test_cache_control_decorator_http_request_async_view(self):
class MyClass:
@cache_control(a="b")
async def async_view(self, request):
return HttpResponse()
msg = (
"cache_control didn't receive an HttpRequest. If you are decorating a "
"classmethod, be sure to use @method_decorator."
)
request = HttpRequest()
with self.assertRaisesMessage(TypeError, msg):
await MyClass().async_view(request)
with self.assertRaisesMessage(TypeError, msg):
await MyClass().async_view(HttpRequestProxy(request))
def test_cache_control_decorator_http_request_proxy(self):
class MyClass:
@method_decorator(cache_control(a="b"))
def a_view(self, request):
return HttpResponse()
request = HttpRequest()
response = MyClass().a_view(HttpRequestProxy(request))
self.assertEqual(response.headers["Cache-Control"], "a=b")
def test_cache_control_empty_decorator(self):
@cache_control()
def a_view(request):
return HttpResponse()
response = a_view(HttpRequest())
self.assertEqual(response.get("Cache-Control"), "")
async def test_cache_control_empty_decorator_async_view(self):
@cache_control()
async def async_view(request):
return HttpResponse()
response = await async_view(HttpRequest())
self.assertEqual(response.get("Cache-Control"), "")
def test_cache_control_full_decorator(self):
@cache_control(max_age=123, private=True, public=True, custom=456)
def a_view(request):
return HttpResponse()
response = a_view(HttpRequest())
cache_control_items = response.get("Cache-Control").split(", ")
self.assertEqual(
set(cache_control_items), {"max-age=123", "private", "public", "custom=456"}
)
async def test_cache_control_full_decorator_async_view(self):
@cache_control(max_age=123, private=True, public=True, custom=456)
async def async_view(request):
return HttpResponse()
response = await async_view(HttpRequest())
cache_control_items = response.get("Cache-Control").split(", ")
self.assertEqual(
set(cache_control_items), {"max-age=123", "private", "public", "custom=456"}
)
class CachePageDecoratorTest(SimpleTestCase):
def test_cache_page(self):
def my_view(request):
return "response"
my_view_cached = cache_page(123)(my_view)
self.assertEqual(my_view_cached(HttpRequest()), "response")
my_view_cached2 = cache_page(123, key_prefix="test")(my_view)
self.assertEqual(my_view_cached2(HttpRequest()), "response")
class NeverCacheDecoratorTest(SimpleTestCase):
def test_wrapped_sync_function_is_not_coroutine_function(self):
def sync_view(request):
return HttpResponse()
wrapped_view = never_cache(sync_view)
self.assertIs(iscoroutinefunction(wrapped_view), False)
def test_wrapped_async_function_is_coroutine_function(self):
async def async_view(request):
return HttpResponse()
wrapped_view = never_cache(async_view)
self.assertIs(iscoroutinefunction(wrapped_view), True)
@mock.patch("time.time")
def test_never_cache_decorator_headers(self, mocked_time):
@never_cache
def a_view(request):
return HttpResponse()
mocked_time.return_value = 1167616461.0
response = a_view(HttpRequest())
self.assertEqual(
response.headers["Expires"],
"Mon, 01 Jan 2007 01:54:21 GMT",
)
self.assertEqual(
response.headers["Cache-Control"],
"max-age=0, no-cache, no-store, must-revalidate, private",
)
@mock.patch("time.time")
async def test_never_cache_decorator_headers_async_view(self, mocked_time):
@never_cache
async def async_view(request):
return HttpResponse()
mocked_time.return_value = 1167616461.0
response = await async_view(HttpRequest())
self.assertEqual(response.headers["Expires"], "Mon, 01 Jan 2007 01:54:21 GMT")
self.assertEqual(
response.headers["Cache-Control"],
"max-age=0, no-cache, no-store, must-revalidate, private",
)
def test_never_cache_decorator_expires_not_overridden(self):
@never_cache
def a_view(request):
return HttpResponse(headers={"Expires": "tomorrow"})
response = a_view(HttpRequest())
self.assertEqual(response.headers["Expires"], "tomorrow")
async def test_never_cache_decorator_expires_not_overridden_async_view(self):
@never_cache
async def async_view(request):
return HttpResponse(headers={"Expires": "tomorrow"})
response = await async_view(HttpRequest())
self.assertEqual(response.headers["Expires"], "tomorrow")
def test_never_cache_decorator_http_request(self):
class MyClass:
@never_cache
def a_view(self, request):
return HttpResponse()
request = HttpRequest()
msg = (
"never_cache didn't receive an HttpRequest. If you are decorating "
"a classmethod, be sure to use @method_decorator."
)
with self.assertRaisesMessage(TypeError, msg):
MyClass().a_view(request)
with self.assertRaisesMessage(TypeError, msg):
MyClass().a_view(HttpRequestProxy(request))
async def test_never_cache_decorator_http_request_async_view(self):
class MyClass:
@never_cache
async def async_view(self, request):
return HttpResponse()
request = HttpRequest()
msg = (
"never_cache didn't receive an HttpRequest. If you are decorating "
"a classmethod, be sure to use @method_decorator."
)
with self.assertRaisesMessage(TypeError, msg):
await MyClass().async_view(request)
with self.assertRaisesMessage(TypeError, msg):
await MyClass().async_view(HttpRequestProxy(request))
def test_never_cache_decorator_http_request_proxy(self):
class MyClass:
@method_decorator(never_cache)
def a_view(self, request):
return HttpResponse()
request = HttpRequest()
response = MyClass().a_view(HttpRequestProxy(request))
self.assertIn("Cache-Control", response.headers)
self.assertIn("Expires", response.headers)
|
./temp_repos/django/django/utils/cache.py
|
./temp_repos/django/tests/decorators/test_cache.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'Unknown'.
Context:
- Class Name: Unknown
- Dependencies to Mock: None detected
- Key Imports: collections, time, django.conf, django.http, django.utils.timezone, django.core.cache, django.utils.http, django.utils.translation, django.utils.regex_helper, hashlib
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
Unknown
|
python
|
from django.http import HttpResponse
def empty_view(request, *args, **kwargs):
return HttpResponse()
|
import datetime
from unittest import mock
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes.views import shortcut
from django.contrib.sites.models import Site
from django.contrib.sites.shortcuts import get_current_site
from django.http import Http404, HttpRequest
from django.test import TestCase, override_settings
from .models import ( # isort:skip
Article,
Author,
FooWithBrokenAbsoluteUrl,
FooWithoutUrl,
FooWithUrl,
ModelWithM2MToSite,
ModelWithNullFKToSite,
SchemeIncludedURL,
Site as MockSite,
UUIDModel,
)
@override_settings(ROOT_URLCONF="contenttypes_tests.urls")
class ContentTypesViewsTests(TestCase):
@classmethod
def setUpTestData(cls):
# Don't use the manager to ensure the site exists with pk=1, regardless
# of whether or not it already exists.
cls.site1 = Site(pk=1, domain="testserver", name="testserver")
cls.site1.save()
cls.author1 = Author.objects.create(name="Boris")
cls.article1 = Article.objects.create(
title="Old Article",
slug="old_article",
author=cls.author1,
date_created=datetime.datetime(2001, 1, 1, 21, 22, 23),
)
cls.article2 = Article.objects.create(
title="Current Article",
slug="current_article",
author=cls.author1,
date_created=datetime.datetime(2007, 9, 17, 21, 22, 23),
)
cls.article3 = Article.objects.create(
title="Future Article",
slug="future_article",
author=cls.author1,
date_created=datetime.datetime(3000, 1, 1, 21, 22, 23),
)
cls.scheme1 = SchemeIncludedURL.objects.create(
url="http://test_scheme_included_http/"
)
cls.scheme2 = SchemeIncludedURL.objects.create(
url="https://test_scheme_included_https/"
)
cls.scheme3 = SchemeIncludedURL.objects.create(
url="//test_default_scheme_kept/"
)
def setUp(self):
Site.objects.clear_cache()
def test_shortcut_with_absolute_url(self):
"""
Can view a shortcut for an Author object that has a get_absolute_url
method
"""
for obj in Author.objects.all():
with self.subTest(obj=obj):
short_url = "/shortcut/%s/%s/" % (
ContentType.objects.get_for_model(Author).id,
obj.pk,
)
response = self.client.get(short_url)
self.assertRedirects(
response,
"http://testserver%s" % obj.get_absolute_url(),
target_status_code=404,
)
def test_shortcut_with_absolute_url_including_scheme(self):
"""
Can view a shortcut when object's get_absolute_url returns a full URL
the tested URLs are: "http://...", "https://..." and "//..."
"""
for obj in SchemeIncludedURL.objects.all():
with self.subTest(obj=obj):
short_url = "/shortcut/%s/%s/" % (
ContentType.objects.get_for_model(SchemeIncludedURL).id,
obj.pk,
)
response = self.client.get(short_url)
self.assertRedirects(
response, obj.get_absolute_url(), fetch_redirect_response=False
)
def test_shortcut_no_absolute_url(self):
"""
Shortcuts for an object that has no get_absolute_url() method raise
404.
"""
for obj in Article.objects.all():
with self.subTest(obj=obj):
short_url = "/shortcut/%s/%s/" % (
ContentType.objects.get_for_model(Article).id,
obj.pk,
)
response = self.client.get(short_url)
self.assertEqual(response.status_code, 404)
def test_wrong_type_pk(self):
short_url = "/shortcut/%s/%s/" % (
ContentType.objects.get_for_model(Author).id,
"nobody/expects",
)
response = self.client.get(short_url)
self.assertEqual(response.status_code, 404)
def test_shortcut_bad_pk(self):
short_url = "/shortcut/%s/%s/" % (
ContentType.objects.get_for_model(Author).id,
"42424242",
)
response = self.client.get(short_url)
self.assertEqual(response.status_code, 404)
def test_nonint_content_type(self):
an_author = Author.objects.all()[0]
short_url = "/shortcut/%s/%s/" % ("spam", an_author.pk)
response = self.client.get(short_url)
self.assertEqual(response.status_code, 404)
def test_bad_content_type(self):
an_author = Author.objects.all()[0]
short_url = "/shortcut/%s/%s/" % (42424242, an_author.pk)
response = self.client.get(short_url)
self.assertEqual(response.status_code, 404)
@override_settings(ROOT_URLCONF="contenttypes_tests.urls")
class ContentTypesViewsSiteRelTests(TestCase):
def setUp(self):
Site.objects.clear_cache()
@classmethod
def setUpTestData(cls):
cls.site_2 = Site.objects.create(domain="example2.com", name="example2.com")
cls.site_3 = Site.objects.create(domain="example3.com", name="example3.com")
@mock.patch("django.apps.apps.get_model")
def test_shortcut_view_with_null_site_fk(self, get_model):
"""
The shortcut view works if a model's ForeignKey to site is None.
"""
get_model.side_effect = lambda *args, **kwargs: (
MockSite if args[0] == "sites.Site" else ModelWithNullFKToSite
)
obj = ModelWithNullFKToSite.objects.create(title="title")
url = "/shortcut/%s/%s/" % (
ContentType.objects.get_for_model(ModelWithNullFKToSite).id,
obj.pk,
)
response = self.client.get(url)
expected_url = "http://example.com%s" % obj.get_absolute_url()
self.assertRedirects(response, expected_url, fetch_redirect_response=False)
@mock.patch("django.apps.apps.get_model")
def test_shortcut_view_with_site_m2m(self, get_model):
"""
When the object has a ManyToManyField to Site, redirect to the current
site if it's attached to the object or to the domain of the first site
found in the m2m relationship.
"""
get_model.side_effect = lambda *args, **kwargs: (
MockSite if args[0] == "sites.Site" else ModelWithM2MToSite
)
# get_current_site() will lookup a Site object, so these must match the
# domains in the MockSite model.
MockSite.objects.bulk_create(
[
MockSite(pk=1, domain="example.com"),
MockSite(pk=self.site_2.pk, domain=self.site_2.domain),
MockSite(pk=self.site_3.pk, domain=self.site_3.domain),
]
)
ct = ContentType.objects.get_for_model(ModelWithM2MToSite)
site_3_obj = ModelWithM2MToSite.objects.create(
title="Not Linked to Current Site"
)
site_3_obj.sites.add(MockSite.objects.get(pk=self.site_3.pk))
expected_url = "http://%s%s" % (
self.site_3.domain,
site_3_obj.get_absolute_url(),
)
with self.settings(SITE_ID=self.site_2.pk):
# Redirects to the domain of the first Site found in the m2m
# relationship (ordering is arbitrary).
response = self.client.get("/shortcut/%s/%s/" % (ct.pk, site_3_obj.pk))
self.assertRedirects(response, expected_url, fetch_redirect_response=False)
obj_with_sites = ModelWithM2MToSite.objects.create(
title="Linked to Current Site"
)
obj_with_sites.sites.set(MockSite.objects.all())
shortcut_url = "/shortcut/%s/%s/" % (ct.pk, obj_with_sites.pk)
expected_url = "http://%s%s" % (
self.site_2.domain,
obj_with_sites.get_absolute_url(),
)
with self.settings(SITE_ID=self.site_2.pk):
# Redirects to the domain of the Site matching the current site's
# domain.
response = self.client.get(shortcut_url)
self.assertRedirects(response, expected_url, fetch_redirect_response=False)
with self.settings(SITE_ID=None, ALLOWED_HOSTS=[self.site_2.domain]):
# Redirects to the domain of the Site matching the request's host
# header.
response = self.client.get(shortcut_url, SERVER_NAME=self.site_2.domain)
self.assertRedirects(response, expected_url, fetch_redirect_response=False)
class ShortcutViewTests(TestCase):
def setUp(self):
self.request = HttpRequest()
self.request.META = {"SERVER_NAME": "Example.com", "SERVER_PORT": "80"}
@override_settings(ALLOWED_HOSTS=["example.com"])
def test_not_dependent_on_sites_app(self):
"""
The view returns a complete URL regardless of whether the sites
framework is installed.
"""
user_ct = ContentType.objects.get_for_model(FooWithUrl)
obj = FooWithUrl.objects.create(name="john")
with self.modify_settings(INSTALLED_APPS={"append": "django.contrib.sites"}):
response = shortcut(self.request, user_ct.id, obj.id)
self.assertEqual(
"http://%s/users/john/" % get_current_site(self.request).domain,
response.headers.get("location"),
)
with self.modify_settings(INSTALLED_APPS={"remove": "django.contrib.sites"}):
response = shortcut(self.request, user_ct.id, obj.id)
self.assertEqual(
"http://Example.com/users/john/", response.headers.get("location")
)
def test_model_without_get_absolute_url(self):
"""The view returns 404 when Model.get_absolute_url() isn't defined."""
user_ct = ContentType.objects.get_for_model(FooWithoutUrl)
obj = FooWithoutUrl.objects.create(name="john")
with self.assertRaises(Http404):
shortcut(self.request, user_ct.id, obj.id)
def test_model_with_broken_get_absolute_url(self):
"""
The view doesn't catch an AttributeError raised by
Model.get_absolute_url() (#8997).
"""
user_ct = ContentType.objects.get_for_model(FooWithBrokenAbsoluteUrl)
obj = FooWithBrokenAbsoluteUrl.objects.create(name="john")
with self.assertRaises(AttributeError):
shortcut(self.request, user_ct.id, obj.id)
def test_invalid_uuid_pk_raises_404(self):
content_type = ContentType.objects.get_for_model(UUIDModel)
invalid_uuid = "1234-zzzz-5678-0000-invaliduuid"
with self.assertRaisesMessage(
Http404,
f"Content type {content_type.id} object {invalid_uuid} doesn’t exist",
):
shortcut(self.request, content_type.id, invalid_uuid)
|
./temp_repos/django/tests/urlpatterns/views.py
|
./temp_repos/django/tests/contenttypes_tests/test_views.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'Unknown'.
Context:
- Class Name: Unknown
- Dependencies to Mock: None detected
- Key Imports: django.http
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
Unknown
|
python
|
"""
Creates the default Site object.
"""
from django.apps import apps as global_apps
from django.conf import settings
from django.core.management.color import no_style
from django.db import DEFAULT_DB_ALIAS, connections, router
def create_default_site(
app_config,
verbosity=2,
interactive=True,
using=DEFAULT_DB_ALIAS,
apps=global_apps,
**kwargs,
):
try:
Site = apps.get_model("sites", "Site")
except LookupError:
return
if not router.allow_migrate_model(using, Site):
return
if not Site.objects.using(using).exists():
# The default settings set SITE_ID = 1, and some tests in Django's test
# suite rely on this value. However, if database sequences are reused
# (e.g. in the test suite after flush/syncdb), it isn't guaranteed that
# the next id will be 1, so we coerce it. See #15573 and #16353. This
# can also crop up outside of tests - see #15346.
if verbosity >= 2:
print("Creating example.com Site object")
Site(
pk=getattr(settings, "SITE_ID", 1), domain="example.com", name="example.com"
).save(using=using)
# We set an explicit pk instead of relying on auto-incrementation,
# so we need to reset the database sequence. See #17415.
sequence_sql = connections[using].ops.sequence_reset_sql(no_style(), [Site])
if sequence_sql:
if verbosity >= 2:
print("Resetting sequence")
with connections[using].cursor() as cursor:
for command in sequence_sql:
cursor.execute(command)
|
from unittest import mock
from django.apps.registry import Apps, apps
from django.contrib.contenttypes import management as contenttypes_management
from django.contrib.contenttypes.models import ContentType
from django.core.management import call_command
from django.test import TestCase, modify_settings
from django.test.utils import captured_stdout
from .models import ModelWithNullFKToSite, Post
@modify_settings(INSTALLED_APPS={"append": ["empty_models", "no_models"]})
class RemoveStaleContentTypesTests(TestCase):
# Speed up tests by avoiding retrieving ContentTypes for all test apps.
available_apps = [
"contenttypes_tests",
"empty_models",
"no_models",
"django.contrib.contenttypes",
]
@classmethod
def setUpTestData(cls):
with captured_stdout():
call_command(
"remove_stale_contenttypes",
interactive=False,
include_stale_apps=True,
verbosity=2,
)
cls.before_count = ContentType.objects.count()
cls.content_type = ContentType.objects.create(
app_label="contenttypes_tests", model="Fake"
)
def setUp(self):
self.app_config = apps.get_app_config("contenttypes_tests")
def test_interactive_true_with_dependent_objects(self):
"""
interactive mode (the default) deletes stale content types and warns of
dependent objects.
"""
post = Post.objects.create(title="post", content_type=self.content_type)
# A related object is needed to show that a custom collector with
# can_fast_delete=False is needed.
ModelWithNullFKToSite.objects.create(post=post)
with mock.patch("builtins.input", return_value="yes"):
with captured_stdout() as stdout:
call_command("remove_stale_contenttypes", verbosity=2, stdout=stdout)
self.assertEqual(Post.objects.count(), 0)
output = stdout.getvalue()
self.assertIn("- Content type for contenttypes_tests.Fake", output)
self.assertIn("- 1 contenttypes_tests.Post object(s)", output)
self.assertIn("- 1 contenttypes_tests.ModelWithNullFKToSite", output)
self.assertIn("Deleting stale content type", output)
self.assertEqual(ContentType.objects.count(), self.before_count)
def test_interactive_true_without_dependent_objects(self):
"""
interactive mode deletes stale content types even if there aren't any
dependent objects.
"""
with mock.patch("builtins.input", return_value="yes"):
with captured_stdout() as stdout:
call_command("remove_stale_contenttypes", verbosity=2)
self.assertIn("Deleting stale content type", stdout.getvalue())
self.assertEqual(ContentType.objects.count(), self.before_count)
def test_interactive_false(self):
"""non-interactive mode deletes stale content types."""
with captured_stdout() as stdout:
call_command("remove_stale_contenttypes", interactive=False, verbosity=2)
self.assertIn("Deleting stale content type", stdout.getvalue())
self.assertEqual(ContentType.objects.count(), self.before_count)
def test_unavailable_content_type_model(self):
"""A ContentType isn't created if the model isn't available."""
apps = Apps()
with self.assertNumQueries(0):
contenttypes_management.create_contenttypes(
self.app_config, interactive=False, verbosity=0, apps=apps
)
self.assertEqual(ContentType.objects.count(), self.before_count + 1)
@modify_settings(INSTALLED_APPS={"remove": ["empty_models"]})
def test_contenttypes_removed_in_installed_apps_without_models(self):
ContentType.objects.create(app_label="empty_models", model="Fake 1")
ContentType.objects.create(app_label="no_models", model="Fake 2")
with (
mock.patch("builtins.input", return_value="yes"),
captured_stdout() as stdout,
):
call_command("remove_stale_contenttypes", verbosity=2)
self.assertNotIn(
"Deleting stale content type 'empty_models | Fake 1'",
stdout.getvalue(),
)
self.assertIn(
"Deleting stale content type 'no_models | Fake 2'",
stdout.getvalue(),
)
self.assertEqual(ContentType.objects.count(), self.before_count + 1)
@modify_settings(INSTALLED_APPS={"remove": ["empty_models"]})
def test_contenttypes_removed_for_apps_not_in_installed_apps(self):
ContentType.objects.create(app_label="empty_models", model="Fake 1")
ContentType.objects.create(app_label="no_models", model="Fake 2")
with (
mock.patch("builtins.input", return_value="yes"),
captured_stdout() as stdout,
):
call_command(
"remove_stale_contenttypes", include_stale_apps=True, verbosity=2
)
self.assertIn(
"Deleting stale content type 'empty_models | Fake 1'",
stdout.getvalue(),
)
self.assertIn(
"Deleting stale content type 'no_models | Fake 2'",
stdout.getvalue(),
)
self.assertEqual(ContentType.objects.count(), self.before_count)
|
./temp_repos/django/django/contrib/sites/management.py
|
./temp_repos/django/tests/contenttypes_tests/test_management.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'Unknown'.
Context:
- Class Name: Unknown
- Dependencies to Mock: None detected
- Key Imports: django.db, django.conf, django.apps, django.core.management.color
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
Unknown
|
python
|
import collections
from itertools import chain
from django.apps import apps
from django.conf import settings
from django.contrib.admin.exceptions import NotRegistered
from django.contrib.admin.utils import NotRelationField, flatten, get_fields_from_path
from django.core import checks
from django.core.exceptions import FieldDoesNotExist
from django.db import models
from django.db.models.constants import LOOKUP_SEP
from django.db.models.expressions import Combinable
from django.forms.models import BaseModelForm, BaseModelFormSet, _get_foreign_key
from django.template import engines
from django.template.backends.django import DjangoTemplates
from django.utils.module_loading import import_string
def _issubclass(cls, classinfo):
"""
issubclass() variant that doesn't raise an exception if cls isn't a
class.
"""
try:
return issubclass(cls, classinfo)
except TypeError:
return False
def _contains_subclass(class_path, candidate_paths):
"""
Return whether or not a dotted class path (or a subclass of that class) is
found in a list of candidate paths.
"""
cls = import_string(class_path)
for path in candidate_paths:
try:
candidate_cls = import_string(path)
except ImportError:
# ImportErrors are raised elsewhere.
continue
if _issubclass(candidate_cls, cls):
return True
return False
def check_admin_app(app_configs, **kwargs):
from django.contrib.admin.sites import all_sites
errors = []
for site in all_sites:
errors.extend(site.check(app_configs))
return errors
def check_dependencies(**kwargs):
"""
Check that the admin's dependencies are correctly installed.
"""
from django.contrib.admin.sites import all_sites
if not apps.is_installed("django.contrib.admin"):
return []
errors = []
app_dependencies = (
("django.contrib.contenttypes", 401),
("django.contrib.auth", 405),
("django.contrib.messages", 406),
)
for app_name, error_code in app_dependencies:
if not apps.is_installed(app_name):
errors.append(
checks.Error(
"'%s' must be in INSTALLED_APPS in order to use the admin "
"application." % app_name,
id="admin.E%d" % error_code,
)
)
for engine in engines.all():
if isinstance(engine, DjangoTemplates):
django_templates_instance = engine.engine
break
else:
django_templates_instance = None
if not django_templates_instance:
errors.append(
checks.Error(
"A 'django.template.backends.django.DjangoTemplates' instance "
"must be configured in TEMPLATES in order to use the admin "
"application.",
id="admin.E403",
)
)
else:
if (
"django.contrib.auth.context_processors.auth"
not in django_templates_instance.context_processors
and _contains_subclass(
"django.contrib.auth.backends.ModelBackend",
settings.AUTHENTICATION_BACKENDS,
)
):
errors.append(
checks.Error(
"'django.contrib.auth.context_processors.auth' must be "
"enabled in DjangoTemplates (TEMPLATES) if using the default "
"auth backend in order to use the admin application.",
id="admin.E402",
)
)
if (
"django.contrib.messages.context_processors.messages"
not in django_templates_instance.context_processors
):
errors.append(
checks.Error(
"'django.contrib.messages.context_processors.messages' must "
"be enabled in DjangoTemplates (TEMPLATES) in order to use "
"the admin application.",
id="admin.E404",
)
)
sidebar_enabled = any(site.enable_nav_sidebar for site in all_sites)
if (
sidebar_enabled
and "django.template.context_processors.request"
not in django_templates_instance.context_processors
):
errors.append(
checks.Warning(
"'django.template.context_processors.request' must be enabled "
"in DjangoTemplates (TEMPLATES) in order to use the admin "
"navigation sidebar.",
id="admin.W411",
)
)
if not _contains_subclass(
"django.contrib.auth.middleware.AuthenticationMiddleware", settings.MIDDLEWARE
):
errors.append(
checks.Error(
"'django.contrib.auth.middleware.AuthenticationMiddleware' must "
"be in MIDDLEWARE in order to use the admin application.",
id="admin.E408",
)
)
if not _contains_subclass(
"django.contrib.messages.middleware.MessageMiddleware", settings.MIDDLEWARE
):
errors.append(
checks.Error(
"'django.contrib.messages.middleware.MessageMiddleware' must "
"be in MIDDLEWARE in order to use the admin application.",
id="admin.E409",
)
)
if not _contains_subclass(
"django.contrib.sessions.middleware.SessionMiddleware", settings.MIDDLEWARE
):
errors.append(
checks.Error(
"'django.contrib.sessions.middleware.SessionMiddleware' must "
"be in MIDDLEWARE in order to use the admin application.",
hint=(
"Insert "
"'django.contrib.sessions.middleware.SessionMiddleware' "
"before "
"'django.contrib.auth.middleware.AuthenticationMiddleware'."
),
id="admin.E410",
)
)
return errors
class BaseModelAdminChecks:
def check(self, admin_obj, **kwargs):
return [
*self._check_autocomplete_fields(admin_obj),
*self._check_raw_id_fields(admin_obj),
*self._check_fields(admin_obj),
*self._check_fieldsets(admin_obj),
*self._check_exclude(admin_obj),
*self._check_form(admin_obj),
*self._check_filter_vertical(admin_obj),
*self._check_filter_horizontal(admin_obj),
*self._check_radio_fields(admin_obj),
*self._check_prepopulated_fields(admin_obj),
*self._check_view_on_site_url(admin_obj),
*self._check_ordering(admin_obj),
*self._check_readonly_fields(admin_obj),
]
def _check_autocomplete_fields(self, obj):
"""
Check that `autocomplete_fields` is a list or tuple of model fields.
"""
if not isinstance(obj.autocomplete_fields, (list, tuple)):
return must_be(
"a list or tuple",
option="autocomplete_fields",
obj=obj,
id="admin.E036",
)
else:
return list(
chain.from_iterable(
[
self._check_autocomplete_fields_item(
obj, field_name, "autocomplete_fields[%d]" % index
)
for index, field_name in enumerate(obj.autocomplete_fields)
]
)
)
def _check_autocomplete_fields_item(self, obj, field_name, label):
"""
Check that an item in `autocomplete_fields` is a ForeignKey or a
ManyToManyField and that the item has a related ModelAdmin with
search_fields defined.
"""
try:
field = obj.model._meta.get_field(field_name)
except FieldDoesNotExist:
return refer_to_missing_field(
field=field_name, option=label, obj=obj, id="admin.E037"
)
else:
if not field.many_to_many and not isinstance(field, models.ForeignKey):
return must_be(
"a foreign key or a many-to-many field",
option=label,
obj=obj,
id="admin.E038",
)
try:
related_admin = obj.admin_site.get_model_admin(field.remote_field.model)
except NotRegistered:
return [
checks.Error(
'An admin for model "%s" has to be registered '
"to be referenced by %s.autocomplete_fields."
% (
field.remote_field.model.__name__,
type(obj).__name__,
),
obj=obj.__class__,
id="admin.E039",
)
]
else:
if not related_admin.search_fields:
return [
checks.Error(
'%s must define "search_fields", because it\'s '
"referenced by %s.autocomplete_fields."
% (
related_admin.__class__.__name__,
type(obj).__name__,
),
obj=obj.__class__,
id="admin.E040",
)
]
return []
def _check_raw_id_fields(self, obj):
"""Check that `raw_id_fields` only contains field names that are listed
on the model."""
if not isinstance(obj.raw_id_fields, (list, tuple)):
return must_be(
"a list or tuple", option="raw_id_fields", obj=obj, id="admin.E001"
)
else:
return list(
chain.from_iterable(
self._check_raw_id_fields_item(
obj, field_name, "raw_id_fields[%d]" % index
)
for index, field_name in enumerate(obj.raw_id_fields)
)
)
def _check_raw_id_fields_item(self, obj, field_name, label):
"""Check an item of `raw_id_fields`, i.e. check that field named
`field_name` exists in model `model` and is a ForeignKey or a
ManyToManyField."""
try:
field = obj.model._meta.get_field(field_name)
except FieldDoesNotExist:
return refer_to_missing_field(
field=field_name, option=label, obj=obj, id="admin.E002"
)
else:
# Using attname is not supported.
if field.name != field_name:
return refer_to_missing_field(
field=field_name,
option=label,
obj=obj,
id="admin.E002",
)
if not field.many_to_many and not isinstance(field, models.ForeignKey):
return must_be(
"a foreign key or a many-to-many field",
option=label,
obj=obj,
id="admin.E003",
)
else:
return []
def _check_fields(self, obj):
"""Check that `fields` only refer to existing fields, doesn't contain
duplicates. Check if at most one of `fields` and `fieldsets` is
defined.
"""
if obj.fields is None:
return []
elif not isinstance(obj.fields, (list, tuple)):
return must_be("a list or tuple", option="fields", obj=obj, id="admin.E004")
elif obj.fieldsets:
return [
checks.Error(
"Both 'fieldsets' and 'fields' are specified.",
obj=obj.__class__,
id="admin.E005",
)
]
field_counts = collections.Counter(flatten(obj.fields))
if duplicate_fields := [
field for field, count in field_counts.items() if count > 1
]:
return [
checks.Error(
"The value of 'fields' contains duplicate field(s).",
hint="Remove duplicates of %s."
% ", ".join(map(repr, duplicate_fields)),
obj=obj.__class__,
id="admin.E006",
)
]
return list(
chain.from_iterable(
self._check_field_spec(obj, field_name, "fields")
for field_name in obj.fields
)
)
def _check_fieldsets(self, obj):
"""Check that fieldsets is properly formatted and doesn't contain
duplicates."""
if obj.fieldsets is None:
return []
elif not isinstance(obj.fieldsets, (list, tuple)):
return must_be(
"a list or tuple", option="fieldsets", obj=obj, id="admin.E007"
)
else:
seen_fields = []
return list(
chain.from_iterable(
self._check_fieldsets_item(
obj, fieldset, "fieldsets[%d]" % index, seen_fields
)
for index, fieldset in enumerate(obj.fieldsets)
)
)
def _check_fieldsets_item(self, obj, fieldset, label, seen_fields):
"""Check an item of `fieldsets`, i.e. check that this is a pair of a
set name and a dictionary containing "fields" key."""
if not isinstance(fieldset, (list, tuple)):
return must_be("a list or tuple", option=label, obj=obj, id="admin.E008")
elif len(fieldset) != 2:
return must_be("of length 2", option=label, obj=obj, id="admin.E009")
elif not isinstance(fieldset[1], dict):
return must_be(
"a dictionary", option="%s[1]" % label, obj=obj, id="admin.E010"
)
elif "fields" not in fieldset[1]:
return [
checks.Error(
"The value of '%s[1]' must contain the key 'fields'." % label,
obj=obj.__class__,
id="admin.E011",
)
]
elif not isinstance(fieldset[1]["fields"], (list, tuple)):
return must_be(
"a list or tuple",
option="%s[1]['fields']" % label,
obj=obj,
id="admin.E008",
)
fieldset_fields = flatten(fieldset[1]["fields"])
seen_fields.extend(fieldset_fields)
field_counts = collections.Counter(seen_fields)
fieldset_fields_set = set(fieldset_fields)
if duplicate_fields := [
field
for field, count in field_counts.items()
if count > 1 and field in fieldset_fields_set
]:
return [
checks.Error(
"There are duplicate field(s) in '%s[1]'." % label,
hint="Remove duplicates of %s."
% ", ".join(map(repr, duplicate_fields)),
obj=obj.__class__,
id="admin.E012",
)
]
return list(
chain.from_iterable(
self._check_field_spec(obj, fieldset_fields, '%s[1]["fields"]' % label)
for fieldset_fields in fieldset[1]["fields"]
)
)
def _check_field_spec(self, obj, fields, label):
"""`fields` should be an item of `fields` or an item of
fieldset[1]['fields'] for any `fieldset` in `fieldsets`. It should be a
field name or a tuple of field names."""
if isinstance(fields, tuple):
return list(
chain.from_iterable(
self._check_field_spec_item(
obj, field_name, "%s[%d]" % (label, index)
)
for index, field_name in enumerate(fields)
)
)
else:
return self._check_field_spec_item(obj, fields, label)
def _check_field_spec_item(self, obj, field_name, label):
if field_name in obj.readonly_fields:
# Stuff can be put in fields that isn't actually a model field if
# it's in readonly_fields, readonly_fields will handle the
# validation of such things.
return []
else:
try:
field = obj.model._meta.get_field(field_name)
except FieldDoesNotExist:
# If we can't find a field on the model that matches, it could
# be an extra field on the form.
return []
else:
if (
isinstance(field, models.ManyToManyField)
and not field.remote_field.through._meta.auto_created
):
return [
checks.Error(
"The value of '%s' cannot include the ManyToManyField "
"'%s', because that field manually specifies a "
"relationship model." % (label, field_name),
obj=obj.__class__,
id="admin.E013",
)
]
else:
return []
def _check_exclude(self, obj):
"""Check that exclude is a sequence without duplicates."""
if obj.exclude is None: # default value is None
return []
elif not isinstance(obj.exclude, (list, tuple)):
return must_be(
"a list or tuple", option="exclude", obj=obj, id="admin.E014"
)
field_counts = collections.Counter(obj.exclude)
if duplicate_fields := [
field for field, count in field_counts.items() if count > 1
]:
return [
checks.Error(
"The value of 'exclude' contains duplicate field(s).",
hint="Remove duplicates of %s."
% ", ".join(map(repr, duplicate_fields)),
obj=obj.__class__,
id="admin.E015",
)
]
else:
return []
def _check_form(self, obj):
"""Check that form subclasses BaseModelForm."""
if not _issubclass(obj.form, BaseModelForm):
return must_inherit_from(
parent="BaseModelForm", option="form", obj=obj, id="admin.E016"
)
else:
return []
def _check_filter_vertical(self, obj):
"""Check that filter_vertical is a sequence of field names."""
if not isinstance(obj.filter_vertical, (list, tuple)):
return must_be(
"a list or tuple", option="filter_vertical", obj=obj, id="admin.E017"
)
else:
return list(
chain.from_iterable(
self._check_filter_item(
obj, field_name, "filter_vertical[%d]" % index
)
for index, field_name in enumerate(obj.filter_vertical)
)
)
def _check_filter_horizontal(self, obj):
"""Check that filter_horizontal is a sequence of field names."""
if not isinstance(obj.filter_horizontal, (list, tuple)):
return must_be(
"a list or tuple", option="filter_horizontal", obj=obj, id="admin.E018"
)
else:
return list(
chain.from_iterable(
self._check_filter_item(
obj, field_name, "filter_horizontal[%d]" % index
)
for index, field_name in enumerate(obj.filter_horizontal)
)
)
def _check_filter_item(self, obj, field_name, label):
"""Check one item of `filter_vertical` or `filter_horizontal`, i.e.
check that given field exists and is a ManyToManyField."""
try:
field = obj.model._meta.get_field(field_name)
except FieldDoesNotExist:
return refer_to_missing_field(
field=field_name, option=label, obj=obj, id="admin.E019"
)
else:
if not field.many_to_many or isinstance(field, models.ManyToManyRel):
return must_be(
"a many-to-many field", option=label, obj=obj, id="admin.E020"
)
elif not field.remote_field.through._meta.auto_created:
return [
checks.Error(
f"The value of '{label}' cannot include the ManyToManyField "
f"'{field_name}', because that field manually specifies a "
f"relationship model.",
obj=obj.__class__,
id="admin.E013",
)
]
else:
return []
def _check_radio_fields(self, obj):
"""Check that `radio_fields` is a dictionary."""
if not isinstance(obj.radio_fields, dict):
return must_be(
"a dictionary", option="radio_fields", obj=obj, id="admin.E021"
)
else:
return list(
chain.from_iterable(
self._check_radio_fields_key(obj, field_name, "radio_fields")
+ self._check_radio_fields_value(
obj, val, 'radio_fields["%s"]' % field_name
)
for field_name, val in obj.radio_fields.items()
)
)
def _check_radio_fields_key(self, obj, field_name, label):
"""Check that a key of `radio_fields` dictionary is name of existing
field and that the field is a ForeignKey or has `choices` defined."""
try:
field = obj.model._meta.get_field(field_name)
except FieldDoesNotExist:
return refer_to_missing_field(
field=field_name, option=label, obj=obj, id="admin.E022"
)
else:
if not (isinstance(field, models.ForeignKey) or field.choices):
return [
checks.Error(
"The value of '%s' refers to '%s', which is not an "
"instance of ForeignKey, and does not have a 'choices' "
"definition." % (label, field_name),
obj=obj.__class__,
id="admin.E023",
)
]
else:
return []
def _check_radio_fields_value(self, obj, val, label):
"""Check type of a value of `radio_fields` dictionary."""
from django.contrib.admin.options import HORIZONTAL, VERTICAL
if val not in (HORIZONTAL, VERTICAL):
return [
checks.Error(
"The value of '%s' must be either admin.HORIZONTAL or "
"admin.VERTICAL." % label,
obj=obj.__class__,
id="admin.E024",
)
]
else:
return []
def _check_view_on_site_url(self, obj):
if not callable(obj.view_on_site) and not isinstance(obj.view_on_site, bool):
return [
checks.Error(
"The value of 'view_on_site' must be a callable or a boolean "
"value.",
obj=obj.__class__,
id="admin.E025",
)
]
else:
return []
def _check_prepopulated_fields(self, obj):
"""Check that `prepopulated_fields` is a dictionary containing allowed
field types."""
if not isinstance(obj.prepopulated_fields, dict):
return must_be(
"a dictionary", option="prepopulated_fields", obj=obj, id="admin.E026"
)
else:
return list(
chain.from_iterable(
self._check_prepopulated_fields_key(
obj, field_name, "prepopulated_fields"
)
+ self._check_prepopulated_fields_value(
obj, val, 'prepopulated_fields["%s"]' % field_name
)
for field_name, val in obj.prepopulated_fields.items()
)
)
def _check_prepopulated_fields_key(self, obj, field_name, label):
"""Check a key of `prepopulated_fields` dictionary, i.e. check that it
is a name of existing field and the field is one of the allowed types.
"""
try:
field = obj.model._meta.get_field(field_name)
except FieldDoesNotExist:
return refer_to_missing_field(
field=field_name, option=label, obj=obj, id="admin.E027"
)
else:
if isinstance(
field, (models.DateTimeField, models.ForeignKey, models.ManyToManyField)
):
return [
checks.Error(
"The value of '%s' refers to '%s', which must not be a "
"DateTimeField, a ForeignKey, a OneToOneField, or a "
"ManyToManyField." % (label, field_name),
obj=obj.__class__,
id="admin.E028",
)
]
else:
return []
def _check_prepopulated_fields_value(self, obj, val, label):
"""Check a value of `prepopulated_fields` dictionary, i.e. it's an
iterable of existing fields."""
if not isinstance(val, (list, tuple)):
return must_be("a list or tuple", option=label, obj=obj, id="admin.E029")
else:
return list(
chain.from_iterable(
self._check_prepopulated_fields_value_item(
obj, subfield_name, "%s[%r]" % (label, index)
)
for index, subfield_name in enumerate(val)
)
)
def _check_prepopulated_fields_value_item(self, obj, field_name, label):
"""For `prepopulated_fields` equal to {"slug": ("title",)},
`field_name` is "title"."""
try:
obj.model._meta.get_field(field_name)
except FieldDoesNotExist:
return refer_to_missing_field(
field=field_name, option=label, obj=obj, id="admin.E030"
)
else:
return []
def _check_ordering(self, obj):
"""Check that ordering refers to existing fields or is random."""
# ordering = None
if obj.ordering is None: # The default value is None
return []
elif not isinstance(obj.ordering, (list, tuple)):
return must_be(
"a list or tuple", option="ordering", obj=obj, id="admin.E031"
)
else:
return list(
chain.from_iterable(
self._check_ordering_item(obj, field_name, "ordering[%d]" % index)
for index, field_name in enumerate(obj.ordering)
)
)
def _check_ordering_item(self, obj, field_name, label):
"""Check that `ordering` refers to existing fields."""
if isinstance(field_name, (Combinable, models.OrderBy)):
if not isinstance(field_name, models.OrderBy):
field_name = field_name.asc()
if isinstance(field_name.expression, models.F):
field_name = field_name.expression.name
else:
return []
if field_name == "?" and len(obj.ordering) != 1:
return [
checks.Error(
"The value of 'ordering' has the random ordering marker '?', "
"but contains other fields as well.",
hint='Either remove the "?", or remove the other fields.',
obj=obj.__class__,
id="admin.E032",
)
]
elif field_name == "?":
return []
elif LOOKUP_SEP in field_name:
# Skip ordering in the format field1__field2 (FIXME: checking
# this format would be nice, but it's a little fiddly).
return []
else:
field_name = field_name.removeprefix("-")
if field_name == "pk":
return []
try:
obj.model._meta.get_field(field_name)
except FieldDoesNotExist:
return refer_to_missing_field(
field=field_name, option=label, obj=obj, id="admin.E033"
)
else:
return []
def _check_readonly_fields(self, obj):
"""Check that readonly_fields refers to proper attribute or field."""
if obj.readonly_fields == ():
return []
elif not isinstance(obj.readonly_fields, (list, tuple)):
return must_be(
"a list or tuple", option="readonly_fields", obj=obj, id="admin.E034"
)
else:
return list(
chain.from_iterable(
self._check_readonly_fields_item(
obj, field_name, "readonly_fields[%d]" % index
)
for index, field_name in enumerate(obj.readonly_fields)
)
)
def _check_readonly_fields_item(self, obj, field_name, label):
if callable(field_name):
return []
elif hasattr(obj, field_name):
return []
elif hasattr(obj.model, field_name):
return []
else:
try:
obj.model._meta.get_field(field_name)
except FieldDoesNotExist:
return [
checks.Error(
"The value of '%s' refers to '%s', which is not a callable, "
"an attribute of '%s', or an attribute of '%s'."
% (
label,
field_name,
obj.__class__.__name__,
obj.model._meta.label,
),
obj=obj.__class__,
id="admin.E035",
)
]
else:
return []
class ModelAdminChecks(BaseModelAdminChecks):
def check(self, admin_obj, **kwargs):
return [
*super().check(admin_obj),
*self._check_save_as(admin_obj),
*self._check_save_on_top(admin_obj),
*self._check_inlines(admin_obj),
*self._check_list_display(admin_obj),
*self._check_list_display_links(admin_obj),
*self._check_list_filter(admin_obj),
*self._check_list_select_related(admin_obj),
*self._check_list_per_page(admin_obj),
*self._check_list_max_show_all(admin_obj),
*self._check_list_editable(admin_obj),
*self._check_search_fields(admin_obj),
*self._check_date_hierarchy(admin_obj),
*self._check_actions(admin_obj),
]
def _check_save_as(self, obj):
"""Check save_as is a boolean."""
if not isinstance(obj.save_as, bool):
return must_be("a boolean", option="save_as", obj=obj, id="admin.E101")
else:
return []
def _check_save_on_top(self, obj):
"""Check save_on_top is a boolean."""
if not isinstance(obj.save_on_top, bool):
return must_be("a boolean", option="save_on_top", obj=obj, id="admin.E102")
else:
return []
def _check_inlines(self, obj):
"""Check all inline model admin classes."""
if not isinstance(obj.inlines, (list, tuple)):
return must_be(
"a list or tuple", option="inlines", obj=obj, id="admin.E103"
)
else:
return list(
chain.from_iterable(
self._check_inlines_item(obj, item, "inlines[%d]" % index)
for index, item in enumerate(obj.inlines)
)
)
def _check_inlines_item(self, obj, inline, label):
"""Check one inline model admin."""
try:
inline_label = inline.__module__ + "." + inline.__name__
except AttributeError:
return [
checks.Error(
"'%s' must inherit from 'InlineModelAdmin'." % obj,
obj=obj.__class__,
id="admin.E104",
)
]
from django.contrib.admin.options import InlineModelAdmin
if not _issubclass(inline, InlineModelAdmin):
return [
checks.Error(
"'%s' must inherit from 'InlineModelAdmin'." % inline_label,
obj=obj.__class__,
id="admin.E104",
)
]
elif not inline.model:
return [
checks.Error(
"'%s' must have a 'model' attribute." % inline_label,
obj=obj.__class__,
id="admin.E105",
)
]
elif not _issubclass(inline.model, models.Model):
return must_be(
"a Model", option="%s.model" % inline_label, obj=obj, id="admin.E106"
)
else:
return inline(obj.model, obj.admin_site).check()
def _check_list_display(self, obj):
"""Check list_display only contains fields or usable attributes."""
if not isinstance(obj.list_display, (list, tuple)):
return must_be(
"a list or tuple", option="list_display", obj=obj, id="admin.E107"
)
else:
return list(
chain.from_iterable(
self._check_list_display_item(obj, item, "list_display[%d]" % index)
for index, item in enumerate(obj.list_display)
)
)
def _check_list_display_item(self, obj, item, label):
if callable(item):
return []
elif hasattr(obj, item):
return []
try:
field = obj.model._meta.get_field(item)
except FieldDoesNotExist:
try:
field = getattr(obj.model, item)
except AttributeError:
try:
field = get_fields_from_path(obj.model, item)[-1]
except (FieldDoesNotExist, NotRelationField):
return [
checks.Error(
f"The value of '{label}' refers to '{item}', which is not "
f"a callable or attribute of '{obj.__class__.__name__}', "
"or an attribute, method, or field on "
f"'{obj.model._meta.label}'.",
obj=obj.__class__,
id="admin.E108",
)
]
if (
getattr(field, "is_relation", False)
and (field.many_to_many or field.one_to_many)
) or (getattr(field, "rel", None) and field.rel.field.many_to_one):
return [
checks.Error(
f"The value of '{label}' must not be a many-to-many field or a "
f"reverse foreign key.",
obj=obj.__class__,
id="admin.E109",
)
]
return []
def _check_list_display_links(self, obj):
"""Check that list_display_links is a unique subset of list_display."""
from django.contrib.admin.options import ModelAdmin
if obj.list_display_links is None:
return []
elif not isinstance(obj.list_display_links, (list, tuple)):
return must_be(
"a list, a tuple, or None",
option="list_display_links",
obj=obj,
id="admin.E110",
)
# Check only if ModelAdmin.get_list_display() isn't overridden.
elif obj.get_list_display.__func__ is ModelAdmin.get_list_display:
return list(
chain.from_iterable(
self._check_list_display_links_item(
obj, field_name, "list_display_links[%d]" % index
)
for index, field_name in enumerate(obj.list_display_links)
)
)
return []
def _check_list_display_links_item(self, obj, field_name, label):
if field_name not in obj.list_display:
return [
checks.Error(
"The value of '%s' refers to '%s', which is not defined in "
"'list_display'." % (label, field_name),
obj=obj.__class__,
id="admin.E111",
)
]
else:
return []
def _check_list_filter(self, obj):
if not isinstance(obj.list_filter, (list, tuple)):
return must_be(
"a list or tuple", option="list_filter", obj=obj, id="admin.E112"
)
else:
return list(
chain.from_iterable(
self._check_list_filter_item(obj, item, "list_filter[%d]" % index)
for index, item in enumerate(obj.list_filter)
)
)
def _check_list_filter_item(self, obj, item, label):
"""
Check one item of `list_filter`, the three valid options are:
1. 'field' -- a basic field filter, possibly w/ relationships (e.g.
'field__rel')
2. ('field', SomeFieldListFilter) - a field-based list filter class
3. SomeListFilter - a non-field list filter class
"""
from django.contrib.admin import FieldListFilter, ListFilter
if callable(item) and not isinstance(item, models.Field):
# If item is option 3, it should be a ListFilter...
if not _issubclass(item, ListFilter):
return must_inherit_from(
parent="ListFilter", option=label, obj=obj, id="admin.E113"
)
# ... but not a FieldListFilter.
elif issubclass(item, FieldListFilter):
return [
checks.Error(
"The value of '%s' must not inherit from 'FieldListFilter'."
% label,
obj=obj.__class__,
id="admin.E114",
)
]
else:
return []
elif isinstance(item, (tuple, list)):
# item is option #2
field, list_filter_class = item
if not _issubclass(list_filter_class, FieldListFilter):
return must_inherit_from(
parent="FieldListFilter",
option="%s[1]" % label,
obj=obj,
id="admin.E115",
)
else:
return []
else:
# item is option #1
field = item
# Validate the field string
try:
get_fields_from_path(obj.model, field)
except (NotRelationField, FieldDoesNotExist):
return [
checks.Error(
"The value of '%s' refers to '%s', which does not refer to a "
"Field." % (label, field),
obj=obj.__class__,
id="admin.E116",
)
]
else:
return []
def _check_list_select_related(self, obj):
"""Check that list_select_related is a boolean, a list or a tuple."""
if not isinstance(obj.list_select_related, (bool, list, tuple)):
return must_be(
"a boolean, tuple or list",
option="list_select_related",
obj=obj,
id="admin.E117",
)
else:
return []
def _check_list_per_page(self, obj):
"""Check that list_per_page is an integer."""
if not isinstance(obj.list_per_page, int):
return must_be(
"an integer", option="list_per_page", obj=obj, id="admin.E118"
)
else:
return []
def _check_list_max_show_all(self, obj):
"""Check that list_max_show_all is an integer."""
if not isinstance(obj.list_max_show_all, int):
return must_be(
"an integer", option="list_max_show_all", obj=obj, id="admin.E119"
)
else:
return []
def _check_list_editable(self, obj):
"""Check that list_editable is a sequence of editable fields from
list_display without first element."""
if not isinstance(obj.list_editable, (list, tuple)):
return must_be(
"a list or tuple", option="list_editable", obj=obj, id="admin.E120"
)
else:
return list(
chain.from_iterable(
self._check_list_editable_item(
obj, item, "list_editable[%d]" % index
)
for index, item in enumerate(obj.list_editable)
)
)
def _check_list_editable_item(self, obj, field_name, label):
try:
field = obj.model._meta.get_field(field_name)
except FieldDoesNotExist:
return refer_to_missing_field(
field=field_name, option=label, obj=obj, id="admin.E121"
)
else:
if field_name not in obj.list_display:
return [
checks.Error(
"The value of '%s' refers to '%s', which is not "
"contained in 'list_display'." % (label, field_name),
obj=obj.__class__,
id="admin.E122",
)
]
elif obj.list_display_links and field_name in obj.list_display_links:
return [
checks.Error(
"The value of '%s' cannot be in both 'list_editable' and "
"'list_display_links'." % field_name,
obj=obj.__class__,
id="admin.E123",
)
]
# If list_display[0] is in list_editable, check that
# list_display_links is set. See #22792 and #26229 for use cases.
elif (
obj.list_display[0] == field_name
and not obj.list_display_links
and obj.list_display_links is not None
):
return [
checks.Error(
"The value of '%s' refers to the first field in 'list_display' "
"('%s'), which cannot be used unless 'list_display_links' is "
"set." % (label, obj.list_display[0]),
obj=obj.__class__,
id="admin.E124",
)
]
elif not field.editable or field.primary_key:
return [
checks.Error(
"The value of '%s' refers to '%s', which is not editable "
"through the admin." % (label, field_name),
obj=obj.__class__,
id="admin.E125",
)
]
else:
return []
def _check_search_fields(self, obj):
"""Check search_fields is a sequence."""
if not isinstance(obj.search_fields, (list, tuple)):
return must_be(
"a list or tuple", option="search_fields", obj=obj, id="admin.E126"
)
else:
return []
def _check_date_hierarchy(self, obj):
"""Check that date_hierarchy refers to DateField or DateTimeField."""
if obj.date_hierarchy is None:
return []
else:
try:
field = get_fields_from_path(obj.model, obj.date_hierarchy)[-1]
except (NotRelationField, FieldDoesNotExist):
return [
checks.Error(
"The value of 'date_hierarchy' refers to '%s', which "
"does not refer to a Field." % obj.date_hierarchy,
obj=obj.__class__,
id="admin.E127",
)
]
else:
if field.get_internal_type() not in {"DateField", "DateTimeField"}:
return must_be(
"a DateField or DateTimeField",
option="date_hierarchy",
obj=obj,
id="admin.E128",
)
else:
return []
def _check_actions(self, obj):
errors = []
actions = obj._get_base_actions()
# Actions with an allowed_permission attribute require the ModelAdmin
# to implement a has_<perm>_permission() method for each permission.
for func, name, _ in actions:
if not hasattr(func, "allowed_permissions"):
continue
for permission in func.allowed_permissions:
method_name = "has_%s_permission" % permission
if not hasattr(obj, method_name):
errors.append(
checks.Error(
"%s must define a %s() method for the %s action."
% (
obj.__class__.__name__,
method_name,
func.__name__,
),
obj=obj.__class__,
id="admin.E129",
)
)
# Names need to be unique.
names = collections.Counter(name for _, name, _ in actions)
for name, count in names.items():
if count > 1:
errors.append(
checks.Error(
"__name__ attributes of actions defined in %s must be "
"unique. Name %r is not unique."
% (
obj.__class__.__name__,
name,
),
obj=obj.__class__,
id="admin.E130",
)
)
return errors
class InlineModelAdminChecks(BaseModelAdminChecks):
def check(self, inline_obj, **kwargs):
parent_model = inline_obj.parent_model
return [
*super().check(inline_obj),
*self._check_relation(inline_obj, parent_model),
*self._check_exclude_of_parent_model(inline_obj, parent_model),
*self._check_extra(inline_obj),
*self._check_max_num(inline_obj),
*self._check_min_num(inline_obj),
*self._check_formset(inline_obj),
]
def _check_exclude_of_parent_model(self, obj, parent_model):
# Do not perform more specific checks if the base checks result in an
# error.
errors = super()._check_exclude(obj)
if errors:
return []
# Skip if `fk_name` is invalid.
if self._check_relation(obj, parent_model):
return []
if obj.exclude is None:
return []
fk = _get_foreign_key(parent_model, obj.model, fk_name=obj.fk_name)
if fk.name in obj.exclude:
return [
checks.Error(
"Cannot exclude the field '%s', because it is the foreign key "
"to the parent model '%s'."
% (
fk.name,
parent_model._meta.label,
),
obj=obj.__class__,
id="admin.E201",
)
]
else:
return []
def _check_relation(self, obj, parent_model):
try:
_get_foreign_key(parent_model, obj.model, fk_name=obj.fk_name)
except ValueError as e:
return [checks.Error(e.args[0], obj=obj.__class__, id="admin.E202")]
else:
return []
def _check_extra(self, obj):
"""Check that extra is an integer."""
if not isinstance(obj.extra, int):
return must_be("an integer", option="extra", obj=obj, id="admin.E203")
else:
return []
def _check_max_num(self, obj):
"""Check that max_num is an integer."""
if obj.max_num is None:
return []
elif not isinstance(obj.max_num, int):
return must_be("an integer", option="max_num", obj=obj, id="admin.E204")
else:
return []
def _check_min_num(self, obj):
"""Check that min_num is an integer."""
if obj.min_num is None:
return []
elif not isinstance(obj.min_num, int):
return must_be("an integer", option="min_num", obj=obj, id="admin.E205")
else:
return []
def _check_formset(self, obj):
"""Check formset is a subclass of BaseModelFormSet."""
if not _issubclass(obj.formset, BaseModelFormSet):
return must_inherit_from(
parent="BaseModelFormSet", option="formset", obj=obj, id="admin.E206"
)
else:
return []
def must_be(type, option, obj, id):
return [
checks.Error(
"The value of '%s' must be %s." % (option, type),
obj=obj.__class__,
id=id,
),
]
def must_inherit_from(parent, option, obj, id):
return [
checks.Error(
"The value of '%s' must inherit from '%s'." % (option, parent),
obj=obj.__class__,
id=id,
),
]
def refer_to_missing_field(field, option, obj, id):
return [
checks.Error(
"The value of '%s' refers to '%s', which is not a field of '%s'."
% (option, field, obj.model._meta.label),
obj=obj.__class__,
id=id,
),
]
|
from unittest import mock
from django.contrib.contenttypes.checks import check_model_name_lengths
from django.contrib.contenttypes.fields import GenericForeignKey, GenericRelation
from django.contrib.contenttypes.models import ContentType
from django.core import checks
from django.db import models
from django.test import SimpleTestCase, override_settings
from django.test.utils import isolate_apps
@isolate_apps("contenttypes_tests", attr_name="apps")
class GenericForeignKeyTests(SimpleTestCase):
databases = "__all__"
def test_missing_content_type_field(self):
class TaggedItem(models.Model):
# no content_type field
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey()
field = TaggedItem._meta.get_field("content_object")
expected = [
checks.Error(
"The GenericForeignKey content type references the nonexistent "
"field 'TaggedItem.content_type'.",
obj=field,
id="contenttypes.E002",
)
]
self.assertEqual(field.check(), expected)
def test_invalid_content_type_field(self):
class Model(models.Model):
content_type = models.IntegerField() # should be ForeignKey
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey("content_type", "object_id")
field = Model._meta.get_field("content_object")
self.assertEqual(
field.check(),
[
checks.Error(
"'Model.content_type' is not a ForeignKey.",
hint=(
"GenericForeignKeys must use a ForeignKey to "
"'contenttypes.ContentType' as the 'content_type' field."
),
obj=field,
id="contenttypes.E003",
)
],
)
def test_content_type_field_pointing_to_wrong_model(self):
class Model(models.Model):
content_type = models.ForeignKey(
"self", models.CASCADE
) # should point to ContentType
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey("content_type", "object_id")
field = Model._meta.get_field("content_object")
self.assertEqual(
field.check(),
[
checks.Error(
"'Model.content_type' is not a ForeignKey to "
"'contenttypes.ContentType'.",
hint=(
"GenericForeignKeys must use a ForeignKey to "
"'contenttypes.ContentType' as the 'content_type' field."
),
obj=field,
id="contenttypes.E004",
)
],
)
def test_content_type_db_on_delete(self):
class Model(models.Model):
content_type = models.ForeignKey(ContentType, models.DB_CASCADE)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey("content_type", "object_id")
field = Model._meta.get_field("content_object")
self.assertEqual(
field.check(),
[
checks.Error(
"'Model.content_type' cannot use the database-level on_delete "
"variant.",
hint="Change the on_delete rule to the non-database variant.",
obj=field,
id="contenttypes.E006",
)
],
)
def test_missing_object_id_field(self):
class TaggedItem(models.Model):
content_type = models.ForeignKey(ContentType, models.CASCADE)
# missing object_id field
content_object = GenericForeignKey()
field = TaggedItem._meta.get_field("content_object")
self.assertEqual(
field.check(),
[
checks.Error(
"The GenericForeignKey object ID references the nonexistent "
"field 'object_id'.",
obj=field,
id="contenttypes.E001",
)
],
)
def test_field_name_ending_with_underscore(self):
class Model(models.Model):
content_type = models.ForeignKey(ContentType, models.CASCADE)
object_id = models.PositiveIntegerField()
content_object_ = GenericForeignKey("content_type", "object_id")
field = Model._meta.get_field("content_object_")
self.assertEqual(
field.check(),
[
checks.Error(
"Field names must not end with an underscore.",
obj=field,
id="fields.E001",
)
],
)
@override_settings(
INSTALLED_APPS=[
"django.contrib.auth",
"django.contrib.contenttypes",
"contenttypes_tests",
]
)
def test_generic_foreign_key_checks_are_performed(self):
class Model(models.Model):
content_object = GenericForeignKey()
with mock.patch.object(GenericForeignKey, "check") as check:
checks.run_checks(app_configs=self.apps.get_app_configs())
check.assert_called_once_with()
@isolate_apps("contenttypes_tests")
class GenericRelationTests(SimpleTestCase):
def test_valid_generic_relationship(self):
class TaggedItem(models.Model):
content_type = models.ForeignKey(ContentType, models.CASCADE)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey()
class Bookmark(models.Model):
tags = GenericRelation("TaggedItem")
self.assertEqual(Bookmark.tags.field.check(), [])
def test_valid_generic_relationship_with_explicit_fields(self):
class TaggedItem(models.Model):
custom_content_type = models.ForeignKey(ContentType, models.CASCADE)
custom_object_id = models.PositiveIntegerField()
content_object = GenericForeignKey(
"custom_content_type", "custom_object_id"
)
class Bookmark(models.Model):
tags = GenericRelation(
"TaggedItem",
content_type_field="custom_content_type",
object_id_field="custom_object_id",
)
self.assertEqual(Bookmark.tags.field.check(), [])
def test_pointing_to_missing_model(self):
class Model(models.Model):
rel = GenericRelation("MissingModel")
self.assertEqual(
Model.rel.field.check(),
[
checks.Error(
"Field defines a relation with model 'MissingModel', "
"which is either not installed, or is abstract.",
obj=Model.rel.field,
id="fields.E300",
)
],
)
def test_valid_self_referential_generic_relationship(self):
class Model(models.Model):
rel = GenericRelation("Model")
content_type = models.ForeignKey(ContentType, models.CASCADE)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey("content_type", "object_id")
self.assertEqual(Model.rel.field.check(), [])
def test_missing_generic_foreign_key(self):
class TaggedItem(models.Model):
content_type = models.ForeignKey(ContentType, models.CASCADE)
object_id = models.PositiveIntegerField()
class Bookmark(models.Model):
tags = GenericRelation("TaggedItem")
self.assertEqual(
Bookmark.tags.field.check(),
[
checks.Error(
"The GenericRelation defines a relation with the model "
"'contenttypes_tests.TaggedItem', but that model does not have a "
"GenericForeignKey.",
obj=Bookmark.tags.field,
id="contenttypes.E004",
)
],
)
@override_settings(TEST_SWAPPED_MODEL="contenttypes_tests.Replacement")
def test_pointing_to_swapped_model(self):
class Replacement(models.Model):
pass
class SwappedModel(models.Model):
content_type = models.ForeignKey(ContentType, models.CASCADE)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey()
class Meta:
swappable = "TEST_SWAPPED_MODEL"
class Model(models.Model):
rel = GenericRelation("SwappedModel")
self.assertEqual(
Model.rel.field.check(),
[
checks.Error(
"Field defines a relation with the model "
"'contenttypes_tests.SwappedModel', "
"which has been swapped out.",
hint=(
"Update the relation to point at 'settings.TEST_SWAPPED_MODEL'."
),
obj=Model.rel.field,
id="fields.E301",
)
],
)
def test_field_name_ending_with_underscore(self):
class TaggedItem(models.Model):
content_type = models.ForeignKey(ContentType, models.CASCADE)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey()
class InvalidBookmark(models.Model):
tags_ = GenericRelation("TaggedItem")
self.assertEqual(
InvalidBookmark.tags_.field.check(),
[
checks.Error(
"Field names must not end with an underscore.",
obj=InvalidBookmark.tags_.field,
id="fields.E001",
)
],
)
@isolate_apps("contenttypes_tests", attr_name="apps")
class ModelCheckTests(SimpleTestCase):
def test_model_name_too_long(self):
model = type("A" * 101, (models.Model,), {"__module__": self.__module__})
self.assertEqual(
check_model_name_lengths(self.apps.get_app_configs()),
[
checks.Error(
"Model names must be at most 100 characters (got 101).",
obj=model,
id="contenttypes.E005",
)
],
)
def test_model_name_max_length(self):
type("A" * 100, (models.Model,), {"__module__": self.__module__})
self.assertEqual(check_model_name_lengths(self.apps.get_app_configs()), [])
|
./temp_repos/django/django/contrib/admin/checks.py
|
./temp_repos/django/tests/contenttypes_tests/test_checks.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'BaseModelAdminChecks'.
Context:
- Class Name: BaseModelAdminChecks
- Dependencies to Mock: None detected
- Key Imports: collections, django.utils.module_loading, itertools, django.conf, django.db, django.contrib.admin.sites, django.contrib.admin, django.contrib.admin.exceptions, django.core, django.core.exceptions
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
BaseModelAdminChecks
|
python
|
from django.apps import apps
from django.contrib.sites.models import Site
from django.core.cache import cache
from django.test import TestCase, modify_settings, override_settings
from .models import I18nTestModel, TestModel
@modify_settings(INSTALLED_APPS={"append": "django.contrib.sitemaps"})
@override_settings(ROOT_URLCONF="sitemaps_tests.urls.http")
class SitemapTestsBase(TestCase):
protocol = "http"
sites_installed = apps.is_installed("django.contrib.sites")
domain = "example.com" if sites_installed else "testserver"
@classmethod
def setUpTestData(cls):
# Create an object for sitemap content.
TestModel.objects.create(name="Test Object")
cls.i18n_model = I18nTestModel.objects.create(name="Test Object")
def setUp(self):
self.base_url = "%s://%s" % (self.protocol, self.domain)
cache.clear()
@classmethod
def setUpClass(cls):
super().setUpClass()
# This cleanup is necessary because contrib.sites cache
# makes tests interfere with each other, see #11505
Site.objects.clear_cache()
|
import os
from unittest import mock
from django.core.exceptions import SuspiciousFileOperation
from django.core.files.storage import Storage
from django.test import SimpleTestCase
class CustomStorage(Storage):
"""Simple Storage subclass implementing the bare minimum for testing."""
def exists(self, name):
return False
def _save(self, name):
return name
class StorageValidateFileNameTests(SimpleTestCase):
invalid_file_names = [
os.path.join("path", "to", os.pardir, "test.file"),
os.path.join(os.path.sep, "path", "to", "test.file"),
]
error_msg = "Detected path traversal attempt in '%s'"
def test_validate_before_get_available_name(self):
s = CustomStorage()
# The initial name passed to `save` is not valid nor safe, fail early.
for name in self.invalid_file_names:
with (
self.subTest(name=name),
mock.patch.object(s, "get_available_name") as mock_get_available_name,
mock.patch.object(s, "_save") as mock_internal_save,
):
with self.assertRaisesMessage(
SuspiciousFileOperation, self.error_msg % name
):
s.save(name, content="irrelevant")
self.assertEqual(mock_get_available_name.mock_calls, [])
self.assertEqual(mock_internal_save.mock_calls, [])
def test_validate_after_get_available_name(self):
s = CustomStorage()
# The initial name passed to `save` is valid and safe, but the returned
# name from `get_available_name` is not.
for name in self.invalid_file_names:
with (
self.subTest(name=name),
mock.patch.object(s, "get_available_name", return_value=name),
mock.patch.object(s, "_save") as mock_internal_save,
):
with self.assertRaisesMessage(
SuspiciousFileOperation, self.error_msg % name
):
s.save("valid-file-name.txt", content="irrelevant")
self.assertEqual(mock_internal_save.mock_calls, [])
def test_validate_after_internal_save(self):
s = CustomStorage()
# The initial name passed to `save` is valid and safe, but the result
# from `_save` is not (this is achieved by monkeypatching _save).
for name in self.invalid_file_names:
with (
self.subTest(name=name),
mock.patch.object(s, "_save", return_value=name),
):
with self.assertRaisesMessage(
SuspiciousFileOperation, self.error_msg % name
):
s.save("valid-file-name.txt", content="irrelevant")
|
./temp_repos/django/tests/sitemaps_tests/base.py
|
./temp_repos/django/tests/file_storage/test_base.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'SitemapTestsBase'.
Context:
- Class Name: SitemapTestsBase
- Dependencies to Mock: None detected
- Key Imports: django.test, django.contrib.sites.models, django.core.cache, models, django.apps
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
SitemapTestsBase
|
python
|
from django.contrib.gis.db.models.fields import BaseSpatialField
from django.contrib.gis.measure import Distance
from django.db import NotSupportedError
from django.db.models import Expression, Lookup, Transform
from django.db.models.sql.query import Query
from django.utils.regex_helper import _lazy_re_compile
class RasterBandTransform(Transform):
def as_sql(self, compiler, connection):
return compiler.compile(self.lhs)
class GISLookup(Lookup):
sql_template = None
transform_func = None
distance = False
band_rhs = None
band_lhs = None
def __init__(self, lhs, rhs):
rhs, *self.rhs_params = rhs if isinstance(rhs, (list, tuple)) else (rhs,)
super().__init__(lhs, rhs)
self.template_params = {}
self.process_rhs_params()
def process_rhs_params(self):
if self.rhs_params:
# Check if a band index was passed in the query argument.
if len(self.rhs_params) == (2 if self.lookup_name == "relate" else 1):
self.process_band_indices()
elif len(self.rhs_params) > 1:
raise ValueError("Tuple too long for lookup %s." % self.lookup_name)
elif isinstance(self.lhs, RasterBandTransform):
self.process_band_indices(only_lhs=True)
def process_band_indices(self, only_lhs=False):
"""
Extract the lhs band index from the band transform class and the rhs
band index from the input tuple.
"""
# PostGIS band indices are 1-based, so the band index needs to be
# increased to be consistent with the GDALRaster band indices.
if only_lhs:
self.band_rhs = 1
self.band_lhs = self.lhs.band_index + 1
return
if isinstance(self.lhs, RasterBandTransform):
self.band_lhs = self.lhs.band_index + 1
else:
self.band_lhs = 1
self.band_rhs, *self.rhs_params = self.rhs_params
def get_db_prep_lookup(self, value, connection):
# get_db_prep_lookup is called by process_rhs from super class
return ("%s", (connection.ops.Adapter(value),))
def process_rhs(self, compiler, connection):
if isinstance(self.rhs, Query):
# If rhs is some Query, don't touch it.
return super().process_rhs(compiler, connection)
if isinstance(self.rhs, Expression):
self.rhs = self.rhs.resolve_expression(compiler.query)
rhs, rhs_params = super().process_rhs(compiler, connection)
placeholder = connection.ops.get_geom_placeholder(
self.lhs.output_field, self.rhs, compiler
)
return placeholder % rhs, rhs_params
def get_rhs_op(self, connection, rhs):
# Unlike BuiltinLookup, the GIS get_rhs_op() implementation should
# return an object (SpatialOperator) with an as_sql() method to allow
# for more complex computations (where the lhs part can be mixed in).
return connection.ops.gis_operators[self.lookup_name]
def as_sql(self, compiler, connection):
lhs_sql, lhs_params = self.process_lhs(compiler, connection)
rhs_sql, rhs_params = self.process_rhs(compiler, connection)
sql_params = (*lhs_params, *rhs_params)
template_params = {
"lhs": lhs_sql,
"rhs": rhs_sql,
"value": "%s",
**self.template_params,
}
rhs_op = self.get_rhs_op(connection, rhs_sql)
return rhs_op.as_sql(connection, self, template_params, sql_params)
# ------------------
# Geometry operators
# ------------------
@BaseSpatialField.register_lookup
class OverlapsLeftLookup(GISLookup):
"""
The overlaps_left operator returns true if A's bounding box overlaps or is
to the left of B's bounding box.
"""
lookup_name = "overlaps_left"
@BaseSpatialField.register_lookup
class OverlapsRightLookup(GISLookup):
"""
The 'overlaps_right' operator returns true if A's bounding box overlaps or
is to the right of B's bounding box.
"""
lookup_name = "overlaps_right"
@BaseSpatialField.register_lookup
class OverlapsBelowLookup(GISLookup):
"""
The 'overlaps_below' operator returns true if A's bounding box overlaps or
is below B's bounding box.
"""
lookup_name = "overlaps_below"
@BaseSpatialField.register_lookup
class OverlapsAboveLookup(GISLookup):
"""
The 'overlaps_above' operator returns true if A's bounding box overlaps or
is above B's bounding box.
"""
lookup_name = "overlaps_above"
@BaseSpatialField.register_lookup
class LeftLookup(GISLookup):
"""
The 'left' operator returns true if A's bounding box is strictly to the
left of B's bounding box.
"""
lookup_name = "left"
@BaseSpatialField.register_lookup
class RightLookup(GISLookup):
"""
The 'right' operator returns true if A's bounding box is strictly to the
right of B's bounding box.
"""
lookup_name = "right"
@BaseSpatialField.register_lookup
class StrictlyBelowLookup(GISLookup):
"""
The 'strictly_below' operator returns true if A's bounding box is strictly
below B's bounding box.
"""
lookup_name = "strictly_below"
@BaseSpatialField.register_lookup
class StrictlyAboveLookup(GISLookup):
"""
The 'strictly_above' operator returns true if A's bounding box is strictly
above B's bounding box.
"""
lookup_name = "strictly_above"
@BaseSpatialField.register_lookup
class SameAsLookup(GISLookup):
"""
The "~=" operator is the "same as" operator. It tests actual geometric
equality of two features. So if A and B are the same feature,
vertex-by-vertex, the operator returns true.
"""
lookup_name = "same_as"
BaseSpatialField.register_lookup(SameAsLookup, "exact")
@BaseSpatialField.register_lookup
class BBContainsLookup(GISLookup):
"""
The 'bbcontains' operator returns true if A's bounding box completely
contains by B's bounding box.
"""
lookup_name = "bbcontains"
@BaseSpatialField.register_lookup
class BBOverlapsLookup(GISLookup):
"""
The 'bboverlaps' operator returns true if A's bounding box overlaps B's
bounding box.
"""
lookup_name = "bboverlaps"
@BaseSpatialField.register_lookup
class ContainedLookup(GISLookup):
"""
The 'contained' operator returns true if A's bounding box is completely
contained by B's bounding box.
"""
lookup_name = "contained"
# ------------------
# Geometry functions
# ------------------
@BaseSpatialField.register_lookup
class ContainsLookup(GISLookup):
lookup_name = "contains"
@BaseSpatialField.register_lookup
class ContainsProperlyLookup(GISLookup):
lookup_name = "contains_properly"
@BaseSpatialField.register_lookup
class CoveredByLookup(GISLookup):
lookup_name = "coveredby"
@BaseSpatialField.register_lookup
class CoversLookup(GISLookup):
lookup_name = "covers"
@BaseSpatialField.register_lookup
class CrossesLookup(GISLookup):
lookup_name = "crosses"
@BaseSpatialField.register_lookup
class DisjointLookup(GISLookup):
lookup_name = "disjoint"
@BaseSpatialField.register_lookup
class EqualsLookup(GISLookup):
lookup_name = "equals"
@BaseSpatialField.register_lookup
class IntersectsLookup(GISLookup):
lookup_name = "intersects"
@BaseSpatialField.register_lookup
class OverlapsLookup(GISLookup):
lookup_name = "overlaps"
@BaseSpatialField.register_lookup
class RelateLookup(GISLookup):
lookup_name = "relate"
sql_template = "%(func)s(%(lhs)s, %(rhs)s, %%s)"
pattern_regex = _lazy_re_compile(r"^[012TF*]{9}$")
def process_rhs(self, compiler, connection):
# Check the pattern argument
pattern = self.rhs_params[0]
backend_op = connection.ops.gis_operators[self.lookup_name]
if hasattr(backend_op, "check_relate_argument"):
backend_op.check_relate_argument(pattern)
elif not isinstance(pattern, str) or not self.pattern_regex.match(pattern):
raise ValueError('Invalid intersection matrix pattern "%s".' % pattern)
sql, params = super().process_rhs(compiler, connection)
return sql, (*params, pattern)
@BaseSpatialField.register_lookup
class TouchesLookup(GISLookup):
lookup_name = "touches"
@BaseSpatialField.register_lookup
class WithinLookup(GISLookup):
lookup_name = "within"
class DistanceLookupBase(GISLookup):
distance = True
sql_template = "%(func)s(%(lhs)s, %(rhs)s) %(op)s %(value)s"
def process_rhs_params(self):
if not 1 <= len(self.rhs_params) <= 3:
raise ValueError(
"2, 3, or 4-element tuple required for '%s' lookup." % self.lookup_name
)
elif len(self.rhs_params) == 3 and self.rhs_params[2] != "spheroid":
raise ValueError(
"For 4-element tuples the last argument must be the 'spheroid' "
"directive."
)
# Check if the second parameter is a band index.
if len(self.rhs_params) > 1 and self.rhs_params[1] != "spheroid":
self.process_band_indices()
def process_distance(self, compiler, connection):
dist_param = self.rhs_params[0]
return (
compiler.compile(dist_param.resolve_expression(compiler.query))
if hasattr(dist_param, "resolve_expression")
else (
"%s",
connection.ops.get_distance(
self.lhs.output_field, self.rhs_params, self.lookup_name
),
)
)
@BaseSpatialField.register_lookup
class DWithinLookup(DistanceLookupBase):
lookup_name = "dwithin"
sql_template = "%(func)s(%(lhs)s, %(rhs)s, %(value)s)"
def process_distance(self, compiler, connection):
dist_param = self.rhs_params[0]
if (
not connection.features.supports_dwithin_distance_expr
and hasattr(dist_param, "resolve_expression")
and not isinstance(dist_param, Distance)
):
raise NotSupportedError(
"This backend does not support expressions for specifying "
"distance in the dwithin lookup."
)
return super().process_distance(compiler, connection)
def process_rhs(self, compiler, connection):
dist_sql, dist_params = self.process_distance(compiler, connection)
self.template_params["value"] = dist_sql
rhs_sql, params = super().process_rhs(compiler, connection)
return rhs_sql, (*params, *dist_params)
class DistanceLookupFromFunction(DistanceLookupBase):
def as_sql(self, compiler, connection):
spheroid = (
len(self.rhs_params) == 2 and self.rhs_params[-1] == "spheroid"
) or None
distance_expr = connection.ops.distance_expr_for_lookup(
self.lhs, self.rhs, spheroid=spheroid
)
sql, params = compiler.compile(distance_expr.resolve_expression(compiler.query))
dist_sql, dist_params = self.process_distance(compiler, connection)
return (
"%(func)s %(op)s %(dist)s" % {"func": sql, "op": self.op, "dist": dist_sql},
(*params, *dist_params),
)
@BaseSpatialField.register_lookup
class DistanceGTLookup(DistanceLookupFromFunction):
lookup_name = "distance_gt"
op = ">"
@BaseSpatialField.register_lookup
class DistanceGTELookup(DistanceLookupFromFunction):
lookup_name = "distance_gte"
op = ">="
@BaseSpatialField.register_lookup
class DistanceLTLookup(DistanceLookupFromFunction):
lookup_name = "distance_lt"
op = "<"
@BaseSpatialField.register_lookup
class DistanceLTELookup(DistanceLookupFromFunction):
lookup_name = "distance_lte"
op = "<="
|
from datetime import datetime
from unittest import mock
from django.db.models import DateTimeField, Value
from django.db.models.lookups import Lookup, YearLookup
from django.test import SimpleTestCase
class CustomLookup(Lookup):
pass
class LookupTests(SimpleTestCase):
def test_equality(self):
lookup = Lookup(Value(1), Value(2))
self.assertEqual(lookup, lookup)
self.assertEqual(lookup, Lookup(lookup.lhs, lookup.rhs))
self.assertEqual(lookup, mock.ANY)
self.assertNotEqual(lookup, Lookup(lookup.lhs, Value(3)))
self.assertNotEqual(lookup, Lookup(Value(3), lookup.rhs))
self.assertNotEqual(lookup, CustomLookup(lookup.lhs, lookup.rhs))
def test_repr(self):
tests = [
(Lookup(Value(1), Value("a")), "Lookup(Value(1), Value('a'))"),
(
YearLookup(
Value(datetime(2010, 1, 1, 0, 0, 0)),
Value(datetime(2010, 1, 1, 23, 59, 59)),
),
"YearLookup("
"Value(datetime.datetime(2010, 1, 1, 0, 0)), "
"Value(datetime.datetime(2010, 1, 1, 23, 59, 59)))",
),
]
for lookup, expected in tests:
with self.subTest(lookup=lookup):
self.assertEqual(repr(lookup), expected)
def test_hash(self):
lookup = Lookup(Value(1), Value(2))
self.assertEqual(hash(lookup), hash(lookup))
self.assertEqual(hash(lookup), hash(Lookup(lookup.lhs, lookup.rhs)))
self.assertNotEqual(hash(lookup), hash(Lookup(lookup.lhs, Value(3))))
self.assertNotEqual(hash(lookup), hash(Lookup(Value(3), lookup.rhs)))
self.assertNotEqual(hash(lookup), hash(CustomLookup(lookup.lhs, lookup.rhs)))
class YearLookupTests(SimpleTestCase):
def test_get_bound_params(self):
look_up = YearLookup(
lhs=Value(datetime(2010, 1, 1, 0, 0, 0), output_field=DateTimeField()),
rhs=Value(datetime(2010, 1, 1, 23, 59, 59), output_field=DateTimeField()),
)
msg = "subclasses of YearLookup must provide a get_bound_params() method"
with self.assertRaisesMessage(NotImplementedError, msg):
look_up.get_bound_params(
datetime(2010, 1, 1, 0, 0, 0), datetime(2010, 1, 1, 23, 59, 59)
)
|
./temp_repos/django/django/contrib/gis/db/models/lookups.py
|
./temp_repos/django/tests/lookup/test_lookups.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'RasterBandTransform'.
Context:
- Class Name: RasterBandTransform
- Dependencies to Mock: lhs, rhs
- Key Imports: django.db.models.sql.query, django.db, django.contrib.gis.db.models.fields, django.contrib.gis.measure, django.utils.regex_helper, django.db.models
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
RasterBandTransform
|
python
|
from contextlib import contextmanager
from copy import copy
# Hard-coded processor for easier use of CSRF protection.
_builtin_context_processors = ("django.template.context_processors.csrf",)
class ContextPopException(Exception):
"pop() has been called more times than push()"
pass
class ContextDict(dict):
def __init__(self, context, *args, **kwargs):
super().__init__(*args, **kwargs)
context.dicts.append(self)
self.context = context
def __enter__(self):
return self
def __exit__(self, *args, **kwargs):
self.context.pop()
class BaseContext:
def __init__(self, dict_=None):
self._reset_dicts(dict_)
def _reset_dicts(self, value=None):
builtins = {"True": True, "False": False, "None": None}
self.dicts = [builtins]
if isinstance(value, BaseContext):
self.dicts += value.dicts[1:]
elif value is not None:
self.dicts.append(value)
def __copy__(self):
duplicate = BaseContext()
duplicate.__class__ = self.__class__
duplicate.__dict__ = copy(self.__dict__)
duplicate.dicts = self.dicts[:]
return duplicate
def __repr__(self):
return repr(self.dicts)
def __iter__(self):
return reversed(self.dicts)
def push(self, *args, **kwargs):
dicts = []
for d in args:
if isinstance(d, BaseContext):
dicts += d.dicts[1:]
else:
dicts.append(d)
return ContextDict(self, *dicts, **kwargs)
def pop(self):
if len(self.dicts) == 1:
raise ContextPopException
return self.dicts.pop()
def __setitem__(self, key, value):
"Set a variable in the current context"
self.dicts[-1][key] = value
def set_upward(self, key, value):
"""
Set a variable in one of the higher contexts if it exists there,
otherwise in the current context.
"""
context = self.dicts[-1]
for d in reversed(self.dicts):
if key in d:
context = d
break
context[key] = value
def __getitem__(self, key):
"""
Get a variable's value, starting at the current context and going
upward
"""
for d in reversed(self.dicts):
if key in d:
return d[key]
raise KeyError(key)
def __delitem__(self, key):
"Delete a variable from the current context"
del self.dicts[-1][key]
def __contains__(self, key):
return any(key in d for d in self.dicts)
def get(self, key, otherwise=None):
for d in reversed(self.dicts):
if key in d:
return d[key]
return otherwise
def setdefault(self, key, default=None):
try:
return self[key]
except KeyError:
self[key] = default
return default
def new(self, values=None):
"""
Return a new context with the same properties, but with only the
values given in 'values' stored.
"""
new_context = copy(self)
new_context._reset_dicts(values)
return new_context
def flatten(self):
"""
Return self.dicts as one dictionary.
"""
flat = {}
for d in self.dicts:
flat.update(d)
return flat
def __eq__(self, other):
"""
Compare two contexts by comparing theirs 'dicts' attributes.
"""
if not isinstance(other, BaseContext):
return NotImplemented
# flatten dictionaries because they can be put in a different order.
return self.flatten() == other.flatten()
class Context(BaseContext):
"A stack container for variable context"
def __init__(self, dict_=None, autoescape=True, use_l10n=None, use_tz=None):
self.autoescape = autoescape
self.use_l10n = use_l10n
self.use_tz = use_tz
self.template_name = "unknown"
self.render_context = RenderContext()
# Set to the original template -- as opposed to extended or included
# templates -- during rendering, see bind_template.
self.template = None
super().__init__(dict_)
@contextmanager
def bind_template(self, template):
if self.template is not None:
raise RuntimeError("Context is already bound to a template")
self.template = template
try:
yield
finally:
self.template = None
def __copy__(self):
duplicate = super().__copy__()
duplicate.render_context = copy(self.render_context)
return duplicate
def update(self, other_dict):
"Push other_dict to the stack of dictionaries in the Context"
if not hasattr(other_dict, "__getitem__"):
raise TypeError("other_dict must be a mapping (dictionary-like) object.")
if isinstance(other_dict, BaseContext):
other_dict = other_dict.dicts[1:].pop()
return ContextDict(self, other_dict)
class RenderContext(BaseContext):
"""
A stack container for storing Template state.
RenderContext simplifies the implementation of template Nodes by providing
a safe place to store state between invocations of a node's `render`
method.
The RenderContext also provides scoping rules that are more sensible for
'template local' variables. The render context stack is pushed before each
template is rendered, creating a fresh scope with nothing in it. Name
resolution fails if a variable is not found at the top of the
RequestContext stack. Thus, variables are local to a specific template and
don't affect the rendering of other templates as they would if they were
stored in the normal template context.
"""
template = None
def __iter__(self):
yield from self.dicts[-1]
def __contains__(self, key):
return key in self.dicts[-1]
def get(self, key, otherwise=None):
return self.dicts[-1].get(key, otherwise)
def __getitem__(self, key):
return self.dicts[-1][key]
@contextmanager
def push_state(self, template, isolated_context=True):
initial = self.template
self.template = template
if isolated_context:
self.push()
try:
yield
finally:
self.template = initial
if isolated_context:
self.pop()
class RequestContext(Context):
"""
This subclass of template.Context automatically populates itself using
the processors defined in the engine's configuration.
Additional processors can be specified as a list of callables
using the "processors" keyword argument.
"""
def __init__(
self,
request,
dict_=None,
processors=None,
use_l10n=None,
use_tz=None,
autoescape=True,
):
super().__init__(dict_, use_l10n=use_l10n, use_tz=use_tz, autoescape=autoescape)
self.request = request
self._processors = () if processors is None else tuple(processors)
self._processors_index = len(self.dicts)
# placeholder for context processors output
self.update({})
# empty dict for any new modifications
# (so that context processors don't overwrite them)
self.update({})
@contextmanager
def bind_template(self, template):
if self.template is not None:
raise RuntimeError("Context is already bound to a template")
self.template = template
# Set context processors according to the template engine's settings.
processors = template.engine.template_context_processors + self._processors
updates = {}
for processor in processors:
context = processor(self.request)
try:
updates.update(context)
except TypeError as e:
raise TypeError(
f"Context processor {processor.__qualname__} didn't return a "
"dictionary."
) from e
self.dicts[self._processors_index] = updates
try:
yield
finally:
self.template = None
# Unset context processors.
self.dicts[self._processors_index] = {}
def new(self, values=None):
new_context = super().new(values)
# This is for backwards-compatibility: RequestContexts created via
# Context.new don't include values from context processors.
if hasattr(new_context, "_processors_index"):
del new_context._processors_index
return new_context
def make_context(context, request=None, **kwargs):
"""
Create a suitable Context from a plain dict and optionally an HttpRequest.
"""
if context is not None and not isinstance(context, dict):
raise TypeError(
"context must be a dict rather than %s." % context.__class__.__name__
)
if request is None:
context = Context(context, **kwargs)
else:
# The following pattern is required to ensure values from
# context override those from template context processors.
original_context = context
context = RequestContext(request, **kwargs)
if original_context:
context.push(original_context)
return context
|
from copy import copy
from unittest import mock
from django.http import HttpRequest
from django.template import (
Context,
Engine,
RequestContext,
Template,
Variable,
VariableDoesNotExist,
)
from django.template.context import RenderContext
from django.test import RequestFactory, SimpleTestCase, override_settings
class ContextTests(SimpleTestCase):
def test_context(self):
c = Context({"a": 1, "b": "xyzzy"})
self.assertEqual(c["a"], 1)
self.assertEqual(c.push(), {})
c["a"] = 2
self.assertEqual(c["a"], 2)
self.assertEqual(c.get("a"), 2)
self.assertEqual(c.pop(), {"a": 2})
self.assertEqual(c["a"], 1)
self.assertEqual(c.get("foo", 42), 42)
self.assertEqual(c, mock.ANY)
def test_push_context_manager(self):
c = Context({"a": 1})
with c.push():
c["a"] = 2
self.assertEqual(c["a"], 2)
self.assertEqual(c["a"], 1)
with c.push(a=3):
self.assertEqual(c["a"], 3)
self.assertEqual(c["a"], 1)
def test_update_context_manager(self):
c = Context({"a": 1})
with c.update({}):
c["a"] = 2
self.assertEqual(c["a"], 2)
self.assertEqual(c["a"], 1)
with c.update({"a": 3}):
self.assertEqual(c["a"], 3)
self.assertEqual(c["a"], 1)
def test_push_context_manager_with_context_object(self):
c = Context({"a": 1})
with c.push(Context({"a": 3})):
self.assertEqual(c["a"], 3)
self.assertEqual(c["a"], 1)
def test_update_context_manager_with_context_object(self):
c = Context({"a": 1})
with c.update(Context({"a": 3})):
self.assertEqual(c["a"], 3)
self.assertEqual(c["a"], 1)
def test_push_proper_layering(self):
c = Context({"a": 1})
c.push(Context({"b": 2}))
c.push(Context({"c": 3, "d": {"z": "26"}}))
self.assertEqual(
c.dicts,
[
{"False": False, "None": None, "True": True},
{"a": 1},
{"b": 2},
{"c": 3, "d": {"z": "26"}},
],
)
def test_update_proper_layering(self):
c = Context({"a": 1})
c.update(Context({"b": 2}))
c.update(Context({"c": 3, "d": {"z": "26"}}))
self.assertEqual(
c.dicts,
[
{"False": False, "None": None, "True": True},
{"a": 1},
{"b": 2},
{"c": 3, "d": {"z": "26"}},
],
)
def test_setdefault(self):
c = Context()
x = c.setdefault("x", 42)
self.assertEqual(x, 42)
self.assertEqual(c["x"], 42)
x = c.setdefault("x", 100)
self.assertEqual(x, 42)
self.assertEqual(c["x"], 42)
def test_resolve_on_context_method(self):
"""
#17778 -- Variable shouldn't resolve RequestContext methods
"""
empty_context = Context()
with self.assertRaises(VariableDoesNotExist):
Variable("no_such_variable").resolve(empty_context)
with self.assertRaises(VariableDoesNotExist):
Variable("new").resolve(empty_context)
self.assertEqual(
Variable("new").resolve(Context({"new": "foo"})),
"foo",
)
def test_render_context(self):
test_context = RenderContext({"fruit": "papaya"})
# push() limits access to the topmost dict
test_context.push()
test_context["vegetable"] = "artichoke"
self.assertEqual(list(test_context), ["vegetable"])
self.assertNotIn("fruit", test_context)
with self.assertRaises(KeyError):
test_context["fruit"]
self.assertIsNone(test_context.get("fruit"))
def test_flatten_context(self):
a = Context()
a.update({"a": 2})
a.update({"b": 4})
a.update({"c": 8})
self.assertEqual(
a.flatten(),
{"False": False, "None": None, "True": True, "a": 2, "b": 4, "c": 8},
)
def test_flatten_context_with_context(self):
"""
Context.push() with a Context argument should work.
"""
a = Context({"a": 2})
a.push(Context({"z": "8"}))
self.assertEqual(
a.flatten(),
{
"False": False,
"None": None,
"True": True,
"a": 2,
"z": "8",
},
)
def test_flatten_context_with_context_copy(self):
ctx1 = Context({"a": 2})
ctx2 = ctx1.new(Context({"b": 4}))
self.assertEqual(
ctx2.dicts, [{"True": True, "False": False, "None": None}, {"b": 4}]
)
self.assertEqual(
ctx2.flatten(),
{"False": False, "None": None, "True": True, "b": 4},
)
def test_context_comparable(self):
"""
#21765 -- equality comparison should work
"""
test_data = {"x": "y", "v": "z", "d": {"o": object, "a": "b"}}
self.assertEqual(Context(test_data), Context(test_data))
a = Context()
b = Context()
self.assertEqual(a, b)
# update only a
a.update({"a": 1})
self.assertNotEqual(a, b)
# update both to check regression
a.update({"c": 3})
b.update({"c": 3})
self.assertNotEqual(a, b)
# make contexts equals again
b.update({"a": 1})
self.assertEqual(a, b)
def test_copy_request_context_twice(self):
"""
#24273 -- Copy twice shouldn't raise an exception
"""
RequestContext(HttpRequest()).new().new()
def test_set_upward(self):
c = Context({"a": 1})
c.set_upward("a", 2)
self.assertEqual(c.get("a"), 2)
def test_set_upward_empty_context(self):
empty_context = Context()
empty_context.set_upward("a", 1)
self.assertEqual(empty_context.get("a"), 1)
def test_set_upward_with_push(self):
"""
The highest context which has the given key is used.
"""
c = Context({"a": 1})
c.push({"a": 2})
c.set_upward("a", 3)
self.assertEqual(c.get("a"), 3)
c.pop()
self.assertEqual(c.get("a"), 1)
def test_set_upward_with_push_no_match(self):
"""
The highest context is used if the given key isn't found.
"""
c = Context({"b": 1})
c.push({"b": 2})
c.set_upward("a", 2)
self.assertEqual(len(c.dicts), 3)
self.assertEqual(c.dicts[-1]["a"], 2)
def context_process_returning_none(request):
return None
class RequestContextTests(SimpleTestCase):
request_factory = RequestFactory()
def test_include_only(self):
"""
#15721 -- ``{% include %}`` and ``RequestContext`` should work
together.
"""
engine = Engine(
loaders=[
(
"django.template.loaders.locmem.Loader",
{
"child": '{{ var|default:"none" }}',
},
),
]
)
request = self.request_factory.get("/")
ctx = RequestContext(request, {"var": "parent"})
self.assertEqual(
engine.from_string('{% include "child" %}').render(ctx), "parent"
)
self.assertEqual(
engine.from_string('{% include "child" only %}').render(ctx), "none"
)
def test_stack_size(self):
"""Optimized RequestContext construction (#7116)."""
request = self.request_factory.get("/")
ctx = RequestContext(request, {})
# The stack contains 4 items:
# [builtins, supplied context, context processor, empty dict]
self.assertEqual(len(ctx.dicts), 4)
def test_context_comparable(self):
# Create an engine without any context processors.
test_data = {"x": "y", "v": "z", "d": {"o": object, "a": "b"}}
# test comparing RequestContext to prevent problems if somebody
# adds __eq__ in the future
request = self.request_factory.get("/")
self.assertEqual(
RequestContext(request, dict_=test_data),
RequestContext(request, dict_=test_data),
)
def test_modify_context_and_render(self):
template = Template("{{ foo }}")
request = self.request_factory.get("/")
context = RequestContext(request, {})
context["foo"] = "foo"
self.assertEqual(template.render(context), "foo")
@override_settings(
TEMPLATES=[
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"OPTIONS": {
"context_processors": [
"django.template.context_processors.request",
"template_tests.test_context.context_process_returning_none",
],
},
}
],
)
def test_template_context_processor_returning_none(self):
request_context = RequestContext(HttpRequest())
msg = (
"Context processor context_process_returning_none didn't return a "
"dictionary."
)
with self.assertRaisesMessage(TypeError, msg):
with request_context.bind_template(Template("")):
pass
def test_context_copyable(self):
request_context = RequestContext(HttpRequest())
request_context_copy = copy(request_context)
self.assertIsInstance(request_context_copy, RequestContext)
self.assertEqual(request_context_copy.dicts, request_context.dicts)
self.assertIsNot(request_context_copy.dicts, request_context.dicts)
|
./temp_repos/django/django/template/context.py
|
./temp_repos/django/tests/template_tests/test_context.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'ContextPopException'.
Context:
- Class Name: ContextPopException
- Dependencies to Mock: dict_, processors, use_l10n, autoescape, context, use_tz, request
- Key Imports: contextlib, copy
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
ContextPopException
|
python
|
"""
Cross Site Request Forgery Middleware.
This module provides a middleware that implements protection
against request forgeries from other sites.
"""
import logging
import string
from collections import defaultdict
from urllib.parse import urlsplit
from django.conf import settings
from django.core.exceptions import DisallowedHost, ImproperlyConfigured
from django.http import HttpHeaders, UnreadablePostError
from django.urls import get_callable
from django.utils.cache import patch_vary_headers
from django.utils.crypto import constant_time_compare, get_random_string
from django.utils.deprecation import MiddlewareMixin
from django.utils.functional import cached_property
from django.utils.http import is_same_domain
from django.utils.log import log_response
from django.utils.regex_helper import _lazy_re_compile
logger = logging.getLogger("django.security.csrf")
# This matches if any character is not in CSRF_ALLOWED_CHARS.
invalid_token_chars_re = _lazy_re_compile("[^a-zA-Z0-9]")
REASON_BAD_ORIGIN = "Origin checking failed - %s does not match any trusted origins."
REASON_NO_REFERER = "Referer checking failed - no Referer."
REASON_BAD_REFERER = "Referer checking failed - %s does not match any trusted origins."
REASON_NO_CSRF_COOKIE = "CSRF cookie not set."
REASON_CSRF_TOKEN_MISSING = "CSRF token missing."
REASON_MALFORMED_REFERER = "Referer checking failed - Referer is malformed."
REASON_INSECURE_REFERER = (
"Referer checking failed - Referer is insecure while host is secure."
)
# The reason strings below are for passing to InvalidTokenFormat. They are
# phrases without a subject because they can be in reference to either the CSRF
# cookie or non-cookie token.
REASON_INCORRECT_LENGTH = "has incorrect length"
REASON_INVALID_CHARACTERS = "has invalid characters"
CSRF_SECRET_LENGTH = 32
CSRF_TOKEN_LENGTH = 2 * CSRF_SECRET_LENGTH
CSRF_ALLOWED_CHARS = string.ascii_letters + string.digits
CSRF_SESSION_KEY = "_csrftoken"
def _get_failure_view():
"""Return the view to be used for CSRF rejections."""
return get_callable(settings.CSRF_FAILURE_VIEW)
def _get_new_csrf_string():
return get_random_string(CSRF_SECRET_LENGTH, allowed_chars=CSRF_ALLOWED_CHARS)
def _mask_cipher_secret(secret):
"""
Given a secret (assumed to be a string of CSRF_ALLOWED_CHARS), generate a
token by adding a mask and applying it to the secret.
"""
mask = _get_new_csrf_string()
chars = CSRF_ALLOWED_CHARS
pairs = zip((chars.index(x) for x in secret), (chars.index(x) for x in mask))
cipher = "".join(chars[(x + y) % len(chars)] for x, y in pairs)
return mask + cipher
def _unmask_cipher_token(token):
"""
Given a token (assumed to be a string of CSRF_ALLOWED_CHARS, of length
CSRF_TOKEN_LENGTH, and that its first half is a mask), use it to decrypt
the second half to produce the original secret.
"""
mask = token[:CSRF_SECRET_LENGTH]
token = token[CSRF_SECRET_LENGTH:]
chars = CSRF_ALLOWED_CHARS
pairs = zip((chars.index(x) for x in token), (chars.index(x) for x in mask))
return "".join(chars[x - y] for x, y in pairs) # Note negative values are ok
def _add_new_csrf_cookie(request):
"""Generate a new random CSRF_COOKIE value, and add it to request.META."""
csrf_secret = _get_new_csrf_string()
request.META.update(
{
"CSRF_COOKIE": csrf_secret,
"CSRF_COOKIE_NEEDS_UPDATE": True,
}
)
return csrf_secret
def get_token(request):
"""
Return the CSRF token required for a POST form. The token is an
alphanumeric value. A new token is created if one is not already set.
A side effect of calling this function is to make the csrf_protect
decorator and the CsrfViewMiddleware add a CSRF cookie and a 'Vary: Cookie'
header to the outgoing response. For this reason, you may need to use this
function lazily, as is done by the csrf context processor.
"""
if "CSRF_COOKIE" in request.META:
csrf_secret = request.META["CSRF_COOKIE"]
# Since the cookie is being used, flag to send the cookie in
# process_response() (even if the client already has it) in order to
# renew the expiry timer.
request.META["CSRF_COOKIE_NEEDS_UPDATE"] = True
else:
csrf_secret = _add_new_csrf_cookie(request)
return _mask_cipher_secret(csrf_secret)
def rotate_token(request):
"""
Change the CSRF token in use for a request - should be done on login
for security purposes.
"""
_add_new_csrf_cookie(request)
class InvalidTokenFormat(Exception):
def __init__(self, reason):
self.reason = reason
def _check_token_format(token):
"""
Raise an InvalidTokenFormat error if the token has an invalid length or
characters that aren't allowed. The token argument can be a CSRF cookie
secret or non-cookie CSRF token, and either masked or unmasked.
"""
if len(token) not in (CSRF_TOKEN_LENGTH, CSRF_SECRET_LENGTH):
raise InvalidTokenFormat(REASON_INCORRECT_LENGTH)
# Make sure all characters are in CSRF_ALLOWED_CHARS.
if invalid_token_chars_re.search(token):
raise InvalidTokenFormat(REASON_INVALID_CHARACTERS)
def _does_token_match(request_csrf_token, csrf_secret):
"""
Return whether the given CSRF token matches the given CSRF secret, after
unmasking the token if necessary.
This function assumes that the request_csrf_token argument has been
validated to have the correct length (CSRF_SECRET_LENGTH or
CSRF_TOKEN_LENGTH characters) and allowed characters, and that if it has
length CSRF_TOKEN_LENGTH, it is a masked secret.
"""
# Only unmask tokens that are exactly CSRF_TOKEN_LENGTH characters long.
if len(request_csrf_token) == CSRF_TOKEN_LENGTH:
request_csrf_token = _unmask_cipher_token(request_csrf_token)
assert len(request_csrf_token) == CSRF_SECRET_LENGTH
return constant_time_compare(request_csrf_token, csrf_secret)
class RejectRequest(Exception):
def __init__(self, reason):
self.reason = reason
class CsrfViewMiddleware(MiddlewareMixin):
"""
Require a present and correct csrfmiddlewaretoken for POST requests that
have a CSRF cookie, and set an outgoing CSRF cookie.
This middleware should be used in conjunction with the {% csrf_token %}
template tag.
"""
@cached_property
def csrf_trusted_origins_hosts(self):
return [
urlsplit(origin).netloc.lstrip("*")
for origin in settings.CSRF_TRUSTED_ORIGINS
]
@cached_property
def allowed_origins_exact(self):
return {origin for origin in settings.CSRF_TRUSTED_ORIGINS if "*" not in origin}
@cached_property
def allowed_origin_subdomains(self):
"""
A mapping of allowed schemes to list of allowed netlocs, where all
subdomains of the netloc are allowed.
"""
allowed_origin_subdomains = defaultdict(list)
for parsed in (
urlsplit(origin)
for origin in settings.CSRF_TRUSTED_ORIGINS
if "*" in origin
):
allowed_origin_subdomains[parsed.scheme].append(parsed.netloc.lstrip("*"))
return allowed_origin_subdomains
# The _accept and _reject methods currently only exist for the sake of the
# requires_csrf_token decorator.
def _accept(self, request):
# Avoid checking the request twice by adding a custom attribute to
# request. This will be relevant when both decorator and middleware
# are used.
request.csrf_processing_done = True
return None
def _reject(self, request, reason):
response = _get_failure_view()(request, reason=reason)
log_response(
"Forbidden (%s): %s",
reason,
request.path,
response=response,
request=request,
logger=logger,
)
return response
def _get_secret(self, request):
"""
Return the CSRF secret originally associated with the request, or None
if it didn't have one.
If the CSRF_USE_SESSIONS setting is false, raises InvalidTokenFormat if
the request's secret has invalid characters or an invalid length.
"""
if settings.CSRF_USE_SESSIONS:
try:
csrf_secret = request.session.get(CSRF_SESSION_KEY)
except AttributeError:
raise ImproperlyConfigured(
"CSRF_USE_SESSIONS is enabled, but request.session is not "
"set. SessionMiddleware must appear before CsrfViewMiddleware "
"in MIDDLEWARE."
)
else:
try:
csrf_secret = request.COOKIES[settings.CSRF_COOKIE_NAME]
except KeyError:
csrf_secret = None
else:
# This can raise InvalidTokenFormat.
_check_token_format(csrf_secret)
if csrf_secret is None:
return None
# Django versions before 4.0 masked the secret before storing.
if len(csrf_secret) == CSRF_TOKEN_LENGTH:
csrf_secret = _unmask_cipher_token(csrf_secret)
return csrf_secret
def _set_csrf_cookie(self, request, response):
if settings.CSRF_USE_SESSIONS:
if request.session.get(CSRF_SESSION_KEY) != request.META["CSRF_COOKIE"]:
request.session[CSRF_SESSION_KEY] = request.META["CSRF_COOKIE"]
else:
response.set_cookie(
settings.CSRF_COOKIE_NAME,
request.META["CSRF_COOKIE"],
max_age=settings.CSRF_COOKIE_AGE,
domain=settings.CSRF_COOKIE_DOMAIN,
path=settings.CSRF_COOKIE_PATH,
secure=settings.CSRF_COOKIE_SECURE,
httponly=settings.CSRF_COOKIE_HTTPONLY,
samesite=settings.CSRF_COOKIE_SAMESITE,
)
# Set the Vary header since content varies with the CSRF cookie.
patch_vary_headers(response, ("Cookie",))
def _origin_verified(self, request):
request_origin = request.META["HTTP_ORIGIN"]
try:
good_host = request.get_host()
except DisallowedHost:
pass
else:
good_origin = "%s://%s" % (
"https" if request.is_secure() else "http",
good_host,
)
if request_origin == good_origin:
return True
if request_origin in self.allowed_origins_exact:
return True
try:
parsed_origin = urlsplit(request_origin)
except ValueError:
return False
parsed_origin_scheme = parsed_origin.scheme
parsed_origin_netloc = parsed_origin.netloc
return any(
is_same_domain(parsed_origin_netloc, host)
for host in self.allowed_origin_subdomains.get(parsed_origin_scheme, ())
)
def _check_referer(self, request):
referer = request.META.get("HTTP_REFERER")
if referer is None:
raise RejectRequest(REASON_NO_REFERER)
try:
referer = urlsplit(referer)
except ValueError:
raise RejectRequest(REASON_MALFORMED_REFERER)
# Make sure we have a valid URL for Referer.
if "" in (referer.scheme, referer.netloc):
raise RejectRequest(REASON_MALFORMED_REFERER)
# Ensure that our Referer is also secure.
if referer.scheme != "https":
raise RejectRequest(REASON_INSECURE_REFERER)
if any(
is_same_domain(referer.netloc, host)
for host in self.csrf_trusted_origins_hosts
):
return
# Allow matching the configured cookie domain.
good_referer = (
settings.SESSION_COOKIE_DOMAIN
if settings.CSRF_USE_SESSIONS
else settings.CSRF_COOKIE_DOMAIN
)
if good_referer is None:
# If no cookie domain is configured, allow matching the current
# host:port exactly if it's permitted by ALLOWED_HOSTS.
try:
# request.get_host() includes the port.
good_referer = request.get_host()
except DisallowedHost:
raise RejectRequest(REASON_BAD_REFERER % referer.geturl())
else:
server_port = request.get_port()
if server_port not in ("443", "80"):
good_referer = "%s:%s" % (good_referer, server_port)
if not is_same_domain(referer.netloc, good_referer):
raise RejectRequest(REASON_BAD_REFERER % referer.geturl())
def _bad_token_message(self, reason, token_source):
if token_source != "POST":
# Assume it is a settings.CSRF_HEADER_NAME value.
header_name = HttpHeaders.parse_header_name(token_source)
token_source = f"the {header_name!r} HTTP header"
return f"CSRF token from {token_source} {reason}."
def _check_token(self, request):
# Access csrf_secret via self._get_secret() as rotate_token() may have
# been called by an authentication middleware during the
# process_request() phase.
try:
csrf_secret = self._get_secret(request)
except InvalidTokenFormat as exc:
raise RejectRequest(f"CSRF cookie {exc.reason}.")
if csrf_secret is None:
# No CSRF cookie. For POST requests, we insist on a CSRF cookie,
# and in this way we can avoid all CSRF attacks, including login
# CSRF.
raise RejectRequest(REASON_NO_CSRF_COOKIE)
# Check non-cookie token for match.
request_csrf_token = ""
if request.method == "POST":
try:
request_csrf_token = request.POST.get("csrfmiddlewaretoken", "")
except UnreadablePostError:
# Handle a broken connection before we've completed reading the
# POST data. process_view shouldn't raise any exceptions, so
# we'll ignore and serve the user a 403 (assuming they're still
# listening, which they probably aren't because of the error).
pass
if request_csrf_token == "":
# Fall back to X-CSRFToken, to make things easier for AJAX, and
# possible for PUT/DELETE.
try:
# This can have length CSRF_SECRET_LENGTH or CSRF_TOKEN_LENGTH,
# depending on whether the client obtained the token from
# the DOM or the cookie (and if the cookie, whether the cookie
# was masked or unmasked).
request_csrf_token = request.META[settings.CSRF_HEADER_NAME]
except KeyError:
raise RejectRequest(REASON_CSRF_TOKEN_MISSING)
token_source = settings.CSRF_HEADER_NAME
else:
token_source = "POST"
try:
_check_token_format(request_csrf_token)
except InvalidTokenFormat as exc:
reason = self._bad_token_message(exc.reason, token_source)
raise RejectRequest(reason)
if not _does_token_match(request_csrf_token, csrf_secret):
reason = self._bad_token_message("incorrect", token_source)
raise RejectRequest(reason)
def process_request(self, request):
try:
csrf_secret = self._get_secret(request)
except InvalidTokenFormat:
_add_new_csrf_cookie(request)
else:
if csrf_secret is not None:
# Use the same secret next time. If the secret was originally
# masked, this also causes it to be replaced with the unmasked
# form, but only in cases where the secret is already getting
# saved anyways.
request.META["CSRF_COOKIE"] = csrf_secret
def process_view(self, request, callback, callback_args, callback_kwargs):
if getattr(request, "csrf_processing_done", False):
return None
# Wait until request.META["CSRF_COOKIE"] has been manipulated before
# bailing out, so that get_token still works
if getattr(callback, "csrf_exempt", False):
return None
# Assume that anything not defined as 'safe' by RFC 9110 needs
# protection
if request.method in ("GET", "HEAD", "OPTIONS", "TRACE"):
return self._accept(request)
if getattr(request, "_dont_enforce_csrf_checks", False):
# Mechanism to turn off CSRF checks for test suite. It comes after
# the creation of CSRF cookies, so that everything else continues
# to work exactly the same (e.g. cookies are sent, etc.), but
# before any branches that call the _reject method.
return self._accept(request)
# Reject the request if the Origin header doesn't match an allowed
# value.
if "HTTP_ORIGIN" in request.META:
if not self._origin_verified(request):
return self._reject(
request, REASON_BAD_ORIGIN % request.META["HTTP_ORIGIN"]
)
elif request.is_secure():
# If the Origin header wasn't provided, reject HTTPS requests if
# the Referer header doesn't match an allowed value.
#
# Suppose user visits http://example.com/
# An active network attacker (man-in-the-middle, MITM) sends a
# POST form that targets https://example.com/detonate-bomb/ and
# submits it via JavaScript.
#
# The attacker will need to provide a CSRF cookie and token, but
# that's no problem for a MITM and the session-independent secret
# we're using. So the MITM can circumvent the CSRF protection. This
# is true for any HTTP connection, but anyone using HTTPS expects
# better! For this reason, for https://example.com/ we need
# additional protection that treats http://example.com/ as
# completely untrusted. Under HTTPS, Barth et al. found that the
# Referer header is missing for same-domain requests in only about
# 0.2% of cases or less, so we can use strict Referer checking.
try:
self._check_referer(request)
except RejectRequest as exc:
return self._reject(request, exc.reason)
try:
self._check_token(request)
except RejectRequest as exc:
return self._reject(request, exc.reason)
return self._accept(request)
def process_response(self, request, response):
if request.META.get("CSRF_COOKIE_NEEDS_UPDATE"):
self._set_csrf_cookie(request, response)
# Unset the flag to prevent _set_csrf_cookie() from being
# unnecessarily called again in process_response() by other
# instances of CsrfViewMiddleware. This can happen e.g. when both a
# decorator and middleware are used. However,
# CSRF_COOKIE_NEEDS_UPDATE is still respected in subsequent calls
# e.g. in case rotate_token() is called in process_response() later
# by custom middleware but before those subsequent calls.
request.META["CSRF_COOKIE_NEEDS_UPDATE"] = False
return response
|
from unittest import mock
from django.template import TemplateDoesNotExist
from django.test import Client, RequestFactory, SimpleTestCase, override_settings
from django.utils.translation import override
from django.views.csrf import CSRF_FAILURE_TEMPLATE_NAME, csrf_failure
@override_settings(ROOT_URLCONF="view_tests.urls")
class CsrfViewTests(SimpleTestCase):
def setUp(self):
super().setUp()
self.client = Client(enforce_csrf_checks=True)
@override_settings(
USE_I18N=True,
MIDDLEWARE=[
"django.middleware.locale.LocaleMiddleware",
"django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
],
)
def test_translation(self):
"""An invalid request is rejected with a localized error message."""
response = self.client.post("/")
self.assertContains(response, "Forbidden", status_code=403)
self.assertContains(
response, "CSRF verification failed. Request aborted.", status_code=403
)
with self.settings(LANGUAGE_CODE="nl"), override("en-us"):
response = self.client.post("/")
self.assertContains(response, "Verboden", status_code=403)
self.assertContains(
response,
"CSRF-verificatie mislukt. Verzoek afgebroken.",
status_code=403,
)
@override_settings(SECURE_PROXY_SSL_HEADER=("HTTP_X_FORWARDED_PROTO", "https"))
def test_no_referer(self):
"""
Referer header is strictly checked for POST over HTTPS. Trigger the
exception by sending an incorrect referer.
"""
response = self.client.post("/", headers={"x-forwarded-proto": "https"})
self.assertContains(
response,
"You are seeing this message because this HTTPS site requires a "
"“Referer header” to be sent by your web browser, but "
"none was sent.",
status_code=403,
)
self.assertContains(
response,
"If you have configured your browser to disable “Referer” "
"headers, please re-enable them, at least for this site, or for "
"HTTPS connections, or for “same-origin” requests.",
status_code=403,
)
self.assertContains(
response,
"If you are using the <meta name="referrer" "
"content="no-referrer"> tag or including the "
"“Referrer-Policy: no-referrer” header, please remove them.",
status_code=403,
)
def test_no_cookies(self):
"""
The CSRF cookie is checked for POST. Failure to send this cookie should
provide a nice error message.
"""
response = self.client.post("/")
self.assertContains(
response,
"You are seeing this message because this site requires a CSRF "
"cookie when submitting forms. This cookie is required for "
"security reasons, to ensure that your browser is not being "
"hijacked by third parties.",
status_code=403,
)
@override_settings(TEMPLATES=[])
def test_no_django_template_engine(self):
"""
The CSRF view doesn't depend on the TEMPLATES configuration (#24388).
"""
response = self.client.post("/")
self.assertContains(response, "Forbidden", status_code=403)
@override_settings(
TEMPLATES=[
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"OPTIONS": {
"loaders": [
(
"django.template.loaders.locmem.Loader",
{
CSRF_FAILURE_TEMPLATE_NAME: (
"Test template for CSRF failure"
)
},
),
],
},
}
]
)
def test_custom_template(self):
"""A custom CSRF_FAILURE_TEMPLATE_NAME is used."""
response = self.client.post("/")
self.assertContains(response, "Test template for CSRF failure", status_code=403)
self.assertIs(response.wsgi_request, response.context.request)
def test_custom_template_does_not_exist(self):
"""An exception is raised if a nonexistent template is supplied."""
factory = RequestFactory()
request = factory.post("/")
with self.assertRaises(TemplateDoesNotExist):
csrf_failure(request, template_name="nonexistent.html")
def test_template_encoding(self):
"""
The template is loaded directly, not via a template loader, and should
be opened as utf-8 charset as is the default specified on template
engines.
"""
from django.views.csrf import Path
with mock.patch.object(Path, "open") as m:
csrf_failure(mock.MagicMock(), mock.Mock())
m.assert_called_once_with(encoding="utf-8")
@override_settings(DEBUG=True)
@mock.patch("django.views.csrf.get_docs_version", return_value="4.2")
def test_doc_links(self, mocked_get_complete_version):
response = self.client.post("/")
self.assertContains(response, "Forbidden", status_code=403)
self.assertNotContains(
response, "https://docs.djangoproject.com/en/dev/", status_code=403
)
self.assertContains(
response, "https://docs.djangoproject.com/en/4.2/", status_code=403
)
|
./temp_repos/django/django/middleware/csrf.py
|
./temp_repos/django/tests/view_tests/tests/test_csrf.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'InvalidTokenFormat'.
Context:
- Class Name: InvalidTokenFormat
- Dependencies to Mock: reason
- Key Imports: collections, string, django.conf, django.http, django.utils.log, django.utils.crypto, django.urls, logging, django.core.exceptions, django.utils.http
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
InvalidTokenFormat
|
python
|
import functools
import inspect
import itertools
import re
import sys
import types
import warnings
from pathlib import Path
from django.conf import settings
from django.http import Http404, HttpResponse, HttpResponseNotFound
from django.template import Context, Engine, TemplateDoesNotExist
from django.template.defaultfilters import pprint
from django.urls import URLResolver, resolve
from django.utils import timezone
from django.utils.datastructures import MultiValueDict
from django.utils.encoding import force_str
from django.utils.module_loading import import_string
from django.utils.regex_helper import _lazy_re_compile
from django.utils.version import get_docs_version
from django.views.decorators.csp import csp_override, csp_report_only_override
from django.views.decorators.debug import coroutine_functions_to_sensitive_variables
# Minimal Django templates engine to render the error templates
# regardless of the project's TEMPLATES setting. Templates are
# read directly from the filesystem so that the error handler
# works even if the template loader is broken.
DEBUG_ENGINE = Engine(
debug=True,
libraries={"i18n": "django.templatetags.i18n"},
)
def builtin_template_path(name):
"""
Return a path to a builtin template.
Avoid calling this function at the module level or in a class-definition
because __file__ may not exist, e.g. in frozen environments.
"""
return Path(__file__).parent / "templates" / name
class ExceptionCycleWarning(UserWarning):
pass
class CallableSettingWrapper:
"""
Object to wrap callable appearing in settings.
* Not to call in the debug page (#21345).
* Not to break the debug page if the callable forbidding to set attributes
(#23070).
"""
def __init__(self, callable_setting):
self._wrapped = callable_setting
def __repr__(self):
return repr(self._wrapped)
@csp_override({})
@csp_report_only_override({})
def technical_500_response(request, exc_type, exc_value, tb, status_code=500):
"""
Create a technical server error response. The last three arguments are
the values returned from sys.exc_info() and friends.
"""
reporter = get_exception_reporter_class(request)(request, exc_type, exc_value, tb)
preferred_type = request.get_preferred_type(["text/html", "text/plain"])
if preferred_type == "text/html":
html = reporter.get_traceback_html()
return HttpResponse(html, status=status_code, content_type="text/html")
else:
text = reporter.get_traceback_text()
return HttpResponse(
text, status=status_code, content_type="text/plain; charset=utf-8"
)
@functools.lru_cache
def get_default_exception_reporter_filter():
# Instantiate the default filter for the first time and cache it.
return import_string(settings.DEFAULT_EXCEPTION_REPORTER_FILTER)()
def get_exception_reporter_filter(request):
default_filter = get_default_exception_reporter_filter()
return getattr(request, "exception_reporter_filter", default_filter)
def get_exception_reporter_class(request):
default_exception_reporter_class = import_string(
settings.DEFAULT_EXCEPTION_REPORTER
)
return getattr(
request, "exception_reporter_class", default_exception_reporter_class
)
def get_caller(request):
resolver_match = request.resolver_match
if resolver_match is None:
try:
resolver_match = resolve(request.path)
except Http404:
pass
return "" if resolver_match is None else resolver_match._func_path
class SafeExceptionReporterFilter:
"""
Use annotations made by the sensitive_post_parameters and
sensitive_variables decorators to filter out sensitive information.
"""
cleansed_substitute = "********************"
hidden_settings = _lazy_re_compile(
"API|AUTH|TOKEN|KEY|SECRET|PASS|SIGNATURE|HTTP_COOKIE", flags=re.I
)
def cleanse_setting(self, key, value):
"""
Cleanse an individual setting key/value of sensitive content. If the
value is a dictionary, recursively cleanse the keys in that dictionary.
"""
if key == settings.SESSION_COOKIE_NAME:
is_sensitive = True
else:
try:
is_sensitive = self.hidden_settings.search(key)
except TypeError:
is_sensitive = False
if is_sensitive:
cleansed = self.cleansed_substitute
elif isinstance(value, dict):
cleansed = {k: self.cleanse_setting(k, v) for k, v in value.items()}
elif isinstance(value, list):
cleansed = [self.cleanse_setting("", v) for v in value]
elif isinstance(value, tuple):
cleansed = tuple([self.cleanse_setting("", v) for v in value])
else:
cleansed = value
if callable(cleansed):
cleansed = CallableSettingWrapper(cleansed)
return cleansed
def get_safe_settings(self):
"""
Return a dictionary of the settings module with values of sensitive
settings replaced with stars (*********).
"""
settings_dict = {}
for k in dir(settings):
if k.isupper():
settings_dict[k] = self.cleanse_setting(k, getattr(settings, k))
return settings_dict
def get_safe_request_meta(self, request):
"""
Return a dictionary of request.META with sensitive values redacted.
"""
if not hasattr(request, "META"):
return {}
return {k: self.cleanse_setting(k, v) for k, v in request.META.items()}
def get_safe_cookies(self, request):
"""
Return a dictionary of request.COOKIES with sensitive values redacted.
"""
if not hasattr(request, "COOKIES"):
return {}
return {k: self.cleanse_setting(k, v) for k, v in request.COOKIES.items()}
def is_active(self, request):
"""
This filter is to add safety in production environments (i.e. DEBUG
is False). If DEBUG is True then your site is not safe anyway.
This hook is provided as a convenience to easily activate or
deactivate the filter on a per request basis.
"""
return settings.DEBUG is False
def get_cleansed_multivaluedict(self, request, multivaluedict):
"""
Replace the keys in a MultiValueDict marked as sensitive with stars.
This mitigates leaking sensitive POST parameters if something like
request.POST['nonexistent_key'] throws an exception (#21098).
"""
sensitive_post_parameters = getattr(request, "sensitive_post_parameters", [])
if self.is_active(request) and sensitive_post_parameters:
multivaluedict = multivaluedict.copy()
for param in sensitive_post_parameters:
if param in multivaluedict:
multivaluedict[param] = self.cleansed_substitute
return multivaluedict
def get_post_parameters(self, request):
"""
Replace the values of POST parameters marked as sensitive with
stars (*********).
"""
if request is None:
return {}
else:
sensitive_post_parameters = getattr(
request, "sensitive_post_parameters", []
)
if self.is_active(request) and sensitive_post_parameters:
cleansed = request.POST.copy()
if sensitive_post_parameters == "__ALL__":
# Cleanse all parameters.
for k in cleansed:
cleansed[k] = self.cleansed_substitute
return cleansed
else:
# Cleanse only the specified parameters.
for param in sensitive_post_parameters:
if param in cleansed:
cleansed[param] = self.cleansed_substitute
return cleansed
else:
return request.POST
def cleanse_special_types(self, request, value):
try:
# If value is lazy or a complex object of another kind, this check
# might raise an exception. isinstance checks that lazy
# MultiValueDicts will have a return value.
is_multivalue_dict = isinstance(value, MultiValueDict)
except Exception as e:
return "{!r} while evaluating {!r}".format(e, value)
if is_multivalue_dict:
# Cleanse MultiValueDicts (request.POST is the one we usually care
# about)
value = self.get_cleansed_multivaluedict(request, value)
return value
def get_traceback_frame_variables(self, request, tb_frame):
"""
Replace the values of variables marked as sensitive with
stars (*********).
"""
sensitive_variables = None
# Coroutines don't have a proper `f_back` so they need to be inspected
# separately. Handle this by stashing the registered sensitive
# variables in a global dict indexed by `hash(file_path:line_number)`.
if (
tb_frame.f_code.co_flags & inspect.CO_COROUTINE != 0
and tb_frame.f_code.co_name != "sensitive_variables_wrapper"
):
key = hash(
f"{tb_frame.f_code.co_filename}:{tb_frame.f_code.co_firstlineno}"
)
sensitive_variables = coroutine_functions_to_sensitive_variables.get(
key, None
)
if sensitive_variables is None:
# Loop through the frame's callers to see if the
# sensitive_variables decorator was used.
current_frame = tb_frame
while current_frame is not None:
if (
current_frame.f_code.co_name == "sensitive_variables_wrapper"
and "sensitive_variables_wrapper" in current_frame.f_locals
):
# The sensitive_variables decorator was used, so take note
# of the sensitive variables' names.
wrapper = current_frame.f_locals["sensitive_variables_wrapper"]
sensitive_variables = getattr(wrapper, "sensitive_variables", None)
break
current_frame = current_frame.f_back
cleansed = {}
if self.is_active(request) and sensitive_variables:
if sensitive_variables == "__ALL__":
# Cleanse all variables
for name in tb_frame.f_locals:
cleansed[name] = self.cleansed_substitute
else:
# Cleanse specified variables
for name, value in tb_frame.f_locals.items():
if name in sensitive_variables:
value = self.cleansed_substitute
else:
value = self.cleanse_special_types(request, value)
cleansed[name] = value
else:
# Potentially cleanse the request and any MultiValueDicts if they
# are one of the frame variables.
for name, value in tb_frame.f_locals.items():
cleansed[name] = self.cleanse_special_types(request, value)
if (
tb_frame.f_code.co_name == "sensitive_variables_wrapper"
and "sensitive_variables_wrapper" in tb_frame.f_locals
):
# For good measure, obfuscate the decorated function's arguments in
# the sensitive_variables decorator's frame, in case the variables
# associated with those arguments were meant to be obfuscated from
# the decorated function's frame.
cleansed["func_args"] = self.cleansed_substitute
cleansed["func_kwargs"] = self.cleansed_substitute
return cleansed.items()
class ExceptionReporter:
"""Organize and coordinate reporting on exceptions."""
@property
def html_template_path(self):
return builtin_template_path("technical_500.html")
@property
def text_template_path(self):
return builtin_template_path("technical_500.txt")
def __init__(self, request, exc_type, exc_value, tb, is_email=False):
self.request = request
self.filter = get_exception_reporter_filter(self.request)
self.exc_type = exc_type
self.exc_value = exc_value
self.tb = tb
self.is_email = is_email
self.template_info = getattr(self.exc_value, "template_debug", None)
self.template_does_not_exist = False
self.postmortem = None
def _get_raw_insecure_uri(self):
"""
Return an absolute URI from variables available in this request. Skip
allowed hosts protection, so may return insecure URI.
"""
return "{scheme}://{host}{path}".format(
scheme=self.request.scheme,
host=self.request._get_raw_host(),
path=self.request.get_full_path(),
)
def get_traceback_data(self):
"""Return a dictionary containing traceback information."""
if self.exc_type and issubclass(self.exc_type, TemplateDoesNotExist):
self.template_does_not_exist = True
self.postmortem = self.exc_value.chain or [self.exc_value]
frames = self.get_traceback_frames()
for i, frame in enumerate(frames):
if "vars" in frame:
frame_vars = []
for k, v in frame["vars"]:
v = pprint(v)
# Trim large blobs of data
if len(v) > 4096:
v = "%s… <trimmed %d bytes string>" % (v[0:4096], len(v))
frame_vars.append((k, v))
frame["vars"] = frame_vars
frames[i] = frame
unicode_hint = ""
if self.exc_type and issubclass(self.exc_type, UnicodeError):
start = getattr(self.exc_value, "start", None)
end = getattr(self.exc_value, "end", None)
if start is not None and end is not None:
unicode_str = self.exc_value.args[1]
unicode_hint = force_str(
unicode_str[max(start - 5, 0) : min(end + 5, len(unicode_str))],
"ascii",
errors="replace",
)
from django import get_version
if self.request is None:
user_str = None
else:
try:
user_str = str(self.request.user)
except Exception:
# request.user may raise OperationalError if the database is
# unavailable, for example.
user_str = "[unable to retrieve the current user]"
c = {
"is_email": self.is_email,
"unicode_hint": unicode_hint,
"frames": frames,
"request": self.request,
"request_meta": self.filter.get_safe_request_meta(self.request),
"request_COOKIES_items": self.filter.get_safe_cookies(self.request).items(),
"user_str": user_str,
"filtered_POST_items": list(
self.filter.get_post_parameters(self.request).items()
),
"settings": self.filter.get_safe_settings(),
"sys_executable": sys.executable,
"sys_version_info": "%d.%d.%d" % sys.version_info[0:3],
"server_time": timezone.now(),
"django_version_info": get_version(),
"sys_path": sys.path,
"template_info": self.template_info,
"template_does_not_exist": self.template_does_not_exist,
"postmortem": self.postmortem,
}
if self.request is not None:
c["request_GET_items"] = self.request.GET.items()
c["request_FILES_items"] = self.request.FILES.items()
c["request_insecure_uri"] = self._get_raw_insecure_uri()
c["raising_view_name"] = get_caller(self.request)
# Check whether exception info is available
if self.exc_type:
c["exception_type"] = self.exc_type.__name__
if self.exc_value:
c["exception_value"] = getattr(
self.exc_value, "raw_error_message", self.exc_value
)
if exc_notes := getattr(self.exc_value, "__notes__", None):
c["exception_notes"] = "\n" + "\n".join(exc_notes)
if frames:
c["lastframe"] = frames[-1]
return c
def get_traceback_html(self):
"""Return HTML version of debug 500 HTTP error page."""
with self.html_template_path.open(encoding="utf-8") as fh:
t = DEBUG_ENGINE.from_string(fh.read())
c = Context(self.get_traceback_data(), use_l10n=False)
return t.render(c)
def get_traceback_text(self):
"""Return plain text version of debug 500 HTTP error page."""
with self.text_template_path.open(encoding="utf-8") as fh:
t = DEBUG_ENGINE.from_string(fh.read())
c = Context(self.get_traceback_data(), autoescape=False, use_l10n=False)
return t.render(c)
def _get_source(self, filename, loader, module_name):
source = None
if hasattr(loader, "get_source"):
try:
source = loader.get_source(module_name)
except ImportError:
pass
if source is not None:
source = source.splitlines()
if source is None:
try:
with open(filename, "rb") as fp:
source = fp.read().splitlines()
except OSError:
pass
return source
def _get_lines_from_file(
self, filename, lineno, context_lines, loader=None, module_name=None
):
"""
Return context_lines before and after lineno from file.
Return (pre_context_lineno, pre_context, context_line, post_context).
"""
source = self._get_source(filename, loader, module_name)
if source is None:
return None, [], None, []
# If we just read the source from a file, or if the loader did not
# apply tokenize.detect_encoding to decode the source into a
# string, then we should do that ourselves.
if isinstance(source[0], bytes):
encoding = "ascii"
for line in source[:2]:
# File coding may be specified. Match pattern from PEP-263
# (https://www.python.org/dev/peps/pep-0263/)
match = re.search(rb"coding[:=]\s*([-\w.]+)", line)
if match:
encoding = match[1].decode("ascii")
break
source = [str(sline, encoding, "replace") for sline in source]
lower_bound = max(0, lineno - context_lines)
upper_bound = lineno + context_lines
try:
pre_context = source[lower_bound:lineno]
context_line = source[lineno]
post_context = source[lineno + 1 : upper_bound]
except IndexError:
return None, [], None, []
return lower_bound, pre_context, context_line, post_context
def _get_explicit_or_implicit_cause(self, exc_value):
explicit = getattr(exc_value, "__cause__", None)
suppress_context = getattr(exc_value, "__suppress_context__", None)
implicit = getattr(exc_value, "__context__", None)
return explicit or (None if suppress_context else implicit)
def get_traceback_frames(self):
# Get the exception and all its causes
exceptions = []
exc_value = self.exc_value
while exc_value:
exceptions.append(exc_value)
exc_value = self._get_explicit_or_implicit_cause(exc_value)
if exc_value in exceptions:
warnings.warn(
"Cycle in the exception chain detected: exception '%s' "
"encountered again." % exc_value,
ExceptionCycleWarning,
)
# Avoid infinite loop if there's a cyclic reference (#29393).
break
frames = []
# No exceptions were supplied to ExceptionReporter
if not exceptions:
return frames
# In case there's just one exception, take the traceback from self.tb
exc_value = exceptions.pop()
tb = self.tb if not exceptions else exc_value.__traceback__
while True:
frames.extend(self.get_exception_traceback_frames(exc_value, tb))
try:
exc_value = exceptions.pop()
except IndexError:
break
tb = exc_value.__traceback__
return frames
def get_exception_traceback_frames(self, exc_value, tb):
exc_cause = self._get_explicit_or_implicit_cause(exc_value)
exc_cause_explicit = getattr(exc_value, "__cause__", True)
if tb is None:
yield {
"exc_cause": exc_cause,
"exc_cause_explicit": exc_cause_explicit,
"tb": None,
"type": "user",
}
while tb is not None:
# Support for __traceback_hide__ which is used by a few libraries
# to hide internal frames.
if tb.tb_frame.f_locals.get("__traceback_hide__"):
tb = tb.tb_next
continue
filename = tb.tb_frame.f_code.co_filename
function = tb.tb_frame.f_code.co_name
lineno = tb.tb_lineno - 1
loader = tb.tb_frame.f_globals.get("__loader__")
module_name = tb.tb_frame.f_globals.get("__name__") or ""
(
pre_context_lineno,
pre_context,
context_line,
post_context,
) = self._get_lines_from_file(
filename,
lineno,
7,
loader,
module_name,
)
if pre_context_lineno is None:
pre_context_lineno = lineno
pre_context = []
context_line = "<source code not available>"
post_context = []
colno = tb_area_colno = ""
_, _, start_column, end_column = next(
itertools.islice(
tb.tb_frame.f_code.co_positions(), tb.tb_lasti // 2, None
)
)
if start_column and end_column:
underline = "^" * (end_column - start_column)
spaces = " " * (start_column + len(str(lineno + 1)) + 2)
colno = f"\n{spaces}{underline}"
tb_area_spaces = " " * (
4 + start_column - (len(context_line) - len(context_line.lstrip()))
)
tb_area_colno = f"\n{tb_area_spaces}{underline}"
yield {
"exc_cause": exc_cause,
"exc_cause_explicit": exc_cause_explicit,
"tb": tb,
"type": "django" if module_name.startswith("django.") else "user",
"filename": filename,
"function": function,
"lineno": lineno + 1,
"vars": self.filter.get_traceback_frame_variables(
self.request, tb.tb_frame
),
"id": id(tb),
"pre_context": pre_context,
"context_line": context_line,
"post_context": post_context,
"pre_context_lineno": pre_context_lineno + 1,
"colno": colno,
"tb_area_colno": tb_area_colno,
}
tb = tb.tb_next
@csp_override({})
@csp_report_only_override({})
def technical_404_response(request, exception):
"""Create a technical 404 error response. `exception` is the Http404."""
try:
error_url = exception.args[0]["path"]
except (IndexError, TypeError, KeyError):
error_url = request.path_info[1:] # Trim leading slash
try:
tried = exception.args[0]["tried"]
except (IndexError, TypeError, KeyError):
resolved = True
tried = request.resolver_match.tried if request.resolver_match else None
else:
resolved = False
if not tried or ( # empty URLconf
request.path_info == "/"
and len(tried) == 1
and len(tried[0]) == 1 # default URLconf
and getattr(tried[0][0], "app_name", "")
== getattr(tried[0][0], "namespace", "")
== "admin"
):
return default_urlconf(request)
patterns_with_debug_info = []
for urlpattern in tried or ():
patterns = []
for inner_pattern in urlpattern:
wrapper = {"tried": inner_pattern}
if isinstance(inner_pattern, URLResolver):
wrapper["debug_key"] = "namespace"
wrapper["debug_val"] = inner_pattern.namespace
else:
wrapper["debug_key"] = "name"
wrapper["debug_val"] = inner_pattern.name
patterns.append(wrapper)
patterns_with_debug_info.append(patterns)
urlconf = getattr(request, "urlconf", settings.ROOT_URLCONF)
if isinstance(urlconf, types.ModuleType):
urlconf = urlconf.__name__
with builtin_template_path("technical_404.html").open(encoding="utf-8") as fh:
t = DEBUG_ENGINE.from_string(fh.read())
reporter_filter = get_default_exception_reporter_filter()
c = Context(
{
"urlconf": urlconf,
"root_urlconf": settings.ROOT_URLCONF,
"request_path": error_url,
"urlpatterns": tried, # Unused, left for compatibility.
"urlpatterns_debug": patterns_with_debug_info,
"resolved": resolved,
"reason": str(exception),
"request": request,
"settings": reporter_filter.get_safe_settings(),
"raising_view_name": get_caller(request),
}
)
return HttpResponseNotFound(t.render(c))
def default_urlconf(request):
"""Create an empty URLconf 404 error response."""
with builtin_template_path("default_urlconf.html").open(encoding="utf-8") as fh:
t = DEBUG_ENGINE.from_string(fh.read())
c = Context(
{
"version": get_docs_version(),
}
)
return HttpResponse(t.render(c))
|
import importlib
import inspect
import os
import re
import sys
import tempfile
import threading
from io import StringIO
from pathlib import Path
from unittest import mock, skipIf
from asgiref.sync import async_to_sync, iscoroutinefunction
from django.core import mail
from django.core.files.uploadedfile import SimpleUploadedFile
from django.db import DatabaseError, connection
from django.http import Http404, HttpRequest, HttpResponse
from django.shortcuts import render
from django.template import TemplateDoesNotExist
from django.test import RequestFactory, SimpleTestCase, override_settings
from django.test.utils import LoggingCaptureMixin
from django.urls import path, reverse
from django.urls.converters import IntConverter
from django.utils.functional import SimpleLazyObject
from django.utils.regex_helper import _lazy_re_compile
from django.utils.safestring import mark_safe
from django.views.debug import (
CallableSettingWrapper,
ExceptionCycleWarning,
ExceptionReporter,
)
from django.views.debug import Path as DebugPath
from django.views.debug import (
SafeExceptionReporterFilter,
default_urlconf,
get_default_exception_reporter_filter,
technical_404_response,
technical_500_response,
)
from django.views.decorators.debug import sensitive_post_parameters, sensitive_variables
from ..views import (
async_sensitive_method_view,
async_sensitive_method_view_nested,
async_sensitive_view,
async_sensitive_view_nested,
custom_exception_reporter_filter_view,
index_page,
multivalue_dict_key_error,
non_sensitive_view,
paranoid_view,
sensitive_args_function_caller,
sensitive_kwargs_function_caller,
sensitive_method_view,
sensitive_view,
)
class User:
def __str__(self):
return "jacob"
class WithoutEmptyPathUrls:
urlpatterns = [path("url/", index_page, name="url")]
class CallableSettingWrapperTests(SimpleTestCase):
"""Unittests for CallableSettingWrapper"""
def test_repr(self):
class WrappedCallable:
def __repr__(self):
return "repr from the wrapped callable"
def __call__(self):
pass
actual = repr(CallableSettingWrapper(WrappedCallable()))
self.assertEqual(actual, "repr from the wrapped callable")
@override_settings(DEBUG=True, ROOT_URLCONF="view_tests.urls")
class DebugViewTests(SimpleTestCase):
def test_files(self):
with self.assertLogs("django.request", "ERROR"):
response = self.client.get("/raises/")
self.assertEqual(response.status_code, 500)
data = {
"file_data.txt": SimpleUploadedFile("file_data.txt", b"haha"),
}
with self.assertLogs("django.request", "ERROR"):
response = self.client.post("/raises/", data)
self.assertContains(response, "file_data.txt", status_code=500)
self.assertNotContains(response, "haha", status_code=500)
def test_400(self):
# When DEBUG=True, technical_500_template() is called.
with self.assertLogs("django.security", "WARNING"):
response = self.client.get("/raises400/")
self.assertContains(response, '<div class="context" id="', status_code=400)
def test_400_bad_request(self):
# When DEBUG=True, technical_500_template() is called.
with self.assertLogs("django.request", "WARNING") as cm:
response = self.client.get("/raises400_bad_request/")
self.assertContains(response, '<div class="context" id="', status_code=400)
self.assertEqual(
cm.records[0].getMessage(),
"Malformed request syntax: /raises400_bad_request/",
)
# Ensure no 403.html template exists to test the default case.
@override_settings(
TEMPLATES=[
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
}
]
)
def test_403(self):
response = self.client.get("/raises403/")
self.assertContains(response, "<h1>403 Forbidden</h1>", status_code=403)
# Set up a test 403.html template.
@override_settings(
TEMPLATES=[
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"OPTIONS": {
"loaders": [
(
"django.template.loaders.locmem.Loader",
{
"403.html": (
"This is a test template for a 403 error "
"({{ exception }})."
),
},
),
],
},
}
]
)
def test_403_template(self):
response = self.client.get("/raises403/")
self.assertContains(response, "test template", status_code=403)
self.assertContains(response, "(Insufficient Permissions).", status_code=403)
def test_404(self):
response = self.client.get("/raises404/")
self.assertNotContains(
response,
'<pre class="exception_value">',
status_code=404,
)
self.assertContains(
response,
"<p>The current path, <code>not-in-urls</code>, didn’t match any "
"of these.</p>",
status_code=404,
html=True,
)
def test_404_not_in_urls(self):
response = self.client.get("/not-in-urls")
self.assertNotContains(response, "Raised by:", status_code=404)
self.assertNotContains(
response,
'<pre class="exception_value">',
status_code=404,
)
self.assertContains(
response, "Django tried these URL patterns", status_code=404
)
self.assertContains(
response,
"<code>technical404/ [name='my404']</code>",
status_code=404,
html=True,
)
self.assertContains(
response,
"<p>The current path, <code>not-in-urls</code>, didn’t match any "
"of these.</p>",
status_code=404,
html=True,
)
# Pattern and view name of a RegexURLPattern appear.
self.assertContains(
response, r"^regex-post/(?P<pk>[0-9]+)/$", status_code=404
)
self.assertContains(response, "[name='regex-post']", status_code=404)
# Pattern and view name of a RoutePattern appear.
self.assertContains(response, r"path-post/<int:pk>/", status_code=404)
self.assertContains(response, "[name='path-post']", status_code=404)
@override_settings(ROOT_URLCONF=WithoutEmptyPathUrls)
def test_404_empty_path_not_in_urls(self):
response = self.client.get("/")
self.assertContains(
response,
"<p>The empty path didn’t match any of these.</p>",
status_code=404,
html=True,
)
def test_technical_404(self):
response = self.client.get("/technical404/")
self.assertContains(response, '<header id="summary">', status_code=404)
self.assertContains(response, '<main id="info">', status_code=404)
self.assertContains(response, '<footer id="explanation">', status_code=404)
self.assertContains(
response,
'<pre class="exception_value">Testing technical 404.</pre>',
status_code=404,
html=True,
)
self.assertContains(response, "Raised by:", status_code=404)
self.assertContains(
response,
"<td>view_tests.views.technical404</td>",
status_code=404,
)
self.assertContains(
response,
"<p>The current path, <code>technical404/</code>, matched the "
"last one.</p>",
status_code=404,
html=True,
)
def test_classbased_technical_404(self):
response = self.client.get("/classbased404/")
self.assertContains(
response,
'<th scope="row">Raised by:</th><td>view_tests.views.Http404View</td>',
status_code=404,
html=True,
)
def test_technical_500(self):
with self.assertLogs("django.request", "ERROR"):
response = self.client.get("/raises500/")
self.assertContains(response, '<header id="summary">', status_code=500)
self.assertContains(response, '<main id="info">', status_code=500)
self.assertContains(response, '<footer id="explanation">', status_code=500)
self.assertContains(
response,
'<th scope="row">Raised during:</th><td>view_tests.views.raises500</td>',
status_code=500,
html=True,
)
with self.assertLogs("django.request", "ERROR"):
response = self.client.get("/raises500/", headers={"accept": "text/plain"})
self.assertContains(
response,
"Raised during: view_tests.views.raises500",
status_code=500,
)
def test_technical_500_content_type_negotiation(self):
for accepts, content_type in [
("text/plain", "text/plain; charset=utf-8"),
("text/html", "text/html"),
("text/html,text/plain;q=0.9", "text/html"),
("text/plain,text/html;q=0.9", "text/plain; charset=utf-8"),
("text/*", "text/html"),
]:
with self.subTest(accepts=accepts):
with self.assertLogs("django.request", "ERROR"):
response = self.client.get(
"/raises500/", headers={"accept": accepts}
)
self.assertEqual(response.status_code, 500)
self.assertEqual(response["Content-Type"], content_type)
def test_classbased_technical_500(self):
with self.assertLogs("django.request", "ERROR"):
response = self.client.get("/classbased500/")
self.assertContains(
response,
'<th scope="row">Raised during:</th>'
"<td>view_tests.views.Raises500View</td>",
status_code=500,
html=True,
)
with self.assertLogs("django.request", "ERROR"):
response = self.client.get(
"/classbased500/", headers={"accept": "text/plain"}
)
self.assertContains(
response,
"Raised during: view_tests.views.Raises500View",
status_code=500,
)
def test_non_l10ned_numeric_ids(self):
"""
Numeric IDs and fancy traceback context blocks line numbers shouldn't
be localized.
"""
with self.settings(DEBUG=True):
with self.assertLogs("django.request", "ERROR"):
response = self.client.get("/raises500/")
# We look for a HTML fragment of the form
# '<div class="context" id="c38123208">',
# not '<div class="context" id="c38,123,208"'.
self.assertContains(response, '<div class="context" id="', status_code=500)
match = re.search(
b'<div class="context" id="(?P<id>[^"]+)">', response.content
)
self.assertIsNotNone(match)
id_repr = match["id"]
self.assertFalse(
re.search(b"[^c0-9]", id_repr),
"Numeric IDs in debug response HTML page shouldn't be localized "
"(value: %s)." % id_repr.decode(),
)
def test_template_exceptions(self):
with self.assertLogs("django.request", "ERROR"):
try:
self.client.get(reverse("template_exception"))
except Exception:
raising_loc = inspect.trace()[-1][-2][0].strip()
self.assertNotEqual(
raising_loc.find('raise Exception("boom")'),
-1,
"Failed to find 'raise Exception' in last frame of "
"traceback, instead found: %s" % raising_loc,
)
@skipIf(
sys.platform == "win32",
"Raises OSError instead of TemplateDoesNotExist on Windows.",
)
def test_safestring_in_exception(self):
with self.assertLogs("django.request", "ERROR"):
response = self.client.get("/safestring_exception/")
self.assertNotContains(
response,
"<script>alert(1);</script>",
status_code=500,
html=True,
)
self.assertContains(
response,
"<script>alert(1);</script>",
count=3,
status_code=500,
html=True,
)
def test_template_loader_postmortem(self):
"""Tests for not existing file"""
template_name = "notfound.html"
with tempfile.NamedTemporaryFile(prefix=template_name) as tmpfile:
tempdir = os.path.dirname(tmpfile.name)
template_path = os.path.join(tempdir, template_name)
with (
override_settings(
TEMPLATES=[
{
"BACKEND": (
"django.template.backends.django.DjangoTemplates"
),
"DIRS": [tempdir],
}
]
),
self.assertLogs("django.request", "ERROR"),
):
response = self.client.get(
reverse(
"raises_template_does_not_exist", kwargs={"path": template_name}
)
)
self.assertContains(
response,
"%s (Source does not exist)" % template_path,
status_code=500,
count=2,
)
# Assert as HTML.
self.assertContains(
response,
"<li><code>django.template.loaders.filesystem.Loader</code>: "
"%s (Source does not exist)</li>"
% os.path.join(tempdir, "notfound.html"),
status_code=500,
html=True,
)
def test_no_template_source_loaders(self):
"""
Make sure if you don't specify a template, the debug view doesn't blow
up.
"""
with self.assertLogs("django.request", "ERROR"):
with self.assertRaises(TemplateDoesNotExist):
self.client.get("/render_no_template/")
@override_settings(ROOT_URLCONF="view_tests.default_urls")
def test_default_urlconf_template(self):
"""
Make sure that the default URLconf template is shown instead of the
technical 404 page, if the user has not altered their URLconf yet.
"""
response = self.client.get("/")
self.assertContains(
response, "<h1>The install worked successfully! Congratulations!</h1>"
)
@override_settings(
ROOT_URLCONF="view_tests.default_urls", FORCE_SCRIPT_NAME="/FORCED_PREFIX"
)
def test_default_urlconf_script_name(self):
response = self.client.request(**{"path": "/FORCED_PREFIX/"})
self.assertContains(
response, "<h1>The install worked successfully! Congratulations!</h1>"
)
@override_settings(ROOT_URLCONF="view_tests.default_urls")
def test_default_urlconf_technical_404(self):
response = self.client.get("/favicon.ico")
self.assertContains(
response,
"<code>\nadmin/\n[namespace='admin']\n</code>",
status_code=404,
html=True,
)
@override_settings(ROOT_URLCONF="view_tests.regression_21530_urls")
def test_regression_21530(self):
"""
Regression test for bug #21530.
If the admin app include is replaced with exactly one url
pattern, then the technical 404 template should be displayed.
The bug here was that an AttributeError caused a 500 response.
"""
response = self.client.get("/")
self.assertContains(
response, "Page not found <small>(404)</small>", status_code=404
)
def test_template_encoding(self):
"""
The templates are loaded directly, not via a template loader, and
should be opened as utf-8 charset as is the default specified on
template engines.
"""
with mock.patch.object(DebugPath, "open") as m:
default_urlconf(None)
m.assert_called_once_with(encoding="utf-8")
m.reset_mock()
technical_404_response(mock.MagicMock(), mock.Mock())
m.assert_called_once_with(encoding="utf-8")
def test_technical_404_converter_raise_404(self):
with mock.patch.object(IntConverter, "to_python", side_effect=Http404):
response = self.client.get("/path-post/1/")
self.assertContains(response, "Page not found", status_code=404)
def test_exception_reporter_from_request(self):
with self.assertLogs("django.request", "ERROR"):
response = self.client.get("/custom_reporter_class_view/")
self.assertContains(response, "custom traceback text", status_code=500)
@override_settings(
DEFAULT_EXCEPTION_REPORTER="view_tests.views.CustomExceptionReporter"
)
def test_exception_reporter_from_settings(self):
with self.assertLogs("django.request", "ERROR"):
response = self.client.get("/raises500/")
self.assertContains(response, "custom traceback text", status_code=500)
@override_settings(
DEFAULT_EXCEPTION_REPORTER="view_tests.views.TemplateOverrideExceptionReporter"
)
def test_template_override_exception_reporter(self):
with self.assertLogs("django.request", "ERROR"):
response = self.client.get("/raises500/")
self.assertContains(
response,
"<h1>Oh no, an error occurred!</h1>",
status_code=500,
html=True,
)
with self.assertLogs("django.request", "ERROR"):
response = self.client.get("/raises500/", headers={"accept": "text/plain"})
self.assertContains(response, "Oh dear, an error occurred!", status_code=500)
class DebugViewQueriesAllowedTests(SimpleTestCase):
# May need a query to initialize MySQL connection
databases = {"default"}
def test_handle_db_exception(self):
"""
Ensure the debug view works when a database exception is raised by
performing an invalid query and passing the exception to the debug
view.
"""
with connection.cursor() as cursor:
try:
cursor.execute("INVALID SQL")
except DatabaseError:
exc_info = sys.exc_info()
rf = RequestFactory()
response = technical_500_response(rf.get("/"), *exc_info)
self.assertContains(response, "OperationalError at /", status_code=500)
@override_settings(
DEBUG=True,
ROOT_URLCONF="view_tests.urls",
# No template directories are configured, so no templates will be found.
TEMPLATES=[
{
"BACKEND": "django.template.backends.dummy.TemplateStrings",
}
],
)
class NonDjangoTemplatesDebugViewTests(SimpleTestCase):
def test_400(self):
# When DEBUG=True, technical_500_template() is called.
with self.assertLogs("django.security", "WARNING"):
response = self.client.get("/raises400/")
self.assertContains(response, '<div class="context" id="', status_code=400)
def test_400_bad_request(self):
# When DEBUG=True, technical_500_template() is called.
with self.assertLogs("django.request", "WARNING") as cm:
response = self.client.get("/raises400_bad_request/")
self.assertContains(response, '<div class="context" id="', status_code=400)
self.assertEqual(
cm.records[0].getMessage(),
"Malformed request syntax: /raises400_bad_request/",
)
def test_403(self):
response = self.client.get("/raises403/")
self.assertContains(response, "<h1>403 Forbidden</h1>", status_code=403)
def test_404(self):
response = self.client.get("/raises404/")
self.assertEqual(response.status_code, 404)
def test_template_not_found_error(self):
# Raises a TemplateDoesNotExist exception and shows the debug view.
url = reverse(
"raises_template_does_not_exist", kwargs={"path": "notfound.html"}
)
with self.assertLogs("django.request", "ERROR"):
response = self.client.get(url)
self.assertContains(response, '<div class="context" id="', status_code=500)
class ExceptionReporterTests(SimpleTestCase):
rf = RequestFactory()
def test_request_and_exception(self):
"A simple exception report can be generated"
try:
request = self.rf.get("/test_view/")
request.user = User()
raise ValueError("Can't find my keys")
except ValueError:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertInHTML("<h1>ValueError at /test_view/</h1>", html)
self.assertIn(
'<pre class="exception_value">Can't find my keys</pre>', html
)
self.assertIn('<th scope="row">Request Method:</th>', html)
self.assertIn('<th scope="row">Request URL:</th>', html)
self.assertIn('<h3 id="user-info">USER</h3>', html)
self.assertIn("<p>jacob</p>", html)
self.assertIn('<th scope="row">Exception Type:</th>', html)
self.assertIn('<th scope="row">Exception Value:</th>', html)
self.assertIn("<h2>Traceback ", html)
self.assertIn("<h2>Request information</h2>", html)
self.assertNotIn("<p>Request data not supplied</p>", html)
self.assertIn("<p>No POST data</p>", html)
def test_no_request(self):
"An exception report can be generated without request"
try:
raise ValueError("Can't find my keys")
except ValueError:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(None, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertInHTML("<h1>ValueError</h1>", html)
self.assertIn(
'<pre class="exception_value">Can't find my keys</pre>', html
)
self.assertNotIn('<th scope="row">Request Method:</th>', html)
self.assertNotIn('<th scope="row">Request URL:</th>', html)
self.assertNotIn('<h3 id="user-info">USER</h3>', html)
self.assertIn('<th scope="row">Exception Type:</th>', html)
self.assertIn('<th scope="row">Exception Value:</th>', html)
self.assertIn("<h2>Traceback ", html)
self.assertIn("<h2>Request information</h2>", html)
self.assertIn("<p>Request data not supplied</p>", html)
def test_sharing_traceback(self):
try:
raise ValueError("Oops")
except ValueError:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(None, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertIn(
'<form action="https://dpaste.com/" name="pasteform" '
'id="pasteform" method="post">',
html,
)
def test_eol_support(self):
"""
The ExceptionReporter supports Unix, Windows and Macintosh EOL markers
"""
LINES = ["print %d" % i for i in range(1, 6)]
reporter = ExceptionReporter(None, None, None, None)
for newline in ["\n", "\r\n", "\r"]:
fd, filename = tempfile.mkstemp(text=False)
os.write(fd, (newline.join(LINES) + newline).encode())
os.close(fd)
try:
self.assertEqual(
reporter._get_lines_from_file(filename, 3, 2),
(1, LINES[1:3], LINES[3], LINES[4:]),
)
finally:
os.unlink(filename)
def test_no_exception(self):
"An exception report can be generated for just a request"
request = self.rf.get("/test_view/")
reporter = ExceptionReporter(request, None, None, None)
html = reporter.get_traceback_html()
self.assertInHTML("<h1>Report at /test_view/</h1>", html)
self.assertIn(
'<pre class="exception_value">No exception message supplied</pre>', html
)
self.assertIn('<th scope="row">Request Method:</th>', html)
self.assertIn('<th scope="row">Request URL:</th>', html)
self.assertNotIn('<th scope="row">Exception Type:</th>', html)
self.assertNotIn('<th scope="row">Exception Value:</th>', html)
self.assertNotIn("<h2>Traceback ", html)
self.assertIn("<h2>Request information</h2>", html)
self.assertNotIn("<p>Request data not supplied</p>", html)
def test_suppressed_context(self):
try:
try:
raise RuntimeError("Can't find my keys")
except RuntimeError:
raise ValueError("Can't find my keys") from None
except ValueError:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(None, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertInHTML("<h1>ValueError</h1>", html)
self.assertIn(
'<pre class="exception_value">Can't find my keys</pre>', html
)
self.assertIn('<th scope="row">Exception Type:</th>', html)
self.assertIn('<th scope="row">Exception Value:</th>', html)
self.assertIn("<h2>Traceback ", html)
self.assertIn("<h2>Request information</h2>", html)
self.assertIn("<p>Request data not supplied</p>", html)
self.assertNotIn("During handling of the above exception", html)
def test_innermost_exception_without_traceback(self):
try:
try:
raise RuntimeError("Oops")
except Exception as exc:
new_exc = RuntimeError("My context")
exc.__context__ = new_exc
raise
except Exception:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(None, exc_type, exc_value, tb)
frames = reporter.get_traceback_frames()
self.assertEqual(len(frames), 2)
html = reporter.get_traceback_html()
self.assertInHTML("<h1>RuntimeError</h1>", html)
self.assertIn('<pre class="exception_value">Oops</pre>', html)
self.assertIn('<th scope="row">Exception Type:</th>', html)
self.assertIn('<th scope="row">Exception Value:</th>', html)
self.assertIn("<h2>Traceback ", html)
self.assertIn("<h2>Request information</h2>", html)
self.assertIn("<p>Request data not supplied</p>", html)
self.assertIn(
"During handling of the above exception (My context), another "
"exception occurred",
html,
)
self.assertInHTML('<li class="frame user">None</li>', html)
self.assertIn("Traceback (most recent call last):\n None", html)
text = reporter.get_traceback_text()
self.assertIn("Exception Type: RuntimeError", text)
self.assertIn("Exception Value: Oops", text)
self.assertIn("Traceback (most recent call last):\n None", text)
self.assertIn(
"During handling of the above exception (My context), another "
"exception occurred",
text,
)
def test_exception_with_notes(self):
request = self.rf.get("/test_view/")
try:
try:
raise RuntimeError("Oops")
except Exception as err:
err.add_note("First Note")
err.add_note("Second Note")
err.add_note(mark_safe("<script>alert(1);</script>"))
raise err
except Exception:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertIn(
'<pre class="exception_value">Oops\nFirst Note\nSecond Note\n'
"<script>alert(1);</script></pre>",
html,
)
self.assertIn(
"Exception Value: Oops\nFirst Note\nSecond Note\n"
"<script>alert(1);</script>",
html,
)
text = reporter.get_traceback_text()
self.assertIn(
"Exception Value: Oops\nFirst Note\nSecond Note\n"
"<script>alert(1);</script>",
text,
)
def test_mid_stack_exception_without_traceback(self):
try:
try:
raise RuntimeError("Inner Oops")
except Exception as exc:
new_exc = RuntimeError("My context")
new_exc.__context__ = exc
raise RuntimeError("Oops") from new_exc
except Exception:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(None, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertInHTML("<h1>RuntimeError</h1>", html)
self.assertIn('<pre class="exception_value">Oops</pre>', html)
self.assertIn('<th scope="row">Exception Type:</th>', html)
self.assertIn('<th scope="row">Exception Value:</th>', html)
self.assertIn("<h2>Traceback ", html)
self.assertInHTML('<li class="frame user">Traceback: None</li>', html)
self.assertIn(
"During handling of the above exception (Inner Oops), another "
"exception occurred:\n Traceback: None",
html,
)
text = reporter.get_traceback_text()
self.assertIn("Exception Type: RuntimeError", text)
self.assertIn("Exception Value: Oops", text)
self.assertIn("Traceback (most recent call last):", text)
self.assertIn(
"During handling of the above exception (Inner Oops), another "
"exception occurred:\n Traceback: None",
text,
)
def test_reporting_of_nested_exceptions(self):
request = self.rf.get("/test_view/")
try:
try:
raise AttributeError(mark_safe("<p>Top level</p>"))
except AttributeError as explicit:
try:
raise ValueError(mark_safe("<p>Second exception</p>")) from explicit
except ValueError:
raise IndexError(mark_safe("<p>Final exception</p>"))
except Exception:
# Custom exception handler, just pass it into ExceptionReporter
exc_type, exc_value, tb = sys.exc_info()
explicit_exc = (
"The above exception ({0}) was the direct cause of the following exception:"
)
implicit_exc = (
"During handling of the above exception ({0}), another exception occurred:"
)
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
# Both messages are twice on page -- one rendered as html,
# one as plain text (for pastebin)
self.assertEqual(
2, html.count(explicit_exc.format("<p>Top level</p>"))
)
self.assertEqual(
2, html.count(implicit_exc.format("<p>Second exception</p>"))
)
self.assertEqual(10, html.count("<p>Final exception</p>"))
text = reporter.get_traceback_text()
self.assertIn(explicit_exc.format("<p>Top level</p>"), text)
self.assertIn(implicit_exc.format("<p>Second exception</p>"), text)
self.assertEqual(3, text.count("<p>Final exception</p>"))
@skipIf(
sys._xoptions.get("no_debug_ranges", False)
or os.environ.get("PYTHONNODEBUGRANGES", False),
"Fine-grained error locations are disabled.",
)
def test_highlight_error_position(self):
request = self.rf.get("/test_view/")
try:
try:
raise AttributeError("Top level")
except AttributeError as explicit:
try:
raise ValueError(mark_safe("<p>2nd exception</p>")) from explicit
except ValueError:
raise IndexError("Final exception")
except Exception:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertIn(
"<pre> raise AttributeError("Top level")\n"
" ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^</pre>",
html,
)
self.assertIn(
"<pre> raise ValueError(mark_safe("
""<p>2nd exception</p>")) from explicit\n"
" "
"^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^</pre>",
html,
)
self.assertIn(
"<pre> raise IndexError("Final exception")\n"
" ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^</pre>",
html,
)
# Pastebin.
self.assertIn(
" raise AttributeError("Top level")\n"
" ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
html,
)
self.assertIn(
" raise ValueError(mark_safe("
""<p>2nd exception</p>")) from explicit\n"
" ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
html,
)
self.assertIn(
" raise IndexError("Final exception")\n"
" ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
html,
)
# Text traceback.
text = reporter.get_traceback_text()
self.assertIn(
' raise AttributeError("Top level")\n'
" ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
text,
)
self.assertIn(
' raise ValueError(mark_safe("<p>2nd exception</p>")) from explicit\n'
" ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
text,
)
self.assertIn(
' raise IndexError("Final exception")\n'
" ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
text,
)
def test_reporting_frames_without_source(self):
try:
source = "def funcName():\n raise Error('Whoops')\nfuncName()"
namespace = {}
code = compile(source, "generated", "exec")
exec(code, namespace)
except Exception:
exc_type, exc_value, tb = sys.exc_info()
request = self.rf.get("/test_view/")
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
frames = reporter.get_traceback_frames()
last_frame = frames[-1]
self.assertEqual(last_frame["context_line"], "<source code not available>")
self.assertEqual(last_frame["filename"], "generated")
self.assertEqual(last_frame["function"], "funcName")
self.assertEqual(last_frame["lineno"], 2)
html = reporter.get_traceback_html()
self.assertIn(
'<span class="fname">generated</span>, line 2, in funcName',
html,
)
self.assertIn(
'<code class="fname">generated</code>, line 2, in funcName',
html,
)
self.assertIn(
'"generated", line 2, in funcName\n <source code not available>',
html,
)
text = reporter.get_traceback_text()
self.assertIn(
'"generated", line 2, in funcName\n <source code not available>',
text,
)
def test_reporting_frames_source_not_match(self):
try:
source = "def funcName():\n raise Error('Whoops')\nfuncName()"
namespace = {}
code = compile(source, "generated", "exec")
exec(code, namespace)
except Exception:
exc_type, exc_value, tb = sys.exc_info()
with mock.patch(
"django.views.debug.ExceptionReporter._get_source",
return_value=["wrong source"],
):
request = self.rf.get("/test_view/")
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
frames = reporter.get_traceback_frames()
last_frame = frames[-1]
self.assertEqual(last_frame["context_line"], "<source code not available>")
self.assertEqual(last_frame["filename"], "generated")
self.assertEqual(last_frame["function"], "funcName")
self.assertEqual(last_frame["lineno"], 2)
html = reporter.get_traceback_html()
self.assertIn(
'<span class="fname">generated</span>, line 2, in funcName',
html,
)
self.assertIn(
'<code class="fname">generated</code>, line 2, in funcName',
html,
)
self.assertIn(
'"generated", line 2, in funcName\n'
" <source code not available>",
html,
)
text = reporter.get_traceback_text()
self.assertIn(
'"generated", line 2, in funcName\n <source code not available>',
text,
)
def test_reporting_frames_for_cyclic_reference(self):
try:
def test_func():
try:
raise RuntimeError("outer") from RuntimeError("inner")
except RuntimeError as exc:
raise exc.__cause__
test_func()
except Exception:
exc_type, exc_value, tb = sys.exc_info()
request = self.rf.get("/test_view/")
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
def generate_traceback_frames(*args, **kwargs):
nonlocal tb_frames
tb_frames = reporter.get_traceback_frames()
tb_frames = None
tb_generator = threading.Thread(target=generate_traceback_frames, daemon=True)
msg = (
"Cycle in the exception chain detected: exception 'inner' "
"encountered again."
)
with self.assertWarnsMessage(ExceptionCycleWarning, msg):
tb_generator.start()
tb_generator.join(timeout=5)
if tb_generator.is_alive():
# tb_generator is a daemon that runs until the main thread/process
# exits. This is resource heavy when running the full test suite.
# Setting the following values to None makes
# reporter.get_traceback_frames() exit early.
exc_value.__traceback__ = exc_value.__context__ = exc_value.__cause__ = None
tb_generator.join()
self.fail("Cyclic reference in Exception Reporter.get_traceback_frames()")
if tb_frames is None:
# can happen if the thread generating traceback got killed
# or exception while generating the traceback
self.fail("Traceback generation failed")
last_frame = tb_frames[-1]
self.assertIn("raise exc.__cause__", last_frame["context_line"])
self.assertEqual(last_frame["filename"], __file__)
self.assertEqual(last_frame["function"], "test_func")
def test_request_and_message(self):
"A message can be provided in addition to a request"
request = self.rf.get("/test_view/")
reporter = ExceptionReporter(request, None, "I'm a little teapot", None)
html = reporter.get_traceback_html()
self.assertInHTML("<h1>Report at /test_view/</h1>", html)
self.assertIn(
'<pre class="exception_value">I'm a little teapot</pre>', html
)
self.assertIn('<th scope="row">Request Method:</th>', html)
self.assertIn('<th scope="row">Request URL:</th>', html)
self.assertNotIn('<th scope="row">Exception Type:</th>', html)
self.assertNotIn('<th scope="row">Exception Value:</th>', html)
self.assertIn("<h2>Traceback ", html)
self.assertIn("<h2>Request information</h2>", html)
self.assertNotIn("<p>Request data not supplied</p>", html)
def test_message_only(self):
reporter = ExceptionReporter(None, None, "I'm a little teapot", None)
html = reporter.get_traceback_html()
self.assertInHTML("<h1>Report</h1>", html)
self.assertIn(
'<pre class="exception_value">I'm a little teapot</pre>', html
)
self.assertNotIn('<th scope="row">Request Method:</th>', html)
self.assertNotIn('<th scope="row">Request URL:</th>', html)
self.assertNotIn('<th scope="row">Exception Type:</th>', html)
self.assertNotIn('<th scope="row">Exception Value:</th>', html)
self.assertIn("<h2>Traceback ", html)
self.assertIn("<h2>Request information</h2>", html)
self.assertIn("<p>Request data not supplied</p>", html)
def test_non_utf8_values_handling(self):
"""
Non-UTF-8 exceptions/values should not make the output generation
choke.
"""
try:
class NonUtf8Output(Exception):
def __repr__(self):
return b"EXC\xe9EXC"
somevar = b"VAL\xe9VAL" # NOQA
raise NonUtf8Output()
except Exception:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(None, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertIn("VAL\\xe9VAL", html)
self.assertIn("EXC\\xe9EXC", html)
def test_local_variable_escaping(self):
"""Safe strings in local variables are escaped."""
try:
local = mark_safe("<p>Local variable</p>")
raise ValueError(local)
except Exception:
exc_type, exc_value, tb = sys.exc_info()
html = ExceptionReporter(None, exc_type, exc_value, tb).get_traceback_html()
self.assertIn(
'<td class="code"><pre>'<p>Local variable</p>'</pre>'
"</td>",
html,
)
def test_unprintable_values_handling(self):
"Unprintable values should not make the output generation choke."
try:
class OomOutput:
def __repr__(self):
raise MemoryError("OOM")
oomvalue = OomOutput() # NOQA
raise ValueError()
except Exception:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(None, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertIn('<td class="code"><pre>Error in formatting', html)
def test_too_large_values_handling(self):
"Large values should not create a large HTML."
large = 256 * 1024
repr_of_str_adds = len(repr(""))
try:
class LargeOutput:
def __repr__(self):
return repr("A" * large)
largevalue = LargeOutput() # NOQA
raise ValueError()
except Exception:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(None, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertEqual(len(html) // 1024 // 128, 0) # still fit in 128Kb
self.assertIn(
"<trimmed %d bytes string>" % (large + repr_of_str_adds,), html
)
def test_encoding_error(self):
"""
A UnicodeError displays a portion of the problematic string. HTML in
safe strings is escaped.
"""
try:
mark_safe("abcdefghijkl<p>mnὀp</p>qrstuwxyz").encode("ascii")
except Exception:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(None, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertIn("<h2>Unicode error hint</h2>", html)
self.assertIn("The string that could not be encoded/decoded was: ", html)
self.assertIn("<strong><p>mnὀp</p></strong>", html)
def test_unfrozen_importlib(self):
"""
importlib is not a frozen app, but its loader thinks it's frozen which
results in an ImportError. Refs #21443.
"""
try:
request = self.rf.get("/test_view/")
importlib.import_module("abc.def.invalid.name")
except Exception:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertInHTML("<h1>ModuleNotFoundError at /test_view/</h1>", html)
def test_ignore_traceback_evaluation_exceptions(self):
"""
Don't trip over exceptions generated by crafted objects when
evaluating them while cleansing (#24455).
"""
class BrokenEvaluation(Exception):
pass
def broken_setup():
raise BrokenEvaluation
request = self.rf.get("/test_view/")
broken_lazy = SimpleLazyObject(broken_setup)
try:
bool(broken_lazy)
except BrokenEvaluation:
exc_type, exc_value, tb = sys.exc_info()
self.assertIn(
"BrokenEvaluation",
ExceptionReporter(request, exc_type, exc_value, tb).get_traceback_html(),
"Evaluation exception reason not mentioned in traceback",
)
@override_settings(ALLOWED_HOSTS="example.com")
def test_disallowed_host(self):
"An exception report can be generated even for a disallowed host."
request = self.rf.get("/", headers={"host": "evil.com"})
reporter = ExceptionReporter(request, None, None, None)
html = reporter.get_traceback_html()
self.assertIn("http://evil.com/", html)
def test_request_with_items_key(self):
"""
An exception report can be generated for requests with 'items' in
request GET, POST, FILES, or COOKIES QueryDicts.
"""
value = '<td>items</td><td class="code"><pre>'Oops'</pre></td>'
# GET
request = self.rf.get("/test_view/?items=Oops")
reporter = ExceptionReporter(request, None, None, None)
html = reporter.get_traceback_html()
self.assertInHTML(value, html)
# POST
request = self.rf.post("/test_view/", data={"items": "Oops"})
reporter = ExceptionReporter(request, None, None, None)
html = reporter.get_traceback_html()
self.assertInHTML(value, html)
# FILES
fp = StringIO("filecontent")
request = self.rf.post("/test_view/", data={"name": "filename", "items": fp})
reporter = ExceptionReporter(request, None, None, None)
html = reporter.get_traceback_html()
self.assertInHTML(
'<td>items</td><td class="code"><pre><InMemoryUploadedFile: '
"items (application/octet-stream)></pre></td>",
html,
)
# COOKIES
rf = RequestFactory()
rf.cookies["items"] = "Oops"
request = rf.get("/test_view/")
reporter = ExceptionReporter(request, None, None, None)
html = reporter.get_traceback_html()
self.assertInHTML(
'<td>items</td><td class="code"><pre>'Oops'</pre></td>', html
)
def test_exception_fetching_user(self):
"""
The error page can be rendered if the current user can't be retrieved
(such as when the database is unavailable).
"""
class ExceptionUser:
def __str__(self):
raise Exception()
request = self.rf.get("/test_view/")
request.user = ExceptionUser()
try:
raise ValueError("Oops")
except ValueError:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertInHTML("<h1>ValueError at /test_view/</h1>", html)
self.assertIn('<pre class="exception_value">Oops</pre>', html)
self.assertIn('<h3 id="user-info">USER</h3>', html)
self.assertIn("<p>[unable to retrieve the current user]</p>", html)
text = reporter.get_traceback_text()
self.assertIn("USER: [unable to retrieve the current user]", text)
def test_template_encoding(self):
"""
The templates are loaded directly, not via a template loader, and
should be opened as utf-8 charset as is the default specified on
template engines.
"""
reporter = ExceptionReporter(None, None, None, None)
with mock.patch.object(DebugPath, "open") as m:
reporter.get_traceback_html()
m.assert_called_once_with(encoding="utf-8")
m.reset_mock()
reporter.get_traceback_text()
m.assert_called_once_with(encoding="utf-8")
@override_settings(ALLOWED_HOSTS=["example.com"])
def test_get_raw_insecure_uri(self):
factory = RequestFactory(headers={"host": "evil.com"})
tests = [
("////absolute-uri", "http://evil.com//absolute-uri"),
("/?foo=bar", "http://evil.com/?foo=bar"),
("/path/with:colons", "http://evil.com/path/with:colons"),
]
for url, expected in tests:
with self.subTest(url=url):
request = factory.get(url)
reporter = ExceptionReporter(request, None, None, None)
self.assertEqual(reporter._get_raw_insecure_uri(), expected)
class PlainTextReportTests(SimpleTestCase):
rf = RequestFactory()
def test_request_and_exception(self):
"A simple exception report can be generated"
try:
request = self.rf.get("/test_view/")
request.user = User()
raise ValueError("Can't find my keys")
except ValueError:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
text = reporter.get_traceback_text()
self.assertIn("ValueError at /test_view/", text)
self.assertIn("Can't find my keys", text)
self.assertIn("Request Method:", text)
self.assertIn("Request URL:", text)
self.assertIn("USER: jacob", text)
self.assertIn("Exception Type:", text)
self.assertIn("Exception Value:", text)
self.assertIn("Traceback (most recent call last):", text)
self.assertIn("Request information:", text)
self.assertNotIn("Request data not supplied", text)
def test_no_request(self):
"An exception report can be generated without request"
try:
raise ValueError("Can't find my keys")
except ValueError:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(None, exc_type, exc_value, tb)
text = reporter.get_traceback_text()
self.assertIn("ValueError", text)
self.assertIn("Can't find my keys", text)
self.assertNotIn("Request Method:", text)
self.assertNotIn("Request URL:", text)
self.assertNotIn("USER:", text)
self.assertIn("Exception Type:", text)
self.assertIn("Exception Value:", text)
self.assertIn("Traceback (most recent call last):", text)
self.assertIn("Request data not supplied", text)
def test_no_exception(self):
"An exception report can be generated for just a request"
request = self.rf.get("/test_view/")
reporter = ExceptionReporter(request, None, None, None)
reporter.get_traceback_text()
def test_request_and_message(self):
"A message can be provided in addition to a request"
request = self.rf.get("/test_view/")
reporter = ExceptionReporter(request, None, "I'm a little teapot", None)
reporter.get_traceback_text()
@override_settings(DEBUG=True)
def test_template_exception(self):
request = self.rf.get("/test_view/")
try:
render(request, "debug/template_error.html")
except Exception:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
text = reporter.get_traceback_text()
templ_path = Path(
Path(__file__).parents[1], "templates", "debug", "template_error.html"
)
self.assertIn(
"Template error:\n"
"In template %(path)s, error at line 2\n"
" 'cycle' tag requires at least two arguments\n"
" 1 : Template with error:\n"
" 2 : {%% cycle %%} \n"
" 3 : " % {"path": templ_path},
text,
)
def test_request_with_items_key(self):
"""
An exception report can be generated for requests with 'items' in
request GET, POST, FILES, or COOKIES QueryDicts.
"""
# GET
request = self.rf.get("/test_view/?items=Oops")
reporter = ExceptionReporter(request, None, None, None)
text = reporter.get_traceback_text()
self.assertIn("items = 'Oops'", text)
# POST
request = self.rf.post("/test_view/", data={"items": "Oops"})
reporter = ExceptionReporter(request, None, None, None)
text = reporter.get_traceback_text()
self.assertIn("items = 'Oops'", text)
# FILES
fp = StringIO("filecontent")
request = self.rf.post("/test_view/", data={"name": "filename", "items": fp})
reporter = ExceptionReporter(request, None, None, None)
text = reporter.get_traceback_text()
self.assertIn("items = <InMemoryUploadedFile:", text)
# COOKIES
rf = RequestFactory()
rf.cookies["items"] = "Oops"
request = rf.get("/test_view/")
reporter = ExceptionReporter(request, None, None, None)
text = reporter.get_traceback_text()
self.assertIn("items = 'Oops'", text)
def test_message_only(self):
reporter = ExceptionReporter(None, None, "I'm a little teapot", None)
reporter.get_traceback_text()
@override_settings(ALLOWED_HOSTS="example.com")
def test_disallowed_host(self):
"An exception report can be generated even for a disallowed host."
request = self.rf.get("/", headers={"host": "evil.com"})
reporter = ExceptionReporter(request, None, None, None)
text = reporter.get_traceback_text()
self.assertIn("http://evil.com/", text)
class ExceptionReportTestMixin:
# Mixin used in the ExceptionReporterFilterTests and
# AjaxResponseExceptionReporterFilter tests below
breakfast_data = {
"sausage-key": "sausage-value",
"baked-beans-key": "baked-beans-value",
"hash-brown-key": "hash-brown-value",
"bacon-key": "bacon-value",
}
def verify_unsafe_response(
self, view, check_for_vars=True, check_for_POST_params=True
):
"""
Asserts that potentially sensitive info are displayed in the response.
"""
request = self.rf.post("/some_url/", self.breakfast_data)
if iscoroutinefunction(view):
response = async_to_sync(view)(request)
else:
response = view(request)
if check_for_vars:
# All variables are shown.
self.assertContains(response, "cooked_eggs", status_code=500)
self.assertContains(response, "scrambled", status_code=500)
self.assertContains(response, "sauce", status_code=500)
self.assertContains(response, "worcestershire", status_code=500)
if check_for_POST_params:
for k, v in self.breakfast_data.items():
# All POST parameters are shown.
self.assertContains(response, k, status_code=500)
self.assertContains(response, v, status_code=500)
def verify_safe_response(
self, view, check_for_vars=True, check_for_POST_params=True
):
"""
Asserts that certain sensitive info are not displayed in the response.
"""
request = self.rf.post("/some_url/", self.breakfast_data)
if iscoroutinefunction(view):
response = async_to_sync(view)(request)
else:
response = view(request)
if check_for_vars:
# Non-sensitive variable's name and value are shown.
self.assertContains(response, "cooked_eggs", status_code=500)
self.assertContains(response, "scrambled", status_code=500)
# Sensitive variable's name is shown but not its value.
self.assertContains(response, "sauce", status_code=500)
self.assertNotContains(response, "worcestershire", status_code=500)
if check_for_POST_params:
for k in self.breakfast_data:
# All POST parameters' names are shown.
self.assertContains(response, k, status_code=500)
# Non-sensitive POST parameters' values are shown.
self.assertContains(response, "baked-beans-value", status_code=500)
self.assertContains(response, "hash-brown-value", status_code=500)
# Sensitive POST parameters' values are not shown.
self.assertNotContains(response, "sausage-value", status_code=500)
self.assertNotContains(response, "bacon-value", status_code=500)
def verify_paranoid_response(
self, view, check_for_vars=True, check_for_POST_params=True
):
"""
Asserts that no variables or POST parameters are displayed in the
response.
"""
request = self.rf.post("/some_url/", self.breakfast_data)
response = view(request)
if check_for_vars:
# Show variable names but not their values.
self.assertContains(response, "cooked_eggs", status_code=500)
self.assertNotContains(response, "scrambled", status_code=500)
self.assertContains(response, "sauce", status_code=500)
self.assertNotContains(response, "worcestershire", status_code=500)
if check_for_POST_params:
for k, v in self.breakfast_data.items():
# All POST parameters' names are shown.
self.assertContains(response, k, status_code=500)
# No POST parameters' values are shown.
self.assertNotContains(response, v, status_code=500)
def verify_unsafe_email(self, view, check_for_POST_params=True):
"""
Asserts that potentially sensitive info are displayed in the email
report.
"""
with self.settings(ADMINS=["[email protected]"]):
mail.outbox = [] # Empty outbox
request = self.rf.post("/some_url/", self.breakfast_data)
if iscoroutinefunction(view):
async_to_sync(view)(request)
else:
view(request)
self.assertEqual(len(mail.outbox), 1)
email = mail.outbox[0]
# Frames vars are never shown in plain text email reports.
body_plain = str(email.body)
self.assertNotIn("cooked_eggs", body_plain)
self.assertNotIn("scrambled", body_plain)
self.assertNotIn("sauce", body_plain)
self.assertNotIn("worcestershire", body_plain)
# Frames vars are shown in html email reports.
body_html = str(email.alternatives[0].content)
self.assertIn("cooked_eggs", body_html)
self.assertIn("scrambled", body_html)
self.assertIn("sauce", body_html)
self.assertIn("worcestershire", body_html)
if check_for_POST_params:
for k, v in self.breakfast_data.items():
# All POST parameters are shown.
self.assertIn(k, body_plain)
self.assertIn(v, body_plain)
self.assertIn(k, body_html)
self.assertIn(v, body_html)
def verify_safe_email(self, view, check_for_POST_params=True):
"""
Asserts that certain sensitive info are not displayed in the email
report.
"""
with self.settings(ADMINS=["[email protected]"]):
mail.outbox = [] # Empty outbox
request = self.rf.post("/some_url/", self.breakfast_data)
if iscoroutinefunction(view):
async_to_sync(view)(request)
else:
view(request)
self.assertEqual(len(mail.outbox), 1)
email = mail.outbox[0]
# Frames vars are never shown in plain text email reports.
body_plain = str(email.body)
self.assertNotIn("cooked_eggs", body_plain)
self.assertNotIn("scrambled", body_plain)
self.assertNotIn("sauce", body_plain)
self.assertNotIn("worcestershire", body_plain)
# Frames vars are shown in html email reports.
body_html = str(email.alternatives[0].content)
self.assertIn("cooked_eggs", body_html)
self.assertIn("scrambled", body_html)
self.assertIn("sauce", body_html)
self.assertNotIn("worcestershire", body_html)
if check_for_POST_params:
for k in self.breakfast_data:
# All POST parameters' names are shown.
self.assertIn(k, body_plain)
# Non-sensitive POST parameters' values are shown.
self.assertIn("baked-beans-value", body_plain)
self.assertIn("hash-brown-value", body_plain)
self.assertIn("baked-beans-value", body_html)
self.assertIn("hash-brown-value", body_html)
# Sensitive POST parameters' values are not shown.
self.assertNotIn("sausage-value", body_plain)
self.assertNotIn("bacon-value", body_plain)
self.assertNotIn("sausage-value", body_html)
self.assertNotIn("bacon-value", body_html)
def verify_paranoid_email(self, view):
"""
Asserts that no variables or POST parameters are displayed in the email
report.
"""
with self.settings(ADMINS=["[email protected]"]):
mail.outbox = [] # Empty outbox
request = self.rf.post("/some_url/", self.breakfast_data)
view(request)
self.assertEqual(len(mail.outbox), 1)
email = mail.outbox[0]
# Frames vars are never shown in plain text email reports.
body = str(email.body)
self.assertNotIn("cooked_eggs", body)
self.assertNotIn("scrambled", body)
self.assertNotIn("sauce", body)
self.assertNotIn("worcestershire", body)
for k, v in self.breakfast_data.items():
# All POST parameters' names are shown.
self.assertIn(k, body)
# No POST parameters' values are shown.
self.assertNotIn(v, body)
@override_settings(ROOT_URLCONF="view_tests.urls")
class ExceptionReporterFilterTests(
ExceptionReportTestMixin, LoggingCaptureMixin, SimpleTestCase
):
"""
Sensitive information can be filtered out of error reports (#14614).
"""
rf = RequestFactory()
sensitive_settings = [
"SECRET_KEY",
"SECRET_KEY_FALLBACKS",
"PASSWORD",
"API_KEY",
"SOME_TOKEN",
"MY_AUTH",
]
def test_non_sensitive_request(self):
"""
Everything (request info and frame variables) can bee seen
in the default error reports for non-sensitive requests.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(non_sensitive_view)
self.verify_unsafe_email(non_sensitive_view)
with self.settings(DEBUG=False):
self.verify_unsafe_response(non_sensitive_view)
self.verify_unsafe_email(non_sensitive_view)
def test_sensitive_request(self):
"""
Sensitive POST parameters and frame variables cannot be
seen in the default error reports for sensitive requests.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(sensitive_view)
self.verify_unsafe_email(sensitive_view)
with self.settings(DEBUG=False):
self.verify_safe_response(sensitive_view)
self.verify_safe_email(sensitive_view)
def test_async_sensitive_request(self):
with self.settings(DEBUG=True):
self.verify_unsafe_response(async_sensitive_view)
self.verify_unsafe_email(async_sensitive_view)
with self.settings(DEBUG=False):
self.verify_safe_response(async_sensitive_view)
self.verify_safe_email(async_sensitive_view)
def test_async_sensitive_nested_request(self):
with self.settings(DEBUG=True):
self.verify_unsafe_response(async_sensitive_view_nested)
self.verify_unsafe_email(async_sensitive_view_nested)
with self.settings(DEBUG=False):
self.verify_safe_response(async_sensitive_view_nested)
self.verify_safe_email(async_sensitive_view_nested)
def test_paranoid_request(self):
"""
No POST parameters and frame variables can be seen in the
default error reports for "paranoid" requests.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(paranoid_view)
self.verify_unsafe_email(paranoid_view)
with self.settings(DEBUG=False):
self.verify_paranoid_response(paranoid_view)
self.verify_paranoid_email(paranoid_view)
def test_multivalue_dict_key_error(self):
"""
#21098 -- Sensitive POST parameters cannot be seen in the
error reports for if request.POST['nonexistent_key'] throws an error.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(multivalue_dict_key_error)
self.verify_unsafe_email(multivalue_dict_key_error)
with self.settings(DEBUG=False):
self.verify_safe_response(multivalue_dict_key_error)
self.verify_safe_email(multivalue_dict_key_error)
def test_custom_exception_reporter_filter(self):
"""
It's possible to assign an exception reporter filter to
the request to bypass the one set in DEFAULT_EXCEPTION_REPORTER_FILTER.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(custom_exception_reporter_filter_view)
self.verify_unsafe_email(custom_exception_reporter_filter_view)
with self.settings(DEBUG=False):
self.verify_unsafe_response(custom_exception_reporter_filter_view)
self.verify_unsafe_email(custom_exception_reporter_filter_view)
def test_sensitive_method(self):
"""
The sensitive_variables decorator works with object methods.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(
sensitive_method_view, check_for_POST_params=False
)
self.verify_unsafe_email(sensitive_method_view, check_for_POST_params=False)
with self.settings(DEBUG=False):
self.verify_safe_response(
sensitive_method_view, check_for_POST_params=False
)
self.verify_safe_email(sensitive_method_view, check_for_POST_params=False)
def test_async_sensitive_method(self):
"""
The sensitive_variables decorator works with async object methods.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(
async_sensitive_method_view, check_for_POST_params=False
)
self.verify_unsafe_email(
async_sensitive_method_view, check_for_POST_params=False
)
with self.settings(DEBUG=False):
self.verify_safe_response(
async_sensitive_method_view, check_for_POST_params=False
)
self.verify_safe_email(
async_sensitive_method_view, check_for_POST_params=False
)
def test_async_sensitive_method_nested(self):
"""
The sensitive_variables decorator works with async object methods.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(
async_sensitive_method_view_nested, check_for_POST_params=False
)
self.verify_unsafe_email(
async_sensitive_method_view_nested, check_for_POST_params=False
)
with self.settings(DEBUG=False):
self.verify_safe_response(
async_sensitive_method_view_nested, check_for_POST_params=False
)
self.verify_safe_email(
async_sensitive_method_view_nested, check_for_POST_params=False
)
def test_sensitive_function_arguments(self):
"""
Sensitive variables don't leak in the sensitive_variables decorator's
frame, when those variables are passed as arguments to the decorated
function.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(sensitive_args_function_caller)
self.verify_unsafe_email(sensitive_args_function_caller)
with self.settings(DEBUG=False):
self.verify_safe_response(
sensitive_args_function_caller, check_for_POST_params=False
)
self.verify_safe_email(
sensitive_args_function_caller, check_for_POST_params=False
)
def test_sensitive_function_keyword_arguments(self):
"""
Sensitive variables don't leak in the sensitive_variables decorator's
frame, when those variables are passed as keyword arguments to the
decorated function.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(sensitive_kwargs_function_caller)
self.verify_unsafe_email(sensitive_kwargs_function_caller)
with self.settings(DEBUG=False):
self.verify_safe_response(
sensitive_kwargs_function_caller, check_for_POST_params=False
)
self.verify_safe_email(
sensitive_kwargs_function_caller, check_for_POST_params=False
)
def test_callable_settings(self):
"""
Callable settings should not be evaluated in the debug page (#21345).
"""
def callable_setting():
return "This should not be displayed"
with self.settings(DEBUG=True, FOOBAR=callable_setting):
response = self.client.get("/raises500/")
self.assertNotContains(
response, "This should not be displayed", status_code=500
)
def test_callable_settings_forbidding_to_set_attributes(self):
"""
Callable settings which forbid to set attributes should not break
the debug page (#23070).
"""
class CallableSettingWithSlots:
__slots__ = []
def __call__(self):
return "This should not be displayed"
with self.settings(DEBUG=True, WITH_SLOTS=CallableSettingWithSlots()):
response = self.client.get("/raises500/")
self.assertNotContains(
response, "This should not be displayed", status_code=500
)
def test_dict_setting_with_non_str_key(self):
"""
A dict setting containing a non-string key should not break the
debug page (#12744).
"""
with self.settings(DEBUG=True, FOOBAR={42: None}):
response = self.client.get("/raises500/")
self.assertContains(response, "FOOBAR", status_code=500)
def test_sensitive_settings(self):
"""
The debug page should not show some sensitive settings
(password, secret key, ...).
"""
for setting in self.sensitive_settings:
with self.subTest(setting=setting):
with self.settings(DEBUG=True, **{setting: "should not be displayed"}):
response = self.client.get("/raises500/")
self.assertNotContains(
response, "should not be displayed", status_code=500
)
def test_settings_with_sensitive_keys(self):
"""
The debug page should filter out some sensitive information found in
dict settings.
"""
for setting in self.sensitive_settings:
FOOBAR = {
setting: "should not be displayed",
"recursive": {setting: "should not be displayed"},
}
with self.subTest(setting=setting):
with self.settings(DEBUG=True, FOOBAR=FOOBAR):
response = self.client.get("/raises500/")
self.assertNotContains(
response, "should not be displayed", status_code=500
)
def test_cleanse_setting_basic(self):
reporter_filter = SafeExceptionReporterFilter()
self.assertEqual(reporter_filter.cleanse_setting("TEST", "TEST"), "TEST")
self.assertEqual(
reporter_filter.cleanse_setting("PASSWORD", "super_secret"),
reporter_filter.cleansed_substitute,
)
def test_cleanse_setting_ignore_case(self):
reporter_filter = SafeExceptionReporterFilter()
self.assertEqual(
reporter_filter.cleanse_setting("password", "super_secret"),
reporter_filter.cleansed_substitute,
)
def test_cleanse_setting_recurses_in_dictionary(self):
reporter_filter = SafeExceptionReporterFilter()
initial = {"login": "cooper", "password": "secret"}
self.assertEqual(
reporter_filter.cleanse_setting("SETTING_NAME", initial),
{"login": "cooper", "password": reporter_filter.cleansed_substitute},
)
def test_cleanse_setting_recurses_in_dictionary_with_non_string_key(self):
reporter_filter = SafeExceptionReporterFilter()
initial = {("localhost", 8000): {"login": "cooper", "password": "secret"}}
self.assertEqual(
reporter_filter.cleanse_setting("SETTING_NAME", initial),
{
("localhost", 8000): {
"login": "cooper",
"password": reporter_filter.cleansed_substitute,
},
},
)
def test_cleanse_setting_recurses_in_list_tuples(self):
reporter_filter = SafeExceptionReporterFilter()
initial = [
{
"login": "cooper",
"password": "secret",
"apps": (
{"name": "app1", "api_key": "a06b-c462cffae87a"},
{"name": "app2", "api_key": "a9f4-f152e97ad808"},
),
"tokens": ["98b37c57-ec62-4e39", "8690ef7d-8004-4916"],
},
{"SECRET_KEY": "c4d77c62-6196-4f17-a06b-c462cffae87a"},
]
cleansed = [
{
"login": "cooper",
"password": reporter_filter.cleansed_substitute,
"apps": (
{"name": "app1", "api_key": reporter_filter.cleansed_substitute},
{"name": "app2", "api_key": reporter_filter.cleansed_substitute},
),
"tokens": reporter_filter.cleansed_substitute,
},
{"SECRET_KEY": reporter_filter.cleansed_substitute},
]
self.assertEqual(
reporter_filter.cleanse_setting("SETTING_NAME", initial),
cleansed,
)
self.assertEqual(
reporter_filter.cleanse_setting("SETTING_NAME", tuple(initial)),
tuple(cleansed),
)
def test_request_meta_filtering(self):
headers = {
"API_URL": "super secret",
"A_SIGNATURE_VALUE": "super secret",
"MY_KEY": "super secret",
"PASSWORD": "super secret",
"SECRET_VALUE": "super secret",
"SOME_TOKEN": "super secret",
"THE_AUTH": "super secret",
}
request = self.rf.get("/", headers=headers)
reporter_filter = SafeExceptionReporterFilter()
cleansed_headers = reporter_filter.get_safe_request_meta(request)
for header in headers:
with self.subTest(header=header):
self.assertEqual(
cleansed_headers[f"HTTP_{header}"],
reporter_filter.cleansed_substitute,
)
self.assertEqual(
cleansed_headers["HTTP_COOKIE"],
reporter_filter.cleansed_substitute,
)
def test_exception_report_uses_meta_filtering(self):
response = self.client.get(
"/raises500/", headers={"secret-header": "super_secret"}
)
self.assertNotIn(b"super_secret", response.content)
response = self.client.get(
"/raises500/",
headers={"secret-header": "super_secret", "accept": "application/json"},
)
self.assertNotIn(b"super_secret", response.content)
@override_settings(SESSION_COOKIE_NAME="djangosession")
def test_cleanse_session_cookie_value(self):
self.client.cookies.load({"djangosession": "should not be displayed"})
response = self.client.get("/raises500/")
self.assertNotContains(response, "should not be displayed", status_code=500)
class CustomExceptionReporterFilter(SafeExceptionReporterFilter):
cleansed_substitute = "XXXXXXXXXXXXXXXXXXXX"
hidden_settings = _lazy_re_compile("PASS|DATABASE", flags=re.I)
@override_settings(
ROOT_URLCONF="view_tests.urls",
DEFAULT_EXCEPTION_REPORTER_FILTER="%s.CustomExceptionReporterFilter" % __name__,
)
class CustomExceptionReporterFilterTests(SimpleTestCase):
def setUp(self):
get_default_exception_reporter_filter.cache_clear()
self.addCleanup(get_default_exception_reporter_filter.cache_clear)
def test_setting_allows_custom_subclass(self):
self.assertIsInstance(
get_default_exception_reporter_filter(),
CustomExceptionReporterFilter,
)
def test_cleansed_substitute_override(self):
reporter_filter = get_default_exception_reporter_filter()
self.assertEqual(
reporter_filter.cleanse_setting("password", "super_secret"),
reporter_filter.cleansed_substitute,
)
def test_hidden_settings_override(self):
reporter_filter = get_default_exception_reporter_filter()
self.assertEqual(
reporter_filter.cleanse_setting("database_url", "super_secret"),
reporter_filter.cleansed_substitute,
)
class NonHTMLResponseExceptionReporterFilter(
ExceptionReportTestMixin, LoggingCaptureMixin, SimpleTestCase
):
"""
Sensitive information can be filtered out of error reports.
The plain text 500 debug-only error page is served when it has been
detected the request doesn't accept HTML content. Don't check for
(non)existence of frames vars in the traceback information section of the
response content because they're not included in these error pages.
Refs #14614.
"""
rf = RequestFactory(headers={"accept": "application/json"})
def test_non_sensitive_request(self):
"""
Request info can bee seen in the default error reports for
non-sensitive requests.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(non_sensitive_view, check_for_vars=False)
with self.settings(DEBUG=False):
self.verify_unsafe_response(non_sensitive_view, check_for_vars=False)
def test_sensitive_request(self):
"""
Sensitive POST parameters cannot be seen in the default
error reports for sensitive requests.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(sensitive_view, check_for_vars=False)
with self.settings(DEBUG=False):
self.verify_safe_response(sensitive_view, check_for_vars=False)
def test_async_sensitive_request(self):
"""
Sensitive POST parameters cannot be seen in the default
error reports for sensitive requests.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(async_sensitive_view, check_for_vars=False)
with self.settings(DEBUG=False):
self.verify_safe_response(async_sensitive_view, check_for_vars=False)
def test_async_sensitive_request_nested(self):
"""
Sensitive POST parameters cannot be seen in the default
error reports for sensitive requests.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(
async_sensitive_view_nested, check_for_vars=False
)
with self.settings(DEBUG=False):
self.verify_safe_response(async_sensitive_view_nested, check_for_vars=False)
def test_paranoid_request(self):
"""
No POST parameters can be seen in the default error reports
for "paranoid" requests.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(paranoid_view, check_for_vars=False)
with self.settings(DEBUG=False):
self.verify_paranoid_response(paranoid_view, check_for_vars=False)
def test_custom_exception_reporter_filter(self):
"""
It's possible to assign an exception reporter filter to
the request to bypass the one set in DEFAULT_EXCEPTION_REPORTER_FILTER.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(
custom_exception_reporter_filter_view, check_for_vars=False
)
with self.settings(DEBUG=False):
self.verify_unsafe_response(
custom_exception_reporter_filter_view, check_for_vars=False
)
@override_settings(DEBUG=True, ROOT_URLCONF="view_tests.urls")
def test_non_html_response_encoding(self):
response = self.client.get(
"/raises500/", headers={"accept": "application/json"}
)
self.assertEqual(response.headers["Content-Type"], "text/plain; charset=utf-8")
class DecoratorsTests(SimpleTestCase):
def test_sensitive_variables_not_called(self):
msg = (
"sensitive_variables() must be called to use it as a decorator, "
"e.g., use @sensitive_variables(), not @sensitive_variables."
)
with self.assertRaisesMessage(TypeError, msg):
@sensitive_variables
def test_func(password):
pass
def test_sensitive_post_parameters_not_called(self):
msg = (
"sensitive_post_parameters() must be called to use it as a "
"decorator, e.g., use @sensitive_post_parameters(), not "
"@sensitive_post_parameters."
)
with self.assertRaisesMessage(TypeError, msg):
@sensitive_post_parameters
def test_func(request):
return index_page(request)
def test_sensitive_post_parameters_http_request(self):
class MyClass:
@sensitive_post_parameters()
def a_view(self, request):
return HttpResponse()
msg = (
"sensitive_post_parameters didn't receive an HttpRequest object. "
"If you are decorating a classmethod, make sure to use "
"@method_decorator."
)
with self.assertRaisesMessage(TypeError, msg):
MyClass().a_view(HttpRequest())
|
./temp_repos/django/django/views/debug.py
|
./temp_repos/django/tests/view_tests/tests/test_debug.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'ExceptionCycleWarning'.
Context:
- Class Name: ExceptionCycleWarning
- Dependencies to Mock: tb, callable_setting, exc_type, is_email, exc_value, request
- Key Imports: django.utils.module_loading, django.views.decorators.debug, django, django.utils, pathlib, django.utils.version, django.template, django.http, itertools, inspect
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
ExceptionCycleWarning
|
python
|
import json
import os
import re
from pathlib import Path
from django.apps import apps
from django.conf import settings
from django.http import HttpResponse, HttpResponseRedirect, JsonResponse
from django.template import Context, Engine
from django.urls import translate_url
from django.utils.formats import get_format
from django.utils.http import url_has_allowed_host_and_scheme
from django.utils.translation import check_for_language, get_language
from django.utils.translation.trans_real import DjangoTranslation
from django.views.generic import View
LANGUAGE_QUERY_PARAMETER = "language"
def builtin_template_path(name):
"""
Return a path to a builtin template.
Avoid calling this function at the module level or in a class-definition
because __file__ may not exist, e.g. in frozen environments.
"""
return Path(__file__).parent / "templates" / name
def set_language(request):
"""
Redirect to a given URL while setting the chosen language in the language
cookie. The URL and the language code need to be specified in the request
parameters.
Since this view changes how the user will see the rest of the site, it must
only be accessed as a POST request. If called as a GET request, it will
redirect to the page in the request (the 'next' parameter) without changing
any state.
"""
next_url = request.POST.get("next", request.GET.get("next"))
if (
next_url or request.accepts("text/html")
) and not url_has_allowed_host_and_scheme(
url=next_url,
allowed_hosts={request.get_host()},
require_https=request.is_secure(),
):
next_url = request.META.get("HTTP_REFERER")
if not url_has_allowed_host_and_scheme(
url=next_url,
allowed_hosts={request.get_host()},
require_https=request.is_secure(),
):
next_url = "/"
response = HttpResponseRedirect(next_url) if next_url else HttpResponse(status=204)
if request.method == "POST":
lang_code = request.POST.get(LANGUAGE_QUERY_PARAMETER)
if lang_code and check_for_language(lang_code):
if next_url:
next_trans = translate_url(next_url, lang_code)
if next_trans != next_url:
response = HttpResponseRedirect(next_trans)
response.set_cookie(
settings.LANGUAGE_COOKIE_NAME,
lang_code,
max_age=settings.LANGUAGE_COOKIE_AGE,
path=settings.LANGUAGE_COOKIE_PATH,
domain=settings.LANGUAGE_COOKIE_DOMAIN,
secure=settings.LANGUAGE_COOKIE_SECURE,
httponly=settings.LANGUAGE_COOKIE_HTTPONLY,
samesite=settings.LANGUAGE_COOKIE_SAMESITE,
)
return response
def get_formats():
"""Return all formats strings required for i18n to work."""
FORMAT_SETTINGS = (
"DATE_FORMAT",
"DATETIME_FORMAT",
"TIME_FORMAT",
"YEAR_MONTH_FORMAT",
"MONTH_DAY_FORMAT",
"SHORT_DATE_FORMAT",
"SHORT_DATETIME_FORMAT",
"FIRST_DAY_OF_WEEK",
"DECIMAL_SEPARATOR",
"THOUSAND_SEPARATOR",
"NUMBER_GROUPING",
"DATE_INPUT_FORMATS",
"TIME_INPUT_FORMATS",
"DATETIME_INPUT_FORMATS",
)
return {attr: get_format(attr) for attr in FORMAT_SETTINGS}
class JavaScriptCatalog(View):
"""
Return the selected language catalog as a JavaScript library.
Receive the list of packages to check for translations in the `packages`
kwarg either from the extra dictionary passed to the path() function or as
a plus-sign delimited string from the request. Default is 'django.conf'.
You can override the gettext domain for this view, but usually you don't
want to do that as JavaScript messages go to the djangojs domain. This
might be needed if you deliver your JavaScript source from Django
templates.
"""
domain = "djangojs"
packages = None
def get(self, request, *args, **kwargs):
locale = get_language()
domain = kwargs.get("domain", self.domain)
# If packages are not provided, default to all installed packages, as
# DjangoTranslation without localedirs harvests them all.
packages = kwargs.get("packages", "")
packages = packages.split("+") if packages else self.packages
paths = self.get_paths(packages) if packages else None
self.translation = DjangoTranslation(locale, domain=domain, localedirs=paths)
context = self.get_context_data(**kwargs)
return self.render_to_response(context)
def get_paths(self, packages):
allowable_packages = {
app_config.name: app_config for app_config in apps.get_app_configs()
}
app_configs = [
allowable_packages[p] for p in packages if p in allowable_packages
]
if len(app_configs) < len(packages):
excluded = [p for p in packages if p not in allowable_packages]
raise ValueError(
"Invalid package(s) provided to JavaScriptCatalog: %s"
% ",".join(excluded)
)
# paths of requested packages
return [os.path.join(app.path, "locale") for app in app_configs]
@property
def _num_plurals(self):
"""
Return the number of plurals for this catalog language, or 2 if no
plural string is available.
"""
match = re.search(r"nplurals=\s*(\d+)", self._plural_string or "")
if match:
return int(match[1])
return 2
@property
def _plural_string(self):
"""
Return the plural string (including nplurals) for this catalog
language, or None if no plural string is available.
"""
if "" in self.translation._catalog:
for line in self.translation._catalog[""].split("\n"):
if line.startswith("Plural-Forms:"):
return line.split(":", 1)[1].strip()
return None
def get_plural(self):
plural = self._plural_string
if plural is not None:
# This should be a compiled function of a typical plural-form:
# Plural-Forms: nplurals=3; plural=n%10==1 && n%100!=11 ? 0 :
# n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20)
# ? 1 : 2;
plural = [
el.strip()
for el in plural.split(";")
if el.strip().startswith("plural=")
][0].split("=", 1)[1]
return plural
def get_catalog(self):
pdict = {}
catalog = {}
translation = self.translation
seen_keys = set()
while True:
for key, value in translation._catalog.items():
if key == "" or key in seen_keys:
continue
if isinstance(key, str):
catalog[key] = value
elif isinstance(key, tuple):
msgid, cnt = key
pdict.setdefault(msgid, {})[cnt] = value
else:
raise TypeError(key)
seen_keys.add(key)
if translation._fallback:
translation = translation._fallback
else:
break
num_plurals = self._num_plurals
for k, v in pdict.items():
catalog[k] = [v.get(i, "") for i in range(num_plurals)]
return catalog
def get_context_data(self, **kwargs):
return {
"catalog": self.get_catalog(),
"formats": get_formats(),
"plural": self.get_plural(),
}
def render_to_response(self, context, **response_kwargs):
def indent(s):
return s.replace("\n", "\n ")
with builtin_template_path("i18n_catalog.js").open(encoding="utf-8") as fh:
template = Engine().from_string(fh.read())
context["catalog_str"] = (
indent(json.dumps(context["catalog"], sort_keys=True, indent=2))
if context["catalog"]
else None
)
context["formats_str"] = indent(
json.dumps(context["formats"], sort_keys=True, indent=2)
)
return HttpResponse(
template.render(Context(context)), 'text/javascript; charset="utf-8"'
)
class JSONCatalog(JavaScriptCatalog):
"""
Return the selected language catalog as a JSON object.
Receive the same parameters as JavaScriptCatalog and return a response
with a JSON object of the following format:
{
"catalog": {
# Translations catalog
},
"formats": {
# Language formats for date, time, etc.
},
"plural": '...' # Expression for plural forms, or null.
}
"""
def render_to_response(self, context, **response_kwargs):
return JsonResponse(context)
|
import gettext
import json
from os import path
from unittest import mock
from django.conf import settings
from django.test import (
RequestFactory,
SimpleTestCase,
TestCase,
modify_settings,
override_settings,
)
from django.test.selenium import SeleniumTestCase
from django.urls import reverse
from django.utils.translation import get_language, override
from django.views.i18n import JavaScriptCatalog, get_formats
from ..urls import locale_dir
@override_settings(ROOT_URLCONF="view_tests.urls")
class SetLanguageTests(TestCase):
"""Test the django.views.i18n.set_language view."""
def _get_inactive_language_code(self):
"""Return language code for a language which is not activated."""
current_language = get_language()
return [code for code, name in settings.LANGUAGES if code != current_language][
0
]
def test_setlang(self):
"""
The set_language view can be used to change the session language.
The user is redirected to the 'next' argument if provided.
"""
lang_code = self._get_inactive_language_code()
post_data = {"language": lang_code, "next": "/"}
response = self.client.post(
"/i18n/setlang/", post_data, headers={"referer": "/i_should_not_be_used/"}
)
self.assertRedirects(response, "/")
# The language is set in a cookie.
language_cookie = self.client.cookies[settings.LANGUAGE_COOKIE_NAME]
self.assertEqual(language_cookie.value, lang_code)
self.assertEqual(language_cookie["domain"], "")
self.assertEqual(language_cookie["path"], "/")
self.assertEqual(language_cookie["max-age"], "")
self.assertEqual(language_cookie["httponly"], "")
self.assertEqual(language_cookie["samesite"], "")
self.assertEqual(language_cookie["secure"], "")
def test_setlang_unsafe_next(self):
"""
The set_language view only redirects to the 'next' argument if it is
"safe".
"""
lang_code = self._get_inactive_language_code()
post_data = {"language": lang_code, "next": "//unsafe/redirection/"}
response = self.client.post("/i18n/setlang/", data=post_data)
self.assertEqual(response.url, "/")
self.assertEqual(
self.client.cookies[settings.LANGUAGE_COOKIE_NAME].value, lang_code
)
def test_setlang_http_next(self):
"""
The set_language view only redirects to the 'next' argument if it is
"safe" and its scheme is HTTPS if the request was sent over HTTPS.
"""
lang_code = self._get_inactive_language_code()
non_https_next_url = "http://testserver/redirection/"
post_data = {"language": lang_code, "next": non_https_next_url}
# Insecure URL in POST data.
response = self.client.post("/i18n/setlang/", data=post_data, secure=True)
self.assertEqual(response.url, "/")
self.assertEqual(
self.client.cookies[settings.LANGUAGE_COOKIE_NAME].value, lang_code
)
# Insecure URL in HTTP referer.
response = self.client.post(
"/i18n/setlang/", secure=True, headers={"referer": non_https_next_url}
)
self.assertEqual(response.url, "/")
self.assertEqual(
self.client.cookies[settings.LANGUAGE_COOKIE_NAME].value, lang_code
)
def test_setlang_redirect_to_referer(self):
"""
The set_language view redirects to the URL in the referer header when
there isn't a "next" parameter.
"""
lang_code = self._get_inactive_language_code()
post_data = {"language": lang_code}
response = self.client.post(
"/i18n/setlang/", post_data, headers={"referer": "/i18n/"}
)
self.assertRedirects(response, "/i18n/", fetch_redirect_response=False)
self.assertEqual(
self.client.cookies[settings.LANGUAGE_COOKIE_NAME].value, lang_code
)
def test_setlang_default_redirect(self):
"""
The set_language view redirects to '/' when there isn't a referer or
"next" parameter.
"""
lang_code = self._get_inactive_language_code()
post_data = {"language": lang_code}
response = self.client.post("/i18n/setlang/", post_data)
self.assertRedirects(response, "/")
self.assertEqual(
self.client.cookies[settings.LANGUAGE_COOKIE_NAME].value, lang_code
)
def test_setlang_performs_redirect_for_ajax_if_explicitly_requested(self):
"""
The set_language view redirects to the "next" parameter for requests
not accepting HTML response content.
"""
lang_code = self._get_inactive_language_code()
post_data = {"language": lang_code, "next": "/"}
response = self.client.post(
"/i18n/setlang/", post_data, headers={"accept": "application/json"}
)
self.assertRedirects(response, "/")
self.assertEqual(
self.client.cookies[settings.LANGUAGE_COOKIE_NAME].value, lang_code
)
def test_setlang_doesnt_perform_a_redirect_to_referer_for_ajax(self):
"""
The set_language view doesn't redirect to the HTTP referer header if
the request doesn't accept HTML response content.
"""
lang_code = self._get_inactive_language_code()
post_data = {"language": lang_code}
headers = {"HTTP_REFERER": "/", "HTTP_ACCEPT": "application/json"}
response = self.client.post("/i18n/setlang/", post_data, **headers)
self.assertEqual(response.status_code, 204)
self.assertEqual(
self.client.cookies[settings.LANGUAGE_COOKIE_NAME].value, lang_code
)
def test_setlang_doesnt_perform_a_default_redirect_for_ajax(self):
"""
The set_language view returns 204 by default for requests not accepting
HTML response content.
"""
lang_code = self._get_inactive_language_code()
post_data = {"language": lang_code}
response = self.client.post(
"/i18n/setlang/", post_data, headers={"accept": "application/json"}
)
self.assertEqual(response.status_code, 204)
self.assertEqual(
self.client.cookies[settings.LANGUAGE_COOKIE_NAME].value, lang_code
)
def test_setlang_unsafe_next_for_ajax(self):
"""
The fallback to root URL for the set_language view works for requests
not accepting HTML response content.
"""
lang_code = self._get_inactive_language_code()
post_data = {"language": lang_code, "next": "//unsafe/redirection/"}
response = self.client.post(
"/i18n/setlang/", post_data, headers={"accept": "application/json"}
)
self.assertEqual(response.url, "/")
self.assertEqual(
self.client.cookies[settings.LANGUAGE_COOKIE_NAME].value, lang_code
)
def test_setlang_reversal(self):
self.assertEqual(reverse("set_language"), "/i18n/setlang/")
def test_setlang_cookie(self):
# we force saving language to a cookie rather than a session
# by excluding session middleware and those which do require it
test_settings = {
"MIDDLEWARE": ["django.middleware.common.CommonMiddleware"],
"LANGUAGE_COOKIE_NAME": "mylanguage",
"LANGUAGE_COOKIE_AGE": 3600 * 7 * 2,
"LANGUAGE_COOKIE_DOMAIN": ".example.com",
"LANGUAGE_COOKIE_PATH": "/test/",
"LANGUAGE_COOKIE_HTTPONLY": True,
"LANGUAGE_COOKIE_SAMESITE": "Strict",
"LANGUAGE_COOKIE_SECURE": True,
}
with self.settings(**test_settings):
post_data = {"language": "pl", "next": "/views/"}
response = self.client.post("/i18n/setlang/", data=post_data)
language_cookie = response.cookies.get("mylanguage")
self.assertEqual(language_cookie.value, "pl")
self.assertEqual(language_cookie["domain"], ".example.com")
self.assertEqual(language_cookie["path"], "/test/")
self.assertEqual(language_cookie["max-age"], 3600 * 7 * 2)
self.assertIs(language_cookie["httponly"], True)
self.assertEqual(language_cookie["samesite"], "Strict")
self.assertIs(language_cookie["secure"], True)
def test_setlang_decodes_http_referer_url(self):
"""
The set_language view decodes the HTTP_REFERER URL and preserves an
encoded query string.
"""
# The URL & view must exist for this to work as a regression test.
self.assertEqual(
reverse("with_parameter", kwargs={"parameter": "x"}), "/test-setlang/x/"
)
lang_code = self._get_inactive_language_code()
# %C3%A4 decodes to ä, %26 to &.
encoded_url = "/test-setlang/%C3%A4/?foo=bar&baz=alpha%26omega"
response = self.client.post(
"/i18n/setlang/", {"language": lang_code}, headers={"referer": encoded_url}
)
self.assertRedirects(response, encoded_url, fetch_redirect_response=False)
self.assertEqual(
self.client.cookies[settings.LANGUAGE_COOKIE_NAME].value, lang_code
)
@modify_settings(
MIDDLEWARE={
"append": "django.middleware.locale.LocaleMiddleware",
}
)
def test_lang_from_translated_i18n_pattern(self):
response = self.client.post(
"/i18n/setlang/",
data={"language": "nl"},
follow=True,
headers={"referer": "/en/translated/"},
)
self.assertEqual(self.client.cookies[settings.LANGUAGE_COOKIE_NAME].value, "nl")
self.assertRedirects(response, "/nl/vertaald/")
# And reverse
response = self.client.post(
"/i18n/setlang/",
data={"language": "en"},
follow=True,
headers={"referer": "/nl/vertaald/"},
)
self.assertRedirects(response, "/en/translated/")
@override_settings(ROOT_URLCONF="view_tests.urls")
class I18NViewTests(SimpleTestCase):
"""Test django.views.i18n views other than set_language."""
@override_settings(LANGUAGE_CODE="de")
def test_get_formats(self):
formats = get_formats()
# Test 3 possible types in get_formats: integer, string, and list.
self.assertEqual(formats["FIRST_DAY_OF_WEEK"], 1)
self.assertEqual(formats["DECIMAL_SEPARATOR"], ",")
self.assertEqual(
formats["TIME_INPUT_FORMATS"], ["%H:%M:%S", "%H:%M:%S.%f", "%H:%M"]
)
def test_jsi18n(self):
"""The javascript_catalog can be deployed with language settings"""
for lang_code in ["es", "fr", "ru"]:
with override(lang_code):
catalog = gettext.translation("djangojs", locale_dir, [lang_code])
trans_txt = catalog.gettext("this is to be translated")
response = self.client.get("/jsi18n/")
self.assertEqual(
response.headers["Content-Type"], 'text/javascript; charset="utf-8"'
)
# response content must include a line like:
# "this is to be translated": <value of trans_txt Python
# variable> json.dumps() is used to be able to check Unicode
# strings.
self.assertContains(response, json.dumps(trans_txt), 1)
if lang_code == "fr":
# Message with context (msgctxt)
self.assertContains(response, '"month name\\u0004May": "mai"', 1)
@override_settings(USE_I18N=False)
def test_jsi18n_USE_I18N_False(self):
response = self.client.get("/jsi18n/")
# default plural function
self.assertContains(
response,
"django.pluralidx = function(count) { return (count == 1) ? 0 : 1; };",
)
self.assertNotContains(response, "var newcatalog =")
def test_jsoni18n(self):
"""
The json_catalog returns the language catalog and settings as JSON.
"""
with override("de"):
response = self.client.get("/jsoni18n/")
data = json.loads(response.text)
self.assertIn("catalog", data)
self.assertIn("formats", data)
self.assertEqual(
data["formats"]["TIME_INPUT_FORMATS"],
["%H:%M:%S", "%H:%M:%S.%f", "%H:%M"],
)
self.assertEqual(data["formats"]["FIRST_DAY_OF_WEEK"], 1)
self.assertIn("plural", data)
self.assertEqual(data["catalog"]["month name\x04May"], "Mai")
self.assertIn("DATETIME_FORMAT", data["formats"])
self.assertEqual(data["plural"], "(n != 1)")
def test_jsi18n_with_missing_en_files(self):
"""
The javascript_catalog shouldn't load the fallback language in the
case that the current selected language is actually the one translated
from, and hence missing translation files completely.
This happens easily when you're translating from English to other
languages and you've set settings.LANGUAGE_CODE to some other language
than English.
"""
with self.settings(LANGUAGE_CODE="es"), override("en-us"):
response = self.client.get("/jsi18n/")
self.assertNotContains(response, "esto tiene que ser traducido")
def test_jsoni18n_with_missing_en_files(self):
"""
Same as above for the json_catalog view. Here we also check for the
expected JSON format.
"""
with self.settings(LANGUAGE_CODE="es"), override("en-us"):
response = self.client.get("/jsoni18n/")
data = json.loads(response.text)
self.assertIn("catalog", data)
self.assertIn("formats", data)
self.assertIn("plural", data)
self.assertEqual(data["catalog"], {})
self.assertIn("DATETIME_FORMAT", data["formats"])
self.assertIsNone(data["plural"])
def test_jsi18n_fallback_language(self):
"""
Let's make sure that the fallback language is still working properly
in cases where the selected language cannot be found.
"""
with self.settings(LANGUAGE_CODE="fr"), override("fi"):
response = self.client.get("/jsi18n/")
self.assertContains(response, "il faut le traduire")
self.assertNotContains(response, "Untranslated string")
def test_jsi18n_fallback_language_with_custom_locale_dir(self):
"""
The fallback language works when there are several levels of fallback
translation catalogs.
"""
locale_paths = [
path.join(
path.dirname(path.dirname(path.abspath(__file__))),
"custom_locale_path",
),
]
with self.settings(LOCALE_PATHS=locale_paths), override("es_MX"):
response = self.client.get("/jsi18n/")
self.assertContains(
response, "custom_locale_path: esto tiene que ser traducido"
)
response = self.client.get("/jsi18n_no_packages/")
self.assertContains(
response, "custom_locale_path: esto tiene que ser traducido"
)
def test_i18n_fallback_language_plural(self):
"""
The fallback to a language with less plural forms maintains the real
language's number of plural forms and correct translations.
"""
with self.settings(LANGUAGE_CODE="pt"), override("ru"):
response = self.client.get("/jsi18n/")
self.assertEqual(
response.context["catalog"]["{count} plural3"],
["{count} plural3 p3", "{count} plural3 p3s", "{count} plural3 p3t"],
)
self.assertEqual(
response.context["catalog"]["{count} plural2"],
["{count} plural2", "{count} plural2s", ""],
)
with self.settings(LANGUAGE_CODE="ru"), override("pt"):
response = self.client.get("/jsi18n/")
self.assertEqual(
response.context["catalog"]["{count} plural3"],
["{count} plural3", "{count} plural3s"],
)
self.assertEqual(
response.context["catalog"]["{count} plural2"],
["{count} plural2", "{count} plural2s"],
)
def test_i18n_english_variant(self):
with override("en-gb"):
response = self.client.get("/jsi18n/")
self.assertIn(
'"this color is to be translated": "this colour is to be translated"',
response.context["catalog_str"],
)
def test_i18n_language_non_english_default(self):
"""
Check if the JavaScript i18n view returns an empty language catalog
if the default language is non-English, the selected language
is English and there is not 'en' translation available. See #13388,
#3594 and #13726 for more details.
"""
with self.settings(LANGUAGE_CODE="fr"), override("en-us"):
response = self.client.get("/jsi18n/")
self.assertNotContains(response, "Choisir une heure")
@modify_settings(INSTALLED_APPS={"append": "view_tests.app0"})
def test_non_english_default_english_userpref(self):
"""
Same as above with the difference that there IS an 'en' translation
available. The JavaScript i18n view must return a NON empty language
catalog with the proper English translations. See #13726 for more
details.
"""
with self.settings(LANGUAGE_CODE="fr"), override("en-us"):
response = self.client.get("/jsi18n_english_translation/")
self.assertContains(response, "this app0 string is to be translated")
def test_i18n_language_non_english_fallback(self):
"""
Makes sure that the fallback language is still working properly
in cases where the selected language cannot be found.
"""
with self.settings(LANGUAGE_CODE="fr"), override("none"):
response = self.client.get("/jsi18n/")
self.assertContains(response, "Choisir une heure")
def test_escaping(self):
# Force a language via GET otherwise the gettext functions are a noop!
response = self.client.get("/jsi18n_admin/?language=de")
self.assertContains(response, "\\x04")
@modify_settings(INSTALLED_APPS={"append": ["view_tests.app5"]})
def test_non_BMP_char(self):
"""
Non-BMP characters should not break the javascript_catalog (#21725).
"""
with self.settings(LANGUAGE_CODE="en-us"), override("fr"):
response = self.client.get("/jsi18n/app5/")
self.assertContains(response, "emoji")
self.assertContains(response, "\\ud83d\\udca9")
@modify_settings(INSTALLED_APPS={"append": ["view_tests.app1", "view_tests.app2"]})
def test_i18n_language_english_default(self):
"""
Check if the JavaScript i18n view returns a complete language catalog
if the default language is en-us, the selected language has a
translation available and a catalog composed by djangojs domain
translations of multiple Python packages is requested. See #13388,
#3594 and #13514 for more details.
"""
base_trans_string = (
"il faut traduire cette cha\\u00eene de caract\\u00e8res de "
)
app1_trans_string = base_trans_string + "app1"
app2_trans_string = base_trans_string + "app2"
with self.settings(LANGUAGE_CODE="en-us"), override("fr"):
response = self.client.get("/jsi18n_multi_packages1/")
self.assertContains(response, app1_trans_string)
self.assertContains(response, app2_trans_string)
response = self.client.get("/jsi18n/app1/")
self.assertContains(response, app1_trans_string)
self.assertNotContains(response, app2_trans_string)
response = self.client.get("/jsi18n/app2/")
self.assertNotContains(response, app1_trans_string)
self.assertContains(response, app2_trans_string)
@modify_settings(INSTALLED_APPS={"append": ["view_tests.app3", "view_tests.app4"]})
def test_i18n_different_non_english_languages(self):
"""
Similar to above but with neither default or requested language being
English.
"""
with self.settings(LANGUAGE_CODE="fr"), override("es-ar"):
response = self.client.get("/jsi18n_multi_packages2/")
self.assertContains(response, "este texto de app3 debe ser traducido")
def test_i18n_with_locale_paths(self):
extended_locale_paths = settings.LOCALE_PATHS + [
path.join(
path.dirname(path.dirname(path.abspath(__file__))),
"app3",
"locale",
),
]
with self.settings(LANGUAGE_CODE="es-ar", LOCALE_PATHS=extended_locale_paths):
with override("es-ar"):
response = self.client.get("/jsi18n/")
self.assertContains(response, "este texto de app3 debe ser traducido")
def test_i18n_unknown_package_error(self):
view = JavaScriptCatalog.as_view()
request = RequestFactory().get("/")
msg = "Invalid package(s) provided to JavaScriptCatalog: unknown_package"
with self.assertRaisesMessage(ValueError, msg):
view(request, packages="unknown_package")
msg += ",unknown_package2"
with self.assertRaisesMessage(ValueError, msg):
view(request, packages="unknown_package+unknown_package2")
def test_template_encoding(self):
"""
The template is loaded directly, not via a template loader, and should
be opened as utf-8 charset as is the default specified on template
engines.
"""
from django.views.i18n import Path
view = JavaScriptCatalog.as_view()
request = RequestFactory().get("/")
with mock.patch.object(Path, "open") as m:
view(request)
m.assert_called_once_with(encoding="utf-8")
@override_settings(ROOT_URLCONF="view_tests.urls")
class I18nSeleniumTests(SeleniumTestCase):
# The test cases use fixtures & translations from these apps.
available_apps = [
"django.contrib.admin",
"django.contrib.auth",
"django.contrib.contenttypes",
"view_tests",
]
@override_settings(LANGUAGE_CODE="de")
def test_javascript_gettext(self):
from selenium.webdriver.common.by import By
self.selenium.get(self.live_server_url + "/jsi18n_template/")
elem = self.selenium.find_element(By.ID, "gettext")
self.assertEqual(elem.text, "Entfernen")
elem = self.selenium.find_element(By.ID, "ngettext_sing")
self.assertEqual(elem.text, "1 Element")
elem = self.selenium.find_element(By.ID, "ngettext_plur")
self.assertEqual(elem.text, "455 Elemente")
elem = self.selenium.find_element(By.ID, "ngettext_onnonplural")
self.assertEqual(elem.text, "Bild")
elem = self.selenium.find_element(By.ID, "pgettext")
self.assertEqual(elem.text, "Kann")
elem = self.selenium.find_element(By.ID, "npgettext_sing")
self.assertEqual(elem.text, "1 Resultat")
elem = self.selenium.find_element(By.ID, "npgettext_plur")
self.assertEqual(elem.text, "455 Resultate")
elem = self.selenium.find_element(By.ID, "formats")
self.assertEqual(
elem.text,
"DATE_INPUT_FORMATS is an object; DECIMAL_SEPARATOR is a string; "
"FIRST_DAY_OF_WEEK is a number;",
)
@modify_settings(INSTALLED_APPS={"append": ["view_tests.app1", "view_tests.app2"]})
@override_settings(LANGUAGE_CODE="fr")
def test_multiple_catalogs(self):
from selenium.webdriver.common.by import By
self.selenium.get(self.live_server_url + "/jsi18n_multi_catalogs/")
elem = self.selenium.find_element(By.ID, "app1string")
self.assertEqual(
elem.text, "il faut traduire cette chaîne de caractères de app1"
)
elem = self.selenium.find_element(By.ID, "app2string")
self.assertEqual(
elem.text, "il faut traduire cette chaîne de caractères de app2"
)
|
./temp_repos/django/django/views/i18n.py
|
./temp_repos/django/tests/view_tests/tests/test_i18n.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'JavaScriptCatalog'.
Context:
- Class Name: JavaScriptCatalog
- Dependencies to Mock: None detected
- Key Imports: django.conf, django.http, django.urls, django.utils.translation.trans_real, pathlib, django.utils.http, django.utils.translation, django.apps, django.views.generic, json
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
JavaScriptCatalog
|
python
|
"""
Views and functions for serving static files. These are only to be used
during development, and SHOULD NOT be used in a production setting.
"""
import mimetypes
import posixpath
from pathlib import Path
from django.http import FileResponse, Http404, HttpResponse, HttpResponseNotModified
from django.template import Context, Engine, TemplateDoesNotExist, loader
from django.utils._os import safe_join
from django.utils.http import http_date, parse_http_date
from django.utils.translation import gettext as _
from django.utils.translation import gettext_lazy
def builtin_template_path(name):
"""
Return a path to a builtin template.
Avoid calling this function at the module level or in a class-definition
because __file__ may not exist, e.g. in frozen environments.
"""
return Path(__file__).parent / "templates" / name
def serve(request, path, document_root=None, show_indexes=False):
"""
Serve static files below a given point in the directory structure.
To use, put a URL pattern such as::
from django.views.static import serve
path('<path:path>', serve, {'document_root': '/path/to/my/files/'})
in your URLconf. You must provide the ``document_root`` param. You may
also set ``show_indexes`` to ``True`` if you'd like to serve a basic index
of the directory. This index view will use the template hardcoded below,
but if you'd like to override it, you can create a template called
``static/directory_index.html``.
"""
path = posixpath.normpath(path).lstrip("/")
fullpath = Path(safe_join(document_root, path))
if fullpath.is_dir():
if show_indexes:
return directory_index(path, fullpath)
raise Http404(_("Directory indexes are not allowed here."))
if not fullpath.exists():
raise Http404(_("“%(path)s” does not exist") % {"path": fullpath})
# Respect the If-Modified-Since header.
statobj = fullpath.stat()
if not was_modified_since(
request.META.get("HTTP_IF_MODIFIED_SINCE"), statobj.st_mtime
):
return HttpResponseNotModified()
content_type, encoding = mimetypes.guess_type(str(fullpath))
content_type = content_type or "application/octet-stream"
response = FileResponse(fullpath.open("rb"), content_type=content_type)
response.headers["Last-Modified"] = http_date(statobj.st_mtime)
if encoding:
response.headers["Content-Encoding"] = encoding
return response
# Translatable string for static directory index template title.
template_translatable = gettext_lazy("Index of %(directory)s")
def directory_index(path, fullpath):
try:
t = loader.select_template(
[
"static/directory_index.html",
"static/directory_index",
]
)
except TemplateDoesNotExist:
with builtin_template_path("directory_index.html").open(encoding="utf-8") as fh:
t = Engine(libraries={"i18n": "django.templatetags.i18n"}).from_string(
fh.read()
)
c = Context()
else:
c = {}
files = []
for f in fullpath.iterdir():
if not f.name.startswith("."):
url = str(f.relative_to(fullpath))
if f.is_dir():
url += "/"
files.append(url)
c.update(
{
"directory": path + "/",
"file_list": files,
}
)
return HttpResponse(t.render(c))
def was_modified_since(header=None, mtime=0):
"""
Was something modified since the user last downloaded it?
header
This is the value of the If-Modified-Since header. If this is None,
I'll just return True.
mtime
This is the modification time of the item we're talking about.
"""
try:
if header is None:
raise ValueError
header_mtime = parse_http_date(header)
if int(mtime) > header_mtime:
raise ValueError
except (ValueError, OverflowError):
return True
return False
|
import mimetypes
import unittest
from os import path
from unittest import mock
from urllib.parse import quote
from django.conf.urls.static import static
from django.core.exceptions import ImproperlyConfigured
from django.http import FileResponse, HttpResponseNotModified
from django.test import SimpleTestCase, override_settings
from django.utils.http import http_date
from django.views.static import directory_index, was_modified_since
from .. import urls
from ..urls import media_dir
@override_settings(DEBUG=True, ROOT_URLCONF="view_tests.urls")
class StaticTests(SimpleTestCase):
"""Tests django views in django/views/static.py"""
prefix = "site_media"
def test_serve(self):
"The static view can serve static media"
media_files = ["file.txt", "file.txt.gz", "%2F.txt"]
for filename in media_files:
response = self.client.get("/%s/%s" % (self.prefix, quote(filename)))
response_content = b"".join(response)
file_path = path.join(media_dir, filename)
with open(file_path, "rb") as fp:
self.assertEqual(fp.read(), response_content)
self.assertEqual(
len(response_content), int(response.headers["Content-Length"])
)
self.assertEqual(
mimetypes.guess_type(file_path)[1],
response.get("Content-Encoding", None),
)
def test_chunked(self):
"""
The static view should stream files in chunks to avoid large memory
usage
"""
response = self.client.get("/%s/%s" % (self.prefix, "long-line.txt"))
response_iterator = iter(response)
first_chunk = next(response_iterator)
self.assertEqual(len(first_chunk), FileResponse.block_size)
second_chunk = next(response_iterator)
response.close()
# strip() to prevent OS line endings from causing differences
self.assertEqual(len(second_chunk.strip()), 1449)
def test_unknown_mime_type(self):
response = self.client.get("/%s/file.unknown" % self.prefix)
self.assertEqual("application/octet-stream", response.headers["Content-Type"])
response.close()
def test_copes_with_empty_path_component(self):
file_name = "file.txt"
response = self.client.get("/%s//%s" % (self.prefix, file_name))
response_content = b"".join(response)
with open(path.join(media_dir, file_name), "rb") as fp:
self.assertEqual(fp.read(), response_content)
def test_is_modified_since(self):
file_name = "file.txt"
response = self.client.get(
"/%s/%s" % (self.prefix, file_name),
headers={"if-modified-since": "Thu, 1 Jan 1970 00:00:00 GMT"},
)
response_content = b"".join(response)
with open(path.join(media_dir, file_name), "rb") as fp:
self.assertEqual(fp.read(), response_content)
def test_not_modified_since(self):
file_name = "file.txt"
response = self.client.get(
"/%s/%s" % (self.prefix, file_name),
headers={
# This is 24h before max Unix time. Remember to fix Django and
# update this test well before 2038 :)
"if-modified-since": "Mon, 18 Jan 2038 05:14:07 GMT"
},
)
self.assertIsInstance(response, HttpResponseNotModified)
def test_invalid_if_modified_since(self):
"""Handle bogus If-Modified-Since values gracefully
Assume that a file is modified since an invalid timestamp as per RFC
9110 Section 13.1.3.
"""
file_name = "file.txt"
invalid_date = "Mon, 28 May 999999999999 28:25:26 GMT"
response = self.client.get(
"/%s/%s" % (self.prefix, file_name),
headers={"if-modified-since": invalid_date},
)
response_content = b"".join(response)
with open(path.join(media_dir, file_name), "rb") as fp:
self.assertEqual(fp.read(), response_content)
self.assertEqual(len(response_content), int(response.headers["Content-Length"]))
def test_invalid_if_modified_since2(self):
"""Handle even more bogus If-Modified-Since values gracefully
Assume that a file is modified since an invalid timestamp as per RFC
9110 Section 13.1.3.
"""
file_name = "file.txt"
invalid_date = ": 1291108438, Wed, 20 Oct 2010 14:05:00 GMT"
response = self.client.get(
"/%s/%s" % (self.prefix, file_name),
headers={"if-modified-since": invalid_date},
)
response_content = b"".join(response)
with open(path.join(media_dir, file_name), "rb") as fp:
self.assertEqual(fp.read(), response_content)
self.assertEqual(len(response_content), int(response.headers["Content-Length"]))
def test_404(self):
response = self.client.get("/%s/nonexistent_resource" % self.prefix)
self.assertEqual(404, response.status_code)
def test_index(self):
response = self.client.get("/%s/" % self.prefix)
self.assertContains(response, "Index of ./")
# Directories have a trailing slash.
self.assertIn("subdir/", response.context["file_list"])
def test_index_subdir(self):
response = self.client.get("/%s/subdir/" % self.prefix)
self.assertContains(response, "Index of subdir/")
# File with a leading dot (e.g. .hidden) aren't displayed.
self.assertEqual(response.context["file_list"], ["visible"])
@override_settings(
TEMPLATES=[
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"OPTIONS": {
"loaders": [
(
"django.template.loaders.locmem.Loader",
{
"static/directory_index.html": "Test index",
},
),
],
},
}
]
)
def test_index_custom_template(self):
response = self.client.get("/%s/" % self.prefix)
self.assertEqual(response.content, b"Test index")
def test_template_encoding(self):
"""
The template is loaded directly, not via a template loader, and should
be opened as utf-8 charset as is the default specified on template
engines.
"""
from django.views.static import Path
with mock.patch.object(Path, "open") as m:
directory_index(mock.MagicMock(), mock.MagicMock())
m.assert_called_once_with(encoding="utf-8")
class StaticHelperTest(StaticTests):
"""
Test case to make sure the static URL pattern helper works as expected
"""
def setUp(self):
super().setUp()
self._old_views_urlpatterns = urls.urlpatterns[:]
urls.urlpatterns += static("media/", document_root=media_dir)
def tearDown(self):
super().tearDown()
urls.urlpatterns = self._old_views_urlpatterns
def test_prefix(self):
self.assertEqual(static("test")[0].pattern.regex.pattern, "^test(?P<path>.*)$")
@override_settings(DEBUG=False)
def test_debug_off(self):
"""No URLs are served if DEBUG=False."""
self.assertEqual(static("test"), [])
def test_empty_prefix(self):
with self.assertRaisesMessage(
ImproperlyConfigured, "Empty static prefix not permitted"
):
static("")
def test_special_prefix(self):
"""No URLs are served if prefix contains a netloc part."""
self.assertEqual(static("http://example.org"), [])
self.assertEqual(static("//example.org"), [])
class StaticUtilsTests(unittest.TestCase):
def test_was_modified_since_fp(self):
"""
A floating point mtime does not disturb was_modified_since (#18675).
"""
mtime = 1343416141.107817
header = http_date(mtime)
self.assertFalse(was_modified_since(header, mtime))
def test_was_modified_since_empty_string(self):
self.assertTrue(was_modified_since(header="", mtime=1))
|
./temp_repos/django/django/views/static.py
|
./temp_repos/django/tests/view_tests/tests/test_static.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'Unknown'.
Context:
- Class Name: Unknown
- Dependencies to Mock: None detected
- Key Imports: django.utils._os, django.http, mimetypes, pathlib, django.utils.http, django.utils.translation, django.template, posixpath
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
Unknown
|
python
|
from django.core.exceptions import ValidationError
from django.forms.fields import BooleanField, IntegerField
from django.forms.forms import Form
from django.forms.renderers import get_default_renderer
from django.forms.utils import ErrorList, RenderableFormMixin
from django.forms.widgets import CheckboxInput, HiddenInput, NumberInput
from django.utils.functional import cached_property
from django.utils.translation import gettext_lazy as _
from django.utils.translation import ngettext_lazy
__all__ = ("BaseFormSet", "formset_factory", "all_valid")
# special field names
TOTAL_FORM_COUNT = "TOTAL_FORMS"
INITIAL_FORM_COUNT = "INITIAL_FORMS"
MIN_NUM_FORM_COUNT = "MIN_NUM_FORMS"
MAX_NUM_FORM_COUNT = "MAX_NUM_FORMS"
ORDERING_FIELD_NAME = "ORDER"
DELETION_FIELD_NAME = "DELETE"
# default minimum number of forms in a formset
DEFAULT_MIN_NUM = 0
# default maximum number of forms in a formset, to prevent memory exhaustion
DEFAULT_MAX_NUM = 1000
class ManagementForm(Form):
"""
Keep track of how many form instances are displayed on the page. If adding
new forms via JavaScript, you should increment the count field of this form
as well.
"""
TOTAL_FORMS = IntegerField(widget=HiddenInput)
INITIAL_FORMS = IntegerField(widget=HiddenInput)
# MIN_NUM_FORM_COUNT and MAX_NUM_FORM_COUNT are output with the rest of the
# management form, but only for the convenience of client-side code. The
# POST value of them returned from the client is not checked.
MIN_NUM_FORMS = IntegerField(required=False, widget=HiddenInput)
MAX_NUM_FORMS = IntegerField(required=False, widget=HiddenInput)
def clean(self):
cleaned_data = super().clean()
# When the management form is invalid, we don't know how many forms
# were submitted.
cleaned_data.setdefault(TOTAL_FORM_COUNT, 0)
cleaned_data.setdefault(INITIAL_FORM_COUNT, 0)
return cleaned_data
class BaseFormSet(RenderableFormMixin):
"""
A collection of instances of the same Form class.
"""
deletion_widget = CheckboxInput
ordering_widget = NumberInput
default_error_messages = {
"missing_management_form": _(
"ManagementForm data is missing or has been tampered with. Missing fields: "
"%(field_names)s. You may need to file a bug report if the issue persists."
),
"too_many_forms": ngettext_lazy(
"Please submit at most %(num)d form.",
"Please submit at most %(num)d forms.",
"num",
),
"too_few_forms": ngettext_lazy(
"Please submit at least %(num)d form.",
"Please submit at least %(num)d forms.",
"num",
),
}
template_name_div = "django/forms/formsets/div.html"
template_name_p = "django/forms/formsets/p.html"
template_name_table = "django/forms/formsets/table.html"
template_name_ul = "django/forms/formsets/ul.html"
def __init__(
self,
data=None,
files=None,
auto_id="id_%s",
prefix=None,
initial=None,
error_class=ErrorList,
form_kwargs=None,
error_messages=None,
):
self.is_bound = data is not None or files is not None
self.prefix = prefix or self.get_default_prefix()
self.auto_id = auto_id
self.data = data or {}
self.files = files or {}
self.initial = initial
self.form_kwargs = form_kwargs or {}
self.error_class = error_class
self._errors = None
self._non_form_errors = None
self.form_renderer = self.renderer
self.renderer = self.renderer or get_default_renderer()
messages = {}
for cls in reversed(type(self).__mro__):
messages.update(getattr(cls, "default_error_messages", {}))
if error_messages is not None:
messages.update(error_messages)
self.error_messages = messages
def __iter__(self):
"""Yield the forms in the order they should be rendered."""
return iter(self.forms)
def __getitem__(self, index):
"""Return the form at the given index, based on the rendering order."""
return self.forms[index]
def __len__(self):
return len(self.forms)
def __bool__(self):
"""
Return True since all formsets have a management form which is not
included in the length.
"""
return True
def __repr__(self):
if self._errors is None:
is_valid = "Unknown"
else:
is_valid = (
self.is_bound
and not self._non_form_errors
and not any(form_errors for form_errors in self._errors)
)
return "<%s: bound=%s valid=%s total_forms=%s>" % (
self.__class__.__qualname__,
self.is_bound,
is_valid,
self.total_form_count(),
)
@cached_property
def management_form(self):
"""Return the ManagementForm instance for this FormSet."""
if self.is_bound:
form = ManagementForm(
self.data,
auto_id=self.auto_id,
prefix=self.prefix,
renderer=self.renderer,
)
form.full_clean()
else:
form = ManagementForm(
auto_id=self.auto_id,
prefix=self.prefix,
initial={
TOTAL_FORM_COUNT: self.total_form_count(),
INITIAL_FORM_COUNT: self.initial_form_count(),
MIN_NUM_FORM_COUNT: self.min_num,
MAX_NUM_FORM_COUNT: self.max_num,
},
renderer=self.renderer,
)
return form
def total_form_count(self):
"""Return the total number of forms in this FormSet."""
if self.is_bound:
# return absolute_max if it is lower than the actual total form
# count in the data; this is DoS protection to prevent clients
# from forcing the server to instantiate arbitrary numbers of
# forms
return min(
self.management_form.cleaned_data[TOTAL_FORM_COUNT], self.absolute_max
)
else:
initial_forms = self.initial_form_count()
total_forms = max(initial_forms, self.min_num) + self.extra
# Allow all existing related objects/inlines to be displayed,
# but don't allow extra beyond max_num.
if initial_forms > self.max_num >= 0:
total_forms = initial_forms
elif total_forms > self.max_num >= 0:
total_forms = self.max_num
return total_forms
def initial_form_count(self):
"""Return the number of forms that are required in this FormSet."""
if self.is_bound:
return self.management_form.cleaned_data[INITIAL_FORM_COUNT]
else:
# Use the length of the initial data if it's there, 0 otherwise.
initial_forms = len(self.initial) if self.initial else 0
return initial_forms
@cached_property
def forms(self):
"""Instantiate forms at first property access."""
# DoS protection is included in total_form_count()
return [
self._construct_form(i, **self.get_form_kwargs(i))
for i in range(self.total_form_count())
]
def get_form_kwargs(self, index):
"""
Return additional keyword arguments for each individual formset form.
index will be None if the form being constructed is a new empty
form.
"""
return self.form_kwargs.copy()
def _construct_form(self, i, **kwargs):
"""Instantiate and return the i-th form instance in a formset."""
defaults = {
"auto_id": self.auto_id,
"prefix": self.add_prefix(i),
"error_class": self.error_class,
# Don't render the HTML 'required' attribute as it may cause
# incorrect validation for extra, optional, and deleted
# forms in the formset.
"use_required_attribute": False,
"renderer": self.form_renderer,
}
if self.is_bound:
defaults["data"] = self.data
defaults["files"] = self.files
if self.initial and "initial" not in kwargs:
try:
defaults["initial"] = self.initial[i]
except IndexError:
pass
# Allow extra forms to be empty, unless they're part of
# the minimum forms.
if i >= self.initial_form_count() and i >= self.min_num:
defaults["empty_permitted"] = True
defaults.update(kwargs)
form = self.form(**defaults)
self.add_fields(form, i)
return form
@property
def initial_forms(self):
"""Return a list of all the initial forms in this formset."""
return self.forms[: self.initial_form_count()]
@property
def extra_forms(self):
"""Return a list of all the extra forms in this formset."""
return self.forms[self.initial_form_count() :]
@property
def empty_form(self):
form_kwargs = {
**self.get_form_kwargs(None),
"auto_id": self.auto_id,
"prefix": self.add_prefix("__prefix__"),
"empty_permitted": True,
"use_required_attribute": False,
"renderer": self.form_renderer,
}
form = self.form(**form_kwargs)
self.add_fields(form, None)
return form
@property
def cleaned_data(self):
"""
Return a list of form.cleaned_data dicts for every form in self.forms.
"""
if not self.is_valid():
raise AttributeError(
"'%s' object has no attribute 'cleaned_data'" % self.__class__.__name__
)
return [form.cleaned_data for form in self.forms]
@property
def deleted_forms(self):
"""Return a list of forms that have been marked for deletion."""
if not self.is_valid() or not self.can_delete:
return []
# construct _deleted_form_indexes which is just a list of form indexes
# that have had their deletion widget set to True
if not hasattr(self, "_deleted_form_indexes"):
self._deleted_form_indexes = []
for i, form in enumerate(self.forms):
# If this is an extra form and hasn't changed, ignore it.
if i >= self.initial_form_count() and not form.has_changed():
continue
if self._should_delete_form(form):
self._deleted_form_indexes.append(i)
return [self.forms[i] for i in self._deleted_form_indexes]
@property
def ordered_forms(self):
"""
Return a list of form in the order specified by the incoming data.
Raise an AttributeError if ordering is not allowed.
"""
if not self.is_valid() or not self.can_order:
raise AttributeError(
"'%s' object has no attribute 'ordered_forms'" % self.__class__.__name__
)
# Construct _ordering, which is a list of (form_index,
# order_field_value) tuples. After constructing this list, we'll sort
# it by order_field_value so we have a way to get to the form indexes
# in the order specified by the form data.
if not hasattr(self, "_ordering"):
self._ordering = []
for i, form in enumerate(self.forms):
# If this is an extra form and hasn't changed, ignore it.
if i >= self.initial_form_count() and not form.has_changed():
continue
# don't add data marked for deletion to self.ordered_data
if self.can_delete and self._should_delete_form(form):
continue
self._ordering.append((i, form.cleaned_data[ORDERING_FIELD_NAME]))
# After we're done populating self._ordering, sort it.
# A sort function to order things numerically ascending, but
# None should be sorted below anything else. Allowing None as
# a comparison value makes it so we can leave ordering fields
# blank.
def compare_ordering_key(k):
if k[1] is None:
return (1, 0) # +infinity, larger than any number
return (0, k[1])
self._ordering.sort(key=compare_ordering_key)
# Return a list of form.cleaned_data dicts in the order specified by
# the form data.
return [self.forms[i[0]] for i in self._ordering]
@classmethod
def get_default_prefix(cls):
return "form"
@classmethod
def get_deletion_widget(cls):
return cls.deletion_widget
@classmethod
def get_ordering_widget(cls):
return cls.ordering_widget
def non_form_errors(self):
"""
Return an ErrorList of errors that aren't associated with a particular
form -- i.e., from formset.clean(). Return an empty ErrorList if there
are none.
"""
if self._non_form_errors is None:
self.full_clean()
return self._non_form_errors
@property
def errors(self):
"""Return a list of form.errors for every form in self.forms."""
if self._errors is None:
self.full_clean()
return self._errors
def total_error_count(self):
"""Return the number of errors across all forms in the formset."""
return len(self.non_form_errors()) + sum(
len(form_errors) for form_errors in self.errors
)
def _should_delete_form(self, form):
"""Return whether or not the form was marked for deletion."""
return form.cleaned_data.get(DELETION_FIELD_NAME, False)
def is_valid(self):
"""Return True if every form in self.forms is valid."""
if not self.is_bound:
return False
# Accessing errors triggers a full clean the first time only.
self.errors
# List comprehension ensures is_valid() is called for all forms.
# Forms due to be deleted shouldn't cause the formset to be invalid.
forms_valid = all(
[
form.is_valid()
for form in self.forms
if not (self.can_delete and self._should_delete_form(form))
]
)
return forms_valid and not self.non_form_errors()
def full_clean(self):
"""
Clean all of self.data and populate self._errors and
self._non_form_errors.
"""
self._errors = []
self._non_form_errors = self.error_class(
error_class="nonform", renderer=self.renderer
)
empty_forms_count = 0
if not self.is_bound: # Stop further processing.
return
if not self.management_form.is_valid():
error = ValidationError(
self.error_messages["missing_management_form"],
params={
"field_names": ", ".join(
self.management_form.add_prefix(field_name)
for field_name in self.management_form.errors
),
},
code="missing_management_form",
)
self._non_form_errors.append(error)
for i, form in enumerate(self.forms):
# Empty forms are unchanged forms beyond those with initial data.
if not form.has_changed() and i >= self.initial_form_count():
empty_forms_count += 1
# Accessing errors calls full_clean() if necessary.
# _should_delete_form() requires cleaned_data.
form_errors = form.errors
if self.can_delete and self._should_delete_form(form):
continue
self._errors.append(form_errors)
try:
if (
self.validate_max
and self.total_form_count() - len(self.deleted_forms) > self.max_num
) or self.management_form.cleaned_data[
TOTAL_FORM_COUNT
] > self.absolute_max:
raise ValidationError(
self.error_messages["too_many_forms"] % {"num": self.max_num},
code="too_many_forms",
)
if (
self.validate_min
and self.total_form_count()
- len(self.deleted_forms)
- empty_forms_count
< self.min_num
):
raise ValidationError(
self.error_messages["too_few_forms"] % {"num": self.min_num},
code="too_few_forms",
)
# Give self.clean() a chance to do cross-form validation.
self.clean()
except ValidationError as e:
self._non_form_errors = self.error_class(
e.error_list,
error_class="nonform",
renderer=self.renderer,
)
def clean(self):
"""
Hook for doing any extra formset-wide cleaning after Form.clean() has
been called on every form. Any ValidationError raised by this method
will not be associated with a particular form; it will be accessible
via formset.non_form_errors()
"""
pass
def has_changed(self):
"""Return True if data in any form differs from initial."""
return any(form.has_changed() for form in self)
def add_fields(self, form, index):
"""A hook for adding extra fields on to each form instance."""
initial_form_count = self.initial_form_count()
if self.can_order:
# Only pre-fill the ordering field for initial forms.
if index is not None and index < initial_form_count:
form.fields[ORDERING_FIELD_NAME] = IntegerField(
label=_("Order"),
initial=index + 1,
required=False,
widget=self.get_ordering_widget(),
)
else:
form.fields[ORDERING_FIELD_NAME] = IntegerField(
label=_("Order"),
required=False,
widget=self.get_ordering_widget(),
)
if self.can_delete and (
self.can_delete_extra or (index is not None and index < initial_form_count)
):
form.fields[DELETION_FIELD_NAME] = BooleanField(
label=_("Delete"),
required=False,
widget=self.get_deletion_widget(),
)
def add_prefix(self, index):
return "%s-%s" % (self.prefix, index)
def is_multipart(self):
"""
Return True if the formset needs to be multipart, i.e. it
has FileInput, or False otherwise.
"""
if self.forms:
return self.forms[0].is_multipart()
else:
return self.empty_form.is_multipart()
@property
def media(self):
# All the forms on a FormSet are the same, so you only need to
# interrogate the first form for media.
if self.forms:
return self.forms[0].media
else:
return self.empty_form.media
@property
def template_name(self):
return self.renderer.formset_template_name
def get_context(self):
return {"formset": self}
def formset_factory(
form,
formset=BaseFormSet,
extra=1,
can_order=False,
can_delete=False,
max_num=None,
validate_max=False,
min_num=None,
validate_min=False,
absolute_max=None,
can_delete_extra=True,
renderer=None,
):
"""Return a FormSet for the given form class."""
if min_num is None:
min_num = DEFAULT_MIN_NUM
if max_num is None:
max_num = DEFAULT_MAX_NUM
# absolute_max is a hard limit on forms instantiated, to prevent
# memory-exhaustion attacks. Default to max_num + DEFAULT_MAX_NUM
# (which is 2 * DEFAULT_MAX_NUM if max_num is None in the first place).
if absolute_max is None:
absolute_max = max_num + DEFAULT_MAX_NUM
if max_num > absolute_max:
raise ValueError("'absolute_max' must be greater or equal to 'max_num'.")
attrs = {
"form": form,
"extra": extra,
"can_order": can_order,
"can_delete": can_delete,
"can_delete_extra": can_delete_extra,
"min_num": min_num,
"max_num": max_num,
"absolute_max": absolute_max,
"validate_min": validate_min,
"validate_max": validate_max,
"renderer": renderer,
}
form_name = form.__name__
if form_name.endswith("Form"):
formset_name = form_name + "Set"
else:
formset_name = form_name + "FormSet"
return type(formset_name, (formset,), attrs)
def all_valid(formsets):
"""Validate every formset and return True if all are valid."""
# List comprehension ensures is_valid() is called for all formsets.
return all([formset.is_valid() for formset in formsets])
|
import datetime
from collections import Counter
from unittest import mock
from django.core.exceptions import ValidationError
from django.forms import (
BaseForm,
CharField,
DateField,
FileField,
Form,
IntegerField,
SplitDateTimeField,
formsets,
)
from django.forms.formsets import (
INITIAL_FORM_COUNT,
MAX_NUM_FORM_COUNT,
MIN_NUM_FORM_COUNT,
TOTAL_FORM_COUNT,
BaseFormSet,
ManagementForm,
all_valid,
formset_factory,
)
from django.forms.renderers import (
DjangoTemplates,
TemplatesSetting,
get_default_renderer,
)
from django.forms.utils import ErrorList
from django.forms.widgets import HiddenInput
from django.test import SimpleTestCase
from . import jinja2_tests
class Choice(Form):
choice = CharField()
votes = IntegerField()
ChoiceFormSet = formset_factory(Choice)
class ChoiceFormsetWithNonFormError(ChoiceFormSet):
def clean(self):
super().clean()
raise ValidationError("non-form error")
class FavoriteDrinkForm(Form):
name = CharField()
class BaseFavoriteDrinksFormSet(BaseFormSet):
def clean(self):
seen_drinks = []
for drink in self.cleaned_data:
if drink["name"] in seen_drinks:
raise ValidationError("You may only specify a drink once.")
seen_drinks.append(drink["name"])
# A FormSet that takes a list of favorite drinks and raises an error if
# there are any duplicates.
FavoriteDrinksFormSet = formset_factory(
FavoriteDrinkForm, formset=BaseFavoriteDrinksFormSet, extra=3
)
class CustomKwargForm(Form):
def __init__(self, *args, custom_kwarg, **kwargs):
self.custom_kwarg = custom_kwarg
super().__init__(*args, **kwargs)
class FormsFormsetTestCase(SimpleTestCase):
def make_choiceformset(
self,
formset_data=None,
formset_class=ChoiceFormSet,
total_forms=None,
initial_forms=0,
max_num_forms=0,
min_num_forms=0,
**kwargs,
):
"""
Make a ChoiceFormset from the given formset_data.
The data should be given as a list of (choice, votes) tuples.
"""
kwargs.setdefault("prefix", "choices")
kwargs.setdefault("auto_id", False)
if formset_data is None:
return formset_class(**kwargs)
if total_forms is None:
total_forms = len(formset_data)
def prefixed(*args):
args = (kwargs["prefix"],) + args
return "-".join(args)
data = {
prefixed("TOTAL_FORMS"): str(total_forms),
prefixed("INITIAL_FORMS"): str(initial_forms),
prefixed("MAX_NUM_FORMS"): str(max_num_forms),
prefixed("MIN_NUM_FORMS"): str(min_num_forms),
}
for i, (choice, votes) in enumerate(formset_data):
data[prefixed(str(i), "choice")] = choice
data[prefixed(str(i), "votes")] = votes
return formset_class(data, **kwargs)
def test_basic_formset(self):
"""
A FormSet constructor takes the same arguments as Form. Create a
FormSet for adding data. By default, it displays 1 blank form.
"""
formset = self.make_choiceformset()
self.assertHTMLEqual(
str(formset),
"""<input type="hidden" name="choices-TOTAL_FORMS" value="1">
<input type="hidden" name="choices-INITIAL_FORMS" value="0">
<input type="hidden" name="choices-MIN_NUM_FORMS" value="0">
<input type="hidden" name="choices-MAX_NUM_FORMS" value="1000">
<div>Choice:<input type="text" name="choices-0-choice"></div>
<div>Votes:<input type="number" name="choices-0-votes"></div>""",
)
# FormSet are treated similarly to Forms. FormSet has an is_valid()
# method, and a cleaned_data or errors attribute depending on whether
# all the forms passed validation. However, unlike a Form, cleaned_data
# and errors will be a list of dicts rather than a single dict.
formset = self.make_choiceformset([("Calexico", "100")])
self.assertTrue(formset.is_valid())
self.assertEqual(
[form.cleaned_data for form in formset.forms],
[{"votes": 100, "choice": "Calexico"}],
)
# If a FormSet wasn't passed any data, is_valid() and has_changed()
# return False.
formset = self.make_choiceformset()
self.assertFalse(formset.is_valid())
self.assertFalse(formset.has_changed())
def test_formset_name(self):
ArticleFormSet = formset_factory(ArticleForm)
ChoiceFormSet = formset_factory(Choice)
self.assertEqual(ArticleFormSet.__name__, "ArticleFormSet")
self.assertEqual(ChoiceFormSet.__name__, "ChoiceFormSet")
def test_form_kwargs_formset(self):
"""
Custom kwargs set on the formset instance are passed to the
underlying forms.
"""
FormSet = formset_factory(CustomKwargForm, extra=2)
formset = FormSet(form_kwargs={"custom_kwarg": 1})
for form in formset:
self.assertTrue(hasattr(form, "custom_kwarg"))
self.assertEqual(form.custom_kwarg, 1)
def test_form_kwargs_formset_dynamic(self):
"""Form kwargs can be passed dynamically in a formset."""
class DynamicBaseFormSet(BaseFormSet):
def get_form_kwargs(self, index):
return {"custom_kwarg": index}
DynamicFormSet = formset_factory(
CustomKwargForm, formset=DynamicBaseFormSet, extra=2
)
formset = DynamicFormSet(form_kwargs={"custom_kwarg": "ignored"})
for i, form in enumerate(formset):
self.assertTrue(hasattr(form, "custom_kwarg"))
self.assertEqual(form.custom_kwarg, i)
def test_form_kwargs_empty_form(self):
FormSet = formset_factory(CustomKwargForm)
formset = FormSet(form_kwargs={"custom_kwarg": 1})
self.assertTrue(hasattr(formset.empty_form, "custom_kwarg"))
self.assertEqual(formset.empty_form.custom_kwarg, 1)
def test_empty_permitted_ignored_empty_form(self):
formset = ArticleFormSet(form_kwargs={"empty_permitted": False})
self.assertIs(formset.empty_form.empty_permitted, True)
def test_formset_validation(self):
# FormSet instances can also have an error attribute if validation
# failed for any of the forms.
formset = self.make_choiceformset([("Calexico", "")])
self.assertFalse(formset.is_valid())
self.assertEqual(formset.errors, [{"votes": ["This field is required."]}])
def test_formset_validation_count(self):
"""
A formset's ManagementForm is validated once per FormSet.is_valid()
call and each form of the formset is cleaned once.
"""
def make_method_counter(func):
"""Add a counter to func for the number of times it's called."""
counter = Counter()
counter.call_count = 0
def mocked_func(*args, **kwargs):
counter.call_count += 1
return func(*args, **kwargs)
return mocked_func, counter
mocked_is_valid, is_valid_counter = make_method_counter(
formsets.ManagementForm.is_valid
)
mocked_full_clean, full_clean_counter = make_method_counter(BaseForm.full_clean)
formset = self.make_choiceformset(
[("Calexico", "100"), ("Any1", "42"), ("Any2", "101")]
)
with (
mock.patch(
"django.forms.formsets.ManagementForm.is_valid", mocked_is_valid
),
mock.patch("django.forms.forms.BaseForm.full_clean", mocked_full_clean),
):
self.assertTrue(formset.is_valid())
self.assertEqual(is_valid_counter.call_count, 1)
self.assertEqual(full_clean_counter.call_count, 4)
def test_formset_has_changed(self):
"""
FormSet.has_changed() is True if any data is passed to its forms, even
if the formset didn't validate.
"""
blank_formset = self.make_choiceformset([("", "")])
self.assertFalse(blank_formset.has_changed())
# invalid formset
invalid_formset = self.make_choiceformset([("Calexico", "")])
self.assertFalse(invalid_formset.is_valid())
self.assertTrue(invalid_formset.has_changed())
# valid formset
valid_formset = self.make_choiceformset([("Calexico", "100")])
self.assertTrue(valid_formset.is_valid())
self.assertTrue(valid_formset.has_changed())
def test_formset_initial_data(self):
"""
A FormSet can be prefilled with existing data by providing a list of
dicts to the `initial` argument. By default, an extra blank form is
included.
"""
formset = self.make_choiceformset(
initial=[{"choice": "Calexico", "votes": 100}]
)
self.assertHTMLEqual(
"\n".join(form.as_ul() for form in formset.forms),
'<li>Choice: <input type="text" name="choices-0-choice" value="Calexico">'
"</li>"
'<li>Votes: <input type="number" name="choices-0-votes" value="100"></li>'
'<li>Choice: <input type="text" name="choices-1-choice"></li>'
'<li>Votes: <input type="number" name="choices-1-votes"></li>',
)
def test_blank_form_unfilled(self):
"""A form that's displayed as blank may be submitted as blank."""
formset = self.make_choiceformset(
[("Calexico", "100"), ("", "")], initial_forms=1
)
self.assertTrue(formset.is_valid())
self.assertEqual(
[form.cleaned_data for form in formset.forms],
[{"votes": 100, "choice": "Calexico"}, {}],
)
def test_second_form_partially_filled(self):
"""
If at least one field is filled out on a blank form, it will be
validated.
"""
formset = self.make_choiceformset(
[("Calexico", "100"), ("The Decemberists", "")], initial_forms=1
)
self.assertFalse(formset.is_valid())
self.assertEqual(formset.errors, [{}, {"votes": ["This field is required."]}])
def test_delete_prefilled_data(self):
"""
Deleting prefilled data is an error. Removing data from form fields
isn't the proper way to delete it.
"""
formset = self.make_choiceformset([("", ""), ("", "")], initial_forms=1)
self.assertFalse(formset.is_valid())
self.assertEqual(
formset.errors,
[
{
"votes": ["This field is required."],
"choice": ["This field is required."],
},
{},
],
)
def test_displaying_more_than_one_blank_form(self):
"""
More than 1 empty form can be displayed using formset_factory's
`extra` argument.
"""
ChoiceFormSet = formset_factory(Choice, extra=3)
formset = ChoiceFormSet(auto_id=False, prefix="choices")
self.assertHTMLEqual(
"\n".join(form.as_ul() for form in formset.forms),
"""<li>Choice: <input type="text" name="choices-0-choice"></li>
<li>Votes: <input type="number" name="choices-0-votes"></li>
<li>Choice: <input type="text" name="choices-1-choice"></li>
<li>Votes: <input type="number" name="choices-1-votes"></li>
<li>Choice: <input type="text" name="choices-2-choice"></li>
<li>Votes: <input type="number" name="choices-2-votes"></li>""",
)
# Since every form was displayed as blank, they are also accepted as
# blank. This may seem a little strange, but min_num is used to require
# a minimum number of forms to be completed.
data = {
"choices-TOTAL_FORMS": "3", # the number of forms rendered
"choices-INITIAL_FORMS": "0", # the number of forms with initial data
"choices-MIN_NUM_FORMS": "0", # min number of forms
"choices-MAX_NUM_FORMS": "0", # max number of forms
"choices-0-choice": "",
"choices-0-votes": "",
"choices-1-choice": "",
"choices-1-votes": "",
"choices-2-choice": "",
"choices-2-votes": "",
}
formset = ChoiceFormSet(data, auto_id=False, prefix="choices")
self.assertTrue(formset.is_valid())
self.assertEqual([form.cleaned_data for form in formset.forms], [{}, {}, {}])
def test_min_num_displaying_more_than_one_blank_form(self):
"""
More than 1 empty form can also be displayed using formset_factory's
min_num argument. It will (essentially) increment the extra argument.
"""
ChoiceFormSet = formset_factory(Choice, extra=1, min_num=1)
formset = ChoiceFormSet(auto_id=False, prefix="choices")
# Min_num forms are required; extra forms can be empty.
self.assertFalse(formset.forms[0].empty_permitted)
self.assertTrue(formset.forms[1].empty_permitted)
self.assertHTMLEqual(
"\n".join(form.as_ul() for form in formset.forms),
"""<li>Choice: <input type="text" name="choices-0-choice"></li>
<li>Votes: <input type="number" name="choices-0-votes"></li>
<li>Choice: <input type="text" name="choices-1-choice"></li>
<li>Votes: <input type="number" name="choices-1-votes"></li>""",
)
def test_min_num_displaying_more_than_one_blank_form_with_zero_extra(self):
"""More than 1 empty form can be displayed using min_num."""
ChoiceFormSet = formset_factory(Choice, extra=0, min_num=3)
formset = ChoiceFormSet(auto_id=False, prefix="choices")
self.assertHTMLEqual(
"\n".join(form.as_ul() for form in formset.forms),
"""<li>Choice: <input type="text" name="choices-0-choice"></li>
<li>Votes: <input type="number" name="choices-0-votes"></li>
<li>Choice: <input type="text" name="choices-1-choice"></li>
<li>Votes: <input type="number" name="choices-1-votes"></li>
<li>Choice: <input type="text" name="choices-2-choice"></li>
<li>Votes: <input type="number" name="choices-2-votes"></li>""",
)
def test_single_form_completed(self):
"""Just one form may be completed."""
data = {
"choices-TOTAL_FORMS": "3", # the number of forms rendered
"choices-INITIAL_FORMS": "0", # the number of forms with initial data
"choices-MIN_NUM_FORMS": "0", # min number of forms
"choices-MAX_NUM_FORMS": "0", # max number of forms
"choices-0-choice": "Calexico",
"choices-0-votes": "100",
"choices-1-choice": "",
"choices-1-votes": "",
"choices-2-choice": "",
"choices-2-votes": "",
}
ChoiceFormSet = formset_factory(Choice, extra=3)
formset = ChoiceFormSet(data, auto_id=False, prefix="choices")
self.assertTrue(formset.is_valid())
self.assertEqual(
[form.cleaned_data for form in formset.forms],
[{"votes": 100, "choice": "Calexico"}, {}, {}],
)
def test_formset_validate_max_flag(self):
"""
If validate_max is set and max_num is less than TOTAL_FORMS in the
data, a ValidationError is raised. MAX_NUM_FORMS in the data is
irrelevant here (it's output as a hint for the client but its value
in the returned data is not checked).
"""
data = {
"choices-TOTAL_FORMS": "2", # the number of forms rendered
"choices-INITIAL_FORMS": "0", # the number of forms with initial data
"choices-MIN_NUM_FORMS": "0", # min number of forms
"choices-MAX_NUM_FORMS": "2", # max number of forms - should be ignored
"choices-0-choice": "Zero",
"choices-0-votes": "0",
"choices-1-choice": "One",
"choices-1-votes": "1",
}
ChoiceFormSet = formset_factory(Choice, extra=1, max_num=1, validate_max=True)
formset = ChoiceFormSet(data, auto_id=False, prefix="choices")
self.assertFalse(formset.is_valid())
self.assertEqual(formset.non_form_errors(), ["Please submit at most 1 form."])
self.assertEqual(
str(formset.non_form_errors()),
'<ul class="errorlist nonform"><li>Please submit at most 1 form.</li></ul>',
)
def test_formset_validate_max_flag_custom_error(self):
data = {
"choices-TOTAL_FORMS": "2",
"choices-INITIAL_FORMS": "0",
"choices-MIN_NUM_FORMS": "0",
"choices-MAX_NUM_FORMS": "2",
"choices-0-choice": "Zero",
"choices-0-votes": "0",
"choices-1-choice": "One",
"choices-1-votes": "1",
}
ChoiceFormSet = formset_factory(Choice, extra=1, max_num=1, validate_max=True)
formset = ChoiceFormSet(
data,
auto_id=False,
prefix="choices",
error_messages={
"too_many_forms": "Number of submitted forms should be at most %(num)d."
},
)
self.assertFalse(formset.is_valid())
self.assertEqual(
formset.non_form_errors(),
["Number of submitted forms should be at most 1."],
)
self.assertEqual(
str(formset.non_form_errors()),
'<ul class="errorlist nonform">'
"<li>Number of submitted forms should be at most 1.</li></ul>",
)
def test_formset_validate_min_flag(self):
"""
If validate_min is set and min_num is more than TOTAL_FORMS in the
data, a ValidationError is raised. MIN_NUM_FORMS in the data is
irrelevant here (it's output as a hint for the client but its value
in the returned data is not checked).
"""
data = {
"choices-TOTAL_FORMS": "2", # the number of forms rendered
"choices-INITIAL_FORMS": "0", # the number of forms with initial data
"choices-MIN_NUM_FORMS": "0", # min number of forms
"choices-MAX_NUM_FORMS": "0", # max number of forms - should be ignored
"choices-0-choice": "Zero",
"choices-0-votes": "0",
"choices-1-choice": "One",
"choices-1-votes": "1",
}
ChoiceFormSet = formset_factory(Choice, extra=1, min_num=3, validate_min=True)
formset = ChoiceFormSet(data, auto_id=False, prefix="choices")
self.assertFalse(formset.is_valid())
self.assertEqual(formset.non_form_errors(), ["Please submit at least 3 forms."])
self.assertEqual(
str(formset.non_form_errors()),
'<ul class="errorlist nonform"><li>'
"Please submit at least 3 forms.</li></ul>",
)
def test_formset_validate_min_flag_custom_formatted_error(self):
data = {
"choices-TOTAL_FORMS": "2",
"choices-INITIAL_FORMS": "0",
"choices-MIN_NUM_FORMS": "0",
"choices-MAX_NUM_FORMS": "0",
"choices-0-choice": "Zero",
"choices-0-votes": "0",
"choices-1-choice": "One",
"choices-1-votes": "1",
}
ChoiceFormSet = formset_factory(Choice, extra=1, min_num=3, validate_min=True)
formset = ChoiceFormSet(
data,
auto_id=False,
prefix="choices",
error_messages={
"too_few_forms": "Number of submitted forms should be at least %(num)d."
},
)
self.assertFalse(formset.is_valid())
self.assertEqual(
formset.non_form_errors(),
["Number of submitted forms should be at least 3."],
)
self.assertEqual(
str(formset.non_form_errors()),
'<ul class="errorlist nonform">'
"<li>Number of submitted forms should be at least 3.</li></ul>",
)
def test_formset_validate_min_unchanged_forms(self):
"""
min_num validation doesn't consider unchanged forms with initial data
as "empty".
"""
initial = [
{"choice": "Zero", "votes": 0},
{"choice": "One", "votes": 0},
]
data = {
"choices-TOTAL_FORMS": "2",
"choices-INITIAL_FORMS": "2",
"choices-MIN_NUM_FORMS": "0",
"choices-MAX_NUM_FORMS": "2",
"choices-0-choice": "Zero",
"choices-0-votes": "0",
"choices-1-choice": "One",
"choices-1-votes": "1", # changed from initial
}
ChoiceFormSet = formset_factory(Choice, min_num=2, validate_min=True)
formset = ChoiceFormSet(data, auto_id=False, prefix="choices", initial=initial)
self.assertFalse(formset.forms[0].has_changed())
self.assertTrue(formset.forms[1].has_changed())
self.assertTrue(formset.is_valid())
def test_formset_validate_min_excludes_empty_forms(self):
data = {
"choices-TOTAL_FORMS": "2",
"choices-INITIAL_FORMS": "0",
}
ChoiceFormSet = formset_factory(
Choice, extra=2, min_num=1, validate_min=True, can_delete=True
)
formset = ChoiceFormSet(data, prefix="choices")
self.assertFalse(formset.has_changed())
self.assertFalse(formset.is_valid())
self.assertEqual(formset.non_form_errors(), ["Please submit at least 1 form."])
def test_second_form_partially_filled_2(self):
"""A partially completed form is invalid."""
data = {
"choices-TOTAL_FORMS": "3", # the number of forms rendered
"choices-INITIAL_FORMS": "0", # the number of forms with initial data
"choices-MIN_NUM_FORMS": "0", # min number of forms
"choices-MAX_NUM_FORMS": "0", # max number of forms
"choices-0-choice": "Calexico",
"choices-0-votes": "100",
"choices-1-choice": "The Decemberists",
"choices-1-votes": "", # missing value
"choices-2-choice": "",
"choices-2-votes": "",
}
ChoiceFormSet = formset_factory(Choice, extra=3)
formset = ChoiceFormSet(data, auto_id=False, prefix="choices")
self.assertFalse(formset.is_valid())
self.assertEqual(
formset.errors, [{}, {"votes": ["This field is required."]}, {}]
)
def test_more_initial_data(self):
"""
The extra argument works when the formset is pre-filled with initial
data.
"""
initial = [{"choice": "Calexico", "votes": 100}]
ChoiceFormSet = formset_factory(Choice, extra=3)
formset = ChoiceFormSet(initial=initial, auto_id=False, prefix="choices")
self.assertHTMLEqual(
"\n".join(form.as_ul() for form in formset.forms),
'<li>Choice: <input type="text" name="choices-0-choice" value="Calexico">'
"</li>"
'<li>Votes: <input type="number" name="choices-0-votes" value="100"></li>'
'<li>Choice: <input type="text" name="choices-1-choice"></li>'
'<li>Votes: <input type="number" name="choices-1-votes"></li>'
'<li>Choice: <input type="text" name="choices-2-choice"></li>'
'<li>Votes: <input type="number" name="choices-2-votes"></li>'
'<li>Choice: <input type="text" name="choices-3-choice"></li>'
'<li>Votes: <input type="number" name="choices-3-votes"></li>',
)
# Retrieving an empty form works. Tt shows up in the form list.
self.assertTrue(formset.empty_form.empty_permitted)
self.assertHTMLEqual(
formset.empty_form.as_ul(),
"""<li>Choice: <input type="text" name="choices-__prefix__-choice"></li>
<li>Votes: <input type="number" name="choices-__prefix__-votes"></li>""",
)
def test_formset_with_deletion(self):
"""
formset_factory's can_delete argument adds a boolean "delete" field to
each form. When that boolean field is True, the form will be in
formset.deleted_forms.
"""
ChoiceFormSet = formset_factory(Choice, can_delete=True)
initial = [
{"choice": "Calexico", "votes": 100},
{"choice": "Fergie", "votes": 900},
]
formset = ChoiceFormSet(initial=initial, auto_id=False, prefix="choices")
self.assertHTMLEqual(
"\n".join(form.as_ul() for form in formset.forms),
'<li>Choice: <input type="text" name="choices-0-choice" value="Calexico">'
"</li>"
'<li>Votes: <input type="number" name="choices-0-votes" value="100"></li>'
'<li>Delete: <input type="checkbox" name="choices-0-DELETE"></li>'
'<li>Choice: <input type="text" name="choices-1-choice" value="Fergie">'
"</li>"
'<li>Votes: <input type="number" name="choices-1-votes" value="900"></li>'
'<li>Delete: <input type="checkbox" name="choices-1-DELETE"></li>'
'<li>Choice: <input type="text" name="choices-2-choice"></li>'
'<li>Votes: <input type="number" name="choices-2-votes"></li>'
'<li>Delete: <input type="checkbox" name="choices-2-DELETE"></li>',
)
# To delete something, set that form's special delete field to 'on'.
# Let's go ahead and delete Fergie.
data = {
"choices-TOTAL_FORMS": "3", # the number of forms rendered
"choices-INITIAL_FORMS": "2", # the number of forms with initial data
"choices-MIN_NUM_FORMS": "0", # min number of forms
"choices-MAX_NUM_FORMS": "0", # max number of forms
"choices-0-choice": "Calexico",
"choices-0-votes": "100",
"choices-0-DELETE": "",
"choices-1-choice": "Fergie",
"choices-1-votes": "900",
"choices-1-DELETE": "on",
"choices-2-choice": "",
"choices-2-votes": "",
"choices-2-DELETE": "",
}
formset = ChoiceFormSet(data, auto_id=False, prefix="choices")
self.assertTrue(formset.is_valid())
self.assertEqual(
[form.cleaned_data for form in formset.forms],
[
{"votes": 100, "DELETE": False, "choice": "Calexico"},
{"votes": 900, "DELETE": True, "choice": "Fergie"},
{},
],
)
self.assertEqual(
[form.cleaned_data for form in formset.deleted_forms],
[{"votes": 900, "DELETE": True, "choice": "Fergie"}],
)
def test_formset_with_deletion_remove_deletion_flag(self):
"""
If a form is filled with something and can_delete is also checked, that
form's errors shouldn't make the entire formset invalid since it's
going to be deleted.
"""
class CheckForm(Form):
field = IntegerField(min_value=100)
data = {
"check-TOTAL_FORMS": "3", # the number of forms rendered
"check-INITIAL_FORMS": "2", # the number of forms with initial data
"choices-MIN_NUM_FORMS": "0", # min number of forms
"check-MAX_NUM_FORMS": "0", # max number of forms
"check-0-field": "200",
"check-0-DELETE": "",
"check-1-field": "50",
"check-1-DELETE": "on",
"check-2-field": "",
"check-2-DELETE": "",
}
CheckFormSet = formset_factory(CheckForm, can_delete=True)
formset = CheckFormSet(data, prefix="check")
self.assertTrue(formset.is_valid())
# If the deletion flag is removed, validation is enabled.
data["check-1-DELETE"] = ""
formset = CheckFormSet(data, prefix="check")
self.assertFalse(formset.is_valid())
def test_formset_with_deletion_invalid_deleted_form(self):
"""
deleted_forms works on a valid formset even if a deleted form would
have been invalid.
"""
FavoriteDrinkFormset = formset_factory(form=FavoriteDrinkForm, can_delete=True)
formset = FavoriteDrinkFormset(
{
"form-0-name": "",
"form-0-DELETE": "on", # no name!
"form-TOTAL_FORMS": 1,
"form-INITIAL_FORMS": 1,
"form-MIN_NUM_FORMS": 0,
"form-MAX_NUM_FORMS": 1,
}
)
self.assertTrue(formset.is_valid())
self.assertEqual(formset._errors, [])
self.assertEqual(len(formset.deleted_forms), 1)
def test_formset_with_deletion_custom_widget(self):
class DeletionAttributeFormSet(BaseFormSet):
deletion_widget = HiddenInput
class DeletionMethodFormSet(BaseFormSet):
def get_deletion_widget(self):
return HiddenInput(attrs={"class": "deletion"})
tests = [
(DeletionAttributeFormSet, '<input type="hidden" name="form-0-DELETE">'),
(
DeletionMethodFormSet,
'<input class="deletion" type="hidden" name="form-0-DELETE">',
),
]
for formset_class, delete_html in tests:
with self.subTest(formset_class=formset_class.__name__):
ArticleFormSet = formset_factory(
ArticleForm,
formset=formset_class,
can_delete=True,
)
formset = ArticleFormSet(auto_id=False)
self.assertHTMLEqual(
"\n".join([form.as_ul() for form in formset.forms]),
(
f'<li>Title: <input type="text" name="form-0-title"></li>'
f'<li>Pub date: <input type="text" name="form-0-pub_date">'
f"{delete_html}</li>"
),
)
def test_formsets_with_ordering(self):
"""
formset_factory's can_order argument adds an integer field to each
form. When form validation succeeds,
[form.cleaned_data for form in formset.forms]
will have the data in the correct order specified by the ordering
fields. If a number is duplicated in the set of ordering fields, for
instance form 0 and form 3 are both marked as 1, then the form index
used as a secondary ordering criteria. In order to put something at the
front of the list, you'd need to set its order to 0.
"""
ChoiceFormSet = formset_factory(Choice, can_order=True)
initial = [
{"choice": "Calexico", "votes": 100},
{"choice": "Fergie", "votes": 900},
]
formset = ChoiceFormSet(initial=initial, auto_id=False, prefix="choices")
self.assertHTMLEqual(
"\n".join(form.as_ul() for form in formset.forms),
'<li>Choice: <input type="text" name="choices-0-choice" value="Calexico">'
"</li>"
'<li>Votes: <input type="number" name="choices-0-votes" value="100"></li>'
'<li>Order: <input type="number" name="choices-0-ORDER" value="1"></li>'
'<li>Choice: <input type="text" name="choices-1-choice" value="Fergie">'
"</li>"
'<li>Votes: <input type="number" name="choices-1-votes" value="900"></li>'
'<li>Order: <input type="number" name="choices-1-ORDER" value="2"></li>'
'<li>Choice: <input type="text" name="choices-2-choice"></li>'
'<li>Votes: <input type="number" name="choices-2-votes"></li>'
'<li>Order: <input type="number" name="choices-2-ORDER"></li>',
)
data = {
"choices-TOTAL_FORMS": "3", # the number of forms rendered
"choices-INITIAL_FORMS": "2", # the number of forms with initial data
"choices-MIN_NUM_FORMS": "0", # min number of forms
"choices-MAX_NUM_FORMS": "0", # max number of forms
"choices-0-choice": "Calexico",
"choices-0-votes": "100",
"choices-0-ORDER": "1",
"choices-1-choice": "Fergie",
"choices-1-votes": "900",
"choices-1-ORDER": "2",
"choices-2-choice": "The Decemberists",
"choices-2-votes": "500",
"choices-2-ORDER": "0",
}
formset = ChoiceFormSet(data, auto_id=False, prefix="choices")
self.assertTrue(formset.is_valid())
self.assertEqual(
[form.cleaned_data for form in formset.ordered_forms],
[
{"votes": 500, "ORDER": 0, "choice": "The Decemberists"},
{"votes": 100, "ORDER": 1, "choice": "Calexico"},
{"votes": 900, "ORDER": 2, "choice": "Fergie"},
],
)
def test_formsets_with_ordering_custom_widget(self):
class OrderingAttributeFormSet(BaseFormSet):
ordering_widget = HiddenInput
class OrderingMethodFormSet(BaseFormSet):
def get_ordering_widget(self):
return HiddenInput(attrs={"class": "ordering"})
tests = (
(OrderingAttributeFormSet, '<input type="hidden" name="form-0-ORDER">'),
(
OrderingMethodFormSet,
'<input class="ordering" type="hidden" name="form-0-ORDER">',
),
)
for formset_class, order_html in tests:
with self.subTest(formset_class=formset_class.__name__):
ArticleFormSet = formset_factory(
ArticleForm, formset=formset_class, can_order=True
)
formset = ArticleFormSet(auto_id=False)
self.assertHTMLEqual(
"\n".join(form.as_ul() for form in formset.forms),
(
'<li>Title: <input type="text" name="form-0-title"></li>'
'<li>Pub date: <input type="text" name="form-0-pub_date">'
"%s</li>" % order_html
),
)
def test_empty_ordered_fields(self):
"""
Ordering fields are allowed to be left blank. If they are left blank,
they'll be sorted below everything else.
"""
data = {
"choices-TOTAL_FORMS": "4", # the number of forms rendered
"choices-INITIAL_FORMS": "3", # the number of forms with initial data
"choices-MIN_NUM_FORMS": "0", # min number of forms
"choices-MAX_NUM_FORMS": "0", # max number of forms
"choices-0-choice": "Calexico",
"choices-0-votes": "100",
"choices-0-ORDER": "1",
"choices-1-choice": "Fergie",
"choices-1-votes": "900",
"choices-1-ORDER": "2",
"choices-2-choice": "The Decemberists",
"choices-2-votes": "500",
"choices-2-ORDER": "",
"choices-3-choice": "Basia Bulat",
"choices-3-votes": "50",
"choices-3-ORDER": "",
}
ChoiceFormSet = formset_factory(Choice, can_order=True)
formset = ChoiceFormSet(data, auto_id=False, prefix="choices")
self.assertTrue(formset.is_valid())
self.assertEqual(
[form.cleaned_data for form in formset.ordered_forms],
[
{"votes": 100, "ORDER": 1, "choice": "Calexico"},
{"votes": 900, "ORDER": 2, "choice": "Fergie"},
{"votes": 500, "ORDER": None, "choice": "The Decemberists"},
{"votes": 50, "ORDER": None, "choice": "Basia Bulat"},
],
)
def test_ordering_blank_fieldsets(self):
"""Ordering works with blank fieldsets."""
data = {
"choices-TOTAL_FORMS": "3", # the number of forms rendered
"choices-INITIAL_FORMS": "0", # the number of forms with initial data
"choices-MIN_NUM_FORMS": "0", # min number of forms
"choices-MAX_NUM_FORMS": "0", # max number of forms
}
ChoiceFormSet = formset_factory(Choice, can_order=True)
formset = ChoiceFormSet(data, auto_id=False, prefix="choices")
self.assertTrue(formset.is_valid())
self.assertEqual(formset.ordered_forms, [])
def test_formset_with_ordering_and_deletion(self):
"""FormSets with ordering + deletion."""
ChoiceFormSet = formset_factory(Choice, can_order=True, can_delete=True)
initial = [
{"choice": "Calexico", "votes": 100},
{"choice": "Fergie", "votes": 900},
{"choice": "The Decemberists", "votes": 500},
]
formset = ChoiceFormSet(initial=initial, auto_id=False, prefix="choices")
self.assertHTMLEqual(
"\n".join(form.as_ul() for form in formset.forms),
'<li>Choice: <input type="text" name="choices-0-choice" value="Calexico">'
"</li>"
'<li>Votes: <input type="number" name="choices-0-votes" value="100"></li>'
'<li>Order: <input type="number" name="choices-0-ORDER" value="1"></li>'
'<li>Delete: <input type="checkbox" name="choices-0-DELETE"></li>'
'<li>Choice: <input type="text" name="choices-1-choice" value="Fergie">'
"</li>"
'<li>Votes: <input type="number" name="choices-1-votes" value="900"></li>'
'<li>Order: <input type="number" name="choices-1-ORDER" value="2"></li>'
'<li>Delete: <input type="checkbox" name="choices-1-DELETE"></li>'
'<li>Choice: <input type="text" name="choices-2-choice" '
'value="The Decemberists"></li>'
'<li>Votes: <input type="number" name="choices-2-votes" value="500"></li>'
'<li>Order: <input type="number" name="choices-2-ORDER" value="3"></li>'
'<li>Delete: <input type="checkbox" name="choices-2-DELETE"></li>'
'<li>Choice: <input type="text" name="choices-3-choice"></li>'
'<li>Votes: <input type="number" name="choices-3-votes"></li>'
'<li>Order: <input type="number" name="choices-3-ORDER"></li>'
'<li>Delete: <input type="checkbox" name="choices-3-DELETE"></li>',
)
# Let's delete Fergie, and put The Decemberists ahead of Calexico.
data = {
"choices-TOTAL_FORMS": "4", # the number of forms rendered
"choices-INITIAL_FORMS": "3", # the number of forms with initial data
"choices-MIN_NUM_FORMS": "0", # min number of forms
"choices-MAX_NUM_FORMS": "0", # max number of forms
"choices-0-choice": "Calexico",
"choices-0-votes": "100",
"choices-0-ORDER": "1",
"choices-0-DELETE": "",
"choices-1-choice": "Fergie",
"choices-1-votes": "900",
"choices-1-ORDER": "2",
"choices-1-DELETE": "on",
"choices-2-choice": "The Decemberists",
"choices-2-votes": "500",
"choices-2-ORDER": "0",
"choices-2-DELETE": "",
"choices-3-choice": "",
"choices-3-votes": "",
"choices-3-ORDER": "",
"choices-3-DELETE": "",
}
formset = ChoiceFormSet(data, auto_id=False, prefix="choices")
self.assertTrue(formset.is_valid())
self.assertEqual(
[form.cleaned_data for form in formset.ordered_forms],
[
{
"votes": 500,
"DELETE": False,
"ORDER": 0,
"choice": "The Decemberists",
},
{"votes": 100, "DELETE": False, "ORDER": 1, "choice": "Calexico"},
],
)
self.assertEqual(
[form.cleaned_data for form in formset.deleted_forms],
[{"votes": 900, "DELETE": True, "ORDER": 2, "choice": "Fergie"}],
)
def test_invalid_deleted_form_with_ordering(self):
"""
Can get ordered_forms from a valid formset even if a deleted form
would have been invalid.
"""
FavoriteDrinkFormset = formset_factory(
form=FavoriteDrinkForm, can_delete=True, can_order=True
)
formset = FavoriteDrinkFormset(
{
"form-0-name": "",
"form-0-DELETE": "on", # no name!
"form-TOTAL_FORMS": 1,
"form-INITIAL_FORMS": 1,
"form-MIN_NUM_FORMS": 0,
"form-MAX_NUM_FORMS": 1,
}
)
self.assertTrue(formset.is_valid())
self.assertEqual(formset.ordered_forms, [])
def test_clean_hook(self):
"""
FormSets have a clean() hook for doing extra validation that isn't tied
to any form. It follows the same pattern as the clean() hook on Forms.
"""
# Start out with a some duplicate data.
data = {
"drinks-TOTAL_FORMS": "2", # the number of forms rendered
"drinks-INITIAL_FORMS": "0", # the number of forms with initial data
"drinks-MIN_NUM_FORMS": "0", # min number of forms
"drinks-MAX_NUM_FORMS": "0", # max number of forms
"drinks-0-name": "Gin and Tonic",
"drinks-1-name": "Gin and Tonic",
}
formset = FavoriteDrinksFormSet(data, prefix="drinks")
self.assertFalse(formset.is_valid())
# Any errors raised by formset.clean() are available via the
# formset.non_form_errors() method.
for error in formset.non_form_errors():
self.assertEqual(str(error), "You may only specify a drink once.")
# The valid case still works.
data["drinks-1-name"] = "Bloody Mary"
formset = FavoriteDrinksFormSet(data, prefix="drinks")
self.assertTrue(formset.is_valid())
self.assertEqual(formset.non_form_errors(), [])
def test_limiting_max_forms(self):
"""Limiting the maximum number of forms with max_num."""
# When not passed, max_num will take a high default value, leaving the
# number of forms only controlled by the value of the extra parameter.
LimitedFavoriteDrinkFormSet = formset_factory(FavoriteDrinkForm, extra=3)
formset = LimitedFavoriteDrinkFormSet()
self.assertHTMLEqual(
"\n".join(str(form) for form in formset.forms),
"""<div><label for="id_form-0-name">Name:</label>
<input type="text" name="form-0-name" id="id_form-0-name"></div>
<div><label for="id_form-1-name">Name:</label>
<input type="text" name="form-1-name" id="id_form-1-name"></div>
<div><label for="id_form-2-name">Name:</label>
<input type="text" name="form-2-name" id="id_form-2-name"></div>""",
)
# If max_num is 0 then no form is rendered at all.
LimitedFavoriteDrinkFormSet = formset_factory(
FavoriteDrinkForm, extra=3, max_num=0
)
formset = LimitedFavoriteDrinkFormSet()
self.assertEqual(formset.forms, [])
def test_limited_max_forms_two(self):
LimitedFavoriteDrinkFormSet = formset_factory(
FavoriteDrinkForm, extra=5, max_num=2
)
formset = LimitedFavoriteDrinkFormSet()
self.assertHTMLEqual(
"\n".join(str(form) for form in formset.forms),
"""<div><label for="id_form-0-name">Name:</label>
<input type="text" name="form-0-name" id="id_form-0-name"></div>
<div><label for="id_form-1-name">Name:</label>
<input type="text" name="form-1-name" id="id_form-1-name"></div>""",
)
def test_limiting_extra_lest_than_max_num(self):
"""max_num has no effect when extra is less than max_num."""
LimitedFavoriteDrinkFormSet = formset_factory(
FavoriteDrinkForm, extra=1, max_num=2
)
formset = LimitedFavoriteDrinkFormSet()
self.assertHTMLEqual(
"\n".join(str(form) for form in formset.forms),
"""<div><label for="id_form-0-name">Name:</label>
<input type="text" name="form-0-name" id="id_form-0-name"></div>""",
)
def test_max_num_with_initial_data(self):
# When not passed, max_num will take a high default value, leaving the
# number of forms only controlled by the value of the initial and extra
# parameters.
LimitedFavoriteDrinkFormSet = formset_factory(FavoriteDrinkForm, extra=1)
formset = LimitedFavoriteDrinkFormSet(initial=[{"name": "Fernet and Coke"}])
self.assertHTMLEqual(
"\n".join(str(form) for form in formset.forms),
"""
<div><label for="id_form-0-name">Name:</label>
<input type="text" name="form-0-name" value="Fernet and Coke"
id="id_form-0-name"></div>
<div><label for="id_form-1-name">Name:</label>
<input type="text" name="form-1-name" id="id_form-1-name"></div>
""",
)
def test_max_num_zero(self):
"""
If max_num is 0 then no form is rendered at all, regardless of extra,
unless initial data is present.
"""
LimitedFavoriteDrinkFormSet = formset_factory(
FavoriteDrinkForm, extra=1, max_num=0
)
formset = LimitedFavoriteDrinkFormSet()
self.assertEqual(formset.forms, [])
def test_max_num_zero_with_initial(self):
# initial trumps max_num
initial = [
{"name": "Fernet and Coke"},
{"name": "Bloody Mary"},
]
LimitedFavoriteDrinkFormSet = formset_factory(
FavoriteDrinkForm, extra=1, max_num=0
)
formset = LimitedFavoriteDrinkFormSet(initial=initial)
self.assertHTMLEqual(
"\n".join(str(form) for form in formset.forms),
"""
<div><label for="id_form-0-name">Name:</label>
<input id="id_form-0-name" name="form-0-name" type="text"
value="Fernet and Coke"></div>
<div><label for="id_form-1-name">Name:</label>
<input id="id_form-1-name" name="form-1-name" type="text"
value="Bloody Mary"></div>
""",
)
def test_more_initial_than_max_num(self):
"""
More initial forms than max_num results in all initial forms being
displayed (but no extra forms).
"""
initial = [
{"name": "Gin Tonic"},
{"name": "Bloody Mary"},
{"name": "Jack and Coke"},
]
LimitedFavoriteDrinkFormSet = formset_factory(
FavoriteDrinkForm, extra=1, max_num=2
)
formset = LimitedFavoriteDrinkFormSet(initial=initial)
self.assertHTMLEqual(
"\n".join(str(form) for form in formset.forms),
"""
<div><label for="id_form-0-name">Name:</label>
<input id="id_form-0-name" name="form-0-name" type="text" value="Gin Tonic">
</div>
<div><label for="id_form-1-name">Name:</label>
<input id="id_form-1-name" name="form-1-name" type="text"
value="Bloody Mary"></div>
<div><label for="id_form-2-name">Name:</label>
<input id="id_form-2-name" name="form-2-name" type="text"
value="Jack and Coke"></div>
""",
)
def test_default_absolute_max(self):
# absolute_max defaults to 2 * DEFAULT_MAX_NUM if max_num is None.
data = {
"form-TOTAL_FORMS": 2001,
"form-INITIAL_FORMS": "0",
"form-MAX_NUM_FORMS": "0",
}
formset = FavoriteDrinksFormSet(data=data)
self.assertIs(formset.is_valid(), False)
self.assertEqual(
formset.non_form_errors(),
["Please submit at most 1000 forms."],
)
self.assertEqual(formset.absolute_max, 2000)
def test_absolute_max(self):
data = {
"form-TOTAL_FORMS": "2001",
"form-INITIAL_FORMS": "0",
"form-MAX_NUM_FORMS": "0",
}
AbsoluteMaxFavoriteDrinksFormSet = formset_factory(
FavoriteDrinkForm,
absolute_max=3000,
)
formset = AbsoluteMaxFavoriteDrinksFormSet(data=data)
self.assertIs(formset.is_valid(), True)
self.assertEqual(len(formset.forms), 2001)
# absolute_max provides a hard limit.
data["form-TOTAL_FORMS"] = "3001"
formset = AbsoluteMaxFavoriteDrinksFormSet(data=data)
self.assertIs(formset.is_valid(), False)
self.assertEqual(len(formset.forms), 3000)
self.assertEqual(
formset.non_form_errors(),
["Please submit at most 1000 forms."],
)
def test_absolute_max_with_max_num(self):
data = {
"form-TOTAL_FORMS": "1001",
"form-INITIAL_FORMS": "0",
"form-MAX_NUM_FORMS": "0",
}
LimitedFavoriteDrinksFormSet = formset_factory(
FavoriteDrinkForm,
max_num=30,
absolute_max=1000,
)
formset = LimitedFavoriteDrinksFormSet(data=data)
self.assertIs(formset.is_valid(), False)
self.assertEqual(len(formset.forms), 1000)
self.assertEqual(
formset.non_form_errors(),
["Please submit at most 30 forms."],
)
def test_absolute_max_invalid(self):
msg = "'absolute_max' must be greater or equal to 'max_num'."
for max_num in [None, 31]:
with self.subTest(max_num=max_num):
with self.assertRaisesMessage(ValueError, msg):
formset_factory(FavoriteDrinkForm, max_num=max_num, absolute_max=30)
def test_more_initial_form_result_in_one(self):
"""
One form from initial and extra=3 with max_num=2 results in the one
initial form and one extra.
"""
LimitedFavoriteDrinkFormSet = formset_factory(
FavoriteDrinkForm, extra=3, max_num=2
)
formset = LimitedFavoriteDrinkFormSet(initial=[{"name": "Gin Tonic"}])
self.assertHTMLEqual(
"\n".join(str(form) for form in formset.forms),
"""
<div><label for="id_form-0-name">Name:</label>
<input type="text" name="form-0-name" value="Gin Tonic" id="id_form-0-name">
</div>
<div><label for="id_form-1-name">Name:</label>
<input type="text" name="form-1-name" id="id_form-1-name"></div>""",
)
def test_management_form_field_names(self):
"""The management form class has field names matching the constants."""
self.assertCountEqual(
ManagementForm.base_fields,
[
TOTAL_FORM_COUNT,
INITIAL_FORM_COUNT,
MIN_NUM_FORM_COUNT,
MAX_NUM_FORM_COUNT,
],
)
def test_management_form_prefix(self):
"""The management form has the correct prefix."""
formset = FavoriteDrinksFormSet()
self.assertEqual(formset.management_form.prefix, "form")
data = {
"form-TOTAL_FORMS": "2",
"form-INITIAL_FORMS": "0",
"form-MIN_NUM_FORMS": "0",
"form-MAX_NUM_FORMS": "0",
}
formset = FavoriteDrinksFormSet(data=data)
self.assertEqual(formset.management_form.prefix, "form")
formset = FavoriteDrinksFormSet(initial={})
self.assertEqual(formset.management_form.prefix, "form")
def test_non_form_errors(self):
data = {
"drinks-TOTAL_FORMS": "2", # the number of forms rendered
"drinks-INITIAL_FORMS": "0", # the number of forms with initial data
"drinks-MIN_NUM_FORMS": "0", # min number of forms
"drinks-MAX_NUM_FORMS": "0", # max number of forms
"drinks-0-name": "Gin and Tonic",
"drinks-1-name": "Gin and Tonic",
}
formset = FavoriteDrinksFormSet(data, prefix="drinks")
self.assertFalse(formset.is_valid())
self.assertEqual(
formset.non_form_errors(), ["You may only specify a drink once."]
)
self.assertEqual(
str(formset.non_form_errors()),
'<ul class="errorlist nonform"><li>'
"You may only specify a drink once.</li></ul>",
)
def test_formset_iteration(self):
"""Formset instances are iterable."""
ChoiceFormset = formset_factory(Choice, extra=3)
formset = ChoiceFormset()
# An iterated formset yields formset.forms.
forms = list(formset)
self.assertEqual(forms, formset.forms)
self.assertEqual(len(formset), len(forms))
# A formset may be indexed to retrieve its forms.
self.assertEqual(formset[0], forms[0])
with self.assertRaises(IndexError):
formset[3]
# Formsets can override the default iteration order
class BaseReverseFormSet(BaseFormSet):
def __iter__(self):
return reversed(self.forms)
def __getitem__(self, idx):
return super().__getitem__(len(self) - idx - 1)
ReverseChoiceFormset = formset_factory(Choice, BaseReverseFormSet, extra=3)
reverse_formset = ReverseChoiceFormset()
# __iter__() modifies the rendering order.
# Compare forms from "reverse" formset with forms from original formset
self.assertEqual(str(reverse_formset[0]), str(forms[-1]))
self.assertEqual(str(reverse_formset[1]), str(forms[-2]))
self.assertEqual(len(reverse_formset), len(forms))
def test_formset_nonzero(self):
"""A formsets without any forms evaluates as True."""
ChoiceFormset = formset_factory(Choice, extra=0)
formset = ChoiceFormset()
self.assertEqual(len(formset.forms), 0)
self.assertTrue(formset)
def test_formset_splitdatetimefield(self):
"""
Formset works with SplitDateTimeField(initial=datetime.datetime.now).
"""
class SplitDateTimeForm(Form):
when = SplitDateTimeField(initial=datetime.datetime.now)
SplitDateTimeFormSet = formset_factory(SplitDateTimeForm)
data = {
"form-TOTAL_FORMS": "1",
"form-INITIAL_FORMS": "0",
"form-0-when_0": "1904-06-16",
"form-0-when_1": "15:51:33",
}
formset = SplitDateTimeFormSet(data)
self.assertTrue(formset.is_valid())
def test_formset_error_class(self):
"""Formset's forms use the formset's error_class."""
class CustomErrorList(ErrorList):
pass
formset = FavoriteDrinksFormSet(error_class=CustomErrorList)
self.assertEqual(formset.forms[0].error_class, CustomErrorList)
def test_formset_calls_forms_is_valid(self):
"""Formsets call is_valid() on each form."""
class AnotherChoice(Choice):
def is_valid(self):
self.is_valid_called = True
return super().is_valid()
AnotherChoiceFormSet = formset_factory(AnotherChoice)
data = {
"choices-TOTAL_FORMS": "1", # number of forms rendered
"choices-INITIAL_FORMS": "0", # number of forms with initial data
"choices-MIN_NUM_FORMS": "0", # min number of forms
"choices-MAX_NUM_FORMS": "0", # max number of forms
"choices-0-choice": "Calexico",
"choices-0-votes": "100",
}
formset = AnotherChoiceFormSet(data, auto_id=False, prefix="choices")
self.assertTrue(formset.is_valid())
self.assertTrue(all(form.is_valid_called for form in formset.forms))
def test_hard_limit_on_instantiated_forms(self):
"""A formset has a hard limit on the number of forms instantiated."""
# reduce the default limit of 1000 temporarily for testing
_old_DEFAULT_MAX_NUM = formsets.DEFAULT_MAX_NUM
try:
formsets.DEFAULT_MAX_NUM = 2
ChoiceFormSet = formset_factory(Choice, max_num=1)
# someone fiddles with the mgmt form data...
formset = ChoiceFormSet(
{
"choices-TOTAL_FORMS": "4",
"choices-INITIAL_FORMS": "0",
"choices-MIN_NUM_FORMS": "0", # min number of forms
"choices-MAX_NUM_FORMS": "4",
"choices-0-choice": "Zero",
"choices-0-votes": "0",
"choices-1-choice": "One",
"choices-1-votes": "1",
"choices-2-choice": "Two",
"choices-2-votes": "2",
"choices-3-choice": "Three",
"choices-3-votes": "3",
},
prefix="choices",
)
# But we still only instantiate 3 forms
self.assertEqual(len(formset.forms), 3)
# and the formset isn't valid
self.assertFalse(formset.is_valid())
finally:
formsets.DEFAULT_MAX_NUM = _old_DEFAULT_MAX_NUM
def test_increase_hard_limit(self):
"""Can increase the built-in forms limit via a higher max_num."""
# reduce the default limit of 1000 temporarily for testing
_old_DEFAULT_MAX_NUM = formsets.DEFAULT_MAX_NUM
try:
formsets.DEFAULT_MAX_NUM = 3
# for this form, we want a limit of 4
ChoiceFormSet = formset_factory(Choice, max_num=4)
formset = ChoiceFormSet(
{
"choices-TOTAL_FORMS": "4",
"choices-INITIAL_FORMS": "0",
"choices-MIN_NUM_FORMS": "0", # min number of forms
"choices-MAX_NUM_FORMS": "4",
"choices-0-choice": "Zero",
"choices-0-votes": "0",
"choices-1-choice": "One",
"choices-1-votes": "1",
"choices-2-choice": "Two",
"choices-2-votes": "2",
"choices-3-choice": "Three",
"choices-3-votes": "3",
},
prefix="choices",
)
# Four forms are instantiated and no exception is raised
self.assertEqual(len(formset.forms), 4)
finally:
formsets.DEFAULT_MAX_NUM = _old_DEFAULT_MAX_NUM
def test_non_form_errors_run_full_clean(self):
"""
If non_form_errors() is called without calling is_valid() first,
it should ensure that full_clean() is called.
"""
class BaseCustomFormSet(BaseFormSet):
def clean(self):
raise ValidationError("This is a non-form error")
ChoiceFormSet = formset_factory(Choice, formset=BaseCustomFormSet)
data = {
"choices-TOTAL_FORMS": "1",
"choices-INITIAL_FORMS": "0",
}
formset = ChoiceFormSet(data, auto_id=False, prefix="choices")
self.assertIsInstance(formset.non_form_errors(), ErrorList)
self.assertEqual(list(formset.non_form_errors()), ["This is a non-form error"])
def test_validate_max_ignores_forms_marked_for_deletion(self):
class CheckForm(Form):
field = IntegerField()
data = {
"check-TOTAL_FORMS": "2",
"check-INITIAL_FORMS": "0",
"check-MAX_NUM_FORMS": "1",
"check-0-field": "200",
"check-0-DELETE": "",
"check-1-field": "50",
"check-1-DELETE": "on",
}
CheckFormSet = formset_factory(
CheckForm, max_num=1, validate_max=True, can_delete=True
)
formset = CheckFormSet(data, prefix="check")
self.assertTrue(formset.is_valid())
def test_formset_total_error_count(self):
"""A valid formset should have 0 total errors."""
data = [ # formset_data, expected error count
([("Calexico", "100")], 0),
([("Calexico", "")], 1),
([("", "invalid")], 2),
([("Calexico", "100"), ("Calexico", "")], 1),
([("Calexico", ""), ("Calexico", "")], 2),
]
for formset_data, expected_error_count in data:
formset = self.make_choiceformset(formset_data)
self.assertEqual(formset.total_error_count(), expected_error_count)
def test_formset_total_error_count_with_non_form_errors(self):
data = {
"choices-TOTAL_FORMS": "2", # the number of forms rendered
"choices-INITIAL_FORMS": "0", # the number of forms with initial data
"choices-MAX_NUM_FORMS": "2", # max number of forms - should be ignored
"choices-0-choice": "Zero",
"choices-0-votes": "0",
"choices-1-choice": "One",
"choices-1-votes": "1",
}
ChoiceFormSet = formset_factory(Choice, extra=1, max_num=1, validate_max=True)
formset = ChoiceFormSet(data, auto_id=False, prefix="choices")
self.assertEqual(formset.total_error_count(), 1)
data["choices-1-votes"] = ""
formset = ChoiceFormSet(data, auto_id=False, prefix="choices")
self.assertEqual(formset.total_error_count(), 2)
def test_html_safe(self):
formset = self.make_choiceformset()
self.assertTrue(hasattr(formset, "__html__"))
self.assertEqual(str(formset), formset.__html__())
def test_can_delete_extra_formset_forms(self):
ChoiceFormFormset = formset_factory(form=Choice, can_delete=True, extra=2)
formset = ChoiceFormFormset()
self.assertEqual(len(formset), 2)
self.assertIn("DELETE", formset.forms[0].fields)
self.assertIn("DELETE", formset.forms[1].fields)
def test_disable_delete_extra_formset_forms(self):
ChoiceFormFormset = formset_factory(
form=Choice,
can_delete=True,
can_delete_extra=False,
extra=2,
)
formset = ChoiceFormFormset()
self.assertEqual(len(formset), 2)
self.assertNotIn("DELETE", formset.forms[0].fields)
self.assertNotIn("DELETE", formset.forms[1].fields)
formset = ChoiceFormFormset(initial=[{"choice": "Zero", "votes": "1"}])
self.assertEqual(len(formset), 3)
self.assertIn("DELETE", formset.forms[0].fields)
self.assertNotIn("DELETE", formset.forms[1].fields)
self.assertNotIn("DELETE", formset.forms[2].fields)
self.assertNotIn("DELETE", formset.empty_form.fields)
formset = ChoiceFormFormset(
data={
"form-0-choice": "Zero",
"form-0-votes": "0",
"form-0-DELETE": "on",
"form-1-choice": "One",
"form-1-votes": "1",
"form-2-choice": "",
"form-2-votes": "",
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "1",
},
initial=[{"choice": "Zero", "votes": "1"}],
)
self.assertEqual(
formset.cleaned_data,
[
{"choice": "Zero", "votes": 0, "DELETE": True},
{"choice": "One", "votes": 1},
{},
],
)
self.assertIs(formset._should_delete_form(formset.forms[0]), True)
self.assertIs(formset._should_delete_form(formset.forms[1]), False)
self.assertIs(formset._should_delete_form(formset.forms[2]), False)
def test_template_name_uses_renderer_value(self):
class CustomRenderer(TemplatesSetting):
formset_template_name = "a/custom/formset/template.html"
ChoiceFormSet = formset_factory(Choice, renderer=CustomRenderer)
self.assertEqual(
ChoiceFormSet().template_name, "a/custom/formset/template.html"
)
def test_template_name_can_be_overridden(self):
class CustomFormSet(BaseFormSet):
template_name = "a/custom/formset/template.html"
ChoiceFormSet = formset_factory(Choice, formset=CustomFormSet)
self.assertEqual(
ChoiceFormSet().template_name, "a/custom/formset/template.html"
)
def test_custom_renderer(self):
"""
A custom renderer passed to a formset_factory() is passed to all forms
and ErrorList.
"""
from django.forms.renderers import Jinja2
renderer = Jinja2()
data = {
"choices-TOTAL_FORMS": "2",
"choices-INITIAL_FORMS": "0",
"choices-MIN_NUM_FORMS": "0",
"choices-0-choice": "Zero",
"choices-0-votes": "",
"choices-1-choice": "One",
"choices-1-votes": "",
}
ChoiceFormSet = formset_factory(Choice, renderer=renderer)
formset = ChoiceFormSet(data, auto_id=False, prefix="choices")
self.assertEqual(formset.renderer, renderer)
self.assertEqual(formset.forms[0].renderer, renderer)
self.assertEqual(formset.management_form.renderer, renderer)
self.assertEqual(formset.non_form_errors().renderer, renderer)
self.assertEqual(formset.empty_form.renderer, renderer)
def test_form_default_renderer(self):
"""
In the absence of a renderer passed to the formset_factory(),
Form.default_renderer is respected.
"""
class CustomRenderer(DjangoTemplates):
pass
class ChoiceWithDefaultRenderer(Choice):
default_renderer = CustomRenderer()
data = {
"choices-TOTAL_FORMS": "1",
"choices-INITIAL_FORMS": "0",
"choices-MIN_NUM_FORMS": "0",
}
ChoiceFormSet = formset_factory(ChoiceWithDefaultRenderer)
formset = ChoiceFormSet(data, prefix="choices")
self.assertEqual(
formset.forms[0].renderer, ChoiceWithDefaultRenderer.default_renderer
)
self.assertEqual(
formset.empty_form.renderer, ChoiceWithDefaultRenderer.default_renderer
)
default_renderer = get_default_renderer()
self.assertIsInstance(formset.renderer, type(default_renderer))
def test_form_default_renderer_class(self):
"""
In the absence of a renderer passed to the formset_factory(),
Form.default_renderer is respected.
"""
class CustomRenderer(DjangoTemplates):
pass
class ChoiceWithDefaultRenderer(Choice):
default_renderer = CustomRenderer
data = {
"choices-TOTAL_FORMS": "1",
"choices-INITIAL_FORMS": "0",
"choices-MIN_NUM_FORMS": "0",
}
ChoiceFormSet = formset_factory(ChoiceWithDefaultRenderer)
formset = ChoiceFormSet(data, prefix="choices")
self.assertIsInstance(formset.forms[0].renderer, CustomRenderer)
self.assertIsInstance(formset.empty_form.renderer, CustomRenderer)
default_renderer = get_default_renderer()
self.assertIsInstance(formset.renderer, type(default_renderer))
def test_repr(self):
valid_formset = self.make_choiceformset([("test", 1)])
valid_formset.full_clean()
invalid_formset = self.make_choiceformset([("test", "")])
invalid_formset.full_clean()
partially_invalid_formset = self.make_choiceformset(
[("test", "1"), ("test", "")],
)
partially_invalid_formset.full_clean()
invalid_formset_non_form_errors_only = self.make_choiceformset(
[("test", "")],
formset_class=ChoiceFormsetWithNonFormError,
)
invalid_formset_non_form_errors_only.full_clean()
cases = [
(
self.make_choiceformset(),
"<ChoiceFormSet: bound=False valid=Unknown total_forms=1>",
),
(
self.make_choiceformset(
formset_class=formset_factory(Choice, extra=10),
),
"<ChoiceFormSet: bound=False valid=Unknown total_forms=10>",
),
(
self.make_choiceformset([]),
"<ChoiceFormSet: bound=True valid=Unknown total_forms=0>",
),
(
self.make_choiceformset([("test", 1)]),
"<ChoiceFormSet: bound=True valid=Unknown total_forms=1>",
),
(valid_formset, "<ChoiceFormSet: bound=True valid=True total_forms=1>"),
(invalid_formset, "<ChoiceFormSet: bound=True valid=False total_forms=1>"),
(
partially_invalid_formset,
"<ChoiceFormSet: bound=True valid=False total_forms=2>",
),
(
invalid_formset_non_form_errors_only,
"<ChoiceFormsetWithNonFormError: bound=True valid=False total_forms=1>",
),
]
for formset, expected_repr in cases:
with self.subTest(expected_repr=expected_repr):
self.assertEqual(repr(formset), expected_repr)
def test_repr_do_not_trigger_validation(self):
formset = self.make_choiceformset([("test", 1)])
with mock.patch.object(formset, "full_clean") as mocked_full_clean:
repr(formset)
mocked_full_clean.assert_not_called()
formset.is_valid()
mocked_full_clean.assert_called()
@jinja2_tests
class Jinja2FormsFormsetTestCase(FormsFormsetTestCase):
pass
class FormsetAsTagTests(SimpleTestCase):
def setUp(self):
data = {
"choices-TOTAL_FORMS": "1",
"choices-INITIAL_FORMS": "0",
"choices-MIN_NUM_FORMS": "0",
"choices-MAX_NUM_FORMS": "0",
"choices-0-choice": "Calexico",
"choices-0-votes": "100",
}
self.formset = ChoiceFormSet(data, auto_id=False, prefix="choices")
self.management_form_html = (
'<input type="hidden" name="choices-TOTAL_FORMS" value="1">'
'<input type="hidden" name="choices-INITIAL_FORMS" value="0">'
'<input type="hidden" name="choices-MIN_NUM_FORMS" value="0">'
'<input type="hidden" name="choices-MAX_NUM_FORMS" value="0">'
)
def test_as_table(self):
self.assertHTMLEqual(
self.formset.as_table(),
self.management_form_html
+ (
"<tr><th>Choice:</th><td>"
'<input type="text" name="choices-0-choice" value="Calexico"></td></tr>'
"<tr><th>Votes:</th><td>"
'<input type="number" name="choices-0-votes" value="100"></td></tr>'
),
)
def test_as_p(self):
self.assertHTMLEqual(
self.formset.as_p(),
self.management_form_html
+ (
"<p>Choice: "
'<input type="text" name="choices-0-choice" value="Calexico"></p>'
'<p>Votes: <input type="number" name="choices-0-votes" value="100"></p>'
),
)
def test_as_ul(self):
self.assertHTMLEqual(
self.formset.as_ul(),
self.management_form_html
+ (
"<li>Choice: "
'<input type="text" name="choices-0-choice" value="Calexico"></li>'
"<li>Votes: "
'<input type="number" name="choices-0-votes" value="100"></li>'
),
)
def test_as_div(self):
self.assertHTMLEqual(
self.formset.as_div(),
self.management_form_html
+ (
"<div>Choice: "
'<input type="text" name="choices-0-choice" value="Calexico"></div>'
'<div>Votes: <input type="number" name="choices-0-votes" value="100">'
"</div>"
),
)
@jinja2_tests
class Jinja2FormsetAsTagTests(FormsetAsTagTests):
pass
class ArticleForm(Form):
title = CharField()
pub_date = DateField()
ArticleFormSet = formset_factory(ArticleForm)
class TestIsBoundBehavior(SimpleTestCase):
def test_no_data_error(self):
formset = ArticleFormSet({})
self.assertIs(formset.is_valid(), False)
self.assertEqual(
formset.non_form_errors(),
[
"ManagementForm data is missing or has been tampered with. "
"Missing fields: form-TOTAL_FORMS, form-INITIAL_FORMS. "
"You may need to file a bug report if the issue persists.",
],
)
self.assertEqual(formset.errors, [])
# Can still render the formset.
self.assertHTMLEqual(
str(formset),
'<ul class="errorlist nonfield">'
"<li>(Hidden field TOTAL_FORMS) This field is required.</li>"
"<li>(Hidden field INITIAL_FORMS) This field is required.</li>"
"</ul>"
"<div>"
'<input type="hidden" name="form-TOTAL_FORMS" id="id_form-TOTAL_FORMS">'
'<input type="hidden" name="form-INITIAL_FORMS" id="id_form-INITIAL_FORMS">'
'<input type="hidden" name="form-MIN_NUM_FORMS" id="id_form-MIN_NUM_FORMS">'
'<input type="hidden" name="form-MAX_NUM_FORMS" id="id_form-MAX_NUM_FORMS">'
"</div>\n",
)
def test_management_form_invalid_data(self):
data = {
"form-TOTAL_FORMS": "two",
"form-INITIAL_FORMS": "one",
}
formset = ArticleFormSet(data)
self.assertIs(formset.is_valid(), False)
self.assertEqual(
formset.non_form_errors(),
[
"ManagementForm data is missing or has been tampered with. "
"Missing fields: form-TOTAL_FORMS, form-INITIAL_FORMS. "
"You may need to file a bug report if the issue persists.",
],
)
self.assertEqual(formset.errors, [])
# Can still render the formset.
self.assertHTMLEqual(
str(formset),
'<ul class="errorlist nonfield">'
"<li>(Hidden field TOTAL_FORMS) Enter a whole number.</li>"
"<li>(Hidden field INITIAL_FORMS) Enter a whole number.</li>"
"</ul>"
"<div>"
'<input type="hidden" name="form-TOTAL_FORMS" value="two" '
'id="id_form-TOTAL_FORMS">'
'<input type="hidden" name="form-INITIAL_FORMS" value="one" '
'id="id_form-INITIAL_FORMS">'
'<input type="hidden" name="form-MIN_NUM_FORMS" id="id_form-MIN_NUM_FORMS">'
'<input type="hidden" name="form-MAX_NUM_FORMS" id="id_form-MAX_NUM_FORMS">'
"</div>\n",
)
def test_customize_management_form_error(self):
formset = ArticleFormSet(
{}, error_messages={"missing_management_form": "customized"}
)
self.assertIs(formset.is_valid(), False)
self.assertEqual(formset.non_form_errors(), ["customized"])
self.assertEqual(formset.errors, [])
def test_with_management_data_attrs_work_fine(self):
data = {
"form-TOTAL_FORMS": "1",
"form-INITIAL_FORMS": "0",
}
formset = ArticleFormSet(data)
self.assertEqual(0, formset.initial_form_count())
self.assertEqual(1, formset.total_form_count())
self.assertTrue(formset.is_bound)
self.assertTrue(formset.forms[0].is_bound)
self.assertTrue(formset.is_valid())
self.assertTrue(formset.forms[0].is_valid())
self.assertEqual([{}], formset.cleaned_data)
def test_form_errors_are_caught_by_formset(self):
data = {
"form-TOTAL_FORMS": "2",
"form-INITIAL_FORMS": "0",
"form-0-title": "Test",
"form-0-pub_date": "1904-06-16",
"form-1-title": "Test",
"form-1-pub_date": "", # <-- this date is missing but required
}
formset = ArticleFormSet(data)
self.assertFalse(formset.is_valid())
self.assertEqual(
[{}, {"pub_date": ["This field is required."]}], formset.errors
)
def test_empty_forms_are_unbound(self):
data = {
"form-TOTAL_FORMS": "1",
"form-INITIAL_FORMS": "0",
"form-0-title": "Test",
"form-0-pub_date": "1904-06-16",
}
unbound_formset = ArticleFormSet()
bound_formset = ArticleFormSet(data)
empty_forms = [unbound_formset.empty_form, bound_formset.empty_form]
# Empty forms should be unbound
self.assertFalse(empty_forms[0].is_bound)
self.assertFalse(empty_forms[1].is_bound)
# The empty forms should be equal.
self.assertHTMLEqual(empty_forms[0].as_p(), empty_forms[1].as_p())
@jinja2_tests
class TestIsBoundBehavior(TestIsBoundBehavior):
pass
class TestEmptyFormSet(SimpleTestCase):
def test_empty_formset_is_valid(self):
"""An empty formset still calls clean()"""
class EmptyFsetWontValidate(BaseFormSet):
def clean(self):
raise ValidationError("Clean method called")
EmptyFsetWontValidateFormset = formset_factory(
FavoriteDrinkForm, extra=0, formset=EmptyFsetWontValidate
)
formset = EmptyFsetWontValidateFormset(
data={"form-INITIAL_FORMS": "0", "form-TOTAL_FORMS": "0"},
prefix="form",
)
formset2 = EmptyFsetWontValidateFormset(
data={
"form-INITIAL_FORMS": "0",
"form-TOTAL_FORMS": "1",
"form-0-name": "bah",
},
prefix="form",
)
self.assertFalse(formset.is_valid())
self.assertFalse(formset2.is_valid())
def test_empty_formset_media(self):
"""Media is available on empty formset."""
class MediaForm(Form):
class Media:
js = ("some-file.js",)
self.assertIn("some-file.js", str(formset_factory(MediaForm, extra=0)().media))
def test_empty_formset_is_multipart(self):
"""is_multipart() works with an empty formset."""
class FileForm(Form):
file = FileField()
self.assertTrue(formset_factory(FileForm, extra=0)().is_multipart())
class AllValidTests(SimpleTestCase):
def test_valid(self):
data = {
"choices-TOTAL_FORMS": "2",
"choices-INITIAL_FORMS": "0",
"choices-MIN_NUM_FORMS": "0",
"choices-0-choice": "Zero",
"choices-0-votes": "0",
"choices-1-choice": "One",
"choices-1-votes": "1",
}
ChoiceFormSet = formset_factory(Choice)
formset1 = ChoiceFormSet(data, auto_id=False, prefix="choices")
formset2 = ChoiceFormSet(data, auto_id=False, prefix="choices")
self.assertIs(all_valid((formset1, formset2)), True)
expected_errors = [{}, {}]
self.assertEqual(formset1._errors, expected_errors)
self.assertEqual(formset2._errors, expected_errors)
def test_invalid(self):
"""all_valid() validates all forms, even when some are invalid."""
data = {
"choices-TOTAL_FORMS": "2",
"choices-INITIAL_FORMS": "0",
"choices-MIN_NUM_FORMS": "0",
"choices-0-choice": "Zero",
"choices-0-votes": "",
"choices-1-choice": "One",
"choices-1-votes": "",
}
ChoiceFormSet = formset_factory(Choice)
formset1 = ChoiceFormSet(data, auto_id=False, prefix="choices")
formset2 = ChoiceFormSet(data, auto_id=False, prefix="choices")
self.assertIs(all_valid((formset1, formset2)), False)
expected_errors = [
{"votes": ["This field is required."]},
{"votes": ["This field is required."]},
]
self.assertEqual(formset1._errors, expected_errors)
self.assertEqual(formset2._errors, expected_errors)
|
./temp_repos/django/django/forms/formsets.py
|
./temp_repos/django/tests/forms_tests/tests/test_formsets.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'ManagementForm'.
Context:
- Class Name: ManagementForm
- Dependencies to Mock: data, error_messages, auto_id, files, error_class, form_kwargs, initial, prefix
- Key Imports: django.forms.fields, django.forms.forms, django.forms.widgets, django.core.exceptions, django.forms.renderers, django.utils.translation, django.utils.functional, django.forms.utils
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
ManagementForm
|
python
|
from django.contrib.admin.forms import AdminAuthenticationForm, AdminPasswordChangeForm
from django.contrib.admin.helpers import ActionForm
from django.core.exceptions import ValidationError
class CustomAdminAuthenticationForm(AdminAuthenticationForm):
class Media:
css = {"all": ("path/to/media.css",)}
def clean_username(self):
username = self.cleaned_data.get("username")
if username == "customform":
raise ValidationError("custom form error")
return username
class CustomAdminPasswordChangeForm(AdminPasswordChangeForm):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields["old_password"].label = "Custom old password label"
class MediaActionForm(ActionForm):
class Media:
js = ["path/to/media.js"]
|
import datetime
import re
import sys
import urllib.parse
from unittest import mock
from django import forms
from django.contrib.auth.forms import (
AdminPasswordChangeForm,
AdminUserCreationForm,
AuthenticationForm,
BaseUserCreationForm,
PasswordChangeForm,
PasswordResetForm,
ReadOnlyPasswordHashField,
ReadOnlyPasswordHashWidget,
SetPasswordForm,
SetPasswordMixin,
UserChangeForm,
UserCreationForm,
UsernameField,
)
from django.contrib.auth.hashers import make_password
from django.contrib.auth.models import User
from django.contrib.auth.signals import user_login_failed
from django.contrib.sites.models import Site
from django.core import mail
from django.core.exceptions import ValidationError
from django.core.mail import EmailMultiAlternatives
from django.forms.fields import CharField, Field, IntegerField
from django.test import RequestFactory, SimpleTestCase, TestCase, override_settings
from django.urls import reverse
from django.utils import translation
from django.utils.text import capfirst
from django.utils.translation import gettext as _
from django.views.debug import technical_500_response
from django.views.decorators.debug import sensitive_variables
from .models.custom_user import (
CustomUser,
CustomUserWithoutIsActiveField,
ExtensionUser,
)
from .models.with_custom_email_field import CustomEmailField
from .models.with_integer_username import IntegerUsernameUser
from .models.with_many_to_many import CustomUserWithM2M, Organization
from .settings import AUTH_TEMPLATES
class TestDataMixin:
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create_user(
username="testclient", password="password", email="[email protected]"
)
cls.u2 = User.objects.create_user(
username="inactive", password="password", is_active=False
)
cls.u3 = User.objects.create_user(username="staff", password="password")
cls.u4 = User.objects.create(username="empty_password", password="")
cls.u5 = User.objects.create(username="unmanageable_password", password="$")
cls.u6 = User.objects.create(username="unknown_password", password="foo$bar")
cls.u7 = User.objects.create(
username="unusable_password", password=make_password(None)
)
class ExtraValidationFormMixin:
def __init__(self, *args, failing_fields=None, **kwargs):
super().__init__(*args, **kwargs)
self.failing_fields = failing_fields or {}
def failing_helper(self, field_name):
if field_name in self.failing_fields:
errors = [
ValidationError(error, code="invalid")
for error in self.failing_fields[field_name]
]
raise ValidationError(errors)
return self.cleaned_data[field_name]
class BaseUserCreationFormTest(TestDataMixin, TestCase):
form_class = BaseUserCreationForm
def test_form_fields(self):
form = self.form_class()
self.assertEqual(
list(form.fields.keys()), ["username", "password1", "password2"]
)
def test_user_already_exists(self):
data = {
"username": "testclient",
"password1": "test123",
"password2": "test123",
}
form = self.form_class(data)
self.assertFalse(form.is_valid())
self.assertEqual(
form["username"].errors,
[str(User._meta.get_field("username").error_messages["unique"])],
)
def test_invalid_data(self):
data = {
"username": "jsmith!",
"password1": "test123",
"password2": "test123",
}
form = self.form_class(data)
self.assertFalse(form.is_valid())
validator = next(
v
for v in User._meta.get_field("username").validators
if v.code == "invalid"
)
self.assertEqual(form["username"].errors, [str(validator.message)])
def test_password_verification(self):
# The verification password is incorrect.
data = {
"username": "jsmith",
"password1": "test123",
"password2": "test",
}
form = self.form_class(data)
self.assertFalse(form.is_valid())
self.assertEqual(
form["password2"].errors, [str(form.error_messages["password_mismatch"])]
)
def test_both_passwords(self):
# One (or both) passwords weren't given
data = {"username": "jsmith"}
form = self.form_class(data)
required_error = [str(Field.default_error_messages["required"])]
self.assertFalse(form.is_valid())
self.assertEqual(form["password1"].errors, required_error)
self.assertEqual(form["password2"].errors, required_error)
data["password2"] = "test123"
form = self.form_class(data)
self.assertFalse(form.is_valid())
self.assertEqual(form["password1"].errors, required_error)
self.assertEqual(form["password2"].errors, [])
@mock.patch("django.contrib.auth.password_validation.password_changed")
def test_success(self, password_changed):
# The success case.
data = {
"username": "[email protected]",
"password1": "test123",
"password2": "test123",
}
form = self.form_class(data)
self.assertTrue(form.is_valid())
form.save(commit=False)
self.assertEqual(password_changed.call_count, 0)
u = form.save()
self.assertEqual(password_changed.call_count, 1)
self.assertEqual(repr(u), "<User: [email protected]>")
def test_unicode_username(self):
data = {
"username": "宝",
"password1": "test123",
"password2": "test123",
}
form = self.form_class(data)
self.assertTrue(form.is_valid())
u = form.save()
self.assertEqual(u.username, "宝")
def test_normalize_username(self):
# The normalization happens in AbstractBaseUser.clean() and ModelForm
# validation calls Model.clean().
ohm_username = "testΩ" # U+2126 OHM SIGN
data = {
"username": ohm_username,
"password1": "pwd2",
"password2": "pwd2",
}
form = self.form_class(data)
self.assertTrue(form.is_valid())
user = form.save()
self.assertNotEqual(user.username, ohm_username)
self.assertEqual(user.username, "testΩ") # U+03A9 GREEK CAPITAL LETTER OMEGA
def test_invalid_username_no_normalize(self):
field = UsernameField(max_length=254)
# Usernames are not normalized if they are too long.
self.assertEqual(field.to_python("½" * 255), "½" * 255)
self.assertEqual(field.to_python("ff" * 254), "ff" * 254)
def test_duplicate_normalized_unicode(self):
"""
To prevent almost identical usernames, visually identical but differing
by their unicode code points only, Unicode NFKC normalization should
make appear them equal to Django.
"""
omega_username = "iamtheΩ" # U+03A9 GREEK CAPITAL LETTER OMEGA
ohm_username = "iamtheΩ" # U+2126 OHM SIGN
self.assertNotEqual(omega_username, ohm_username)
User.objects.create_user(username=omega_username, password="pwd")
data = {
"username": ohm_username,
"password1": "pwd2",
"password2": "pwd2",
}
form = self.form_class(data)
self.assertFalse(form.is_valid())
self.assertEqual(
form.errors["username"], ["A user with that username already exists."]
)
@override_settings(
AUTH_PASSWORD_VALIDATORS=[
{
"NAME": (
"django.contrib.auth.password_validation."
"UserAttributeSimilarityValidator"
)
},
{
"NAME": (
"django.contrib.auth.password_validation.MinimumLengthValidator"
),
"OPTIONS": {
"min_length": 12,
},
},
]
)
def test_validates_password(self):
data = {
"username": "otherclient",
"password1": "otherclient",
"password2": "otherclient",
}
form = self.form_class(data)
self.assertFalse(form.is_valid())
self.assertEqual(len(form["password2"].errors), 2)
self.assertIn(
"The password is too similar to the username.", form["password2"].errors
)
self.assertIn(
"This password is too short. It must contain at least 12 characters.",
form["password2"].errors,
)
def test_password_whitespace_not_stripped(self):
data = {
"username": "testuser",
"password1": " testpassword ",
"password2": " testpassword ",
}
form = self.form_class(data)
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data["password1"], data["password1"])
self.assertEqual(form.cleaned_data["password2"], data["password2"])
@override_settings(
AUTH_PASSWORD_VALIDATORS=[
{
"NAME": (
"django.contrib.auth.password_validation."
"UserAttributeSimilarityValidator"
)
},
]
)
def test_password_help_text(self):
form = self.form_class()
self.assertEqual(
form.fields["password1"].help_text,
"<ul><li>"
"Your password can’t be too similar to your other personal information."
"</li></ul>",
)
def test_password_extra_validations(self):
class ExtraValidationForm(ExtraValidationFormMixin, self.form_class):
def clean_password1(self):
return self.failing_helper("password1")
def clean_password2(self):
return self.failing_helper("password2")
data = {"username": "extra", "password1": "abc", "password2": "abc"}
for fields in (["password1"], ["password2"], ["password1", "password2"]):
with self.subTest(fields=fields):
errors = {field: [f"Extra validation for {field}."] for field in fields}
form = ExtraValidationForm(data, failing_fields=errors)
self.assertIs(form.is_valid(), False)
self.assertDictEqual(form.errors, errors)
@override_settings(
AUTH_PASSWORD_VALIDATORS=[
{
"NAME": (
"django.contrib.auth.password_validation."
"UserAttributeSimilarityValidator"
)
},
]
)
def test_user_create_form_validates_password_with_all_data(self):
"""
BaseUserCreationForm password validation uses all of the form's data.
"""
class CustomUserCreationForm(self.form_class):
class Meta(self.form_class.Meta):
model = User
fields = ("username", "email", "first_name", "last_name")
form = CustomUserCreationForm(
{
"username": "testuser",
"password1": "testpassword",
"password2": "testpassword",
"first_name": "testpassword",
"last_name": "lastname",
}
)
self.assertFalse(form.is_valid())
self.assertEqual(
form.errors["password2"],
["The password is too similar to the first name."],
)
def test_username_field_autocapitalize_none(self):
form = self.form_class()
self.assertEqual(
form.fields["username"].widget.attrs.get("autocapitalize"), "none"
)
def test_html_autocomplete_attributes(self):
form = self.form_class()
tests = (
("username", "username"),
("password1", "new-password"),
("password2", "new-password"),
)
for field_name, autocomplete in tests:
with self.subTest(field_name=field_name, autocomplete=autocomplete):
self.assertEqual(
form.fields[field_name].widget.attrs["autocomplete"], autocomplete
)
def test_user_creation_form_class_getitem(self):
self.assertIs(BaseUserCreationForm["MyCustomUser"], BaseUserCreationForm)
class CustomUserCreationFormTest(TestDataMixin, TestCase):
def test_custom_form(self):
class CustomUserCreationForm(BaseUserCreationForm):
class Meta(BaseUserCreationForm.Meta):
model = ExtensionUser
fields = UserCreationForm.Meta.fields + ("date_of_birth",)
data = {
"username": "testclient",
"password1": "testclient",
"password2": "testclient",
"date_of_birth": "1988-02-24",
}
form = CustomUserCreationForm(data)
self.assertTrue(form.is_valid())
def test_custom_form_with_different_username_field(self):
class CustomUserCreationForm(BaseUserCreationForm):
class Meta(BaseUserCreationForm.Meta):
model = CustomUser
fields = ("email", "date_of_birth")
data = {
"email": "[email protected]",
"password1": "testclient",
"password2": "testclient",
"date_of_birth": "1988-02-24",
}
form = CustomUserCreationForm(data)
self.assertTrue(form.is_valid())
def test_custom_form_hidden_username_field(self):
class CustomUserCreationForm(BaseUserCreationForm):
class Meta(BaseUserCreationForm.Meta):
model = CustomUserWithoutIsActiveField
fields = ("email",) # without USERNAME_FIELD
data = {
"email": "[email protected]",
"password1": "testclient",
"password2": "testclient",
}
form = CustomUserCreationForm(data)
self.assertTrue(form.is_valid())
def test_custom_form_saves_many_to_many_field(self):
class CustomUserCreationForm(BaseUserCreationForm):
class Meta(BaseUserCreationForm.Meta):
model = CustomUserWithM2M
fields = UserCreationForm.Meta.fields + ("orgs",)
organization = Organization.objects.create(name="organization 1")
data = {
"username": "[email protected]",
"password1": "testclient",
"password2": "testclient",
"orgs": [str(organization.pk)],
}
form = CustomUserCreationForm(data)
self.assertIs(form.is_valid(), True)
user = form.save(commit=True)
self.assertSequenceEqual(user.orgs.all(), [organization])
def test_custom_form_with_non_required_password(self):
class CustomUserCreationForm(BaseUserCreationForm):
password1 = forms.CharField(required=False)
password2 = forms.CharField(required=False)
another_field = forms.CharField(required=True)
data = {
"username": "testclientnew",
"another_field": "Content",
}
form = CustomUserCreationForm(data)
self.assertIs(form.is_valid(), True, form.errors)
class UserCreationFormTest(BaseUserCreationFormTest):
form_class = UserCreationForm
def test_case_insensitive_username(self):
data = {
"username": "TeStClIeNt",
"password1": "test123",
"password2": "test123",
}
form = UserCreationForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(
form["username"].errors,
["A user with that username already exists."],
)
@override_settings(AUTH_USER_MODEL="auth_tests.ExtensionUser")
def test_case_insensitive_username_custom_user_and_error_message(self):
class CustomUserCreationForm(UserCreationForm):
class Meta(UserCreationForm.Meta):
model = ExtensionUser
fields = UserCreationForm.Meta.fields + ("date_of_birth",)
error_messages = {
"username": {"unique": "This username has already been taken."}
}
ExtensionUser.objects.create_user(
username="testclient",
password="password",
email="[email protected]",
date_of_birth=datetime.date(1984, 3, 5),
)
data = {
"username": "TeStClIeNt",
"password1": "test123",
"password2": "test123",
"date_of_birth": "1980-01-01",
}
form = CustomUserCreationForm(data)
self.assertIs(form.is_valid(), False)
self.assertEqual(
form["username"].errors,
["This username has already been taken."],
)
# To verify that the login form rejects inactive users, use an authentication
# backend that allows them.
@override_settings(
AUTHENTICATION_BACKENDS=["django.contrib.auth.backends.AllowAllUsersModelBackend"]
)
class AuthenticationFormTest(TestDataMixin, TestCase):
def test_invalid_username(self):
# The user submits an invalid username.
data = {
"username": "jsmith_does_not_exist",
"password": "test123",
}
form = AuthenticationForm(None, data)
self.assertFalse(form.is_valid())
self.assertEqual(
form.non_field_errors(),
[
form.error_messages["invalid_login"]
% {"username": User._meta.get_field("username").verbose_name}
],
)
def test_inactive_user(self):
# The user is inactive.
data = {
"username": "inactive",
"password": "password",
}
form = AuthenticationForm(None, data)
self.assertFalse(form.is_valid())
self.assertEqual(
form.non_field_errors(), [str(form.error_messages["inactive"])]
)
# Use an authentication backend that rejects inactive users.
@override_settings(
AUTHENTICATION_BACKENDS=["django.contrib.auth.backends.ModelBackend"]
)
def test_inactive_user_incorrect_password(self):
"""An invalid login doesn't leak the inactive status of a user."""
data = {
"username": "inactive",
"password": "incorrect",
}
form = AuthenticationForm(None, data)
self.assertFalse(form.is_valid())
self.assertEqual(
form.non_field_errors(),
[
form.error_messages["invalid_login"]
% {"username": User._meta.get_field("username").verbose_name}
],
)
def test_login_failed(self):
signal_calls = []
def signal_handler(**kwargs):
signal_calls.append(kwargs)
user_login_failed.connect(signal_handler)
fake_request = object()
try:
form = AuthenticationForm(
fake_request,
{
"username": "testclient",
"password": "incorrect",
},
)
self.assertFalse(form.is_valid())
self.assertIs(signal_calls[0]["request"], fake_request)
finally:
user_login_failed.disconnect(signal_handler)
def test_inactive_user_i18n(self):
with (
self.settings(USE_I18N=True),
translation.override("pt-br", deactivate=True),
):
# The user is inactive.
data = {
"username": "inactive",
"password": "password",
}
form = AuthenticationForm(None, data)
self.assertFalse(form.is_valid())
self.assertEqual(
form.non_field_errors(), [str(form.error_messages["inactive"])]
)
# Use an authentication backend that allows inactive users.
@override_settings(
AUTHENTICATION_BACKENDS=[
"django.contrib.auth.backends.AllowAllUsersModelBackend"
]
)
def test_custom_login_allowed_policy(self):
# The user is inactive, but our custom form policy allows them to log
# in.
data = {
"username": "inactive",
"password": "password",
}
class AuthenticationFormWithInactiveUsersOkay(AuthenticationForm):
def confirm_login_allowed(self, user):
pass
form = AuthenticationFormWithInactiveUsersOkay(None, data)
self.assertTrue(form.is_valid())
# Raise a ValidationError in the form to disallow some logins according
# to custom logic.
class PickyAuthenticationForm(AuthenticationForm):
def confirm_login_allowed(self, user):
if user.username == "inactive":
raise ValidationError("This user is disallowed.")
raise ValidationError("Sorry, nobody's allowed in.")
form = PickyAuthenticationForm(None, data)
self.assertFalse(form.is_valid())
self.assertEqual(form.non_field_errors(), ["This user is disallowed."])
data = {
"username": "testclient",
"password": "password",
}
form = PickyAuthenticationForm(None, data)
self.assertFalse(form.is_valid())
self.assertEqual(form.non_field_errors(), ["Sorry, nobody's allowed in."])
def test_success(self):
# The success case
data = {
"username": "testclient",
"password": "password",
}
form = AuthenticationForm(None, data)
self.assertTrue(form.is_valid())
self.assertEqual(form.non_field_errors(), [])
def test_unicode_username(self):
User.objects.create_user(username="Σαρα", password="pwd")
data = {
"username": "Σαρα",
"password": "pwd",
}
form = AuthenticationForm(None, data)
self.assertTrue(form.is_valid())
self.assertEqual(form.non_field_errors(), [])
@override_settings(AUTH_USER_MODEL="auth_tests.CustomEmailField")
def test_username_field_max_length_matches_user_model(self):
self.assertEqual(CustomEmailField._meta.get_field("username").max_length, 255)
data = {
"username": "u" * 255,
"password": "pwd",
"email": "[email protected]",
}
CustomEmailField.objects.create_user(**data)
form = AuthenticationForm(None, data)
self.assertEqual(form.fields["username"].max_length, 255)
self.assertEqual(form.fields["username"].widget.attrs.get("maxlength"), 255)
self.assertEqual(form.errors, {})
@override_settings(AUTH_USER_MODEL="auth_tests.IntegerUsernameUser")
def test_username_field_max_length_defaults_to_254(self):
self.assertIsNone(IntegerUsernameUser._meta.get_field("username").max_length)
data = {
"username": "0123456",
"password": "password",
}
IntegerUsernameUser.objects.create_user(**data)
form = AuthenticationForm(None, data)
self.assertEqual(form.fields["username"].max_length, 254)
self.assertEqual(form.fields["username"].widget.attrs.get("maxlength"), 254)
self.assertEqual(form.errors, {})
def test_username_field_label(self):
class CustomAuthenticationForm(AuthenticationForm):
username = CharField(label="Name", max_length=75)
form = CustomAuthenticationForm()
self.assertEqual(form["username"].label, "Name")
def test_username_field_label_not_set(self):
class CustomAuthenticationForm(AuthenticationForm):
username = CharField()
form = CustomAuthenticationForm()
username_field = User._meta.get_field(User.USERNAME_FIELD)
self.assertEqual(
form.fields["username"].label, capfirst(username_field.verbose_name)
)
def test_username_field_autocapitalize_none(self):
form = AuthenticationForm()
self.assertEqual(
form.fields["username"].widget.attrs.get("autocapitalize"), "none"
)
def test_username_field_label_empty_string(self):
class CustomAuthenticationForm(AuthenticationForm):
username = CharField(label="")
form = CustomAuthenticationForm()
self.assertEqual(form.fields["username"].label, "")
def test_password_whitespace_not_stripped(self):
data = {
"username": "testuser",
"password": " pass ",
}
form = AuthenticationForm(None, data)
form.is_valid() # Not necessary to have valid credentails for the test.
self.assertEqual(form.cleaned_data["password"], data["password"])
@override_settings(AUTH_USER_MODEL="auth_tests.IntegerUsernameUser")
def test_integer_username(self):
class CustomAuthenticationForm(AuthenticationForm):
username = IntegerField()
user = IntegerUsernameUser.objects.create_user(username=0, password="pwd")
data = {
"username": 0,
"password": "pwd",
}
form = CustomAuthenticationForm(None, data)
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data["username"], data["username"])
self.assertEqual(form.cleaned_data["password"], data["password"])
self.assertEqual(form.errors, {})
self.assertEqual(form.user_cache, user)
def test_get_invalid_login_error(self):
error = AuthenticationForm().get_invalid_login_error()
self.assertIsInstance(error, ValidationError)
self.assertEqual(
error.message,
"Please enter a correct %(username)s and password. Note that both "
"fields may be case-sensitive.",
)
self.assertEqual(error.code, "invalid_login")
self.assertEqual(error.params, {"username": "username"})
def test_html_autocomplete_attributes(self):
form = AuthenticationForm()
tests = (
("username", "username"),
("password", "current-password"),
)
for field_name, autocomplete in tests:
with self.subTest(field_name=field_name, autocomplete=autocomplete):
self.assertEqual(
form.fields[field_name].widget.attrs["autocomplete"], autocomplete
)
def test_no_password(self):
data = {"username": "username"}
form = AuthenticationForm(None, data)
self.assertIs(form.is_valid(), False)
self.assertEqual(
form["password"].errors, [Field.default_error_messages["required"]]
)
class SetPasswordFormTest(TestDataMixin, TestCase):
def test_password_verification(self):
# The two new passwords do not match.
user = User.objects.get(username="testclient")
data = {
"new_password1": "abc123",
"new_password2": "abc",
}
form = SetPasswordForm(user, data)
self.assertFalse(form.is_valid())
self.assertEqual(
form["new_password2"].errors,
[str(form.error_messages["password_mismatch"])],
)
@mock.patch("django.contrib.auth.password_validation.password_changed")
def test_success(self, password_changed):
user = User.objects.get(username="testclient")
data = {
"new_password1": "abc123",
"new_password2": "abc123",
}
form = SetPasswordForm(user, data)
self.assertTrue(form.is_valid())
form.save(commit=False)
self.assertEqual(password_changed.call_count, 0)
form.save()
self.assertEqual(password_changed.call_count, 1)
@override_settings(
AUTH_PASSWORD_VALIDATORS=[
{
"NAME": (
"django.contrib.auth.password_validation."
"UserAttributeSimilarityValidator"
)
},
{
"NAME": (
"django.contrib.auth.password_validation.MinimumLengthValidator"
),
"OPTIONS": {
"min_length": 12,
},
},
]
)
def test_validates_password(self):
user = User.objects.get(username="testclient")
data = {
"new_password1": "testclient",
"new_password2": "testclient",
}
form = SetPasswordForm(user, data)
self.assertFalse(form.is_valid())
self.assertEqual(len(form["new_password2"].errors), 2)
self.assertIn(
"The password is too similar to the username.", form["new_password2"].errors
)
self.assertIn(
"This password is too short. It must contain at least 12 characters.",
form["new_password2"].errors,
)
# SetPasswordForm does not consider usable_password for form validation
data = {
"new_password1": "testclient",
"new_password2": "testclient",
"usable_password": "false",
}
form = SetPasswordForm(user, data)
self.assertFalse(form.is_valid())
self.assertEqual(len(form["new_password2"].errors), 2)
self.assertIn(
"The password is too similar to the username.", form["new_password2"].errors
)
self.assertIn(
"This password is too short. It must contain at least 12 characters.",
form["new_password2"].errors,
)
def test_no_password(self):
user = User.objects.get(username="testclient")
data = {"new_password1": "new-password"}
form = SetPasswordForm(user, data)
self.assertIs(form.is_valid(), False)
self.assertEqual(
form["new_password2"].errors, [Field.default_error_messages["required"]]
)
form = SetPasswordForm(user, {})
self.assertIs(form.is_valid(), False)
self.assertEqual(
form["new_password1"].errors, [Field.default_error_messages["required"]]
)
self.assertEqual(
form["new_password2"].errors, [Field.default_error_messages["required"]]
)
def test_password_whitespace_not_stripped(self):
user = User.objects.get(username="testclient")
data = {
"new_password1": " password ",
"new_password2": " password ",
}
form = SetPasswordForm(user, data)
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data["new_password1"], data["new_password1"])
self.assertEqual(form.cleaned_data["new_password2"], data["new_password2"])
@override_settings(
AUTH_PASSWORD_VALIDATORS=[
{
"NAME": (
"django.contrib.auth.password_validation."
"UserAttributeSimilarityValidator"
)
},
{
"NAME": (
"django.contrib.auth.password_validation.MinimumLengthValidator"
),
"OPTIONS": {
"min_length": 12,
},
},
]
)
def test_help_text_translation(self):
french_help_texts = [
"Votre mot de passe ne peut pas trop ressembler à vos autres informations "
"personnelles.",
"Votre mot de passe doit contenir au minimum 12 caractères.",
]
form = SetPasswordForm(self.u1)
with translation.override("fr"):
html = form.as_p()
for french_text in french_help_texts:
self.assertIn(french_text, html)
def test_html_autocomplete_attributes(self):
form = SetPasswordForm(self.u1)
tests = (
("new_password1", "new-password"),
("new_password2", "new-password"),
)
for field_name, autocomplete in tests:
with self.subTest(field_name=field_name, autocomplete=autocomplete):
self.assertEqual(
form.fields[field_name].widget.attrs["autocomplete"], autocomplete
)
def test_password_extra_validations(self):
class ExtraValidationForm(ExtraValidationFormMixin, SetPasswordForm):
def clean_new_password1(self):
return self.failing_helper("new_password1")
def clean_new_password2(self):
return self.failing_helper("new_password2")
user = User.objects.get(username="testclient")
data = {"new_password1": "abc", "new_password2": "abc"}
for fields in (
["new_password1"],
["new_password2"],
["new_password1", "new_password2"],
):
with self.subTest(fields=fields):
errors = {field: [f"Extra validation for {field}."] for field in fields}
form = ExtraValidationForm(user, data, failing_fields=errors)
self.assertIs(form.is_valid(), False)
self.assertDictEqual(form.errors, errors)
class PasswordChangeFormTest(TestDataMixin, TestCase):
def test_incorrect_password(self):
user = User.objects.get(username="testclient")
data = {
"old_password": "test",
"new_password1": "abc123",
"new_password2": "abc123",
}
form = PasswordChangeForm(user, data)
self.assertFalse(form.is_valid())
self.assertEqual(
form["old_password"].errors,
[str(form.error_messages["password_incorrect"])],
)
def test_password_verification(self):
# The two new passwords do not match.
user = User.objects.get(username="testclient")
data = {
"old_password": "password",
"new_password1": "abc123",
"new_password2": "abc",
}
form = PasswordChangeForm(user, data)
self.assertFalse(form.is_valid())
self.assertEqual(
form["new_password2"].errors,
[str(form.error_messages["password_mismatch"])],
)
@mock.patch("django.contrib.auth.password_validation.password_changed")
def test_success(self, password_changed):
# The success case.
user = User.objects.get(username="testclient")
data = {
"old_password": "password",
"new_password1": "abc123",
"new_password2": "abc123",
}
form = PasswordChangeForm(user, data)
self.assertTrue(form.is_valid())
form.save(commit=False)
self.assertEqual(password_changed.call_count, 0)
form.save()
self.assertEqual(password_changed.call_count, 1)
def test_field_order(self):
# Regression test - check the order of fields:
user = User.objects.get(username="testclient")
self.assertEqual(
list(PasswordChangeForm(user, {}).fields),
["old_password", "new_password1", "new_password2"],
)
def test_password_whitespace_not_stripped(self):
user = User.objects.get(username="testclient")
user.set_password(" oldpassword ")
data = {
"old_password": " oldpassword ",
"new_password1": " pass ",
"new_password2": " pass ",
}
form = PasswordChangeForm(user, data)
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data["old_password"], data["old_password"])
self.assertEqual(form.cleaned_data["new_password1"], data["new_password1"])
self.assertEqual(form.cleaned_data["new_password2"], data["new_password2"])
def test_html_autocomplete_attributes(self):
user = User.objects.get(username="testclient")
form = PasswordChangeForm(user)
self.assertEqual(
form.fields["old_password"].widget.attrs["autocomplete"], "current-password"
)
class UserChangeFormTest(TestDataMixin, TestCase):
def test_username_validity(self):
user = User.objects.get(username="testclient")
data = {"username": "not valid"}
form = UserChangeForm(data, instance=user)
self.assertFalse(form.is_valid())
validator = next(
v
for v in User._meta.get_field("username").validators
if v.code == "invalid"
)
self.assertEqual(form["username"].errors, [str(validator.message)])
def test_bug_14242(self):
# A regression test, introduce by adding an optimization for the
# UserChangeForm.
class MyUserForm(UserChangeForm):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields["groups"].help_text = (
"These groups give users different permissions"
)
class Meta(UserChangeForm.Meta):
fields = ("groups",)
# Just check we can create it
MyUserForm({})
def test_unusable_password(self):
user = User.objects.get(username="unusable_password")
form = UserChangeForm(instance=user)
self.assertIn(_("No password set."), form.as_table())
def test_bug_17944_empty_password(self):
user = User.objects.get(username="empty_password")
form = UserChangeForm(instance=user)
self.assertIn(_("No password set."), form.as_table())
def test_bug_17944_unmanageable_password(self):
user = User.objects.get(username="unmanageable_password")
form = UserChangeForm(instance=user)
self.assertIn(
_("Invalid password format or unknown hashing algorithm."), form.as_table()
)
def test_bug_17944_unknown_password_algorithm(self):
user = User.objects.get(username="unknown_password")
form = UserChangeForm(instance=user)
self.assertIn(
_("Invalid password format or unknown hashing algorithm."), form.as_table()
)
def test_bug_19133(self):
"The change form does not return the password value"
# Use the form to construct the POST data
user = User.objects.get(username="testclient")
form_for_data = UserChangeForm(instance=user)
post_data = form_for_data.initial
# The password field should be readonly, so anything
# posted here should be ignored; the form will be
# valid, and give back the 'initial' value for the
# password field.
post_data["password"] = "new password"
form = UserChangeForm(instance=user, data=post_data)
self.assertTrue(form.is_valid())
# original hashed password contains $
self.assertIn("$", form.cleaned_data["password"])
def test_bug_19349_bound_password_field(self):
user = User.objects.get(username="testclient")
form = UserChangeForm(data={}, instance=user)
# When rendering the bound password field,
# ReadOnlyPasswordHashWidget needs the initial
# value to render correctly
self.assertEqual(form.initial["password"], form["password"].value())
@override_settings(ROOT_URLCONF="auth_tests.urls_admin")
def test_link_to_password_reset_in_user_change_form(self):
cases = [
(
"testclient",
"Raw passwords are not stored, so there is no way to see "
"the user’s password.",
"Reset password",
),
(
"unusable_password",
"Enable password-based authentication for this user by setting a "
"password.",
"Set password",
),
]
password_reset_link = (
r'<a role="button" class="button" href="([^"]*)">([^<]*)</a>'
)
for username, expected_help_text, expected_button_label in cases:
with self.subTest(username=username):
user = User.objects.get(username=username)
form = UserChangeForm(data={}, instance=user)
password_help_text = form.fields["password"].help_text
self.assertEqual(password_help_text, expected_help_text)
matches = re.search(password_reset_link, form.as_p())
self.assertIsNotNone(matches)
self.assertEqual(len(matches.groups()), 2)
url_prefix = f"admin:{user._meta.app_label}_{user._meta.model_name}"
# URL to UserChangeForm in admin via to_field (instead of pk).
user_change_url = reverse(f"{url_prefix}_change", args=(user.pk,))
joined_url = urllib.parse.urljoin(user_change_url, matches.group(1))
pw_change_url = reverse(
f"{url_prefix}_password_change", args=(user.pk,)
)
self.assertEqual(joined_url, pw_change_url)
self.assertEqual(matches.group(2), expected_button_label)
def test_custom_form(self):
class CustomUserChangeForm(UserChangeForm):
class Meta(UserChangeForm.Meta):
model = ExtensionUser
fields = (
"username",
"password",
"date_of_birth",
)
user = User.objects.get(username="testclient")
data = {
"username": "testclient",
"password": "testclient",
"date_of_birth": "1998-02-24",
}
form = CustomUserChangeForm(data, instance=user)
self.assertTrue(form.is_valid())
form.save()
self.assertEqual(form.cleaned_data["username"], "testclient")
self.assertEqual(form.cleaned_data["date_of_birth"], datetime.date(1998, 2, 24))
def test_password_excluded(self):
class UserChangeFormWithoutPassword(UserChangeForm):
password = None
class Meta:
model = User
exclude = ["password"]
form = UserChangeFormWithoutPassword()
self.assertNotIn("password", form.fields)
def test_username_field_autocapitalize_none(self):
form = UserChangeForm()
self.assertEqual(
form.fields["username"].widget.attrs.get("autocapitalize"), "none"
)
@override_settings(TEMPLATES=AUTH_TEMPLATES)
class PasswordResetFormTest(TestDataMixin, TestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
# This cleanup is necessary because contrib.sites cache
# makes tests interfere with each other, see #11505
Site.objects.clear_cache()
def assertEmailMessageSent(self, **kwargs):
self.assertEqual(len(mail.outbox), 1)
msg = mail.outbox[0]
for attr, expected in kwargs.items():
with self.subTest(attr=attr):
self.assertEqual(getattr(msg, attr), expected)
return msg
def create_dummy_user(self):
"""
Create a user and return a tuple (user_object, username, email).
"""
username = "jsmith"
email = "[email protected]"
user = User.objects.create_user(username, email, "test123")
return (user, username, email)
def test_invalid_email(self):
data = {"email": "not valid"}
form = PasswordResetForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(form["email"].errors, [_("Enter a valid email address.")])
def test_user_email_unicode_collision(self):
User.objects.create_user("mike123", "[email protected]", "test123")
User.objects.create_user("mike456", "mı[email protected]", "test123")
data = {"email": "mı[email protected]"}
form = PasswordResetForm(data)
self.assertTrue(form.is_valid())
form.save()
self.assertEmailMessageSent(to=["mı[email protected]"])
def test_user_email_domain_unicode_collision(self):
User.objects.create_user("mike123", "[email protected]", "test123")
User.objects.create_user("mike456", "mike@ıxample.org", "test123")
data = {"email": "mike@ıxample.org"}
form = PasswordResetForm(data)
self.assertTrue(form.is_valid())
form.save()
self.assertEmailMessageSent(to=["mike@ıxample.org"])
def test_user_email_unicode_collision_nonexistent(self):
User.objects.create_user("mike123", "[email protected]", "test123")
data = {"email": "mı[email protected]"}
form = PasswordResetForm(data)
self.assertTrue(form.is_valid())
form.save()
self.assertEqual(len(mail.outbox), 0)
def test_user_email_domain_unicode_collision_nonexistent(self):
User.objects.create_user("mike123", "[email protected]", "test123")
data = {"email": "mike@ıxample.org"}
form = PasswordResetForm(data)
self.assertTrue(form.is_valid())
form.save()
self.assertEqual(len(mail.outbox), 0)
def test_nonexistent_email(self):
"""
Test nonexistent email address. This should not fail because it would
expose information about registered users.
"""
data = {"email": "[email protected]"}
form = PasswordResetForm(data)
self.assertTrue(form.is_valid())
self.assertEqual(len(mail.outbox), 0)
def test_cleaned_data(self):
(user, username, email) = self.create_dummy_user()
data = {"email": email}
form = PasswordResetForm(data)
self.assertTrue(form.is_valid())
form.save(domain_override="example.com")
self.assertEqual(form.cleaned_data["email"], email)
self.assertEmailMessageSent()
def test_custom_email_subject(self):
data = {"email": "[email protected]"}
form = PasswordResetForm(data)
self.assertTrue(form.is_valid())
# Since we're not providing a request object, we must provide a
# domain_override to prevent the save operation from failing in the
# potential case where contrib.sites is not installed. Refs #16412.
form.save(domain_override="example.com")
self.assertEmailMessageSent(subject="Custom password reset on example.com")
def test_custom_email_constructor(self):
data = {"email": "[email protected]"}
class CustomEmailPasswordResetForm(PasswordResetForm):
def send_mail(
self,
subject_template_name,
email_template_name,
context,
from_email,
to_email,
html_email_template_name=None,
):
EmailMultiAlternatives(
"Forgot your password?",
"Sorry to hear you forgot your password.",
None,
[to_email],
bcc=["[email protected]"],
headers={"Reply-To": "[email protected]"},
alternatives=[
("Really sorry to hear you forgot your password.", "text/html")
],
).send()
form = CustomEmailPasswordResetForm(data)
self.assertTrue(form.is_valid())
# Since we're not providing a request object, we must provide a
# domain_override to prevent the save operation from failing in the
# potential case where contrib.sites is not installed. Refs #16412.
form.save(domain_override="example.com")
self.assertEmailMessageSent(
subject="Forgot your password?",
bcc=["[email protected]"],
content_subtype="plain",
)
def test_preserve_username_case(self):
"""
Preserve the case of the user name (before the @ in the email address)
when creating a user (#5605).
"""
user = User.objects.create_user("forms_test2", "[email protected]", "test")
self.assertEqual(user.email, "[email protected]")
user = User.objects.create_user("forms_test3", "tesT", "test")
self.assertEqual(user.email, "tesT")
def test_inactive_user(self):
"""
Inactive user cannot receive password reset email.
"""
(user, username, email) = self.create_dummy_user()
user.is_active = False
user.save()
form = PasswordResetForm({"email": email})
self.assertTrue(form.is_valid())
form.save()
self.assertEqual(len(mail.outbox), 0)
def test_unusable_password(self):
user = User.objects.create_user("testuser", "[email protected]", "test")
data = {"email": "[email protected]"}
form = PasswordResetForm(data)
self.assertTrue(form.is_valid())
user.set_unusable_password()
user.save()
form = PasswordResetForm(data)
# The form itself is valid, but no email is sent
self.assertTrue(form.is_valid())
form.save()
self.assertEqual(len(mail.outbox), 0)
def test_save_plaintext_email(self):
"""
Test the PasswordResetForm.save() method with no
html_email_template_name parameter passed in. Test to ensure original
behavior is unchanged after the parameter was added.
"""
(user, username, email) = self.create_dummy_user()
form = PasswordResetForm({"email": email})
self.assertTrue(form.is_valid())
form.save()
msg = self.assertEmailMessageSent()
self.assertEqual(len(msg.alternatives), 0)
message = msg.message()
self.assertFalse(message.is_multipart())
self.assertEqual(message.get_content_type(), "text/plain")
self.assertEqual(message.get("subject"), "Custom password reset on example.com")
self.assertEqual(message.get_all("to"), [email])
self.assertTrue(
re.match(r"^http://example.com/reset/[\w+/-]", message.get_payload())
)
def test_save_html_email_template_name(self):
"""
Test the PasswordResetForm.save() method with html_email_template_name
parameter specified.
Test to ensure that a multipart email is sent with both text/plain
and text/html parts.
"""
(user, username, email) = self.create_dummy_user()
form = PasswordResetForm({"email": email})
self.assertTrue(form.is_valid())
form.save(
html_email_template_name="registration/html_password_reset_email.html"
)
msg = self.assertEmailMessageSent()
self.assertEqual(len(msg.alternatives), 1)
message = msg.message()
self.assertEqual(message.get("subject"), "Custom password reset on example.com")
self.assertEqual(len(message.get_payload()), 2)
self.assertTrue(message.is_multipart())
self.assertEqual(message.get_payload(0).get_content_type(), "text/plain")
self.assertEqual(message.get_payload(1).get_content_type(), "text/html")
self.assertEqual(message.get_all("to"), [email])
self.assertTrue(
re.match(
r"^http://example.com/reset/[\w/-]+",
message.get_payload(0).get_content(),
)
)
self.assertTrue(
re.match(
r'^<html><a href="http://example.com/reset/[\w/-]+/">Link</a></html>$',
message.get_payload(1).get_content(),
)
)
@override_settings(EMAIL_BACKEND="mail.custombackend.FailingEmailBackend")
def test_save_send_email_exceptions_are_catched_and_logged(self):
(user, username, email) = self.create_dummy_user()
form = PasswordResetForm({"email": email})
self.assertTrue(form.is_valid())
with self.assertLogs("django.contrib.auth", level=0) as cm:
form.save()
self.assertEqual(len(mail.outbox), 0)
self.assertEqual(len(cm.output), 1)
errors = cm.output[0].split("\n")
pk = user.pk
self.assertEqual(
errors[0],
f"ERROR:django.contrib.auth:Failed to send password reset email to {pk}",
)
self.assertEqual(
errors[-1], "ValueError: FailingEmailBackend is doomed to fail."
)
@override_settings(AUTH_USER_MODEL="auth_tests.CustomEmailField")
def test_custom_email_field(self):
email = "[email protected]"
CustomEmailField.objects.create_user("test name", "test password", email)
form = PasswordResetForm({"email": email})
self.assertTrue(form.is_valid())
form.save()
self.assertEqual(form.cleaned_data["email"], email)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].to, [email])
def test_html_autocomplete_attributes(self):
form = PasswordResetForm()
self.assertEqual(form.fields["email"].widget.attrs["autocomplete"], "email")
class ReadOnlyPasswordHashTest(SimpleTestCase):
def test_bug_19349_render_with_none_value(self):
# Rendering the widget with value set to None
# mustn't raise an exception.
widget = ReadOnlyPasswordHashWidget()
html = widget.render(name="password", value=None, attrs={})
self.assertIn(_("No password set."), html)
@override_settings(
PASSWORD_HASHERS=["django.contrib.auth.hashers.PBKDF2PasswordHasher"]
)
def test_render(self):
widget = ReadOnlyPasswordHashWidget()
value = (
"pbkdf2_sha256$100000$a6Pucb1qSFcD$WmCkn9Hqidj48NVe5x0FEM6A9YiOqQcl/83m2Z5u"
"dm0="
)
self.assertHTMLEqual(
widget.render("name", value, {"id": "id_password"}),
'<div id="id_password">'
" <p>"
" <strong>algorithm</strong>: <bdi>pbkdf2_sha256</bdi>"
" <strong>iterations</strong>: <bdi>100000</bdi>"
" <strong>salt</strong>: <bdi>a6Pucb******</bdi>"
" <strong>hash</strong>: "
" <bdi>WmCkn9**************************************</bdi>"
" </p>"
' <p><a role="button" class="button" href="../password/">'
"Reset password</a></p>"
"</div>",
)
def test_render_no_password(self):
widget = ReadOnlyPasswordHashWidget()
self.assertHTMLEqual(
widget.render("name", None, {}),
"<div><p><strong>No password set.</p><p>"
'<a role="button" class="button" href="../password/">Set password</a>'
"</p></div>",
)
@override_settings(
PASSWORD_HASHERS=["django.contrib.auth.hashers.PBKDF2PasswordHasher"]
)
def test_render_invalid_password_format(self):
widget = ReadOnlyPasswordHashWidget()
value = "pbkdf2_sh"
self.assertHTMLEqual(
widget.render("name", value, {}),
"<div><p>"
"<strong>Invalid password format or unknown hashing algorithm.</strong>"
'</p><p><a role="button" class="button" href="../password/">Reset password'
"</a></p></div>",
)
def test_readonly_field_has_changed(self):
field = ReadOnlyPasswordHashField()
self.assertIs(field.disabled, True)
self.assertFalse(field.has_changed("aaa", "bbb"))
def test_label(self):
"""
ReadOnlyPasswordHashWidget doesn't contain a for attribute in the
<label> because it doesn't have any labelable elements.
"""
class TestForm(forms.Form):
hash_field = ReadOnlyPasswordHashField()
bound_field = TestForm()["hash_field"]
self.assertIsNone(bound_field.field.widget.id_for_label("id"))
self.assertEqual(bound_field.label_tag(), "<label>Hash field:</label>")
class AdminPasswordChangeFormTest(TestDataMixin, TestCase):
@mock.patch("django.contrib.auth.password_validation.password_changed")
def test_success(self, password_changed):
user = User.objects.get(username="testclient")
data = {
"password1": "test123",
"password2": "test123",
}
form = AdminPasswordChangeForm(user, data)
self.assertTrue(form.is_valid())
form.save(commit=False)
self.assertEqual(password_changed.call_count, 0)
form.save()
self.assertEqual(password_changed.call_count, 1)
self.assertEqual(form.changed_data, ["password"])
@override_settings(
AUTH_PASSWORD_VALIDATORS=[
{
"NAME": (
"django.contrib.auth.password_validation."
"UserAttributeSimilarityValidator"
)
},
{
"NAME": (
"django.contrib.auth.password_validation.MinimumLengthValidator"
),
"OPTIONS": {
"min_length": 12,
},
},
]
)
def test_validates_password(self):
user = User.objects.get(username="testclient")
data = {
"password1": "testclient",
"password2": "testclient",
}
form = AdminPasswordChangeForm(user, data)
self.assertFalse(form.is_valid())
self.assertEqual(len(form["password2"].errors), 2)
self.assertIn(
"The password is too similar to the username.",
form["password2"].errors,
)
self.assertIn(
"This password is too short. It must contain at least 12 characters.",
form["password2"].errors,
)
# passwords are not validated if `usable_password` is unset
data = {
"password1": "testclient",
"password2": "testclient",
"usable_password": "false",
}
form = AdminPasswordChangeForm(user, data)
self.assertIs(form.is_valid(), True, form.errors)
def test_password_whitespace_not_stripped(self):
user = User.objects.get(username="testclient")
data = {
"password1": " pass ",
"password2": " pass ",
}
form = AdminPasswordChangeForm(user, data)
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data["password1"], data["password1"])
self.assertEqual(form.cleaned_data["password2"], data["password2"])
self.assertEqual(form.changed_data, ["password"])
def test_password_extra_validations(self):
class ExtraValidationForm(ExtraValidationFormMixin, AdminPasswordChangeForm):
def clean_password1(self):
return self.failing_helper("password1")
def clean_password2(self):
return self.failing_helper("password2")
user = User.objects.get(username="testclient")
data = {"username": "extra", "password1": "abc", "password2": "abc"}
for fields in (["password1"], ["password2"], ["password1", "password2"]):
with self.subTest(fields=fields):
errors = {field: [f"Extra validation for {field}."] for field in fields}
form = ExtraValidationForm(user, data, failing_fields=errors)
self.assertIs(form.is_valid(), False)
self.assertDictEqual(form.errors, errors)
def test_non_matching_passwords(self):
user = User.objects.get(username="testclient")
data = {"password1": "password1", "password2": "password2"}
form = AdminPasswordChangeForm(user, data)
self.assertEqual(
form.errors["password2"], [form.error_messages["password_mismatch"]]
)
self.assertEqual(form.changed_data, ["password"])
def test_missing_passwords(self):
user = User.objects.get(username="testclient")
data = {"password1": "", "password2": ""}
form = AdminPasswordChangeForm(user, data)
required_error = [Field.default_error_messages["required"]]
self.assertEqual(form.errors["password1"], required_error)
self.assertEqual(form.errors["password2"], required_error)
self.assertEqual(form.changed_data, [])
def test_one_password(self):
user = User.objects.get(username="testclient")
form1 = AdminPasswordChangeForm(user, {"password1": "", "password2": "test"})
required_error = [Field.default_error_messages["required"]]
self.assertEqual(form1.errors["password1"], required_error)
self.assertNotIn("password2", form1.errors)
self.assertEqual(form1.changed_data, [])
form2 = AdminPasswordChangeForm(user, {"password1": "test", "password2": ""})
self.assertEqual(form2.errors["password2"], required_error)
self.assertNotIn("password1", form2.errors)
self.assertEqual(form2.changed_data, [])
def test_html_autocomplete_attributes(self):
user = User.objects.get(username="testclient")
form = AdminPasswordChangeForm(user)
tests = (
("password1", "new-password"),
("password2", "new-password"),
)
for field_name, autocomplete in tests:
with self.subTest(field_name=field_name, autocomplete=autocomplete):
self.assertEqual(
form.fields[field_name].widget.attrs["autocomplete"], autocomplete
)
def test_enable_password_authentication(self):
user = User.objects.get(username="unusable_password")
form = AdminPasswordChangeForm(
user,
{"password1": "complexpassword", "password2": "complexpassword"},
)
self.assertNotIn("usable_password", form.fields)
self.assertIs(form.is_valid(), True)
user = form.save(commit=True)
self.assertIs(user.has_usable_password(), True)
def test_disable_password_authentication(self):
user = User.objects.get(username="testclient")
form = AdminPasswordChangeForm(
user,
{"usable_password": "false", "password1": "", "password2": "test"},
)
self.assertIn("usable_password", form.fields)
self.assertIn(
"If disabled, the current password for this user will be lost.",
form.fields["usable_password"].help_text,
)
self.assertIs(form.is_valid(), True) # Valid despite password empty/mismatch.
user = form.save(commit=True)
self.assertIs(user.has_usable_password(), False)
class AdminUserCreationFormTest(BaseUserCreationFormTest):
form_class = AdminUserCreationForm
def test_form_fields(self):
form = self.form_class()
self.assertEqual(
list(form.fields.keys()),
["username", "password1", "password2", "usable_password"],
)
@override_settings(
AUTH_PASSWORD_VALIDATORS=[
{
"NAME": (
"django.contrib.auth.password_validation."
"UserAttributeSimilarityValidator"
)
},
{
"NAME": (
"django.contrib.auth.password_validation.MinimumLengthValidator"
),
"OPTIONS": {
"min_length": 12,
},
},
]
)
def test_no_password_validation_if_unusable_password_set(self):
data = {
"username": "otherclient",
"password1": "otherclient",
"password2": "otherclient",
"usable_password": "false",
}
form = self.form_class(data)
# Passwords are not validated if `usable_password` is unset.
self.assertIs(form.is_valid(), True, form.errors)
class CustomUserCreationForm(self.form_class):
class Meta(self.form_class.Meta):
model = User
fields = ("username", "email", "first_name", "last_name")
form = CustomUserCreationForm(
{
"username": "testuser",
"password1": "testpassword",
"password2": "testpassword",
"first_name": "testpassword",
"last_name": "lastname",
"usable_password": "false",
}
)
self.assertIs(form.is_valid(), True, form.errors)
def test_unusable_password(self):
data = {
"username": "new-user-which-does-not-exist",
"usable_password": "false",
}
form = self.form_class(data)
self.assertIs(form.is_valid(), True, form.errors)
u = form.save()
self.assertEqual(u.username, data["username"])
self.assertFalse(u.has_usable_password())
class SensitiveVariablesTest(TestDataMixin, TestCase):
@sensitive_variables("data")
def test_passwords_marked_as_sensitive_in_admin_forms(self):
data = {
"password1": "passwordsensitive",
"password2": "sensitivepassword",
"usable_password": "true",
}
forms = [
AdminUserCreationForm({**data, "username": "newusername"}),
AdminPasswordChangeForm(self.u1, data),
]
password1_fragment = """
<td>password1</td>
<td class="code"><pre>'********************'</pre></td>
"""
password2_fragment = """
<td>password2</td>
<td class="code"><pre>'********************'</pre></td>
"""
error = ValueError("Forced error")
for form in forms:
with self.subTest(form=form):
with mock.patch.object(
SetPasswordMixin, "validate_passwords", side_effect=error
):
try:
form.is_valid()
except ValueError:
exc_info = sys.exc_info()
else:
self.fail("Form validation should have failed.")
response = technical_500_response(RequestFactory().get("/"), *exc_info)
self.assertNotContains(response, "sensitivepassword", status_code=500)
self.assertNotContains(response, "passwordsensitive", status_code=500)
self.assertContains(response, str(error), status_code=500)
self.assertContains(
response, password1_fragment, html=True, status_code=500
)
self.assertContains(
response, password2_fragment, html=True, status_code=500
)
|
./temp_repos/django/tests/admin_views/forms.py
|
./temp_repos/django/tests/auth_tests/test_forms.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'CustomAdminAuthenticationForm'.
Context:
- Class Name: CustomAdminAuthenticationForm
- Dependencies to Mock: None detected
- Key Imports: django.core.exceptions, django.contrib.admin.forms, django.contrib.admin.helpers
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
CustomAdminAuthenticationForm
|
python
|
from datetime import datetime
from django.conf import settings
from django.utils.crypto import constant_time_compare, salted_hmac
from django.utils.http import base36_to_int, int_to_base36
class PasswordResetTokenGenerator:
"""
Strategy object used to generate and check tokens for the password
reset mechanism.
"""
key_salt = "django.contrib.auth.tokens.PasswordResetTokenGenerator"
algorithm = None
_secret = None
_secret_fallbacks = None
def __init__(self):
self.algorithm = self.algorithm or "sha256"
def _get_secret(self):
return self._secret or settings.SECRET_KEY
def _set_secret(self, secret):
self._secret = secret
secret = property(_get_secret, _set_secret)
def _get_fallbacks(self):
if self._secret_fallbacks is None:
return settings.SECRET_KEY_FALLBACKS
return self._secret_fallbacks
def _set_fallbacks(self, fallbacks):
self._secret_fallbacks = fallbacks
secret_fallbacks = property(_get_fallbacks, _set_fallbacks)
def make_token(self, user):
"""
Return a token that can be used once to do a password reset
for the given user.
"""
return self._make_token_with_timestamp(
user,
self._num_seconds(self._now()),
self.secret,
)
def check_token(self, user, token):
"""
Check that a password reset token is correct for a given user.
"""
if not (user and token):
return False
# Parse the token
try:
ts_b36, _ = token.split("-")
except ValueError:
return False
try:
ts = base36_to_int(ts_b36)
except ValueError:
return False
# Check that the timestamp/uid has not been tampered with
for secret in [self.secret, *self.secret_fallbacks]:
if constant_time_compare(
self._make_token_with_timestamp(user, ts, secret),
token,
):
break
else:
return False
# Check the timestamp is within limit.
if (self._num_seconds(self._now()) - ts) > settings.PASSWORD_RESET_TIMEOUT:
return False
return True
def _make_token_with_timestamp(self, user, timestamp, secret):
# timestamp is number of seconds since 2001-1-1. Converted to base 36,
# this gives us a 6 digit string until about 2069.
ts_b36 = int_to_base36(timestamp)
hash_string = salted_hmac(
self.key_salt,
self._make_hash_value(user, timestamp),
secret=secret,
algorithm=self.algorithm,
).hexdigest()[
::2
] # Limit to shorten the URL.
return "%s-%s" % (ts_b36, hash_string)
def _make_hash_value(self, user, timestamp):
"""
Hash the user's primary key, email (if available), and some user state
that's sure to change after a password reset to produce a token that is
invalidated when it's used:
1. The password field will change upon a password reset (even if the
same password is chosen, due to password salting).
2. The last_login field will usually be updated very shortly after
a password reset.
Failing those things, settings.PASSWORD_RESET_TIMEOUT eventually
invalidates the token.
Running this data through salted_hmac() prevents password cracking
attempts using the reset token, provided the secret isn't compromised.
"""
# Truncate microseconds so that tokens are consistent even if the
# database doesn't support microseconds.
login_timestamp = (
""
if user.last_login is None
else user.last_login.replace(microsecond=0, tzinfo=None)
)
email_field = user.get_email_field_name()
email = getattr(user, email_field, "") or ""
return f"{user.pk}{user.password}{login_timestamp}{timestamp}{email}"
def _num_seconds(self, dt):
return int((dt - datetime(2001, 1, 1)).total_seconds())
def _now(self):
# Used for mocking in tests
return datetime.now()
default_token_generator = PasswordResetTokenGenerator()
|
from datetime import datetime, timedelta
from django.conf import settings
from django.contrib.auth.models import User
from django.contrib.auth.tokens import PasswordResetTokenGenerator
from django.core.exceptions import ImproperlyConfigured
from django.test import TestCase
from django.test.utils import override_settings
from .models import CustomEmailField
class MockedPasswordResetTokenGenerator(PasswordResetTokenGenerator):
def __init__(self, now):
self._now_val = now
super().__init__()
def _now(self):
return self._now_val
class TokenGeneratorTest(TestCase):
def test_make_token(self):
user = User.objects.create_user("tokentestuser", "[email protected]", "testpw")
p0 = PasswordResetTokenGenerator()
tk1 = p0.make_token(user)
self.assertIs(p0.check_token(user, tk1), True)
def test_10265(self):
"""
The token generated for a user created in the same request
will work correctly.
"""
user = User.objects.create_user("comebackkid", "[email protected]", "testpw")
user_reload = User.objects.get(username="comebackkid")
p0 = MockedPasswordResetTokenGenerator(datetime.now())
tk1 = p0.make_token(user)
tk2 = p0.make_token(user_reload)
self.assertEqual(tk1, tk2)
def test_token_with_different_email(self):
"""Updating the user email address invalidates the token."""
tests = [
(CustomEmailField, None),
(CustomEmailField, "[email protected]"),
(User, "[email protected]"),
]
for model, email in tests:
with self.subTest(model=model.__qualname__, email=email):
user = model.objects.create_user(
"changeemailuser",
email=email,
password="testpw",
)
p0 = PasswordResetTokenGenerator()
tk1 = p0.make_token(user)
self.assertIs(p0.check_token(user, tk1), True)
setattr(user, user.get_email_field_name(), "[email protected]")
user.save()
self.assertIs(p0.check_token(user, tk1), False)
def test_timeout(self):
"""The token is valid after n seconds, but no greater."""
# Uses a mocked version of PasswordResetTokenGenerator so we can change
# the value of 'now'.
user = User.objects.create_user("tokentestuser", "[email protected]", "testpw")
now = datetime.now()
p0 = MockedPasswordResetTokenGenerator(now)
tk1 = p0.make_token(user)
p1 = MockedPasswordResetTokenGenerator(
now + timedelta(seconds=settings.PASSWORD_RESET_TIMEOUT)
)
self.assertIs(p1.check_token(user, tk1), True)
p2 = MockedPasswordResetTokenGenerator(
now + timedelta(seconds=(settings.PASSWORD_RESET_TIMEOUT + 1))
)
self.assertIs(p2.check_token(user, tk1), False)
with self.settings(PASSWORD_RESET_TIMEOUT=60 * 60):
p3 = MockedPasswordResetTokenGenerator(
now + timedelta(seconds=settings.PASSWORD_RESET_TIMEOUT)
)
self.assertIs(p3.check_token(user, tk1), True)
p4 = MockedPasswordResetTokenGenerator(
now + timedelta(seconds=(settings.PASSWORD_RESET_TIMEOUT + 1))
)
self.assertIs(p4.check_token(user, tk1), False)
def test_check_token_with_nonexistent_token_and_user(self):
user = User.objects.create_user("tokentestuser", "[email protected]", "testpw")
p0 = PasswordResetTokenGenerator()
tk1 = p0.make_token(user)
self.assertIs(p0.check_token(None, tk1), False)
self.assertIs(p0.check_token(user, None), False)
def test_token_with_different_secret(self):
"""
A valid token can be created with a secret other than SECRET_KEY by
using the PasswordResetTokenGenerator.secret attribute.
"""
user = User.objects.create_user("tokentestuser", "[email protected]", "testpw")
new_secret = "abcdefghijkl"
# Create and check a token with a different secret.
p0 = PasswordResetTokenGenerator()
p0.secret = new_secret
tk0 = p0.make_token(user)
self.assertIs(p0.check_token(user, tk0), True)
# Create and check a token with the default secret.
p1 = PasswordResetTokenGenerator()
self.assertEqual(p1.secret, settings.SECRET_KEY)
self.assertNotEqual(p1.secret, new_secret)
tk1 = p1.make_token(user)
# Tokens created with a different secret don't validate.
self.assertIs(p0.check_token(user, tk1), False)
self.assertIs(p1.check_token(user, tk0), False)
def test_token_with_different_secret_subclass(self):
class CustomPasswordResetTokenGenerator(PasswordResetTokenGenerator):
secret = "test-secret"
user = User.objects.create_user("tokentestuser", "[email protected]", "testpw")
custom_password_generator = CustomPasswordResetTokenGenerator()
tk_custom = custom_password_generator.make_token(user)
self.assertIs(custom_password_generator.check_token(user, tk_custom), True)
default_password_generator = PasswordResetTokenGenerator()
self.assertNotEqual(
custom_password_generator.secret,
default_password_generator.secret,
)
self.assertEqual(default_password_generator.secret, settings.SECRET_KEY)
# Tokens created with a different secret don't validate.
tk_default = default_password_generator.make_token(user)
self.assertIs(custom_password_generator.check_token(user, tk_default), False)
self.assertIs(default_password_generator.check_token(user, tk_custom), False)
@override_settings(SECRET_KEY="")
def test_secret_lazy_validation(self):
default_token_generator = PasswordResetTokenGenerator()
msg = "The SECRET_KEY setting must not be empty."
with self.assertRaisesMessage(ImproperlyConfigured, msg):
default_token_generator.secret
def test_check_token_secret_fallbacks(self):
user = User.objects.create_user("tokentestuser", "[email protected]", "testpw")
p1 = PasswordResetTokenGenerator()
p1.secret = "secret"
tk = p1.make_token(user)
p2 = PasswordResetTokenGenerator()
p2.secret = "newsecret"
p2.secret_fallbacks = ["secret"]
self.assertIs(p1.check_token(user, tk), True)
self.assertIs(p2.check_token(user, tk), True)
@override_settings(
SECRET_KEY="secret",
SECRET_KEY_FALLBACKS=["oldsecret"],
)
def test_check_token_secret_key_fallbacks(self):
user = User.objects.create_user("tokentestuser", "[email protected]", "testpw")
p1 = PasswordResetTokenGenerator()
p1.secret = "oldsecret"
tk = p1.make_token(user)
p2 = PasswordResetTokenGenerator()
self.assertIs(p2.check_token(user, tk), True)
@override_settings(
SECRET_KEY="secret",
SECRET_KEY_FALLBACKS=["oldsecret"],
)
def test_check_token_secret_key_fallbacks_override(self):
user = User.objects.create_user("tokentestuser", "[email protected]", "testpw")
p1 = PasswordResetTokenGenerator()
p1.secret = "oldsecret"
tk = p1.make_token(user)
p2 = PasswordResetTokenGenerator()
p2.secret_fallbacks = []
self.assertIs(p2.check_token(user, tk), False)
|
./temp_repos/django/django/contrib/auth/tokens.py
|
./temp_repos/django/tests/auth_tests/test_tokens.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'PasswordResetTokenGenerator'.
Context:
- Class Name: PasswordResetTokenGenerator
- Dependencies to Mock: None detected
- Key Imports: datetime, django.conf, django.utils.http, django.utils.crypto
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
PasswordResetTokenGenerator
|
python
|
from django.contrib.auth.backends import ModelBackend
from .models import CustomUser
class CustomUserBackend(ModelBackend):
def authenticate(self, request, username=None, password=None):
try:
user = CustomUser.custom_objects.get_by_natural_key(username)
if user.check_password(password):
return user
except CustomUser.DoesNotExist:
return None
def get_user(self, user_id):
try:
return CustomUser.custom_objects.get(pk=user_id)
except CustomUser.DoesNotExist:
return None
|
import sys
from datetime import date
from unittest import mock
from unittest.mock import patch
from asgiref.sync import sync_to_async
from django.contrib.auth import (
BACKEND_SESSION_KEY,
SESSION_KEY,
_clean_credentials,
aauthenticate,
authenticate,
get_user,
signals,
)
from django.contrib.auth.backends import BaseBackend, ModelBackend
from django.contrib.auth.forms import PasswordChangeForm, SetPasswordForm
from django.contrib.auth.hashers import MD5PasswordHasher
from django.contrib.auth.models import AnonymousUser, Group, Permission, User
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ImproperlyConfigured, PermissionDenied
from django.http import HttpRequest
from django.test import (
Client,
RequestFactory,
SimpleTestCase,
TestCase,
modify_settings,
override_settings,
)
from django.urls import reverse
from django.views.debug import ExceptionReporter, technical_500_response
from django.views.decorators.debug import sensitive_variables
from .models import (
CustomPermissionsUser,
CustomUser,
CustomUserWithoutIsActiveField,
ExtensionUser,
UUIDUser,
)
class FilteredExceptionReporter(ExceptionReporter):
def get_traceback_frames(self):
frames = super().get_traceback_frames()
return [
frame
for frame in frames
if not isinstance(dict(frame["vars"]).get("self"), Client)
]
class SimpleBackend(BaseBackend):
def get_user_permissions(self, user_obj, obj=None):
return ["user_perm"]
def get_group_permissions(self, user_obj, obj=None):
return ["group_perm"]
@override_settings(
AUTHENTICATION_BACKENDS=["auth_tests.test_auth_backends.SimpleBackend"]
)
class BaseBackendTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.user = User.objects.create_user("test", "[email protected]", "test")
def test_get_user_permissions(self):
self.assertEqual(self.user.get_user_permissions(), {"user_perm"})
async def test_aget_user_permissions(self):
self.assertEqual(await self.user.aget_user_permissions(), {"user_perm"})
def test_get_group_permissions(self):
self.assertEqual(self.user.get_group_permissions(), {"group_perm"})
async def test_aget_group_permissions(self):
self.assertEqual(await self.user.aget_group_permissions(), {"group_perm"})
def test_get_all_permissions(self):
self.assertEqual(self.user.get_all_permissions(), {"user_perm", "group_perm"})
async def test_aget_all_permissions(self):
self.assertEqual(
await self.user.aget_all_permissions(), {"user_perm", "group_perm"}
)
def test_has_perm(self):
self.assertIs(self.user.has_perm("user_perm"), True)
self.assertIs(self.user.has_perm("group_perm"), True)
self.assertIs(self.user.has_perm("other_perm", TestObj()), False)
async def test_ahas_perm(self):
self.assertIs(await self.user.ahas_perm("user_perm"), True)
self.assertIs(await self.user.ahas_perm("group_perm"), True)
self.assertIs(await self.user.ahas_perm("other_perm", TestObj()), False)
def test_has_perms_perm_list_invalid(self):
msg = "perm_list must be an iterable of permissions."
with self.assertRaisesMessage(ValueError, msg):
self.user.has_perms("user_perm")
with self.assertRaisesMessage(ValueError, msg):
self.user.has_perms(object())
async def test_ahas_perms_perm_list_invalid(self):
msg = "perm_list must be an iterable of permissions."
with self.assertRaisesMessage(ValueError, msg):
await self.user.ahas_perms("user_perm")
with self.assertRaisesMessage(ValueError, msg):
await self.user.ahas_perms(object())
class CountingMD5PasswordHasher(MD5PasswordHasher):
"""Hasher that counts how many times it computes a hash."""
calls = 0
def encode(self, *args, **kwargs):
type(self).calls += 1
return super().encode(*args, **kwargs)
class BaseModelBackendTest:
"""
A base class for tests that need to validate the ModelBackend
with different User models. Subclasses should define a class
level UserModel attribute, and a create_users() method to
construct two users for test purposes.
"""
backend = "django.contrib.auth.backends.ModelBackend"
@classmethod
def setUpClass(cls):
cls.enterClassContext(
modify_settings(AUTHENTICATION_BACKENDS={"append": cls.backend})
)
super().setUpClass()
def setUp(self):
# The custom_perms test messes with ContentTypes, which will be cached.
# Flush the cache to ensure there are no side effects.
self.addCleanup(ContentType.objects.clear_cache)
self.create_users()
def test_has_perm(self):
user = self.UserModel._default_manager.get(pk=self.user.pk)
self.assertIs(user.has_perm("auth.test"), False)
user.is_staff = True
user.save()
self.assertIs(user.has_perm("auth.test"), False)
user.is_superuser = True
user.save()
self.assertIs(user.has_perm("auth.test"), True)
user.is_staff = True
user.is_superuser = True
user.is_active = False
user.save()
self.assertIs(user.has_perm("auth.test"), False)
async def test_ahas_perm(self):
user = await self.UserModel._default_manager.aget(pk=self.user.pk)
self.assertIs(await user.ahas_perm("auth.test"), False)
user.is_staff = True
await user.asave()
self.assertIs(await user.ahas_perm("auth.test"), False)
user.is_superuser = True
await user.asave()
self.assertIs(await user.ahas_perm("auth.test"), True)
self.assertIs(await user.ahas_module_perms("auth"), True)
user.is_staff = True
user.is_superuser = True
user.is_active = False
await user.asave()
self.assertIs(await user.ahas_perm("auth.test"), False)
def test_custom_perms(self):
user = self.UserModel._default_manager.get(pk=self.user.pk)
content_type = ContentType.objects.get_for_model(Group)
perm = Permission.objects.create(
name="test", content_type=content_type, codename="test"
)
user.user_permissions.add(perm)
# reloading user to purge the _perm_cache
user = self.UserModel._default_manager.get(pk=self.user.pk)
self.assertEqual(user.get_all_permissions(), {"auth.test"})
self.assertEqual(user.get_user_permissions(), {"auth.test"})
self.assertEqual(user.get_group_permissions(), set())
self.assertIs(user.has_module_perms("Group"), False)
self.assertIs(user.has_module_perms("auth"), True)
perm = Permission.objects.create(
name="test2", content_type=content_type, codename="test2"
)
user.user_permissions.add(perm)
perm = Permission.objects.create(
name="test3", content_type=content_type, codename="test3"
)
user.user_permissions.add(perm)
user = self.UserModel._default_manager.get(pk=self.user.pk)
expected_user_perms = {"auth.test2", "auth.test", "auth.test3"}
self.assertEqual(user.get_all_permissions(), expected_user_perms)
self.assertIs(user.has_perm("test"), False)
self.assertIs(user.has_perm("auth.test"), True)
self.assertIs(user.has_perms(["auth.test2", "auth.test3"]), True)
perm = Permission.objects.create(
name="test_group", content_type=content_type, codename="test_group"
)
group = Group.objects.create(name="test_group")
group.permissions.add(perm)
user.groups.add(group)
user = self.UserModel._default_manager.get(pk=self.user.pk)
self.assertEqual(
user.get_all_permissions(), {*expected_user_perms, "auth.test_group"}
)
self.assertEqual(user.get_user_permissions(), expected_user_perms)
self.assertEqual(user.get_group_permissions(), {"auth.test_group"})
self.assertIs(user.has_perms(["auth.test3", "auth.test_group"]), True)
user = AnonymousUser()
self.assertIs(user.has_perm("test"), False)
self.assertIs(user.has_perms(["auth.test2", "auth.test3"]), False)
async def test_acustom_perms(self):
user = await self.UserModel._default_manager.aget(pk=self.user.pk)
content_type = await sync_to_async(ContentType.objects.get_for_model)(Group)
perm = await Permission.objects.acreate(
name="test", content_type=content_type, codename="test"
)
await user.user_permissions.aadd(perm)
# Reloading user to purge the _perm_cache.
user = await self.UserModel._default_manager.aget(pk=self.user.pk)
self.assertEqual(await user.aget_all_permissions(), {"auth.test"})
self.assertEqual(await user.aget_user_permissions(), {"auth.test"})
self.assertEqual(await user.aget_group_permissions(), set())
self.assertIs(await user.ahas_module_perms("Group"), False)
self.assertIs(await user.ahas_module_perms("auth"), True)
perm = await Permission.objects.acreate(
name="test2", content_type=content_type, codename="test2"
)
await user.user_permissions.aadd(perm)
perm = await Permission.objects.acreate(
name="test3", content_type=content_type, codename="test3"
)
await user.user_permissions.aadd(perm)
user = await self.UserModel._default_manager.aget(pk=self.user.pk)
expected_user_perms = {"auth.test2", "auth.test", "auth.test3"}
self.assertEqual(await user.aget_all_permissions(), expected_user_perms)
self.assertIs(await user.ahas_perm("test"), False)
self.assertIs(await user.ahas_perm("auth.test"), True)
self.assertIs(await user.ahas_perms(["auth.test2", "auth.test3"]), True)
perm = await Permission.objects.acreate(
name="test_group", content_type=content_type, codename="test_group"
)
group = await Group.objects.acreate(name="test_group")
await group.permissions.aadd(perm)
await user.groups.aadd(group)
user = await self.UserModel._default_manager.aget(pk=self.user.pk)
self.assertEqual(
await user.aget_all_permissions(), {*expected_user_perms, "auth.test_group"}
)
self.assertEqual(await user.aget_user_permissions(), expected_user_perms)
self.assertEqual(await user.aget_group_permissions(), {"auth.test_group"})
self.assertIs(await user.ahas_perms(["auth.test3", "auth.test_group"]), True)
user = AnonymousUser()
self.assertIs(await user.ahas_perm("test"), False)
self.assertIs(await user.ahas_perms(["auth.test2", "auth.test3"]), False)
def test_has_no_object_perm(self):
"""Regressiontest for #12462"""
user = self.UserModel._default_manager.get(pk=self.user.pk)
content_type = ContentType.objects.get_for_model(Group)
perm = Permission.objects.create(
name="test", content_type=content_type, codename="test"
)
user.user_permissions.add(perm)
self.assertIs(user.has_perm("auth.test", "object"), False)
self.assertEqual(user.get_all_permissions("object"), set())
self.assertIs(user.has_perm("auth.test"), True)
self.assertEqual(user.get_all_permissions(), {"auth.test"})
async def test_ahas_no_object_perm(self):
"""See test_has_no_object_perm()"""
user = await self.UserModel._default_manager.aget(pk=self.user.pk)
content_type = await sync_to_async(ContentType.objects.get_for_model)(Group)
perm = await Permission.objects.acreate(
name="test", content_type=content_type, codename="test"
)
await user.user_permissions.aadd(perm)
self.assertIs(await user.ahas_perm("auth.test", "object"), False)
self.assertEqual(await user.aget_all_permissions("object"), set())
self.assertIs(await user.ahas_perm("auth.test"), True)
self.assertEqual(await user.aget_all_permissions(), {"auth.test"})
def test_anonymous_has_no_permissions(self):
"""
#17903 -- Anonymous users shouldn't have permissions in
ModelBackend.get_(all|user|group)_permissions().
"""
backend = ModelBackend()
user = self.UserModel._default_manager.get(pk=self.user.pk)
content_type = ContentType.objects.get_for_model(Group)
user_perm = Permission.objects.create(
name="test", content_type=content_type, codename="test_user"
)
group_perm = Permission.objects.create(
name="test2", content_type=content_type, codename="test_group"
)
user.user_permissions.add(user_perm)
group = Group.objects.create(name="test_group")
user.groups.add(group)
group.permissions.add(group_perm)
self.assertEqual(
backend.get_all_permissions(user), {"auth.test_user", "auth.test_group"}
)
self.assertEqual(backend.get_user_permissions(user), {"auth.test_user"})
self.assertEqual(backend.get_group_permissions(user), {"auth.test_group"})
with mock.patch.object(self.UserModel, "is_anonymous", True):
self.assertEqual(backend.get_all_permissions(user), set())
self.assertEqual(backend.get_user_permissions(user), set())
self.assertEqual(backend.get_group_permissions(user), set())
async def test_aanonymous_has_no_permissions(self):
"""See test_anonymous_has_no_permissions()"""
backend = ModelBackend()
user = await self.UserModel._default_manager.aget(pk=self.user.pk)
content_type = await sync_to_async(ContentType.objects.get_for_model)(Group)
user_perm = await Permission.objects.acreate(
name="test", content_type=content_type, codename="test_user"
)
group_perm = await Permission.objects.acreate(
name="test2", content_type=content_type, codename="test_group"
)
await user.user_permissions.aadd(user_perm)
group = await Group.objects.acreate(name="test_group")
await user.groups.aadd(group)
await group.permissions.aadd(group_perm)
self.assertEqual(
await backend.aget_all_permissions(user),
{"auth.test_user", "auth.test_group"},
)
self.assertEqual(await backend.aget_user_permissions(user), {"auth.test_user"})
self.assertEqual(
await backend.aget_group_permissions(user), {"auth.test_group"}
)
with mock.patch.object(self.UserModel, "is_anonymous", True):
self.assertEqual(await backend.aget_all_permissions(user), set())
self.assertEqual(await backend.aget_user_permissions(user), set())
self.assertEqual(await backend.aget_group_permissions(user), set())
def test_inactive_has_no_permissions(self):
"""
#17903 -- Inactive users shouldn't have permissions in
ModelBackend.get_(all|user|group)_permissions().
"""
backend = ModelBackend()
user = self.UserModel._default_manager.get(pk=self.user.pk)
content_type = ContentType.objects.get_for_model(Group)
user_perm = Permission.objects.create(
name="test", content_type=content_type, codename="test_user"
)
group_perm = Permission.objects.create(
name="test2", content_type=content_type, codename="test_group"
)
user.user_permissions.add(user_perm)
group = Group.objects.create(name="test_group")
user.groups.add(group)
group.permissions.add(group_perm)
self.assertEqual(
backend.get_all_permissions(user), {"auth.test_user", "auth.test_group"}
)
self.assertEqual(backend.get_user_permissions(user), {"auth.test_user"})
self.assertEqual(backend.get_group_permissions(user), {"auth.test_group"})
user.is_active = False
user.save()
self.assertEqual(backend.get_all_permissions(user), set())
self.assertEqual(backend.get_user_permissions(user), set())
self.assertEqual(backend.get_group_permissions(user), set())
async def test_ainactive_has_no_permissions(self):
"""See test_inactive_has_no_permissions()"""
backend = ModelBackend()
user = await self.UserModel._default_manager.aget(pk=self.user.pk)
content_type = await sync_to_async(ContentType.objects.get_for_model)(Group)
user_perm = await Permission.objects.acreate(
name="test", content_type=content_type, codename="test_user"
)
group_perm = await Permission.objects.acreate(
name="test2", content_type=content_type, codename="test_group"
)
await user.user_permissions.aadd(user_perm)
group = await Group.objects.acreate(name="test_group")
await user.groups.aadd(group)
await group.permissions.aadd(group_perm)
self.assertEqual(
await backend.aget_all_permissions(user),
{"auth.test_user", "auth.test_group"},
)
self.assertEqual(await backend.aget_user_permissions(user), {"auth.test_user"})
self.assertEqual(
await backend.aget_group_permissions(user), {"auth.test_group"}
)
user.is_active = False
await user.asave()
self.assertEqual(await backend.aget_all_permissions(user), set())
self.assertEqual(await backend.aget_user_permissions(user), set())
self.assertEqual(await backend.aget_group_permissions(user), set())
def test_get_all_superuser_permissions(self):
"""A superuser has all permissions. Refs #14795."""
user = self.UserModel._default_manager.get(pk=self.superuser.pk)
self.assertEqual(len(user.get_all_permissions()), len(Permission.objects.all()))
async def test_aget_all_superuser_permissions(self):
"""See test_get_all_superuser_permissions()"""
user = await self.UserModel._default_manager.aget(pk=self.superuser.pk)
self.assertEqual(
len(await user.aget_all_permissions()), await Permission.objects.acount()
)
@override_settings(
PASSWORD_HASHERS=["auth_tests.test_auth_backends.CountingMD5PasswordHasher"]
)
def test_authentication_timing(self):
"""
Hasher is run once regardless of whether the user exists. Refs #20760.
"""
# Re-set the password, because this tests overrides PASSWORD_HASHERS
self.user.set_password("test")
self.user.save()
CountingMD5PasswordHasher.calls = 0
username = getattr(self.user, self.UserModel.USERNAME_FIELD)
authenticate(username=username, password="test")
self.assertEqual(CountingMD5PasswordHasher.calls, 1)
CountingMD5PasswordHasher.calls = 0
authenticate(username="no_such_user", password="test")
self.assertEqual(CountingMD5PasswordHasher.calls, 1)
@override_settings(
PASSWORD_HASHERS=["auth_tests.test_auth_backends.CountingMD5PasswordHasher"]
)
async def test_aauthentication_timing(self):
"""See test_authentication_timing()"""
# Re-set the password, because this tests overrides PASSWORD_HASHERS.
self.user.set_password("test")
await self.user.asave()
CountingMD5PasswordHasher.calls = 0
username = getattr(self.user, self.UserModel.USERNAME_FIELD)
await aauthenticate(username=username, password="test")
self.assertEqual(CountingMD5PasswordHasher.calls, 1)
CountingMD5PasswordHasher.calls = 0
await aauthenticate(username="no_such_user", password="test")
self.assertEqual(CountingMD5PasswordHasher.calls, 1)
@override_settings(
PASSWORD_HASHERS=["auth_tests.test_auth_backends.CountingMD5PasswordHasher"]
)
def test_authentication_without_credentials(self):
CountingMD5PasswordHasher.calls = 0
for credentials in (
{},
{"username": getattr(self.user, self.UserModel.USERNAME_FIELD)},
{"password": "test"},
):
with self.subTest(credentials=credentials):
with self.assertNumQueries(0):
authenticate(**credentials)
self.assertEqual(CountingMD5PasswordHasher.calls, 0)
class ModelBackendTest(BaseModelBackendTest, TestCase):
"""
Tests for the ModelBackend using the default User model.
"""
UserModel = User
user_credentials = {"username": "test", "password": "test"}
def create_users(self):
self.user = User.objects.create_user(
email="[email protected]", **self.user_credentials
)
self.superuser = User.objects.create_superuser(
username="test2",
email="[email protected]",
password="test",
)
def test_authenticate_inactive(self):
"""
An inactive user can't authenticate.
"""
self.assertEqual(authenticate(**self.user_credentials), self.user)
self.user.is_active = False
self.user.save()
self.assertIsNone(authenticate(**self.user_credentials))
async def test_aauthenticate_inactive(self):
"""
An inactive user can't authenticate.
"""
self.assertEqual(await aauthenticate(**self.user_credentials), self.user)
self.user.is_active = False
await self.user.asave()
self.assertIsNone(await aauthenticate(**self.user_credentials))
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUserWithoutIsActiveField")
def test_authenticate_user_without_is_active_field(self):
"""
A custom user without an `is_active` field is allowed to authenticate.
"""
user = CustomUserWithoutIsActiveField.objects._create_user(
username="test",
email="[email protected]",
password="test",
)
self.assertEqual(authenticate(username="test", password="test"), user)
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUserWithoutIsActiveField")
async def test_aauthenticate_user_without_is_active_field(self):
"""
A custom user without an `is_active` field is allowed to authenticate.
"""
user = await CustomUserWithoutIsActiveField.objects._acreate_user(
username="test",
email="[email protected]",
password="test",
)
self.assertEqual(await aauthenticate(username="test", password="test"), user)
@override_settings(AUTH_USER_MODEL="auth_tests.ExtensionUser")
class ExtensionUserModelBackendTest(BaseModelBackendTest, TestCase):
"""
Tests for the ModelBackend using the custom ExtensionUser model.
This isn't a perfect test, because both the User and ExtensionUser are
synchronized to the database, which wouldn't ordinary happen in
production. As a result, it doesn't catch errors caused by the non-
existence of the User table.
The specific problem is queries on .filter(groups__user) et al, which
makes an implicit assumption that the user model is called 'User'. In
production, the auth.User table won't exist, so the requested join
won't exist either; in testing, the auth.User *does* exist, and
so does the join. However, the join table won't contain any useful
data; for testing, we check that the data we expect actually does exist.
"""
UserModel = ExtensionUser
def create_users(self):
self.user = ExtensionUser._default_manager.create_user(
username="test",
email="[email protected]",
password="test",
date_of_birth=date(2006, 4, 25),
)
self.superuser = ExtensionUser._default_manager.create_superuser(
username="test2",
email="[email protected]",
password="test",
date_of_birth=date(1976, 11, 8),
)
@override_settings(AUTH_USER_MODEL="auth_tests.CustomPermissionsUser")
class CustomPermissionsUserModelBackendTest(BaseModelBackendTest, TestCase):
"""
Tests for the ModelBackend using the CustomPermissionsUser model.
As with the ExtensionUser test, this isn't a perfect test, because both
the User and CustomPermissionsUser are synchronized to the database,
which wouldn't ordinary happen in production.
"""
UserModel = CustomPermissionsUser
def create_users(self):
self.user = CustomPermissionsUser._default_manager.create_user(
email="[email protected]", password="test", date_of_birth=date(2006, 4, 25)
)
self.superuser = CustomPermissionsUser._default_manager.create_superuser(
email="[email protected]", password="test", date_of_birth=date(1976, 11, 8)
)
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUser")
class CustomUserModelBackendAuthenticateTest(TestCase):
"""
The model backend can accept a credentials kwarg labeled with
custom user model's USERNAME_FIELD.
"""
def test_authenticate(self):
test_user = CustomUser._default_manager.create_user(
email="[email protected]", password="test", date_of_birth=date(2006, 4, 25)
)
authenticated_user = authenticate(email="[email protected]", password="test")
self.assertEqual(test_user, authenticated_user)
async def test_aauthenticate(self):
test_user = await CustomUser._default_manager.acreate_user(
email="[email protected]", password="test", date_of_birth=date(2006, 4, 25)
)
authenticated_user = await aauthenticate(
email="[email protected]", password="test"
)
self.assertEqual(test_user, authenticated_user)
@override_settings(AUTH_USER_MODEL="auth_tests.UUIDUser")
class UUIDUserTests(TestCase):
def test_login(self):
"""
A custom user with a UUID primary key should be able to login.
"""
user = UUIDUser.objects.create_user(username="uuid", password="test")
self.assertTrue(self.client.login(username="uuid", password="test"))
self.assertEqual(
UUIDUser.objects.get(pk=self.client.session[SESSION_KEY]), user
)
async def test_alogin(self):
"""See test_login()"""
user = await UUIDUser.objects.acreate_user(username="uuid", password="test")
self.assertTrue(await self.client.alogin(username="uuid", password="test"))
session_key = await self.client.session.aget(SESSION_KEY)
self.assertEqual(await UUIDUser.objects.aget(pk=session_key), user)
class TestObj:
pass
class SimpleRowlevelBackend:
def has_perm(self, user, perm, obj=None):
if not obj:
return # We only support row level perms
if isinstance(obj, TestObj):
if user.username == "test2":
return True
elif user.is_anonymous and perm == "anon":
return True
elif not user.is_active and perm == "inactive":
return True
return False
async def ahas_perm(self, user, perm, obj=None):
return self.has_perm(user, perm, obj)
def has_module_perms(self, user, app_label):
return (user.is_anonymous or user.is_active) and app_label == "app1"
async def ahas_module_perms(self, user, app_label):
return self.has_module_perms(user, app_label)
def get_all_permissions(self, user, obj=None):
if not obj:
return [] # We only support row level perms
if not isinstance(obj, TestObj):
return ["none"]
if user.is_anonymous:
return ["anon"]
if user.username == "test2":
return ["simple", "advanced"]
else:
return ["simple"]
async def aget_all_permissions(self, user, obj=None):
return self.get_all_permissions(user, obj)
def get_group_permissions(self, user, obj=None):
if not obj:
return # We only support row level perms
if not isinstance(obj, TestObj):
return ["none"]
if "test_group" in [group.name for group in user.groups.all()]:
return ["group_perm"]
else:
return ["none"]
@modify_settings(
AUTHENTICATION_BACKENDS={
"append": "auth_tests.test_auth_backends.SimpleRowlevelBackend",
}
)
class RowlevelBackendTest(TestCase):
"""
Tests for auth backend that supports object level permissions
"""
@classmethod
def setUpTestData(cls):
cls.user1 = User.objects.create_user("test", "[email protected]", "test")
cls.user2 = User.objects.create_user("test2", "[email protected]", "test")
cls.user3 = User.objects.create_user("test3", "[email protected]", "test")
def tearDown(self):
# The get_group_permissions test messes with ContentTypes, which will
# be cached; flush the cache to ensure there are no side effects
# Refs #14975, #14925
ContentType.objects.clear_cache()
def test_has_perm(self):
self.assertIs(self.user1.has_perm("perm", TestObj()), False)
self.assertIs(self.user2.has_perm("perm", TestObj()), True)
self.assertIs(self.user2.has_perm("perm"), False)
self.assertIs(self.user2.has_perms(["simple", "advanced"], TestObj()), True)
self.assertIs(self.user3.has_perm("perm", TestObj()), False)
self.assertIs(self.user3.has_perm("anon", TestObj()), False)
self.assertIs(self.user3.has_perms(["simple", "advanced"], TestObj()), False)
def test_get_all_permissions(self):
self.assertEqual(self.user1.get_all_permissions(TestObj()), {"simple"})
self.assertEqual(
self.user2.get_all_permissions(TestObj()), {"simple", "advanced"}
)
self.assertEqual(self.user2.get_all_permissions(), set())
def test_get_group_permissions(self):
group = Group.objects.create(name="test_group")
self.user3.groups.add(group)
self.assertEqual(self.user3.get_group_permissions(TestObj()), {"group_perm"})
@override_settings(
AUTHENTICATION_BACKENDS=["auth_tests.test_auth_backends.SimpleRowlevelBackend"],
)
class AnonymousUserBackendTest(SimpleTestCase):
"""
Tests for AnonymousUser delegating to backend.
"""
def setUp(self):
self.user1 = AnonymousUser()
def test_has_perm(self):
self.assertIs(self.user1.has_perm("perm", TestObj()), False)
self.assertIs(self.user1.has_perm("anon", TestObj()), True)
async def test_ahas_perm(self):
self.assertIs(await self.user1.ahas_perm("perm", TestObj()), False)
self.assertIs(await self.user1.ahas_perm("anon", TestObj()), True)
def test_has_perms(self):
self.assertIs(self.user1.has_perms(["anon"], TestObj()), True)
self.assertIs(self.user1.has_perms(["anon", "perm"], TestObj()), False)
async def test_ahas_perms(self):
self.assertIs(await self.user1.ahas_perms(["anon"], TestObj()), True)
self.assertIs(await self.user1.ahas_perms(["anon", "perm"], TestObj()), False)
def test_has_perms_perm_list_invalid(self):
msg = "perm_list must be an iterable of permissions."
with self.assertRaisesMessage(ValueError, msg):
self.user1.has_perms("perm")
with self.assertRaisesMessage(ValueError, msg):
self.user1.has_perms(object())
async def test_ahas_perms_perm_list_invalid(self):
msg = "perm_list must be an iterable of permissions."
with self.assertRaisesMessage(ValueError, msg):
await self.user1.ahas_perms("perm")
with self.assertRaisesMessage(ValueError, msg):
await self.user1.ahas_perms(object())
def test_has_module_perms(self):
self.assertIs(self.user1.has_module_perms("app1"), True)
self.assertIs(self.user1.has_module_perms("app2"), False)
async def test_ahas_module_perms(self):
self.assertIs(await self.user1.ahas_module_perms("app1"), True)
self.assertIs(await self.user1.ahas_module_perms("app2"), False)
def test_get_all_permissions(self):
self.assertEqual(self.user1.get_all_permissions(TestObj()), {"anon"})
async def test_aget_all_permissions(self):
self.assertEqual(await self.user1.aget_all_permissions(TestObj()), {"anon"})
@override_settings(AUTHENTICATION_BACKENDS=[])
class NoBackendsTest(TestCase):
"""
An appropriate error is raised if no auth backends are provided.
"""
@classmethod
def setUpTestData(cls):
cls.user = User.objects.create_user("test", "[email protected]", "test")
def test_raises_exception(self):
msg = (
"No authentication backends have been defined. "
"Does AUTHENTICATION_BACKENDS contain anything?"
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
self.user.has_perm(("perm", TestObj()))
async def test_araises_exception(self):
msg = (
"No authentication backends have been defined. "
"Does AUTHENTICATION_BACKENDS contain anything?"
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
await self.user.ahas_perm(("perm", TestObj()))
@override_settings(
AUTHENTICATION_BACKENDS=["auth_tests.test_auth_backends.SimpleRowlevelBackend"]
)
class InActiveUserBackendTest(TestCase):
"""
Tests for an inactive user
"""
@classmethod
def setUpTestData(cls):
cls.user1 = User.objects.create_user("test", "[email protected]", "test")
cls.user1.is_active = False
cls.user1.save()
def test_has_perm(self):
self.assertIs(self.user1.has_perm("perm", TestObj()), False)
self.assertIs(self.user1.has_perm("inactive", TestObj()), True)
def test_has_module_perms(self):
self.assertIs(self.user1.has_module_perms("app1"), False)
self.assertIs(self.user1.has_module_perms("app2"), False)
class PermissionDeniedBackend:
"""
Always raises PermissionDenied in `authenticate`, `has_perm` and
`has_module_perms`.
"""
def authenticate(self, request, username=None, password=None):
raise PermissionDenied
async def aauthenticate(self, request, username=None, password=None):
raise PermissionDenied
def has_perm(self, user_obj, perm, obj=None):
raise PermissionDenied
async def ahas_perm(self, user_obj, perm, obj=None):
raise PermissionDenied
def has_module_perms(self, user_obj, app_label):
raise PermissionDenied
async def ahas_module_perms(self, user_obj, app_label):
raise PermissionDenied
class PermissionDeniedBackendTest(TestCase):
"""
Other backends are not checked once a backend raises PermissionDenied
"""
backend = "auth_tests.test_auth_backends.PermissionDeniedBackend"
@classmethod
def setUpTestData(cls):
cls.user1 = User.objects.create_user("test", "[email protected]", "test")
def setUp(self):
self.user_login_failed = []
signals.user_login_failed.connect(self.user_login_failed_listener)
self.addCleanup(
signals.user_login_failed.disconnect, self.user_login_failed_listener
)
def user_login_failed_listener(self, sender, credentials, **kwargs):
self.user_login_failed.append(credentials)
@modify_settings(AUTHENTICATION_BACKENDS={"prepend": backend})
def test_permission_denied(self):
"""
user is not authenticated after a backend raises permission denied
#2550
"""
self.assertIsNone(authenticate(username="test", password="test"))
# user_login_failed signal is sent.
self.assertEqual(
self.user_login_failed,
[{"password": "********************", "username": "test"}],
)
@modify_settings(AUTHENTICATION_BACKENDS={"prepend": backend})
async def test_aauthenticate_permission_denied(self):
self.assertIsNone(await aauthenticate(username="test", password="test"))
# user_login_failed signal is sent.
self.assertEqual(
self.user_login_failed,
[{"password": "********************", "username": "test"}],
)
@modify_settings(AUTHENTICATION_BACKENDS={"append": backend})
def test_authenticates(self):
self.assertEqual(authenticate(username="test", password="test"), self.user1)
@modify_settings(AUTHENTICATION_BACKENDS={"append": backend})
async def test_aauthenticate(self):
self.assertEqual(
await aauthenticate(username="test", password="test"), self.user1
)
@modify_settings(AUTHENTICATION_BACKENDS={"prepend": backend})
def test_has_perm_denied(self):
content_type = ContentType.objects.get_for_model(Group)
perm = Permission.objects.create(
name="test", content_type=content_type, codename="test"
)
self.user1.user_permissions.add(perm)
self.assertIs(self.user1.has_perm("auth.test"), False)
self.assertIs(self.user1.has_module_perms("auth"), False)
@modify_settings(AUTHENTICATION_BACKENDS={"prepend": backend})
async def test_ahas_perm_denied(self):
content_type = await sync_to_async(ContentType.objects.get_for_model)(Group)
perm = await Permission.objects.acreate(
name="test", content_type=content_type, codename="test"
)
await self.user1.user_permissions.aadd(perm)
self.assertIs(await self.user1.ahas_perm("auth.test"), False)
self.assertIs(await self.user1.ahas_module_perms("auth"), False)
@modify_settings(AUTHENTICATION_BACKENDS={"append": backend})
def test_has_perm(self):
content_type = ContentType.objects.get_for_model(Group)
perm = Permission.objects.create(
name="test", content_type=content_type, codename="test"
)
self.user1.user_permissions.add(perm)
self.assertIs(self.user1.has_perm("auth.test"), True)
self.assertIs(self.user1.has_module_perms("auth"), True)
@modify_settings(AUTHENTICATION_BACKENDS={"append": backend})
async def test_ahas_perm(self):
content_type = await sync_to_async(ContentType.objects.get_for_model)(Group)
perm = await Permission.objects.acreate(
name="test", content_type=content_type, codename="test"
)
await self.user1.user_permissions.aadd(perm)
self.assertIs(await self.user1.ahas_perm("auth.test"), True)
self.assertIs(await self.user1.ahas_module_perms("auth"), True)
class NewModelBackend(ModelBackend):
pass
class ChangedBackendSettingsTest(TestCase):
"""
Tests for changes in the settings.AUTHENTICATION_BACKENDS
"""
backend = "auth_tests.test_auth_backends.NewModelBackend"
TEST_USERNAME = "test_user"
TEST_PASSWORD = "test_password"
TEST_EMAIL = "[email protected]"
@classmethod
def setUpTestData(cls):
User.objects.create_user(cls.TEST_USERNAME, cls.TEST_EMAIL, cls.TEST_PASSWORD)
@override_settings(AUTHENTICATION_BACKENDS=[backend])
def test_changed_backend_settings(self):
"""
Removing a backend configured in AUTHENTICATION_BACKENDS makes already
logged-in users disconnect.
"""
# Get a session for the test user
self.assertTrue(
self.client.login(
username=self.TEST_USERNAME,
password=self.TEST_PASSWORD,
)
)
# Prepare a request object
request = HttpRequest()
request.session = self.client.session
# Remove NewModelBackend
with self.settings(
AUTHENTICATION_BACKENDS=["django.contrib.auth.backends.ModelBackend"]
):
# Get the user from the request
user = get_user(request)
# Assert that the user retrieval is successful and the user is
# anonymous as the backend is not longer available.
self.assertIsNotNone(user)
self.assertTrue(user.is_anonymous)
class TypeErrorBackend:
"""
Always raises TypeError.
"""
@sensitive_variables("password")
def authenticate(self, request, username=None, password=None):
raise TypeError
@sensitive_variables("password")
async def aauthenticate(self, request, username=None, password=None):
raise TypeError
class TypeErrorValidator:
"""
Always raises a TypeError.
"""
def validate(self, password=None, user=None):
raise TypeError
class SkippedBackend:
def authenticate(self):
# Doesn't accept any credentials so is skipped by authenticate().
pass
class SkippedBackendWithDecoratedMethod:
@sensitive_variables()
def authenticate(self):
pass
class AuthenticateTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.user1 = User.objects.create_user("test", "[email protected]", "test")
def setUp(self):
self.sensitive_password = "mypassword"
@override_settings(
AUTHENTICATION_BACKENDS=["auth_tests.test_auth_backends.TypeErrorBackend"]
)
def test_type_error_raised(self):
"""A TypeError within a backend is propagated properly (#18171)."""
with self.assertRaises(TypeError):
authenticate(username="test", password="test")
@override_settings(
AUTHENTICATION_BACKENDS=["auth_tests.test_auth_backends.TypeErrorBackend"]
)
def test_authenticate_sensitive_variables(self):
try:
authenticate(username="testusername", password=self.sensitive_password)
except TypeError:
exc_info = sys.exc_info()
rf = RequestFactory()
response = technical_500_response(rf.get("/"), *exc_info)
self.assertNotContains(response, self.sensitive_password, status_code=500)
self.assertContains(response, "TypeErrorBackend", status_code=500)
self.assertContains(
response,
'<tr><td>credentials</td><td class="code">'
"<pre>'********************'</pre></td></tr>",
html=True,
status_code=500,
)
@override_settings(
AUTHENTICATION_BACKENDS=["auth_tests.test_auth_backends.TypeErrorBackend"]
)
async def test_aauthenticate_sensitive_variables(self):
try:
await aauthenticate(
username="testusername", password=self.sensitive_password
)
except TypeError:
exc_info = sys.exc_info()
rf = RequestFactory()
response = technical_500_response(rf.get("/"), *exc_info)
self.assertNotContains(response, self.sensitive_password, status_code=500)
self.assertContains(response, "TypeErrorBackend", status_code=500)
self.assertContains(
response,
'<tr><td>credentials</td><td class="code">'
"<pre>'********************'</pre></td></tr>",
html=True,
status_code=500,
)
def test_clean_credentials_sensitive_variables(self):
try:
# Passing in a list to cause an exception
_clean_credentials([1, self.sensitive_password])
except TypeError:
exc_info = sys.exc_info()
rf = RequestFactory()
response = technical_500_response(rf.get("/"), *exc_info)
self.assertNotContains(response, self.sensitive_password, status_code=500)
self.assertContains(
response,
'<tr><td>credentials</td><td class="code">'
"<pre>'********************'</pre></td></tr>",
html=True,
status_code=500,
)
@override_settings(
ROOT_URLCONF="django.contrib.auth.urls",
AUTHENTICATION_BACKENDS=["auth_tests.test_auth_backends.TypeErrorBackend"],
)
def test_login_process_sensitive_variables(self):
try:
self.client.post(
reverse("login"),
dict(username="testusername", password=self.sensitive_password),
)
except TypeError:
exc_info = sys.exc_info()
rf = RequestFactory()
with patch("django.views.debug.ExceptionReporter", FilteredExceptionReporter):
response = technical_500_response(rf.get("/"), *exc_info)
self.assertNotContains(response, self.sensitive_password, status_code=500)
self.assertContains(response, "TypeErrorBackend", status_code=500)
# AuthenticationForm.clean().
self.assertContains(
response,
'<tr><td>password</td><td class="code">'
"<pre>'********************'</pre></td></tr>",
html=True,
status_code=500,
)
def test_setpasswordform_validate_passwords_sensitive_variables(self):
password_form = SetPasswordForm(AnonymousUser())
password_form.cleaned_data = {
"password1": self.sensitive_password,
"password2": self.sensitive_password + "2",
}
try:
password_form.validate_passwords()
except ValueError:
exc_info = sys.exc_info()
rf = RequestFactory()
response = technical_500_response(rf.get("/"), *exc_info)
self.assertNotContains(response, self.sensitive_password, status_code=500)
self.assertNotContains(response, self.sensitive_password + "2", status_code=500)
self.assertContains(
response,
'<tr><td>password1</td><td class="code">'
"<pre>'********************'</pre></td></tr>",
html=True,
status_code=500,
)
self.assertContains(
response,
'<tr><td>password2</td><td class="code">'
"<pre>'********************'</pre></td></tr>",
html=True,
status_code=500,
)
@override_settings(
AUTH_PASSWORD_VALIDATORS=[
{"NAME": __name__ + ".TypeErrorValidator"},
]
)
def test_setpasswordform_validate_password_for_user_sensitive_variables(self):
password_form = SetPasswordForm(AnonymousUser())
password_form.cleaned_data = {"password2": self.sensitive_password}
try:
password_form.validate_password_for_user(AnonymousUser())
except TypeError:
exc_info = sys.exc_info()
rf = RequestFactory()
response = technical_500_response(rf.get("/"), *exc_info)
self.assertNotContains(response, self.sensitive_password, status_code=500)
self.assertContains(
response,
'<tr><td>password</td><td class="code">'
"<pre>'********************'</pre></td></tr>",
html=True,
status_code=500,
)
def test_passwordchangeform_clean_old_password_sensitive_variables(self):
password_form = PasswordChangeForm(User())
password_form.cleaned_data = {"old_password": self.sensitive_password}
password_form.error_messages = None
try:
password_form.clean_old_password()
except TypeError:
exc_info = sys.exc_info()
rf = RequestFactory()
response = technical_500_response(rf.get("/"), *exc_info)
self.assertNotContains(response, self.sensitive_password, status_code=500)
self.assertContains(
response,
'<tr><td>old_password</td><td class="code">'
"<pre>'********************'</pre></td></tr>",
html=True,
status_code=500,
)
@override_settings(
AUTHENTICATION_BACKENDS=(
"auth_tests.test_auth_backends.SkippedBackend",
"django.contrib.auth.backends.ModelBackend",
)
)
def test_skips_backends_without_arguments(self):
"""
A backend (SkippedBackend) is ignored if it doesn't accept the
credentials as arguments.
"""
self.assertEqual(authenticate(username="test", password="test"), self.user1)
@override_settings(
AUTHENTICATION_BACKENDS=(
"auth_tests.test_auth_backends.SkippedBackendWithDecoratedMethod",
"django.contrib.auth.backends.ModelBackend",
)
)
def test_skips_backends_with_decorated_method(self):
self.assertEqual(authenticate(username="test", password="test"), self.user1)
class ImproperlyConfiguredUserModelTest(TestCase):
"""
An exception from within get_user_model() is propagated and doesn't
raise an UnboundLocalError (#21439).
"""
@classmethod
def setUpTestData(cls):
cls.user1 = User.objects.create_user("test", "[email protected]", "test")
def setUp(self):
self.client.login(username="test", password="test")
@override_settings(AUTH_USER_MODEL="thismodel.doesntexist")
def test_does_not_shadow_exception(self):
# Prepare a request object
request = HttpRequest()
request.session = self.client.session
msg = (
"AUTH_USER_MODEL refers to model 'thismodel.doesntexist' "
"that has not been installed"
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
get_user(request)
class ImportedModelBackend(ModelBackend):
pass
class CustomModelBackend(ModelBackend):
pass
class OtherModelBackend(ModelBackend):
pass
class ImportedBackendTests(TestCase):
"""
#23925 - The backend path added to the session should be the same
as the one defined in AUTHENTICATION_BACKENDS setting.
"""
backend = "auth_tests.backend_alias.ImportedModelBackend"
@override_settings(AUTHENTICATION_BACKENDS=[backend])
def test_backend_path(self):
username = "username"
password = "password"
User.objects.create_user(username, "email", password)
self.assertTrue(self.client.login(username=username, password=password))
request = HttpRequest()
request.session = self.client.session
self.assertEqual(request.session[BACKEND_SESSION_KEY], self.backend)
class SelectingBackendTests(TestCase):
backend = "auth_tests.test_auth_backends.CustomModelBackend"
other_backend = "auth_tests.test_auth_backends.OtherModelBackend"
username = "username"
password = "password"
def assertBackendInSession(self, backend):
request = HttpRequest()
request.session = self.client.session
self.assertEqual(request.session[BACKEND_SESSION_KEY], backend)
@override_settings(AUTHENTICATION_BACKENDS=[backend])
def test_backend_path_login_without_authenticate_single_backend(self):
user = User.objects.create_user(self.username, "email", self.password)
self.client._login(user)
self.assertBackendInSession(self.backend)
@override_settings(AUTHENTICATION_BACKENDS=[backend, other_backend])
def test_backend_path_login_without_authenticate_multiple_backends(self):
user = User.objects.create_user(self.username, "email", self.password)
expected_message = (
"You have multiple authentication backends configured and "
"therefore must provide the `backend` argument or set the "
"`backend` attribute on the user."
)
with self.assertRaisesMessage(ValueError, expected_message):
self.client._login(user)
def test_non_string_backend(self):
user = User.objects.create_user(self.username, "email", self.password)
expected_message = (
"backend must be a dotted import path string (got "
"<class 'django.contrib.auth.backends.ModelBackend'>)."
)
with self.assertRaisesMessage(TypeError, expected_message):
self.client._login(user, backend=ModelBackend)
@override_settings(AUTHENTICATION_BACKENDS=[backend, other_backend])
def test_backend_path_login_with_explicit_backends(self):
user = User.objects.create_user(self.username, "email", self.password)
self.client._login(user, self.other_backend)
self.assertBackendInSession(self.other_backend)
@override_settings(
AUTHENTICATION_BACKENDS=["django.contrib.auth.backends.AllowAllUsersModelBackend"]
)
class AllowAllUsersModelBackendTest(TestCase):
"""
Inactive users may authenticate with the AllowAllUsersModelBackend.
"""
user_credentials = {"username": "test", "password": "test"}
@classmethod
def setUpTestData(cls):
cls.user = User.objects.create_user(
email="[email protected]", is_active=False, **cls.user_credentials
)
def test_authenticate(self):
self.assertFalse(self.user.is_active)
self.assertEqual(authenticate(**self.user_credentials), self.user)
def test_get_user(self):
self.client.force_login(self.user)
request = HttpRequest()
request.session = self.client.session
user = get_user(request)
self.assertEqual(user, self.user)
|
./temp_repos/django/tests/test_client_regress/auth_backends.py
|
./temp_repos/django/tests/auth_tests/test_auth_backends.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'CustomUserBackend'.
Context:
- Class Name: CustomUserBackend
- Dependencies to Mock: None detected
- Key Imports: models, django.contrib.auth.backends
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
CustomUserBackend
|
python
|
class StorageSettingsMixin:
def _clear_cached_properties(self, setting, **kwargs):
"""Reset setting based property values."""
if setting == "MEDIA_ROOT":
self.__dict__.pop("base_location", None)
self.__dict__.pop("location", None)
elif setting == "MEDIA_URL":
self.__dict__.pop("base_url", None)
elif setting == "FILE_UPLOAD_PERMISSIONS":
self.__dict__.pop("file_permissions_mode", None)
elif setting == "FILE_UPLOAD_DIRECTORY_PERMISSIONS":
self.__dict__.pop("directory_permissions_mode", None)
def _value_or_setting(self, value, setting):
return setting if value is None else value
|
from unittest import mock
from django.contrib.auth import models
from django.contrib.auth.mixins import (
LoginRequiredMixin,
PermissionRequiredMixin,
UserPassesTestMixin,
)
from django.contrib.auth.models import AnonymousUser
from django.core.exceptions import PermissionDenied
from django.http import HttpResponse
from django.test import RequestFactory, SimpleTestCase, TestCase
from django.views.generic import View
class AlwaysTrueMixin(UserPassesTestMixin):
def test_func(self):
return True
class AlwaysFalseMixin(UserPassesTestMixin):
def test_func(self):
return False
class EmptyResponseView(View):
def get(self, request, *args, **kwargs):
return HttpResponse()
class AlwaysTrueView(AlwaysTrueMixin, EmptyResponseView):
pass
class AlwaysFalseView(AlwaysFalseMixin, EmptyResponseView):
pass
class StackedMixinsView1(
LoginRequiredMixin, PermissionRequiredMixin, EmptyResponseView
):
permission_required = ["auth_tests.add_customuser", "auth_tests.change_customuser"]
raise_exception = True
class StackedMixinsView2(
PermissionRequiredMixin, LoginRequiredMixin, EmptyResponseView
):
permission_required = ["auth_tests.add_customuser", "auth_tests.change_customuser"]
raise_exception = True
class AccessMixinTests(TestCase):
factory = RequestFactory()
def test_stacked_mixins_success(self):
user = models.User.objects.create(username="joe", password="qwerty")
perms = models.Permission.objects.filter(
codename__in=("add_customuser", "change_customuser")
)
user.user_permissions.add(*perms)
request = self.factory.get("/rand")
request.user = user
view = StackedMixinsView1.as_view()
response = view(request)
self.assertEqual(response.status_code, 200)
view = StackedMixinsView2.as_view()
response = view(request)
self.assertEqual(response.status_code, 200)
def test_stacked_mixins_missing_permission(self):
user = models.User.objects.create(username="joe", password="qwerty")
perms = models.Permission.objects.filter(codename__in=("add_customuser",))
user.user_permissions.add(*perms)
request = self.factory.get("/rand")
request.user = user
view = StackedMixinsView1.as_view()
with self.assertRaises(PermissionDenied):
view(request)
view = StackedMixinsView2.as_view()
with self.assertRaises(PermissionDenied):
view(request)
def test_access_mixin_permission_denied_response(self):
user = models.User.objects.create(username="joe", password="qwerty")
# Authenticated users receive PermissionDenied.
request = self.factory.get("/rand")
request.user = user
view = AlwaysFalseView.as_view()
with self.assertRaises(PermissionDenied):
view(request)
# Anonymous users are redirected to the login page.
request.user = AnonymousUser()
response = view(request)
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/accounts/login/?next=/rand")
def test_access_mixin_permission_denied_remote_login_url(self):
class AView(AlwaysFalseView):
login_url = "https://www.remote.example.com/login"
view = AView.as_view()
request = self.factory.get("/rand")
request.user = AnonymousUser()
response = view(request)
self.assertEqual(response.status_code, 302)
self.assertEqual(
response.url,
"https://www.remote.example.com/login?next=http%3A//testserver/rand",
)
@mock.patch.object(models.User, "is_authenticated", False)
def test_stacked_mixins_not_logged_in(self):
user = models.User.objects.create(username="joe", password="qwerty")
perms = models.Permission.objects.filter(
codename__in=("add_customuser", "change_customuser")
)
user.user_permissions.add(*perms)
request = self.factory.get("/rand")
request.user = user
view = StackedMixinsView1.as_view()
with self.assertRaises(PermissionDenied):
view(request)
view = StackedMixinsView2.as_view()
with self.assertRaises(PermissionDenied):
view(request)
class UserPassesTestTests(SimpleTestCase):
factory = RequestFactory()
def _test_redirect(self, view=None, url="/accounts/login/?next=/rand"):
if not view:
view = AlwaysFalseView.as_view()
request = self.factory.get("/rand")
request.user = AnonymousUser()
response = view(request)
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, url)
def test_default(self):
self._test_redirect()
def test_custom_redirect_url(self):
class AView(AlwaysFalseView):
login_url = "/login/"
self._test_redirect(AView.as_view(), "/login/?next=/rand")
def test_custom_redirect_parameter(self):
class AView(AlwaysFalseView):
redirect_field_name = "goto"
self._test_redirect(AView.as_view(), "/accounts/login/?goto=/rand")
def test_no_redirect_parameter(self):
class AView(AlwaysFalseView):
redirect_field_name = None
self._test_redirect(AView.as_view(), "/accounts/login/")
def test_raise_exception(self):
class AView(AlwaysFalseView):
raise_exception = True
request = self.factory.get("/rand")
request.user = AnonymousUser()
with self.assertRaises(PermissionDenied):
AView.as_view()(request)
def test_raise_exception_custom_message(self):
msg = "You don't have access here"
class AView(AlwaysFalseView):
raise_exception = True
permission_denied_message = msg
request = self.factory.get("/rand")
request.user = AnonymousUser()
view = AView.as_view()
with self.assertRaisesMessage(PermissionDenied, msg):
view(request)
def test_raise_exception_custom_message_function(self):
msg = "You don't have access here"
class AView(AlwaysFalseView):
raise_exception = True
def get_permission_denied_message(self):
return msg
request = self.factory.get("/rand")
request.user = AnonymousUser()
view = AView.as_view()
with self.assertRaisesMessage(PermissionDenied, msg):
view(request)
def test_user_passes(self):
view = AlwaysTrueView.as_view()
request = self.factory.get("/rand")
request.user = AnonymousUser()
response = view(request)
self.assertEqual(response.status_code, 200)
class LoginRequiredMixinTests(TestCase):
factory = RequestFactory()
@classmethod
def setUpTestData(cls):
cls.user = models.User.objects.create(username="joe", password="qwerty")
def test_login_required(self):
"""
login_required works on a simple view wrapped in a login_required
decorator.
"""
class AView(LoginRequiredMixin, EmptyResponseView):
pass
view = AView.as_view()
request = self.factory.get("/rand")
request.user = AnonymousUser()
response = view(request)
self.assertEqual(response.status_code, 302)
self.assertEqual("/accounts/login/?next=/rand", response.url)
request = self.factory.get("/rand")
request.user = self.user
response = view(request)
self.assertEqual(response.status_code, 200)
class PermissionsRequiredMixinTests(TestCase):
factory = RequestFactory()
@classmethod
def setUpTestData(cls):
cls.user = models.User.objects.create(username="joe", password="qwerty")
perms = models.Permission.objects.filter(
codename__in=("add_customuser", "change_customuser")
)
cls.user.user_permissions.add(*perms)
def test_many_permissions_pass(self):
class AView(PermissionRequiredMixin, EmptyResponseView):
permission_required = [
"auth_tests.add_customuser",
"auth_tests.change_customuser",
]
request = self.factory.get("/rand")
request.user = self.user
resp = AView.as_view()(request)
self.assertEqual(resp.status_code, 200)
def test_single_permission_pass(self):
class AView(PermissionRequiredMixin, EmptyResponseView):
permission_required = "auth_tests.add_customuser"
request = self.factory.get("/rand")
request.user = self.user
resp = AView.as_view()(request)
self.assertEqual(resp.status_code, 200)
def test_permissioned_denied_redirect(self):
class AView(PermissionRequiredMixin, EmptyResponseView):
permission_required = [
"auth_tests.add_customuser",
"auth_tests.change_customuser",
"nonexistent-permission",
]
# Authenticated users receive PermissionDenied.
request = self.factory.get("/rand")
request.user = self.user
with self.assertRaises(PermissionDenied):
AView.as_view()(request)
# Anonymous users are redirected to the login page.
request.user = AnonymousUser()
resp = AView.as_view()(request)
self.assertEqual(resp.status_code, 302)
def test_permissioned_denied_exception_raised(self):
class AView(PermissionRequiredMixin, EmptyResponseView):
permission_required = [
"auth_tests.add_customuser",
"auth_tests.change_customuser",
"nonexistent-permission",
]
raise_exception = True
request = self.factory.get("/rand")
request.user = self.user
with self.assertRaises(PermissionDenied):
AView.as_view()(request)
|
./temp_repos/django/django/core/files/storage/mixins.py
|
./temp_repos/django/tests/auth_tests/test_mixins.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'StorageSettingsMixin'.
Context:
- Class Name: StorageSettingsMixin
- Dependencies to Mock: None detected
- Key Imports:
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
StorageSettingsMixin
|
python
|
import ipaddress
import math
import re
from pathlib import Path
from urllib.parse import urlsplit
from django.core.exceptions import ValidationError
from django.utils.deconstruct import deconstructible
from django.utils.http import MAX_URL_LENGTH
from django.utils.ipv6 import is_valid_ipv6_address
from django.utils.regex_helper import _lazy_re_compile
from django.utils.translation import gettext_lazy as _
from django.utils.translation import ngettext_lazy
# These values, if given to validate(), will trigger the self.required check.
EMPTY_VALUES = (None, "", [], (), {})
@deconstructible
class RegexValidator:
regex = ""
message = _("Enter a valid value.")
code = "invalid"
inverse_match = False
flags = 0
def __init__(
self, regex=None, message=None, code=None, inverse_match=None, flags=None
):
if regex is not None:
self.regex = regex
if message is not None:
self.message = message
if code is not None:
self.code = code
if inverse_match is not None:
self.inverse_match = inverse_match
if flags is not None:
self.flags = flags
if self.flags and not isinstance(self.regex, str):
raise TypeError(
"If the flags are set, regex must be a regular expression string."
)
self.regex = _lazy_re_compile(self.regex, self.flags)
def __call__(self, value):
"""
Validate that the input contains (or does *not* contain, if
inverse_match is True) a match for the regular expression.
"""
regex_matches = self.regex.search(str(value))
invalid_input = regex_matches if self.inverse_match else not regex_matches
if invalid_input:
raise ValidationError(self.message, code=self.code, params={"value": value})
def __eq__(self, other):
return (
isinstance(other, RegexValidator)
and self.regex.pattern == other.regex.pattern
and self.regex.flags == other.regex.flags
and (self.message == other.message)
and (self.code == other.code)
and (self.inverse_match == other.inverse_match)
)
@deconstructible
class DomainNameValidator(RegexValidator):
message = _("Enter a valid domain name.")
ul = "\u00a1-\uffff" # Unicode letters range (must not be a raw string).
# Host patterns.
hostname_re = (
r"[a-z" + ul + r"0-9](?:[a-z" + ul + r"0-9-]{0,61}[a-z" + ul + r"0-9])?"
)
# Max length for domain name labels is 63 characters per RFC 1034 sec. 3.1.
domain_re = r"(?:\.(?!-)[a-z" + ul + r"0-9-]{1,63}(?<!-))*"
# Top-level domain.
tld_no_fqdn_re = (
r"\." # dot
r"(?!-)" # can't start with a dash
r"(?:[a-z" + ul + "-]{2,63}" # domain label
r"|xn--[a-z0-9]{1,59})" # or punycode label
r"(?<!-)" # can't end with a dash
)
tld_re = tld_no_fqdn_re + r"\.?"
ascii_only_hostname_re = r"[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?"
ascii_only_domain_re = r"(?:\.(?!-)[a-zA-Z0-9-]{1,63}(?<!-))*"
ascii_only_tld_re = (
r"\." # dot
r"(?!-)" # can't start with a dash
r"(?:[a-zA-Z0-9-]{2,63})" # domain label
r"(?<!-)" # can't end with a dash
r"\.?" # may have a trailing dot
)
max_length = 255
def __init__(self, **kwargs):
self.accept_idna = kwargs.pop("accept_idna", True)
if self.accept_idna:
self.regex = _lazy_re_compile(
r"^" + self.hostname_re + self.domain_re + self.tld_re + r"$",
re.IGNORECASE,
)
else:
self.regex = _lazy_re_compile(
r"^"
+ self.ascii_only_hostname_re
+ self.ascii_only_domain_re
+ self.ascii_only_tld_re
+ r"$",
re.IGNORECASE,
)
super().__init__(**kwargs)
def __call__(self, value):
if not isinstance(value, str) or len(value) > self.max_length:
raise ValidationError(self.message, code=self.code, params={"value": value})
if not self.accept_idna and not value.isascii():
raise ValidationError(self.message, code=self.code, params={"value": value})
super().__call__(value)
validate_domain_name = DomainNameValidator()
@deconstructible
class URLValidator(RegexValidator):
# IP patterns
ipv4_re = (
r"(?:0|25[0-5]|2[0-4][0-9]|1[0-9]?[0-9]?|[1-9][0-9]?)"
r"(?:\.(?:0|25[0-5]|2[0-4][0-9]|1[0-9]?[0-9]?|[1-9][0-9]?)){3}"
)
ipv6_re = r"\[[0-9a-f:.]+\]" # (simple regex, validated later)
hostname_re = DomainNameValidator.hostname_re
domain_re = DomainNameValidator.domain_re
tld_re = DomainNameValidator.tld_re
host_re = "(" + hostname_re + domain_re + tld_re + "|localhost)"
regex = _lazy_re_compile(
r"^(?:[a-z0-9.+-]*)://" # scheme is validated separately
r"(?:[^\s:@/]+(?::[^\s:@/]*)?@)?" # user:pass authentication
r"(?:" + ipv4_re + "|" + ipv6_re + "|" + host_re + ")"
r"(?::[0-9]{1,5})?" # port
r"(?:[/?#][^\s]*)?" # resource path
r"\Z",
re.IGNORECASE,
)
message = _("Enter a valid URL.")
schemes = ["http", "https", "ftp", "ftps"]
unsafe_chars = frozenset("\t\r\n")
max_length = MAX_URL_LENGTH
def __init__(self, schemes=None, **kwargs):
super().__init__(**kwargs)
if schemes is not None:
self.schemes = schemes
def __call__(self, value):
if not isinstance(value, str) or len(value) > self.max_length:
raise ValidationError(self.message, code=self.code, params={"value": value})
if self.unsafe_chars.intersection(value):
raise ValidationError(self.message, code=self.code, params={"value": value})
# Check if the scheme is valid.
scheme = value.split("://")[0].lower()
if scheme not in self.schemes:
raise ValidationError(self.message, code=self.code, params={"value": value})
# Then check full URL
try:
splitted_url = urlsplit(value)
except ValueError:
raise ValidationError(self.message, code=self.code, params={"value": value})
super().__call__(value)
# Now verify IPv6 in the netloc part
host_match = re.search(r"^\[(.+)\](?::[0-9]{1,5})?$", splitted_url.netloc)
if host_match:
potential_ip = host_match[1]
try:
validate_ipv6_address(potential_ip)
except ValidationError:
raise ValidationError(
self.message, code=self.code, params={"value": value}
)
# The maximum length of a full host name is 253 characters per RFC 1034
# section 3.1. It's defined to be 255 bytes or less, but this includes
# one byte for the length of the name and one byte for the trailing dot
# that's used to indicate absolute names in DNS.
if splitted_url.hostname is None or len(splitted_url.hostname) > 253:
raise ValidationError(self.message, code=self.code, params={"value": value})
integer_validator = RegexValidator(
_lazy_re_compile(r"^-?\d+\Z"),
message=_("Enter a valid integer."),
code="invalid",
)
def validate_integer(value):
return integer_validator(value)
@deconstructible
class EmailValidator:
message = _("Enter a valid email address.")
code = "invalid"
hostname_re = DomainNameValidator.hostname_re
domain_re = DomainNameValidator.domain_re
tld_no_fqdn_re = DomainNameValidator.tld_no_fqdn_re
user_regex = _lazy_re_compile(
# dot-atom
r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z]+)*\Z"
# quoted-string
r'|^"([\001-\010\013\014\016-\037!#-\[\]-\177]|\\[\001-\011\013\014\016-\177])'
r'*"\Z)',
re.IGNORECASE,
)
domain_regex = _lazy_re_compile(
r"^" + hostname_re + domain_re + tld_no_fqdn_re + r"\Z",
re.IGNORECASE,
)
literal_regex = _lazy_re_compile(
# literal form, ipv4 or ipv6 address (SMTP 4.1.3)
r"\[([A-F0-9:.]+)\]\Z",
re.IGNORECASE,
)
domain_allowlist = ["localhost"]
def __init__(self, message=None, code=None, allowlist=None):
if message is not None:
self.message = message
if code is not None:
self.code = code
if allowlist is not None:
self.domain_allowlist = allowlist
def __call__(self, value):
# The maximum length of an email is 320 characters per RFC 3696
# section 3.
if not value or "@" not in value or len(value) > 320:
raise ValidationError(self.message, code=self.code, params={"value": value})
user_part, domain_part = value.rsplit("@", 1)
if not self.user_regex.match(user_part):
raise ValidationError(self.message, code=self.code, params={"value": value})
if domain_part not in self.domain_allowlist and not self.validate_domain_part(
domain_part
):
raise ValidationError(self.message, code=self.code, params={"value": value})
def validate_domain_part(self, domain_part):
if self.domain_regex.match(domain_part):
return True
literal_match = self.literal_regex.match(domain_part)
if literal_match:
ip_address = literal_match[1]
try:
validate_ipv46_address(ip_address)
return True
except ValidationError:
pass
return False
def __eq__(self, other):
return (
isinstance(other, EmailValidator)
and (set(self.domain_allowlist) == set(other.domain_allowlist))
and (self.message == other.message)
and (self.code == other.code)
)
validate_email = EmailValidator()
slug_re = _lazy_re_compile(r"^[-a-zA-Z0-9_]+\Z")
validate_slug = RegexValidator(
slug_re,
# Translators: "letters" means latin letters: a-z and A-Z.
_("Enter a valid “slug” consisting of letters, numbers, underscores or hyphens."),
"invalid",
)
slug_unicode_re = _lazy_re_compile(r"^[-\w]+\Z")
validate_unicode_slug = RegexValidator(
slug_unicode_re,
_(
"Enter a valid “slug” consisting of Unicode letters, numbers, underscores, or "
"hyphens."
),
"invalid",
)
def validate_ipv4_address(value):
try:
ipaddress.IPv4Address(value)
except ValueError:
raise ValidationError(
_("Enter a valid %(protocol)s address."),
code="invalid",
params={"protocol": _("IPv4"), "value": value},
)
def validate_ipv6_address(value):
if not is_valid_ipv6_address(value):
raise ValidationError(
_("Enter a valid %(protocol)s address."),
code="invalid",
params={"protocol": _("IPv6"), "value": value},
)
def validate_ipv46_address(value):
try:
validate_ipv4_address(value)
except ValidationError:
try:
validate_ipv6_address(value)
except ValidationError:
raise ValidationError(
_("Enter a valid %(protocol)s address."),
code="invalid",
params={"protocol": _("IPv4 or IPv6"), "value": value},
)
ip_address_validator_map = {
"both": [validate_ipv46_address],
"ipv4": [validate_ipv4_address],
"ipv6": [validate_ipv6_address],
}
def ip_address_validators(protocol, unpack_ipv4):
"""
Depending on the given parameters, return the appropriate validators for
the GenericIPAddressField.
"""
if protocol != "both" and unpack_ipv4:
raise ValueError(
"You can only use `unpack_ipv4` if `protocol` is set to 'both'"
)
try:
return ip_address_validator_map[protocol.lower()]
except KeyError:
raise ValueError(
"The protocol '%s' is unknown. Supported: %s"
% (protocol, list(ip_address_validator_map))
)
def int_list_validator(sep=",", message=None, code="invalid", allow_negative=False):
regexp = _lazy_re_compile(
r"^%(neg)s\d+(?:%(sep)s%(neg)s\d+)*\Z"
% {
"neg": "(-)?" if allow_negative else "",
"sep": re.escape(sep),
}
)
return RegexValidator(regexp, message=message, code=code)
validate_comma_separated_integer_list = int_list_validator(
message=_("Enter only digits separated by commas."),
)
@deconstructible
class BaseValidator:
message = _("Ensure this value is %(limit_value)s (it is %(show_value)s).")
code = "limit_value"
def __init__(self, limit_value, message=None):
self.limit_value = limit_value
if message:
self.message = message
def __call__(self, value):
cleaned = self.clean(value)
limit_value = (
self.limit_value() if callable(self.limit_value) else self.limit_value
)
params = {"limit_value": limit_value, "show_value": cleaned, "value": value}
if self.compare(cleaned, limit_value):
raise ValidationError(self.message, code=self.code, params=params)
def __eq__(self, other):
if not isinstance(other, self.__class__):
return NotImplemented
return (
self.limit_value == other.limit_value
and self.message == other.message
and self.code == other.code
)
def compare(self, a, b):
return a is not b
def clean(self, x):
return x
@deconstructible
class MaxValueValidator(BaseValidator):
message = _("Ensure this value is less than or equal to %(limit_value)s.")
code = "max_value"
def compare(self, a, b):
return a > b
@deconstructible
class MinValueValidator(BaseValidator):
message = _("Ensure this value is greater than or equal to %(limit_value)s.")
code = "min_value"
def compare(self, a, b):
return a < b
@deconstructible
class StepValueValidator(BaseValidator):
message = _("Ensure this value is a multiple of step size %(limit_value)s.")
code = "step_size"
def __init__(self, limit_value, message=None, offset=None):
super().__init__(limit_value, message)
if offset is not None:
self.message = _(
"Ensure this value is a multiple of step size %(limit_value)s, "
"starting from %(offset)s, e.g. %(offset)s, %(valid_value1)s, "
"%(valid_value2)s, and so on."
)
self.offset = offset
def __call__(self, value):
if self.offset is None:
super().__call__(value)
else:
cleaned = self.clean(value)
limit_value = (
self.limit_value() if callable(self.limit_value) else self.limit_value
)
if self.compare(cleaned, limit_value):
offset = cleaned.__class__(self.offset)
params = {
"limit_value": limit_value,
"offset": offset,
"valid_value1": offset + limit_value,
"valid_value2": offset + 2 * limit_value,
}
raise ValidationError(self.message, code=self.code, params=params)
def compare(self, a, b):
offset = 0 if self.offset is None else self.offset
return not math.isclose(math.remainder(a - offset, b), 0, abs_tol=1e-9)
@deconstructible
class MinLengthValidator(BaseValidator):
message = ngettext_lazy(
"Ensure this value has at least %(limit_value)d character (it has "
"%(show_value)d).",
"Ensure this value has at least %(limit_value)d characters (it has "
"%(show_value)d).",
"limit_value",
)
code = "min_length"
def compare(self, a, b):
return a < b
def clean(self, x):
return len(x)
@deconstructible
class MaxLengthValidator(BaseValidator):
message = ngettext_lazy(
"Ensure this value has at most %(limit_value)d character (it has "
"%(show_value)d).",
"Ensure this value has at most %(limit_value)d characters (it has "
"%(show_value)d).",
"limit_value",
)
code = "max_length"
def compare(self, a, b):
return a > b
def clean(self, x):
return len(x)
@deconstructible
class DecimalValidator:
"""
Validate that the input does not exceed the maximum number of digits
expected, otherwise raise ValidationError.
"""
messages = {
"invalid": _("Enter a number."),
"max_digits": ngettext_lazy(
"Ensure that there are no more than %(max)s digit in total.",
"Ensure that there are no more than %(max)s digits in total.",
"max",
),
"max_decimal_places": ngettext_lazy(
"Ensure that there are no more than %(max)s decimal place.",
"Ensure that there are no more than %(max)s decimal places.",
"max",
),
"max_whole_digits": ngettext_lazy(
"Ensure that there are no more than %(max)s digit before the decimal "
"point.",
"Ensure that there are no more than %(max)s digits before the decimal "
"point.",
"max",
),
}
def __init__(self, max_digits, decimal_places):
self.max_digits = max_digits
self.decimal_places = decimal_places
def __call__(self, value):
digit_tuple, exponent = value.as_tuple()[1:]
if exponent in {"F", "n", "N"}:
raise ValidationError(
self.messages["invalid"], code="invalid", params={"value": value}
)
if exponent >= 0:
digits = len(digit_tuple)
if digit_tuple != (0,):
# A positive exponent adds that many trailing zeros.
digits += exponent
decimals = 0
else:
# If the absolute value of the negative exponent is larger than the
# number of digits, then it's the same as the number of digits,
# because it'll consume all of the digits in digit_tuple and then
# add abs(exponent) - len(digit_tuple) leading zeros after the
# decimal point.
if abs(exponent) > len(digit_tuple):
digits = decimals = abs(exponent)
else:
digits = len(digit_tuple)
decimals = abs(exponent)
whole_digits = digits - decimals
if self.max_digits is not None and digits > self.max_digits:
raise ValidationError(
self.messages["max_digits"],
code="max_digits",
params={"max": self.max_digits, "value": value},
)
if self.decimal_places is not None and decimals > self.decimal_places:
raise ValidationError(
self.messages["max_decimal_places"],
code="max_decimal_places",
params={"max": self.decimal_places, "value": value},
)
if (
self.max_digits is not None
and self.decimal_places is not None
and whole_digits > (self.max_digits - self.decimal_places)
):
raise ValidationError(
self.messages["max_whole_digits"],
code="max_whole_digits",
params={"max": (self.max_digits - self.decimal_places), "value": value},
)
def __eq__(self, other):
return (
isinstance(other, self.__class__)
and self.max_digits == other.max_digits
and self.decimal_places == other.decimal_places
)
@deconstructible
class FileExtensionValidator:
message = _(
"File extension “%(extension)s” is not allowed. "
"Allowed extensions are: %(allowed_extensions)s."
)
code = "invalid_extension"
def __init__(self, allowed_extensions=None, message=None, code=None):
if allowed_extensions is not None:
allowed_extensions = [
allowed_extension.lower() for allowed_extension in allowed_extensions
]
self.allowed_extensions = allowed_extensions
if message is not None:
self.message = message
if code is not None:
self.code = code
def __call__(self, value):
extension = Path(value.name).suffix[1:].lower()
if (
self.allowed_extensions is not None
and extension not in self.allowed_extensions
):
raise ValidationError(
self.message,
code=self.code,
params={
"extension": extension,
"allowed_extensions": ", ".join(self.allowed_extensions),
"value": value,
},
)
def __eq__(self, other):
return (
isinstance(other, self.__class__)
and set(self.allowed_extensions or [])
== set(other.allowed_extensions or [])
and self.message == other.message
and self.code == other.code
)
def get_available_image_extensions():
try:
from PIL import Image
except ImportError:
return []
else:
Image.init()
return [ext.lower()[1:] for ext in Image.EXTENSION]
def validate_image_file_extension(value):
return FileExtensionValidator(allowed_extensions=get_available_image_extensions())(
value
)
@deconstructible
class ProhibitNullCharactersValidator:
"""Validate that the string doesn't contain the null character."""
message = _("Null characters are not allowed.")
code = "null_characters_not_allowed"
def __init__(self, message=None, code=None):
if message is not None:
self.message = message
if code is not None:
self.code = code
def __call__(self, value):
if "\x00" in str(value):
raise ValidationError(self.message, code=self.code, params={"value": value})
def __eq__(self, other):
return (
isinstance(other, self.__class__)
and self.message == other.message
and self.code == other.code
)
|
import os
from unittest import mock
from django.contrib.auth import validators
from django.contrib.auth.models import User
from django.contrib.auth.password_validation import (
CommonPasswordValidator,
MinimumLengthValidator,
NumericPasswordValidator,
UserAttributeSimilarityValidator,
get_default_password_validators,
get_password_validators,
password_changed,
password_validators_help_text_html,
password_validators_help_texts,
validate_password,
)
from django.core.exceptions import ImproperlyConfigured, ValidationError
from django.db import models
from django.test import SimpleTestCase, TestCase, override_settings
from django.test.utils import isolate_apps
from django.utils.html import conditional_escape
@override_settings(
AUTH_PASSWORD_VALIDATORS=[
{"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator"},
{
"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator",
"OPTIONS": {
"min_length": 12,
},
},
]
)
class PasswordValidationTest(SimpleTestCase):
def test_get_default_password_validators(self):
validators = get_default_password_validators()
self.assertEqual(len(validators), 2)
self.assertEqual(validators[0].__class__.__name__, "CommonPasswordValidator")
self.assertEqual(validators[1].__class__.__name__, "MinimumLengthValidator")
self.assertEqual(validators[1].min_length, 12)
def test_get_password_validators_custom(self):
validator_config = [
{"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator"}
]
validators = get_password_validators(validator_config)
self.assertEqual(len(validators), 1)
self.assertEqual(validators[0].__class__.__name__, "CommonPasswordValidator")
self.assertEqual(get_password_validators([]), [])
def test_get_password_validators_custom_invalid(self):
validator_config = [{"NAME": "json.tool"}]
msg = (
"The module in NAME could not be imported: json.tool. "
"Check your AUTH_PASSWORD_VALIDATORS setting."
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
get_password_validators(validator_config)
def test_validate_password(self):
self.assertIsNone(validate_password("sufficiently-long"))
msg_too_short = (
"This password is too short. It must contain at least 12 characters."
)
with self.assertRaises(ValidationError) as cm:
validate_password("django4242")
self.assertEqual(cm.exception.messages, [msg_too_short])
self.assertEqual(cm.exception.error_list[0].code, "password_too_short")
with self.assertRaises(ValidationError) as cm:
validate_password("password")
self.assertEqual(
cm.exception.messages, ["This password is too common.", msg_too_short]
)
self.assertEqual(cm.exception.error_list[0].code, "password_too_common")
self.assertIsNone(validate_password("password", password_validators=[]))
def test_password_changed(self):
self.assertIsNone(password_changed("password"))
def test_password_changed_with_custom_validator(self):
class Validator:
def password_changed(self, password, user):
self.password = password
self.user = user
user = object()
validator = Validator()
password_changed("password", user=user, password_validators=(validator,))
self.assertIs(validator.user, user)
self.assertEqual(validator.password, "password")
def test_password_validators_help_texts(self):
help_texts = password_validators_help_texts()
self.assertEqual(len(help_texts), 2)
self.assertIn("12 characters", help_texts[1])
self.assertEqual(password_validators_help_texts(password_validators=[]), [])
def test_password_validators_help_text_html(self):
help_text = password_validators_help_text_html()
self.assertEqual(help_text.count("<li>"), 2)
self.assertIn("12 characters", help_text)
def test_password_validators_help_text_html_escaping(self):
class AmpersandValidator:
def get_help_text(self):
return "Must contain &"
help_text = password_validators_help_text_html([AmpersandValidator()])
self.assertEqual(help_text, "<ul><li>Must contain &</li></ul>")
# help_text is marked safe and therefore unchanged by
# conditional_escape().
self.assertEqual(help_text, conditional_escape(help_text))
@override_settings(AUTH_PASSWORD_VALIDATORS=[])
def test_empty_password_validator_help_text_html(self):
self.assertEqual(password_validators_help_text_html(), "")
class MinimumLengthValidatorTest(SimpleTestCase):
def test_validate(self):
expected_error = (
"This password is too short. It must contain at least %d characters."
)
self.assertIsNone(MinimumLengthValidator().validate("12345678"))
self.assertIsNone(MinimumLengthValidator(min_length=3).validate("123"))
with self.assertRaises(ValidationError) as cm:
MinimumLengthValidator().validate("1234567")
self.assertEqual(cm.exception.messages, [expected_error % 8])
error = cm.exception.error_list[0]
self.assertEqual(error.code, "password_too_short")
self.assertEqual(error.params, {"min_length": 8})
with self.assertRaises(ValidationError) as cm:
MinimumLengthValidator(min_length=3).validate("12")
self.assertEqual(cm.exception.messages, [expected_error % 3])
error = cm.exception.error_list[0]
self.assertEqual(error.code, "password_too_short")
self.assertEqual(error.params, {"min_length": 3})
def test_help_text(self):
self.assertEqual(
MinimumLengthValidator().get_help_text(),
"Your password must contain at least 8 characters.",
)
@mock.patch("django.contrib.auth.password_validation.ngettext")
def test_l10n(self, mock_ngettext):
with self.subTest("get_error_message"):
MinimumLengthValidator().get_error_message()
mock_ngettext.assert_called_with(
"This password is too short. It must contain at least %d character.",
"This password is too short. It must contain at least %d characters.",
8,
)
mock_ngettext.reset()
with self.subTest("get_help_text"):
MinimumLengthValidator().get_help_text()
mock_ngettext.assert_called_with(
"Your password must contain at least %(min_length)d " "character.",
"Your password must contain at least %(min_length)d " "characters.",
8,
)
def test_custom_error(self):
class CustomMinimumLengthValidator(MinimumLengthValidator):
def get_error_message(self):
return "Your password must be %d characters long" % self.min_length
expected_error = "Your password must be %d characters long"
with self.assertRaisesMessage(ValidationError, expected_error % 8) as cm:
CustomMinimumLengthValidator().validate("1234567")
self.assertEqual(cm.exception.error_list[0].code, "password_too_short")
with self.assertRaisesMessage(ValidationError, expected_error % 3) as cm:
CustomMinimumLengthValidator(min_length=3).validate("12")
class UserAttributeSimilarityValidatorTest(TestCase):
def test_validate(self):
user = User.objects.create_user(
username="testclient",
password="password",
email="[email protected]",
first_name="Test",
last_name="Client",
)
expected_error = "The password is too similar to the %s."
self.assertIsNone(UserAttributeSimilarityValidator().validate("testclient"))
with self.assertRaises(ValidationError) as cm:
UserAttributeSimilarityValidator().validate("testclient", user=user)
self.assertEqual(cm.exception.messages, [expected_error % "username"])
self.assertEqual(cm.exception.error_list[0].code, "password_too_similar")
msg = expected_error % "email address"
with self.assertRaisesMessage(ValidationError, msg):
UserAttributeSimilarityValidator().validate("example.com", user=user)
msg = expected_error % "first name"
with self.assertRaisesMessage(ValidationError, msg):
UserAttributeSimilarityValidator(
user_attributes=["first_name"],
max_similarity=0.3,
).validate("testclient", user=user)
# max_similarity=1 doesn't allow passwords that are identical to the
# attribute's value.
msg = expected_error % "first name"
with self.assertRaisesMessage(ValidationError, msg):
UserAttributeSimilarityValidator(
user_attributes=["first_name"],
max_similarity=1,
).validate(user.first_name, user=user)
# Very low max_similarity is rejected.
msg = "max_similarity must be at least 0.1"
with self.assertRaisesMessage(ValueError, msg):
UserAttributeSimilarityValidator(max_similarity=0.09)
# Passes validation.
self.assertIsNone(
UserAttributeSimilarityValidator(user_attributes=["first_name"]).validate(
"testclient", user=user
)
)
@isolate_apps("auth_tests")
def test_validate_property(self):
class TestUser(models.Model):
pass
@property
def username(self):
return "foobar"
msg = "The password is too similar to the username."
with self.assertRaisesMessage(ValidationError, msg):
UserAttributeSimilarityValidator().validate("foobar", user=TestUser())
def test_help_text(self):
self.assertEqual(
UserAttributeSimilarityValidator().get_help_text(),
"Your password can’t be too similar to your other personal information.",
)
def test_custom_error(self):
class CustomUserAttributeSimilarityValidator(UserAttributeSimilarityValidator):
def get_error_message(self):
return "The password is too close to the %(verbose_name)s."
user = User.objects.create_user(
username="testclient",
password="password",
email="[email protected]",
first_name="Test",
last_name="Client",
)
expected_error = "The password is too close to the %s."
with self.assertRaisesMessage(ValidationError, expected_error % "username"):
CustomUserAttributeSimilarityValidator().validate("testclient", user=user)
def test_custom_error_verbose_name_not_used(self):
class CustomUserAttributeSimilarityValidator(UserAttributeSimilarityValidator):
def get_error_message(self):
return "The password is too close to a user attribute."
user = User.objects.create_user(
username="testclient",
password="password",
email="[email protected]",
first_name="Test",
last_name="Client",
)
expected_error = "The password is too close to a user attribute."
with self.assertRaisesMessage(ValidationError, expected_error):
CustomUserAttributeSimilarityValidator().validate("testclient", user=user)
class CommonPasswordValidatorTest(SimpleTestCase):
def test_validate(self):
expected_error = "This password is too common."
self.assertIsNone(CommonPasswordValidator().validate("a-safe-password"))
with self.assertRaisesMessage(ValidationError, expected_error):
CommonPasswordValidator().validate("godzilla")
def test_common_hexed_codes(self):
expected_error = "This password is too common."
common_hexed_passwords = ["asdfjkl:", "ठ:"]
for password in common_hexed_passwords:
with self.subTest(password=password):
with self.assertRaisesMessage(ValidationError, expected_error):
CommonPasswordValidator().validate(password)
def test_validate_custom_list(self):
path = os.path.join(
os.path.dirname(os.path.realpath(__file__)), "common-passwords-custom.txt"
)
validator = CommonPasswordValidator(password_list_path=path)
expected_error = "This password is too common."
self.assertIsNone(validator.validate("a-safe-password"))
with self.assertRaises(ValidationError) as cm:
validator.validate("from-my-custom-list")
self.assertEqual(cm.exception.messages, [expected_error])
self.assertEqual(cm.exception.error_list[0].code, "password_too_common")
def test_validate_django_supplied_file(self):
validator = CommonPasswordValidator()
for password in validator.passwords:
self.assertEqual(password, password.lower())
def test_help_text(self):
self.assertEqual(
CommonPasswordValidator().get_help_text(),
"Your password can’t be a commonly used password.",
)
def test_custom_error(self):
class CustomCommonPasswordValidator(CommonPasswordValidator):
def get_error_message(self):
return "This password has been used too much."
expected_error = "This password has been used too much."
with self.assertRaisesMessage(ValidationError, expected_error):
CustomCommonPasswordValidator().validate("godzilla")
class NumericPasswordValidatorTest(SimpleTestCase):
def test_validate(self):
expected_error = "This password is entirely numeric."
self.assertIsNone(NumericPasswordValidator().validate("a-safe-password"))
with self.assertRaises(ValidationError) as cm:
NumericPasswordValidator().validate("42424242")
self.assertEqual(cm.exception.messages, [expected_error])
self.assertEqual(cm.exception.error_list[0].code, "password_entirely_numeric")
def test_help_text(self):
self.assertEqual(
NumericPasswordValidator().get_help_text(),
"Your password can’t be entirely numeric.",
)
def test_custom_error(self):
class CustomNumericPasswordValidator(NumericPasswordValidator):
def get_error_message(self):
return "This password is all digits."
expected_error = "This password is all digits."
with self.assertRaisesMessage(ValidationError, expected_error):
CustomNumericPasswordValidator().validate("42424242")
class UsernameValidatorsTests(SimpleTestCase):
def test_unicode_validator(self):
valid_usernames = ["joe", "René", "ᴮᴵᴳᴮᴵᴿᴰ", "أحمد"]
invalid_usernames = [
"o'connell",
"عبد ال",
"zerowidth\u200bspace",
"nonbreaking\u00a0space",
"en\u2013dash",
"trailingnewline\u000a",
]
v = validators.UnicodeUsernameValidator()
for valid in valid_usernames:
with self.subTest(valid=valid):
v(valid)
for invalid in invalid_usernames:
with self.subTest(invalid=invalid):
with self.assertRaises(ValidationError):
v(invalid)
def test_ascii_validator(self):
valid_usernames = ["glenn", "GLEnN", "jean-marc"]
invalid_usernames = [
"o'connell",
"Éric",
"jean marc",
"أحمد",
"trailingnewline\n",
]
v = validators.ASCIIUsernameValidator()
for valid in valid_usernames:
with self.subTest(valid=valid):
v(valid)
for invalid in invalid_usernames:
with self.subTest(invalid=invalid):
with self.assertRaises(ValidationError):
v(invalid)
|
./temp_repos/django/django/core/validators.py
|
./temp_repos/django/tests/auth_tests/test_validators.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'RegexValidator'.
Context:
- Class Name: RegexValidator
- Dependencies to Mock: flags, inverse_match, message, allowed_extensions, decimal_places, schemes, limit_value, allowlist, code, regex, offset, max_digits
- Key Imports: ipaddress, math, django.core.exceptions, django.utils.deconstruct, pathlib, django.utils.http, django.utils.ipv6, django.utils.translation, django.utils.regex_helper, urllib.parse
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
RegexValidator
|
python
|
def special(request):
return {"path": request.special_path}
|
from django.contrib.auth import authenticate
from django.contrib.auth.context_processors import PermLookupDict, PermWrapper
from django.contrib.auth.models import Permission, User
from django.contrib.contenttypes.models import ContentType
from django.db.models import Q
from django.test import SimpleTestCase, TestCase, override_settings
from .settings import AUTH_MIDDLEWARE, AUTH_TEMPLATES
class MockUser:
def __repr__(self):
return "MockUser()"
def has_module_perms(self, perm):
return perm == "mockapp"
def has_perm(self, perm, obj=None):
return perm == "mockapp.someperm"
class PermWrapperTests(SimpleTestCase):
"""
Test some details of the PermWrapper implementation.
"""
class EQLimiterObject:
"""
This object makes sure __eq__ will not be called endlessly.
"""
def __init__(self):
self.eq_calls = 0
def __eq__(self, other):
if self.eq_calls > 0:
return True
self.eq_calls += 1
return False
def test_repr(self):
perms = PermWrapper(MockUser())
self.assertEqual(repr(perms), "PermWrapper(MockUser())")
def test_permwrapper_in(self):
"""
'something' in PermWrapper works as expected.
"""
perms = PermWrapper(MockUser())
# Works for modules and full permissions.
self.assertIn("mockapp", perms)
self.assertNotIn("nonexistent", perms)
self.assertIn("mockapp.someperm", perms)
self.assertNotIn("mockapp.nonexistent", perms)
def test_permlookupdict_in(self):
"""
No endless loops if accessed with 'in' - refs #18979.
"""
pldict = PermLookupDict(MockUser(), "mockapp")
with self.assertRaises(TypeError):
self.EQLimiterObject() in pldict
def test_iter(self):
with self.assertRaisesMessage(TypeError, "PermWrapper is not iterable."):
iter(PermWrapper(MockUser()))
@override_settings(ROOT_URLCONF="auth_tests.urls", TEMPLATES=AUTH_TEMPLATES)
class AuthContextProcessorTests(TestCase):
"""
Tests for the ``django.contrib.auth.context_processors.auth`` processor
"""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
@override_settings(MIDDLEWARE=AUTH_MIDDLEWARE)
def test_session_not_accessed(self):
"""
The session is not accessed simply by including
the auth context processor
"""
response = self.client.get("/auth_processor_no_attr_access/")
self.assertContains(response, "Session not accessed")
@override_settings(MIDDLEWARE=AUTH_MIDDLEWARE)
def test_session_is_accessed(self):
"""
The session is accessed if the auth context processor
is used and relevant attributes accessed.
"""
response = self.client.get("/auth_processor_attr_access/")
self.assertContains(response, "Session accessed")
def test_perms_attrs(self):
u = User.objects.create_user(username="normal", password="secret")
u.user_permissions.add(
Permission.objects.get(
content_type=ContentType.objects.get_for_model(Permission),
codename="add_permission",
)
)
self.client.force_login(u)
response = self.client.get("/auth_processor_perms/")
self.assertContains(response, "Has auth permissions")
self.assertContains(response, "Has auth.add_permission permissions")
self.assertNotContains(response, "nonexistent")
def test_perm_in_perms_attrs(self):
u = User.objects.create_user(username="normal", password="secret")
u.user_permissions.add(
Permission.objects.get(
content_type=ContentType.objects.get_for_model(Permission),
codename="add_permission",
)
)
self.client.login(username="normal", password="secret")
response = self.client.get("/auth_processor_perm_in_perms/")
self.assertContains(response, "Has auth permissions")
self.assertContains(response, "Has auth.add_permission permissions")
self.assertNotContains(response, "nonexistent")
def test_message_attrs(self):
self.client.force_login(self.superuser)
response = self.client.get("/auth_processor_messages/")
self.assertContains(response, "Message 1")
def test_user_attrs(self):
"""
The lazy objects returned behave just like the wrapped objects.
"""
# These are 'functional' level tests for common use cases. Direct
# testing of the implementation (SimpleLazyObject) is in the 'utils'
# tests.
self.client.login(username="super", password="secret")
user = authenticate(username="super", password="secret")
response = self.client.get("/auth_processor_user/")
self.assertContains(response, "unicode: super")
self.assertContains(response, "id: %d" % self.superuser.pk)
self.assertContains(response, "username: super")
# bug #12037 is tested by the {% url %} in the template:
self.assertContains(response, "url: /userpage/super/")
# A Q() comparing a user and with another Q() (in an AND or OR
# fashion).
Q(user=response.context["user"]) & Q(someflag=True)
# Tests for user equality. This is hard because User defines
# equality in a non-duck-typing way
# See bug #12060
self.assertEqual(response.context["user"], user)
self.assertEqual(user, response.context["user"])
|
./temp_repos/django/tests/test_client_regress/context_processors.py
|
./temp_repos/django/tests/auth_tests/test_context_processors.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'Unknown'.
Context:
- Class Name: Unknown
- Dependencies to Mock: None detected
- Key Imports:
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
Unknown
|
python
|
"""
Reverse lookups
This demonstrates the reverse lookup features of the database API.
"""
from django.db import models
class User(models.Model):
name = models.CharField(max_length=200)
class Poll(models.Model):
question = models.CharField(max_length=200)
creator = models.ForeignKey(User, models.CASCADE)
class Choice(models.Model):
name = models.CharField(max_length=100)
poll = models.ForeignKey(Poll, models.CASCADE, related_name="poll_choice")
related_poll = models.ForeignKey(
Poll, models.CASCADE, related_name="related_choice"
)
|
from unittest import mock
from django.conf.global_settings import PASSWORD_HASHERS
from django.contrib.auth import get_user_model
from django.contrib.auth.backends import ModelBackend
from django.contrib.auth.base_user import AbstractBaseUser
from django.contrib.auth.hashers import get_hasher
from django.contrib.auth.models import (
AnonymousUser,
Group,
Permission,
User,
UserManager,
)
from django.contrib.contenttypes.models import ContentType
from django.core import mail
from django.db import connection, migrations
from django.db.migrations.state import ModelState, ProjectState
from django.db.models.signals import post_save
from django.test import SimpleTestCase, TestCase, TransactionTestCase, override_settings
from .models import CustomEmailField, IntegerUsernameUser
class NaturalKeysTestCase(TestCase):
def test_user_natural_key(self):
staff_user = User.objects.create_user(username="staff")
self.assertEqual(User.objects.get_by_natural_key("staff"), staff_user)
self.assertEqual(staff_user.natural_key(), ("staff",))
async def test_auser_natural_key(self):
staff_user = await User.objects.acreate_user(username="staff")
self.assertEqual(await User.objects.aget_by_natural_key("staff"), staff_user)
self.assertEqual(staff_user.natural_key(), ("staff",))
def test_group_natural_key(self):
users_group = Group.objects.create(name="users")
self.assertEqual(Group.objects.get_by_natural_key("users"), users_group)
async def test_agroup_natural_key(self):
users_group = await Group.objects.acreate(name="users")
self.assertEqual(await Group.objects.aget_by_natural_key("users"), users_group)
class LoadDataWithoutNaturalKeysTestCase(TestCase):
fixtures = ["regular.json"]
def test_user_is_created_and_added_to_group(self):
user = User.objects.get(username="my_username")
group = Group.objects.get(name="my_group")
self.assertEqual(group, user.groups.get())
class LoadDataWithNaturalKeysTestCase(TestCase):
fixtures = ["natural.json"]
def test_user_is_created_and_added_to_group(self):
user = User.objects.get(username="my_username")
group = Group.objects.get(name="my_group")
self.assertEqual(group, user.groups.get())
class LoadDataWithNaturalKeysAndMultipleDatabasesTestCase(TestCase):
databases = {"default", "other"}
def test_load_data_with_user_permissions(self):
# Create test contenttypes for both databases
default_objects = [
ContentType.objects.db_manager("default").create(
model="examplemodela",
app_label="app_a",
),
ContentType.objects.db_manager("default").create(
model="examplemodelb",
app_label="app_b",
),
]
other_objects = [
ContentType.objects.db_manager("other").create(
model="examplemodelb",
app_label="app_b",
),
ContentType.objects.db_manager("other").create(
model="examplemodela",
app_label="app_a",
),
]
# Now we create the test UserPermission
Permission.objects.db_manager("default").create(
name="Can delete example model b",
codename="delete_examplemodelb",
content_type=default_objects[1],
)
Permission.objects.db_manager("other").create(
name="Can delete example model b",
codename="delete_examplemodelb",
content_type=other_objects[0],
)
perm_default = Permission.objects.get_by_natural_key(
"delete_examplemodelb",
"app_b",
"examplemodelb",
)
perm_other = Permission.objects.db_manager("other").get_by_natural_key(
"delete_examplemodelb",
"app_b",
"examplemodelb",
)
self.assertEqual(perm_default.content_type_id, default_objects[1].id)
self.assertEqual(perm_other.content_type_id, other_objects[0].id)
class UserManagerTestCase(TransactionTestCase):
available_apps = [
"auth_tests",
"django.contrib.auth",
"django.contrib.contenttypes",
]
def test_create_user(self):
email_lowercase = "[email protected]"
user = User.objects.create_user("user", email_lowercase)
self.assertEqual(user.email, email_lowercase)
self.assertEqual(user.username, "user")
self.assertFalse(user.has_usable_password())
def test_create_user_email_domain_normalize_rfc3696(self):
# According to RFC 3696 Section 3 the "@" symbol can be part of the
# local part of an email address.
returned = UserManager.normalize_email(r"Abc\@[email protected]")
self.assertEqual(returned, r"Abc\@[email protected]")
def test_create_user_email_domain_normalize(self):
returned = UserManager.normalize_email("[email protected]")
self.assertEqual(returned, "[email protected]")
def test_create_user_email_domain_normalize_with_whitespace(self):
returned = UserManager.normalize_email(r"email\ [email protected]")
self.assertEqual(returned, r"email\ [email protected]")
def test_empty_username(self):
with self.assertRaisesMessage(ValueError, "The given username must be set"):
User.objects.create_user(username="")
def test_create_user_is_staff(self):
email = "[email protected]"
user = User.objects.create_user("user", email, is_staff=True)
self.assertEqual(user.email, email)
self.assertEqual(user.username, "user")
self.assertTrue(user.is_staff)
def test_create_super_user_raises_error_on_false_is_superuser(self):
with self.assertRaisesMessage(
ValueError, "Superuser must have is_superuser=True."
):
User.objects.create_superuser(
username="test",
email="[email protected]",
password="test",
is_superuser=False,
)
async def test_acreate_super_user_raises_error_on_false_is_superuser(self):
with self.assertRaisesMessage(
ValueError, "Superuser must have is_superuser=True."
):
await User.objects.acreate_superuser(
username="test",
email="[email protected]",
password="test",
is_superuser=False,
)
def test_create_superuser_raises_error_on_false_is_staff(self):
with self.assertRaisesMessage(ValueError, "Superuser must have is_staff=True."):
User.objects.create_superuser(
username="test",
email="[email protected]",
password="test",
is_staff=False,
)
async def test_acreate_superuser_raises_error_on_false_is_staff(self):
with self.assertRaisesMessage(ValueError, "Superuser must have is_staff=True."):
await User.objects.acreate_superuser(
username="test",
email="[email protected]",
password="test",
is_staff=False,
)
def test_runpython_manager_methods(self):
def forwards(apps, schema_editor):
UserModel = apps.get_model("auth", "User")
user = UserModel.objects.create_user("user1", password="secure")
self.assertIsInstance(user, UserModel)
operation = migrations.RunPython(forwards, migrations.RunPython.noop)
project_state = ProjectState()
project_state.add_model(ModelState.from_model(User))
project_state.add_model(ModelState.from_model(Group))
project_state.add_model(ModelState.from_model(Permission))
project_state.add_model(ModelState.from_model(ContentType))
new_state = project_state.clone()
with connection.schema_editor() as editor:
operation.state_forwards("test_manager_methods", new_state)
operation.database_forwards(
"test_manager_methods",
editor,
project_state,
new_state,
)
user = User.objects.get(username="user1")
self.assertTrue(user.check_password("secure"))
class AbstractBaseUserTests(SimpleTestCase):
def test_has_usable_password(self):
"""
Passwords are usable even if they don't correspond to a hasher in
settings.PASSWORD_HASHERS.
"""
self.assertIs(User(password="some-gibbberish").has_usable_password(), True)
def test_normalize_username(self):
self.assertEqual(IntegerUsernameUser().normalize_username(123), 123)
def test_clean_normalize_username(self):
# The normalization happens in AbstractBaseUser.clean()
ohm_username = "iamtheΩ" # U+2126 OHM SIGN
for model in ("auth.User", "auth_tests.CustomUser"):
with self.subTest(model=model), self.settings(AUTH_USER_MODEL=model):
User = get_user_model()
user = User(**{User.USERNAME_FIELD: ohm_username, "password": "foo"})
user.clean()
username = user.get_username()
self.assertNotEqual(username, ohm_username)
self.assertEqual(
username, "iamtheΩ"
) # U+03A9 GREEK CAPITAL LETTER OMEGA
def test_default_email(self):
self.assertEqual(AbstractBaseUser.get_email_field_name(), "email")
def test_custom_email(self):
user = CustomEmailField()
self.assertEqual(user.get_email_field_name(), "email_address")
class AbstractUserTestCase(TestCase):
def test_email_user(self):
# valid send_mail parameters
kwargs = {
"fail_silently": False,
"auth_user": None,
"auth_password": None,
"connection": None,
"html_message": None,
}
user = User(email="[email protected]")
user.email_user(
subject="Subject here",
message="This is a message",
from_email="[email protected]",
**kwargs,
)
self.assertEqual(len(mail.outbox), 1)
message = mail.outbox[0]
self.assertEqual(message.subject, "Subject here")
self.assertEqual(message.body, "This is a message")
self.assertEqual(message.from_email, "[email protected]")
self.assertEqual(message.to, [user.email])
def test_last_login_default(self):
user1 = User.objects.create(username="user1")
self.assertIsNone(user1.last_login)
user2 = User.objects.create_user(username="user2")
self.assertIsNone(user2.last_login)
def test_user_clean_normalize_email(self):
user = User(username="user", password="foo", email="[email protected]")
user.clean()
self.assertEqual(user.email, "[email protected]")
def test_user_double_save(self):
"""
Calling user.save() twice should trigger password_changed() once.
"""
user = User.objects.create_user(username="user", password="foo")
user.set_password("bar")
with mock.patch(
"django.contrib.auth.password_validation.password_changed"
) as pw_changed:
user.save()
self.assertEqual(pw_changed.call_count, 1)
user.save()
self.assertEqual(pw_changed.call_count, 1)
@override_settings(PASSWORD_HASHERS=PASSWORD_HASHERS)
def test_check_password_upgrade(self):
"""
password_changed() shouldn't be called if User.check_password()
triggers a hash iteration upgrade.
"""
user = User.objects.create_user(username="user", password="foo")
initial_password = user.password
self.assertTrue(user.check_password("foo"))
hasher = get_hasher("default")
self.assertEqual("pbkdf2_sha256", hasher.algorithm)
old_iterations = hasher.iterations
try:
# Upgrade the password iterations
hasher.iterations = old_iterations + 1
with mock.patch(
"django.contrib.auth.password_validation.password_changed"
) as pw_changed:
user.check_password("foo")
self.assertEqual(pw_changed.call_count, 0)
self.assertNotEqual(initial_password, user.password)
finally:
hasher.iterations = old_iterations
@override_settings(PASSWORD_HASHERS=PASSWORD_HASHERS)
async def test_acheck_password_upgrade(self):
user = await User.objects.acreate_user(username="user", password="foo")
initial_password = user.password
self.assertIs(await user.acheck_password("foo"), True)
hasher = get_hasher("default")
self.assertEqual("pbkdf2_sha256", hasher.algorithm)
old_iterations = hasher.iterations
try:
# Upgrade the password iterations.
hasher.iterations = old_iterations + 1
with mock.patch(
"django.contrib.auth.password_validation.password_changed"
) as pw_changed:
self.assertIs(await user.acheck_password("foo"), True)
self.assertEqual(pw_changed.call_count, 0)
self.assertNotEqual(initial_password, user.password)
finally:
hasher.iterations = old_iterations
class CustomModelBackend(ModelBackend):
def with_perm(
self, perm, is_active=True, include_superusers=True, backend=None, obj=None
):
if obj is not None and obj.username == "charliebrown":
return User.objects.filter(pk=obj.pk)
return User.objects.filter(username__startswith="charlie")
class UserWithPermTestCase(TestCase):
@classmethod
def setUpTestData(cls):
content_type = ContentType.objects.get_for_model(Group)
cls.permission = Permission.objects.create(
name="test",
content_type=content_type,
codename="test",
)
# User with permission.
cls.user1 = User.objects.create_user("user 1", "[email protected]")
cls.user1.user_permissions.add(cls.permission)
# User with group permission.
group1 = Group.objects.create(name="group 1")
group1.permissions.add(cls.permission)
group2 = Group.objects.create(name="group 2")
group2.permissions.add(cls.permission)
cls.user2 = User.objects.create_user("user 2", "[email protected]")
cls.user2.groups.add(group1, group2)
# Users without permissions.
cls.user_charlie = User.objects.create_user("charlie", "[email protected]")
cls.user_charlie_b = User.objects.create_user(
"charliebrown", "[email protected]"
)
# Superuser.
cls.superuser = User.objects.create_superuser(
"superuser",
"[email protected]",
"superpassword",
)
# Inactive user with permission.
cls.inactive_user = User.objects.create_user(
"inactive_user",
"[email protected]",
is_active=False,
)
cls.inactive_user.user_permissions.add(cls.permission)
def test_invalid_permission_name(self):
msg = "Permission name should be in the form app_label.permission_codename."
for perm in ("nodots", "too.many.dots", "...", ""):
with self.subTest(perm), self.assertRaisesMessage(ValueError, msg):
User.objects.with_perm(perm)
def test_invalid_permission_type(self):
msg = "The `perm` argument must be a string or a permission instance."
for perm in (b"auth.test", object(), None):
with self.subTest(perm), self.assertRaisesMessage(TypeError, msg):
User.objects.with_perm(perm)
def test_invalid_backend_type(self):
msg = "backend must be a dotted import path string (got %r)."
for backend in (b"auth_tests.CustomModelBackend", object()):
with self.subTest(backend):
with self.assertRaisesMessage(TypeError, msg % backend):
User.objects.with_perm("auth.test", backend=backend)
def test_basic(self):
active_users = [self.user1, self.user2]
tests = [
({}, [*active_users, self.superuser]),
({"obj": self.user1}, []),
# Only inactive users.
({"is_active": False}, [self.inactive_user]),
# All users.
({"is_active": None}, [*active_users, self.superuser, self.inactive_user]),
# Exclude superusers.
({"include_superusers": False}, active_users),
(
{"include_superusers": False, "is_active": False},
[self.inactive_user],
),
(
{"include_superusers": False, "is_active": None},
[*active_users, self.inactive_user],
),
]
for kwargs, expected_users in tests:
for perm in ("auth.test", self.permission):
with self.subTest(perm=perm, **kwargs):
self.assertCountEqual(
User.objects.with_perm(perm, **kwargs),
expected_users,
)
@override_settings(
AUTHENTICATION_BACKENDS=["django.contrib.auth.backends.BaseBackend"]
)
def test_backend_without_with_perm(self):
self.assertSequenceEqual(User.objects.with_perm("auth.test"), [])
def test_nonexistent_permission(self):
self.assertSequenceEqual(User.objects.with_perm("auth.perm"), [self.superuser])
def test_nonexistent_backend(self):
with self.assertRaises(ImportError):
User.objects.with_perm(
"auth.test",
backend="invalid.backend.CustomModelBackend",
)
def test_invalid_backend_submodule(self):
with self.assertRaises(ImportError):
User.objects.with_perm(
"auth.test",
backend="json.tool",
)
@override_settings(
AUTHENTICATION_BACKENDS=["auth_tests.test_models.CustomModelBackend"]
)
def test_custom_backend(self):
for perm in ("auth.test", self.permission):
with self.subTest(perm):
self.assertCountEqual(
User.objects.with_perm(perm),
[self.user_charlie, self.user_charlie_b],
)
@override_settings(
AUTHENTICATION_BACKENDS=["auth_tests.test_models.CustomModelBackend"]
)
def test_custom_backend_pass_obj(self):
for perm in ("auth.test", self.permission):
with self.subTest(perm):
self.assertSequenceEqual(
User.objects.with_perm(perm, obj=self.user_charlie_b),
[self.user_charlie_b],
)
@override_settings(
AUTHENTICATION_BACKENDS=[
"auth_tests.test_models.CustomModelBackend",
"django.contrib.auth.backends.ModelBackend",
]
)
def test_multiple_backends(self):
msg = (
"You have multiple authentication backends configured and "
"therefore must provide the `backend` argument."
)
with self.assertRaisesMessage(ValueError, msg):
User.objects.with_perm("auth.test")
backend = "auth_tests.test_models.CustomModelBackend"
self.assertCountEqual(
User.objects.with_perm("auth.test", backend=backend),
[self.user_charlie, self.user_charlie_b],
)
class IsActiveTestCase(TestCase):
"""
Tests the behavior of the guaranteed is_active attribute
"""
def test_builtin_user_isactive(self):
user = User.objects.create(username="foo", email="[email protected]")
# is_active is true by default
self.assertIs(user.is_active, True)
user.is_active = False
user.save()
user_fetched = User.objects.get(pk=user.pk)
# the is_active flag is saved
self.assertFalse(user_fetched.is_active)
@override_settings(AUTH_USER_MODEL="auth_tests.IsActiveTestUser1")
def test_is_active_field_default(self):
"""
tests that the default value for is_active is provided
"""
UserModel = get_user_model()
user = UserModel(username="foo")
self.assertIs(user.is_active, True)
# you can set the attribute - but it will not save
user.is_active = False
# there should be no problem saving - but the attribute is not saved
user.save()
user_fetched = UserModel._default_manager.get(pk=user.pk)
# the attribute is always true for newly retrieved instance
self.assertIs(user_fetched.is_active, True)
class TestCreateSuperUserSignals(TestCase):
"""
Simple test case for ticket #20541
"""
def post_save_listener(self, *args, **kwargs):
self.signals_count += 1
def setUp(self):
self.signals_count = 0
post_save.connect(self.post_save_listener, sender=User)
self.addCleanup(post_save.disconnect, self.post_save_listener, sender=User)
def test_create_user(self):
User.objects.create_user("JohnDoe")
self.assertEqual(self.signals_count, 1)
def test_create_superuser(self):
User.objects.create_superuser("JohnDoe", "[email protected]", "1")
self.assertEqual(self.signals_count, 1)
class AnonymousUserTests(SimpleTestCase):
no_repr_msg = "Django doesn't provide a DB representation for AnonymousUser."
def setUp(self):
self.user = AnonymousUser()
def test_properties(self):
self.assertIsNone(self.user.pk)
self.assertEqual(self.user.username, "")
self.assertEqual(self.user.get_username(), "")
self.assertIs(self.user.is_anonymous, True)
self.assertIs(self.user.is_authenticated, False)
self.assertIs(self.user.is_staff, False)
self.assertIs(self.user.is_active, False)
self.assertIs(self.user.is_superuser, False)
self.assertEqual(self.user.groups.count(), 0)
self.assertEqual(self.user.user_permissions.count(), 0)
self.assertEqual(self.user.get_user_permissions(), set())
self.assertEqual(self.user.get_group_permissions(), set())
async def test_properties_async_versions(self):
self.assertEqual(await self.user.groups.acount(), 0)
self.assertEqual(await self.user.user_permissions.acount(), 0)
self.assertEqual(await self.user.aget_user_permissions(), set())
self.assertEqual(await self.user.aget_group_permissions(), set())
def test_str(self):
self.assertEqual(str(self.user), "AnonymousUser")
def test_eq(self):
self.assertEqual(self.user, AnonymousUser())
self.assertNotEqual(self.user, User("super", "[email protected]", "super"))
def test_hash(self):
self.assertEqual(hash(self.user), 1)
def test_int(self):
msg = (
"Cannot cast AnonymousUser to int. Are you trying to use it in "
"place of User?"
)
with self.assertRaisesMessage(TypeError, msg):
int(self.user)
def test_delete(self):
with self.assertRaisesMessage(NotImplementedError, self.no_repr_msg):
self.user.delete()
def test_save(self):
with self.assertRaisesMessage(NotImplementedError, self.no_repr_msg):
self.user.save()
def test_set_password(self):
with self.assertRaisesMessage(NotImplementedError, self.no_repr_msg):
self.user.set_password("password")
def test_check_password(self):
with self.assertRaisesMessage(NotImplementedError, self.no_repr_msg):
self.user.check_password("password")
class GroupTests(SimpleTestCase):
def test_str(self):
g = Group(name="Users")
self.assertEqual(str(g), "Users")
class PermissionTests(TestCase):
def test_str(self):
p = Permission.objects.get(codename="view_customemailfield")
self.assertEqual(
str(p), "Auth_Tests | custom email field | Can view custom email field"
)
|
./temp_repos/django/tests/reverse_lookup/models.py
|
./temp_repos/django/tests/auth_tests/test_models.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'User'.
Context:
- Class Name: User
- Dependencies to Mock: None detected
- Key Imports: django.db
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
User
|
python
|
from django.http import HttpResponse
def empty_view(request, *args, **kwargs):
return HttpResponse()
|
import datetime
import itertools
import re
from importlib import import_module
from unittest import mock
from urllib.parse import quote, urljoin
from django.apps import apps
from django.conf import settings
from django.contrib.admin.models import LogEntry
from django.contrib.auth import BACKEND_SESSION_KEY, REDIRECT_FIELD_NAME, SESSION_KEY
from django.contrib.auth.forms import (
AuthenticationForm,
PasswordChangeForm,
SetPasswordForm,
)
from django.contrib.auth.models import Permission, User
from django.contrib.auth.views import (
INTERNAL_RESET_SESSION_TOKEN,
LoginView,
RedirectURLMixin,
logout_then_login,
redirect_to_login,
)
from django.contrib.contenttypes.models import ContentType
from django.contrib.messages import Message
from django.contrib.messages.test import MessagesTestMixin
from django.contrib.sessions.middleware import SessionMiddleware
from django.contrib.sites.requests import RequestSite
from django.core import mail
from django.core.exceptions import ImproperlyConfigured
from django.db import connection
from django.http import HttpRequest, HttpResponse
from django.middleware.csrf import CsrfViewMiddleware, get_token
from django.test import Client, TestCase, modify_settings, override_settings
from django.test.client import RedirectCycleError
from django.urls import NoReverseMatch, reverse, reverse_lazy
from django.utils.http import urlsafe_base64_encode
from .client import PasswordResetConfirmClient
from .models import CustomUser, CustomUserCompositePrimaryKey, UUIDUser
from .settings import AUTH_TEMPLATES
class RedirectURLMixinTests(TestCase):
@override_settings(ROOT_URLCONF="auth_tests.urls")
def test_get_default_redirect_url_next_page(self):
class RedirectURLView(RedirectURLMixin):
next_page = "/custom/"
self.assertEqual(RedirectURLView().get_default_redirect_url(), "/custom/")
def test_get_default_redirect_url_no_next_page(self):
msg = "No URL to redirect to. Provide a next_page."
with self.assertRaisesMessage(ImproperlyConfigured, msg):
RedirectURLMixin().get_default_redirect_url()
@override_settings(
LANGUAGES=[("en", "English")],
LANGUAGE_CODE="en",
TEMPLATES=AUTH_TEMPLATES,
ROOT_URLCONF="auth_tests.urls",
)
class AuthViewsTestCase(TestCase):
"""
Helper base class for the test classes that follow.
"""
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create_user(
username="testclient", password="password", email="[email protected]"
)
cls.u3 = User.objects.create_user(
username="staff", password="password", email="[email protected]"
)
def login(self, username="testclient", password="password", url="/login/"):
response = self.client.post(
url,
{
"username": username,
"password": password,
},
)
self.assertIn(SESSION_KEY, self.client.session)
return response
def logout(self):
response = self.client.post("/admin/logout/")
self.assertEqual(response.status_code, 200)
self.assertNotIn(SESSION_KEY, self.client.session)
def assertFormError(self, response, error):
"""Assert that error is found in response.context['form'] errors"""
form_errors = list(itertools.chain(*response.context["form"].errors.values()))
self.assertIn(str(error), form_errors)
@override_settings(ROOT_URLCONF="django.contrib.auth.urls")
class AuthViewNamedURLTests(AuthViewsTestCase):
def test_named_urls(self):
"Named URLs should be reversible"
expected_named_urls = [
("login", [], {}),
("logout", [], {}),
("password_change", [], {}),
("password_change_done", [], {}),
("password_reset", [], {}),
("password_reset_done", [], {}),
(
"password_reset_confirm",
[],
{
"uidb64": "aaaaaaa",
"token": "1111-aaaaa",
},
),
("password_reset_complete", [], {}),
]
for name, args, kwargs in expected_named_urls:
with self.subTest(name=name):
try:
reverse(name, args=args, kwargs=kwargs)
except NoReverseMatch:
self.fail(
"Reversal of url named '%s' failed with NoReverseMatch" % name
)
class PasswordResetTest(AuthViewsTestCase):
def setUp(self):
self.client = PasswordResetConfirmClient()
def test_email_not_found(self):
"""If the provided email is not registered, don't raise any error but
also don't send any email."""
response = self.client.get("/password_reset/")
self.assertEqual(response.status_code, 200)
response = self.client.post(
"/password_reset/", {"email": "[email protected]"}
)
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 0)
def test_email_found(self):
"Email is sent if a valid email address is provided for password reset"
response = self.client.post(
"/password_reset/", {"email": "[email protected]"}
)
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertIn("http://", mail.outbox[0].body)
self.assertEqual(settings.DEFAULT_FROM_EMAIL, mail.outbox[0].from_email)
# optional multipart text/html email has been added. Make sure
# original, default functionality is 100% the same
self.assertFalse(mail.outbox[0].message().is_multipart())
def test_extra_email_context(self):
"""
extra_email_context should be available in the email template context.
"""
response = self.client.post(
"/password_reset_extra_email_context/",
{"email": "[email protected]"},
)
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertIn('Email email context: "Hello!"', mail.outbox[0].body)
self.assertIn("http://custom.example.com/reset/", mail.outbox[0].body)
def test_html_mail_template(self):
"""
A multipart email with text/plain and text/html is sent
if the html_email_template parameter is passed to the view
"""
response = self.client.post(
"/password_reset/html_email_template/", {"email": "[email protected]"}
)
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
message = mail.outbox[0].message()
self.assertEqual(len(message.get_payload()), 2)
self.assertTrue(message.is_multipart())
self.assertEqual(message.get_payload(0).get_content_type(), "text/plain")
self.assertEqual(message.get_payload(1).get_content_type(), "text/html")
self.assertNotIn("<html>", message.get_payload(0).get_payload())
self.assertIn("<html>", message.get_payload(1).get_payload())
def test_email_found_custom_from(self):
"""
Email is sent if a valid email address is provided for password reset
when a custom from_email is provided.
"""
response = self.client.post(
"/password_reset_from_email/", {"email": "[email protected]"}
)
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual("[email protected]", mail.outbox[0].from_email)
# Skip any 500 handler action (like sending more mail...)
@override_settings(DEBUG_PROPAGATE_EXCEPTIONS=True)
def test_poisoned_http_host(self):
"Poisoned HTTP_HOST headers can't be used for reset emails"
# This attack is based on the way browsers handle URLs. The colon
# should be used to separate the port, but if the URL contains an @,
# the colon is interpreted as part of a username for login purposes,
# making 'evil.com' the request domain. Since HTTP_HOST is used to
# produce a meaningful reset URL, we need to be certain that the
# HTTP_HOST header isn't poisoned. This is done as a check when
# get_host() is invoked, but we check here as a practical consequence.
with self.assertLogs("django.security.DisallowedHost", "ERROR"):
response = self.client.post(
"/password_reset/",
{"email": "[email protected]"},
headers={"host": "www.example:[email protected]"},
)
self.assertEqual(response.status_code, 400)
self.assertEqual(len(mail.outbox), 0)
# Skip any 500 handler action (like sending more mail...)
@override_settings(DEBUG_PROPAGATE_EXCEPTIONS=True)
def test_poisoned_http_host_admin_site(self):
"""
Poisoned HTTP_HOST headers can't be used for reset emails on admin
views
"""
with self.assertLogs("django.security.DisallowedHost", "ERROR"):
response = self.client.post(
"/admin_password_reset/",
{"email": "[email protected]"},
headers={"host": "www.example:[email protected]"},
)
self.assertEqual(response.status_code, 400)
self.assertEqual(len(mail.outbox), 0)
def _test_confirm_start(self):
# Start by creating the email
self.client.post("/password_reset/", {"email": "[email protected]"})
self.assertEqual(len(mail.outbox), 1)
return self._read_signup_email(mail.outbox[0])
def _read_signup_email(self, email):
urlmatch = re.search(r"https?://[^/]*(/.*reset/\S*)", email.body)
self.assertIsNotNone(urlmatch, "No URL found in sent email")
return urlmatch[0], urlmatch[1]
def test_confirm_valid(self):
url, path = self._test_confirm_start()
response = self.client.get(path)
# redirect to a 'complete' page:
self.assertContains(response, "Please enter your new password")
def test_confirm_invalid(self):
url, path = self._test_confirm_start()
# Let's munge the token in the path, but keep the same length,
# in case the URLconf will reject a different length.
path = path[:-5] + ("0" * 4) + path[-1]
response = self.client.get(path)
self.assertContains(response, "The password reset link was invalid")
def test_confirm_invalid_user(self):
# A nonexistent user returns a 200 response, not a 404.
response = self.client.get("/reset/123456/1-1/")
self.assertContains(response, "The password reset link was invalid")
def test_confirm_overflow_user(self):
# A base36 user id that overflows int returns a 200 response.
response = self.client.get("/reset/zzzzzzzzzzzzz/1-1/")
self.assertContains(response, "The password reset link was invalid")
def test_confirm_invalid_post(self):
# Same as test_confirm_invalid, but trying to do a POST instead.
url, path = self._test_confirm_start()
path = path[:-5] + ("0" * 4) + path[-1]
self.client.post(
path,
{
"new_password1": "anewpassword",
"new_password2": " anewpassword",
},
)
# Check the password has not been changed
u = User.objects.get(email="[email protected]")
self.assertTrue(not u.check_password("anewpassword"))
def test_confirm_invalid_hash(self):
"""A POST with an invalid token is rejected."""
u = User.objects.get(email="[email protected]")
original_password = u.password
url, path = self._test_confirm_start()
path_parts = path.split("-")
path_parts[-1] = ("0") * 20 + "/"
path = "-".join(path_parts)
response = self.client.post(
path,
{
"new_password1": "anewpassword",
"new_password2": "anewpassword",
},
)
self.assertIs(response.context["validlink"], False)
u.refresh_from_db()
self.assertEqual(original_password, u.password) # password hasn't changed
def test_confirm_complete(self):
url, path = self._test_confirm_start()
response = self.client.post(
path, {"new_password1": "anewpassword", "new_password2": "anewpassword"}
)
# Check the password has been changed
u = User.objects.get(email="[email protected]")
self.assertTrue(u.check_password("anewpassword"))
# The reset token is deleted from the session.
self.assertNotIn(INTERNAL_RESET_SESSION_TOKEN, self.client.session)
# Check we can't use the link again
response = self.client.get(path)
self.assertContains(response, "The password reset link was invalid")
def test_confirm_different_passwords(self):
url, path = self._test_confirm_start()
response = self.client.post(
path, {"new_password1": "anewpassword", "new_password2": "x"}
)
self.assertFormError(
response, SetPasswordForm.error_messages["password_mismatch"]
)
def test_reset_redirect_default(self):
response = self.client.post(
"/password_reset/", {"email": "[email protected]"}
)
self.assertRedirects(
response, "/password_reset/done/", fetch_redirect_response=False
)
def test_reset_custom_redirect(self):
response = self.client.post(
"/password_reset/custom_redirect/", {"email": "[email protected]"}
)
self.assertRedirects(response, "/custom/", fetch_redirect_response=False)
def test_reset_custom_redirect_named(self):
response = self.client.post(
"/password_reset/custom_redirect/named/",
{"email": "[email protected]"},
)
self.assertRedirects(
response, "/password_reset/", fetch_redirect_response=False
)
def test_confirm_redirect_default(self):
url, path = self._test_confirm_start()
response = self.client.post(
path, {"new_password1": "anewpassword", "new_password2": "anewpassword"}
)
self.assertRedirects(response, "/reset/done/", fetch_redirect_response=False)
def test_confirm_redirect_custom(self):
url, path = self._test_confirm_start()
path = path.replace("/reset/", "/reset/custom/")
response = self.client.post(
path, {"new_password1": "anewpassword", "new_password2": "anewpassword"}
)
self.assertRedirects(response, "/custom/", fetch_redirect_response=False)
def test_confirm_redirect_custom_named(self):
url, path = self._test_confirm_start()
path = path.replace("/reset/", "/reset/custom/named/")
response = self.client.post(
path, {"new_password1": "anewpassword", "new_password2": "anewpassword"}
)
self.assertRedirects(
response, "/password_reset/", fetch_redirect_response=False
)
def test_confirm_custom_reset_url_token(self):
url, path = self._test_confirm_start()
path = path.replace("/reset/", "/reset/custom/token/")
self.client.reset_url_token = "set-passwordcustom"
response = self.client.post(
path,
{"new_password1": "anewpassword", "new_password2": "anewpassword"},
)
self.assertRedirects(response, "/reset/done/", fetch_redirect_response=False)
def test_confirm_login_post_reset(self):
url, path = self._test_confirm_start()
path = path.replace("/reset/", "/reset/post_reset_login/")
response = self.client.post(
path, {"new_password1": "anewpassword", "new_password2": "anewpassword"}
)
self.assertRedirects(response, "/reset/done/", fetch_redirect_response=False)
self.assertIn(SESSION_KEY, self.client.session)
@override_settings(
AUTHENTICATION_BACKENDS=[
"django.contrib.auth.backends.ModelBackend",
"django.contrib.auth.backends.AllowAllUsersModelBackend",
]
)
def test_confirm_login_post_reset_custom_backend(self):
# This backend is specified in the URL pattern.
backend = "django.contrib.auth.backends.AllowAllUsersModelBackend"
url, path = self._test_confirm_start()
path = path.replace("/reset/", "/reset/post_reset_login_custom_backend/")
response = self.client.post(
path, {"new_password1": "anewpassword", "new_password2": "anewpassword"}
)
self.assertRedirects(response, "/reset/done/", fetch_redirect_response=False)
self.assertIn(SESSION_KEY, self.client.session)
self.assertEqual(self.client.session[BACKEND_SESSION_KEY], backend)
def test_confirm_login_post_reset_already_logged_in(self):
url, path = self._test_confirm_start()
path = path.replace("/reset/", "/reset/post_reset_login/")
self.login()
response = self.client.post(
path, {"new_password1": "anewpassword", "new_password2": "anewpassword"}
)
self.assertRedirects(response, "/reset/done/", fetch_redirect_response=False)
self.assertIn(SESSION_KEY, self.client.session)
def test_confirm_display_user_from_form(self):
url, path = self._test_confirm_start()
response = self.client.get(path)
# The password_reset_confirm() view passes the user object to the
# SetPasswordForm``, even on GET requests (#16919). For this test,
# {{ form.user }}`` is rendered in the template
# registration/password_reset_confirm.html.
username = User.objects.get(email="[email protected]").username
self.assertContains(response, "Hello, %s." % username)
# However, the view should NOT pass any user object on a form if the
# password reset link was invalid.
response = self.client.get("/reset/zzzzzzzzzzzzz/1-1/")
self.assertContains(response, "Hello, .")
def test_confirm_link_redirects_to_set_password_page(self):
url, path = self._test_confirm_start()
# Don't use PasswordResetConfirmClient (self.client) here which
# automatically fetches the redirect page.
client = Client()
response = client.get(path)
token = response.resolver_match.kwargs["token"]
uuidb64 = response.resolver_match.kwargs["uidb64"]
self.assertRedirects(response, "/reset/%s/set-password/" % uuidb64)
self.assertEqual(client.session["_password_reset_token"], token)
def test_confirm_custom_reset_url_token_link_redirects_to_set_password_page(self):
url, path = self._test_confirm_start()
path = path.replace("/reset/", "/reset/custom/token/")
client = Client()
response = client.get(path)
token = response.resolver_match.kwargs["token"]
uuidb64 = response.resolver_match.kwargs["uidb64"]
self.assertRedirects(
response, "/reset/custom/token/%s/set-passwordcustom/" % uuidb64
)
self.assertEqual(client.session["_password_reset_token"], token)
def test_invalid_link_if_going_directly_to_the_final_reset_password_url(self):
url, path = self._test_confirm_start()
_, uuidb64, _ = path.strip("/").split("/")
response = Client().get("/reset/%s/set-password/" % uuidb64)
self.assertContains(response, "The password reset link was invalid")
def test_missing_kwargs(self):
msg = "The URL path must contain 'uidb64' and 'token' parameters."
with self.assertRaisesMessage(ImproperlyConfigured, msg):
self.client.get("/reset/missing_parameters/")
@modify_settings(
MIDDLEWARE={"append": "django.contrib.auth.middleware.LoginRequiredMiddleware"}
)
def test_access_under_login_required_middleware(self):
reset_urls = [
reverse("password_reset"),
reverse("password_reset_done"),
reverse("password_reset_confirm", kwargs={"uidb64": "abc", "token": "def"}),
reverse("password_reset_complete"),
]
for url in reset_urls:
with self.subTest(url=url):
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
response = self.client.post(
"/password_reset/", {"email": "[email protected]"}
)
self.assertRedirects(
response, "/password_reset/done/", fetch_redirect_response=False
)
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUser")
class CustomUserPasswordResetTest(AuthViewsTestCase):
user_email = "[email protected]"
@classmethod
def setUpTestData(cls):
cls.u1 = CustomUser.custom_objects.create(
email="[email protected]",
date_of_birth=datetime.date(1976, 11, 8),
)
cls.u1.set_password("password")
cls.u1.save()
def setUp(self):
self.client = PasswordResetConfirmClient()
def _test_confirm_start(self):
# Start by creating the email
response = self.client.post("/password_reset/", {"email": self.user_email})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
return self._read_signup_email(mail.outbox[0])
def _read_signup_email(self, email):
urlmatch = re.search(r"https?://[^/]*(/.*reset/\S*)", email.body)
self.assertIsNotNone(urlmatch, "No URL found in sent email")
return urlmatch[0], urlmatch[1]
def test_confirm_valid_custom_user(self):
url, path = self._test_confirm_start()
response = self.client.get(path)
# redirect to a 'complete' page:
self.assertContains(response, "Please enter your new password")
# then submit a new password
response = self.client.post(
path,
{
"new_password1": "anewpassword",
"new_password2": "anewpassword",
},
)
self.assertRedirects(response, "/reset/done/")
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUserCompositePrimaryKey")
class CustomUserCompositePrimaryKeyPasswordResetTest(CustomUserPasswordResetTest):
@classmethod
def setUpTestData(cls):
cls.u1 = CustomUserCompositePrimaryKey.custom_objects.create(
email="[email protected]",
date_of_birth=datetime.date(1976, 11, 8),
)
cls.u1.set_password("password")
cls.u1.save()
@override_settings(AUTH_USER_MODEL="auth_tests.UUIDUser")
class UUIDUserPasswordResetTest(CustomUserPasswordResetTest):
def _test_confirm_start(self):
# instead of fixture
UUIDUser.objects.create_user(
email=self.user_email,
username="foo",
password="foo",
)
return super()._test_confirm_start()
def test_confirm_invalid_uuid(self):
"""A uidb64 that decodes to a non-UUID doesn't crash."""
_, path = self._test_confirm_start()
invalid_uidb64 = urlsafe_base64_encode(b"INVALID_UUID")
first, _uuidb64_, second = path.strip("/").split("/")
response = self.client.get(
"/" + "/".join((first, invalid_uidb64, second)) + "/"
)
self.assertContains(response, "The password reset link was invalid")
class ChangePasswordTest(AuthViewsTestCase):
def fail_login(self):
response = self.client.post(
"/login/",
{
"username": "testclient",
"password": "password",
},
)
self.assertFormError(
response,
AuthenticationForm.error_messages["invalid_login"]
% {"username": User._meta.get_field("username").verbose_name},
)
def logout(self):
self.client.post("/logout/")
def test_password_change_fails_with_invalid_old_password(self):
self.login()
response = self.client.post(
"/password_change/",
{
"old_password": "donuts",
"new_password1": "password1",
"new_password2": "password1",
},
)
self.assertFormError(
response, PasswordChangeForm.error_messages["password_incorrect"]
)
def test_password_change_fails_with_mismatched_passwords(self):
self.login()
response = self.client.post(
"/password_change/",
{
"old_password": "password",
"new_password1": "password1",
"new_password2": "donuts",
},
)
self.assertFormError(
response, SetPasswordForm.error_messages["password_mismatch"]
)
def test_password_change_succeeds(self):
self.login()
self.client.post(
"/password_change/",
{
"old_password": "password",
"new_password1": "password1",
"new_password2": "password1",
},
)
self.fail_login()
self.login(password="password1")
def test_password_change_done_succeeds(self):
self.login()
response = self.client.post(
"/password_change/",
{
"old_password": "password",
"new_password1": "password1",
"new_password2": "password1",
},
)
self.assertRedirects(
response, "/password_change/done/", fetch_redirect_response=False
)
@override_settings(LOGIN_URL="/login/")
def test_password_change_done_fails(self):
response = self.client.get("/password_change/done/")
self.assertRedirects(
response,
"/login/?next=/password_change/done/",
fetch_redirect_response=False,
)
def test_password_change_redirect_default(self):
self.login()
response = self.client.post(
"/password_change/",
{
"old_password": "password",
"new_password1": "password1",
"new_password2": "password1",
},
)
self.assertRedirects(
response, "/password_change/done/", fetch_redirect_response=False
)
def test_password_change_redirect_custom(self):
self.login()
response = self.client.post(
"/password_change/custom/",
{
"old_password": "password",
"new_password1": "password1",
"new_password2": "password1",
},
)
self.assertRedirects(response, "/custom/", fetch_redirect_response=False)
def test_password_change_redirect_custom_named(self):
self.login()
response = self.client.post(
"/password_change/custom/named/",
{
"old_password": "password",
"new_password1": "password1",
"new_password2": "password1",
},
)
self.assertRedirects(
response, "/password_reset/", fetch_redirect_response=False
)
@modify_settings(
MIDDLEWARE={"append": "django.contrib.auth.middleware.LoginRequiredMiddleware"}
)
def test_access_under_login_required_middleware(self):
response = self.client.post(
"/password_change/",
{
"old_password": "password",
"new_password1": "password1",
"new_password2": "password1",
},
)
self.assertRedirects(
response,
settings.LOGIN_URL + "?next=/password_change/",
fetch_redirect_response=False,
)
self.login()
response = self.client.post(
"/password_change/",
{
"old_password": "password",
"new_password1": "password1",
"new_password2": "password1",
},
)
self.assertRedirects(
response, "/password_change/done/", fetch_redirect_response=False
)
class SessionAuthenticationTests(AuthViewsTestCase):
def test_user_password_change_updates_session(self):
"""
#21649 - Ensure contrib.auth.views.password_change updates the user's
session auth hash after a password change so the session isn't logged
out.
"""
self.login()
original_session_key = self.client.session.session_key
response = self.client.post(
"/password_change/",
{
"old_password": "password",
"new_password1": "password1",
"new_password2": "password1",
},
)
# if the hash isn't updated, retrieving the redirection page will fail.
self.assertRedirects(response, "/password_change/done/")
# The session key is rotated.
self.assertNotEqual(original_session_key, self.client.session.session_key)
class LoginTest(AuthViewsTestCase):
def test_current_site_in_context_after_login(self):
response = self.client.get(reverse("login"))
self.assertEqual(response.status_code, 200)
if apps.is_installed("django.contrib.sites"):
Site = apps.get_model("sites.Site")
site = Site.objects.get_current()
self.assertEqual(response.context["site"], site)
self.assertEqual(response.context["site_name"], site.name)
else:
self.assertIsInstance(response.context["site"], RequestSite)
self.assertIsInstance(response.context["form"], AuthenticationForm)
def test_security_check(self):
login_url = reverse("login")
# These URLs should not pass the security check.
bad_urls = (
"http://example.com",
"http:///example.com",
"https://example.com",
"ftp://example.com",
"///example.com",
"//example.com",
'javascript:alert("XSS")',
)
for bad_url in bad_urls:
with self.subTest(bad_url=bad_url):
nasty_url = "%(url)s?%(next)s=%(bad_url)s" % {
"url": login_url,
"next": REDIRECT_FIELD_NAME,
"bad_url": quote(bad_url),
}
response = self.client.post(
nasty_url,
{
"username": "testclient",
"password": "password",
},
)
self.assertEqual(response.status_code, 302)
self.assertNotIn(
bad_url, response.url, "%s should be blocked" % bad_url
)
# These URLs should pass the security check.
good_urls = (
"/view/?param=http://example.com",
"/view/?param=https://example.com",
"/view?param=ftp://example.com",
"view/?param=//example.com",
"https://testserver/",
"HTTPS://testserver/",
"//testserver/",
"/url%20with%20spaces/",
)
for good_url in good_urls:
with self.subTest(good_url=good_url):
safe_url = "%(url)s?%(next)s=%(good_url)s" % {
"url": login_url,
"next": REDIRECT_FIELD_NAME,
"good_url": quote(good_url),
}
response = self.client.post(
safe_url,
{
"username": "testclient",
"password": "password",
},
)
self.assertEqual(response.status_code, 302)
self.assertIn(good_url, response.url, "%s should be allowed" % good_url)
def test_security_check_https(self):
login_url = reverse("login")
non_https_next_url = "http://testserver/path"
not_secured_url = "%(url)s?%(next)s=%(next_url)s" % {
"url": login_url,
"next": REDIRECT_FIELD_NAME,
"next_url": quote(non_https_next_url),
}
post_data = {
"username": "testclient",
"password": "password",
}
response = self.client.post(not_secured_url, post_data, secure=True)
self.assertEqual(response.status_code, 302)
self.assertNotEqual(response.url, non_https_next_url)
self.assertEqual(response.url, settings.LOGIN_REDIRECT_URL)
def test_login_form_contains_request(self):
# The custom authentication form for this login requires a request to
# initialize it.
response = self.client.post(
"/custom_request_auth_login/",
{
"username": "testclient",
"password": "password",
},
)
# The login was successful.
self.assertRedirects(
response, settings.LOGIN_REDIRECT_URL, fetch_redirect_response=False
)
def test_login_csrf_rotate(self):
"""
Makes sure that a login rotates the currently-used CSRF token.
"""
def get_response(request):
return HttpResponse()
# Do a GET to establish a CSRF token
# The test client isn't used here as it's a test for middleware.
req = HttpRequest()
CsrfViewMiddleware(get_response).process_view(req, LoginView.as_view(), (), {})
# get_token() triggers CSRF token inclusion in the response
get_token(req)
resp = CsrfViewMiddleware(LoginView.as_view())(req)
csrf_cookie = resp.cookies.get(settings.CSRF_COOKIE_NAME, None)
token1 = csrf_cookie.coded_value
# Prepare the POST request
req = HttpRequest()
req.COOKIES[settings.CSRF_COOKIE_NAME] = token1
req.method = "POST"
req.POST = {
"username": "testclient",
"password": "password",
"csrfmiddlewaretoken": token1,
}
# Use POST request to log in
SessionMiddleware(get_response).process_request(req)
CsrfViewMiddleware(get_response).process_view(req, LoginView.as_view(), (), {})
req.META["SERVER_NAME"] = (
"testserver" # Required to have redirect work in login view
)
req.META["SERVER_PORT"] = 80
resp = CsrfViewMiddleware(LoginView.as_view())(req)
csrf_cookie = resp.cookies.get(settings.CSRF_COOKIE_NAME, None)
token2 = csrf_cookie.coded_value
# Check the CSRF token switched
self.assertNotEqual(token1, token2)
def test_session_key_flushed_on_login(self):
"""
To avoid reusing another user's session, ensure a new, empty session is
created if the existing session corresponds to a different
authenticated user.
"""
self.login()
original_session_key = self.client.session.session_key
self.login(username="staff")
self.assertNotEqual(original_session_key, self.client.session.session_key)
def test_session_key_flushed_on_login_after_password_change(self):
"""
As above, but same user logging in after a password change.
"""
self.login()
original_session_key = self.client.session.session_key
# If no password change, session key should not be flushed.
self.login()
self.assertEqual(original_session_key, self.client.session.session_key)
user = User.objects.get(username="testclient")
user.set_password("foobar")
user.save()
self.login(password="foobar")
self.assertNotEqual(original_session_key, self.client.session.session_key)
def test_login_session_without_hash_session_key(self):
"""
Session without django.contrib.auth.HASH_SESSION_KEY should login
without an exception.
"""
user = User.objects.get(username="testclient")
engine = import_module(settings.SESSION_ENGINE)
session = engine.SessionStore()
session[SESSION_KEY] = user.id
session.save()
original_session_key = session.session_key
self.client.cookies[settings.SESSION_COOKIE_NAME] = original_session_key
self.login()
self.assertNotEqual(original_session_key, self.client.session.session_key)
def test_login_get_default_redirect_url(self):
response = self.login(url="/login/get_default_redirect_url/")
self.assertRedirects(response, "/custom/", fetch_redirect_response=False)
def test_login_next_page(self):
response = self.login(url="/login/next_page/")
self.assertRedirects(response, "/somewhere/", fetch_redirect_response=False)
def test_login_named_next_page_named(self):
response = self.login(url="/login/next_page/named/")
self.assertRedirects(
response, "/password_reset/", fetch_redirect_response=False
)
@override_settings(LOGIN_REDIRECT_URL="/custom/")
def test_login_next_page_overrides_login_redirect_url_setting(self):
response = self.login(url="/login/next_page/")
self.assertRedirects(response, "/somewhere/", fetch_redirect_response=False)
def test_login_redirect_url_overrides_next_page(self):
response = self.login(url="/login/next_page/?next=/test/")
self.assertRedirects(response, "/test/", fetch_redirect_response=False)
def test_login_redirect_url_overrides_get_default_redirect_url(self):
response = self.login(url="/login/get_default_redirect_url/?next=/test/")
self.assertRedirects(response, "/test/", fetch_redirect_response=False)
@modify_settings(
MIDDLEWARE={"append": "django.contrib.auth.middleware.LoginRequiredMiddleware"}
)
def test_access_under_login_required_middleware(self):
response = self.client.get(reverse("login"))
self.assertEqual(response.status_code, 200)
class LoginURLSettings(AuthViewsTestCase):
"""Tests for settings.LOGIN_URL."""
def assertLoginURLEquals(self, url):
response = self.client.get("/login_required/")
self.assertRedirects(response, url, fetch_redirect_response=False)
@override_settings(LOGIN_URL="/login/")
def test_standard_login_url(self):
self.assertLoginURLEquals("/login/?next=/login_required/")
@override_settings(LOGIN_URL="login")
def test_named_login_url(self):
self.assertLoginURLEquals("/login/?next=/login_required/")
@override_settings(LOGIN_URL="http://remote.example.com/login")
def test_remote_login_url(self):
quoted_next = quote("http://testserver/login_required/")
expected = "http://remote.example.com/login?next=%s" % quoted_next
self.assertLoginURLEquals(expected)
@override_settings(LOGIN_URL="https:///login/")
def test_https_login_url(self):
quoted_next = quote("http://testserver/login_required/")
expected = "https:///login/?next=%s" % quoted_next
self.assertLoginURLEquals(expected)
@override_settings(LOGIN_URL="/login/?pretty=1")
def test_login_url_with_querystring(self):
self.assertLoginURLEquals("/login/?pretty=1&next=/login_required/")
@override_settings(LOGIN_URL="http://remote.example.com/login/?next=/default/")
def test_remote_login_url_with_next_querystring(self):
quoted_next = quote("http://testserver/login_required/")
expected = "http://remote.example.com/login/?next=%s" % quoted_next
self.assertLoginURLEquals(expected)
@override_settings(LOGIN_URL=reverse_lazy("login"))
def test_lazy_login_url(self):
self.assertLoginURLEquals("/login/?next=/login_required/")
class LoginRedirectUrlTest(AuthViewsTestCase):
"""Tests for settings.LOGIN_REDIRECT_URL."""
def assertLoginRedirectURLEqual(self, url):
response = self.login()
self.assertRedirects(response, url, fetch_redirect_response=False)
def test_default(self):
self.assertLoginRedirectURLEqual("/accounts/profile/")
@override_settings(LOGIN_REDIRECT_URL="/custom/")
def test_custom(self):
self.assertLoginRedirectURLEqual("/custom/")
@override_settings(LOGIN_REDIRECT_URL="password_reset")
def test_named(self):
self.assertLoginRedirectURLEqual("/password_reset/")
@override_settings(LOGIN_REDIRECT_URL="http://remote.example.com/welcome/")
def test_remote(self):
self.assertLoginRedirectURLEqual("http://remote.example.com/welcome/")
class RedirectToLoginTests(AuthViewsTestCase):
"""Tests for the redirect_to_login view"""
@override_settings(LOGIN_URL=reverse_lazy("login"))
def test_redirect_to_login_with_lazy(self):
login_redirect_response = redirect_to_login(next="/else/where/")
expected = "/login/?next=/else/where/"
self.assertEqual(expected, login_redirect_response.url)
@override_settings(LOGIN_URL=reverse_lazy("login"))
def test_redirect_to_login_with_lazy_and_unicode(self):
login_redirect_response = redirect_to_login(next="/else/where/झ/")
expected = "/login/?next=/else/where/%E0%A4%9D/"
self.assertEqual(expected, login_redirect_response.url)
class LogoutThenLoginTests(AuthViewsTestCase):
"""Tests for the logout_then_login view"""
def confirm_logged_out(self):
self.assertNotIn(SESSION_KEY, self.client.session)
@override_settings(LOGIN_URL="/login/")
def test_default_logout_then_login(self):
self.login()
req = HttpRequest()
req.method = "POST"
csrf_token = get_token(req)
req.COOKIES[settings.CSRF_COOKIE_NAME] = csrf_token
req.POST = {"csrfmiddlewaretoken": csrf_token}
req.META["SERVER_NAME"] = "testserver"
req.META["SERVER_PORT"] = 80
req.session = self.client.session
response = logout_then_login(req)
self.confirm_logged_out()
self.assertRedirects(response, "/login/", fetch_redirect_response=False)
def test_logout_then_login_with_custom_login(self):
self.login()
req = HttpRequest()
req.method = "POST"
csrf_token = get_token(req)
req.COOKIES[settings.CSRF_COOKIE_NAME] = csrf_token
req.POST = {"csrfmiddlewaretoken": csrf_token}
req.META["SERVER_NAME"] = "testserver"
req.META["SERVER_PORT"] = 80
req.session = self.client.session
response = logout_then_login(req, login_url="/custom/")
self.confirm_logged_out()
self.assertRedirects(response, "/custom/", fetch_redirect_response=False)
@override_settings(LOGIN_URL="/login/")
def test_default_logout_then_login_get(self):
self.login()
req = HttpRequest()
req.method = "GET"
req.META["SERVER_NAME"] = "testserver"
req.META["SERVER_PORT"] = 80
req.session = self.client.session
response = logout_then_login(req)
self.assertEqual(response.status_code, 405)
class LoginRedirectAuthenticatedUser(AuthViewsTestCase):
dont_redirect_url = "/login/redirect_authenticated_user_default/"
do_redirect_url = "/login/redirect_authenticated_user/"
def test_default(self):
"""Stay on the login page by default."""
self.login()
response = self.client.get(self.dont_redirect_url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context["next"], "")
def test_guest(self):
"""If not logged in, stay on the same page."""
response = self.client.get(self.do_redirect_url)
self.assertEqual(response.status_code, 200)
def test_redirect(self):
"""If logged in, go to default redirected URL."""
self.login()
response = self.client.get(self.do_redirect_url)
self.assertRedirects(
response, "/accounts/profile/", fetch_redirect_response=False
)
@override_settings(LOGIN_REDIRECT_URL="/custom/")
def test_redirect_url(self):
"""If logged in, go to custom redirected URL."""
self.login()
response = self.client.get(self.do_redirect_url)
self.assertRedirects(response, "/custom/", fetch_redirect_response=False)
def test_redirect_param(self):
"""If next is specified as a GET parameter, go there."""
self.login()
url = self.do_redirect_url + "?next=/custom_next/"
response = self.client.get(url)
self.assertRedirects(response, "/custom_next/", fetch_redirect_response=False)
def test_redirect_loop(self):
"""
Detect a redirect loop if LOGIN_REDIRECT_URL is not correctly set,
with and without custom parameters.
"""
self.login()
msg = (
"Redirection loop for authenticated user detected. Check that "
"your LOGIN_REDIRECT_URL doesn't point to a login page."
)
with self.settings(LOGIN_REDIRECT_URL=self.do_redirect_url):
with self.assertRaisesMessage(ValueError, msg):
self.client.get(self.do_redirect_url)
url = self.do_redirect_url + "?bla=2"
with self.assertRaisesMessage(ValueError, msg):
self.client.get(url)
def test_permission_required_not_logged_in(self):
# Not logged in ...
with self.settings(LOGIN_URL=self.do_redirect_url):
# redirected to login.
response = self.client.get("/permission_required_redirect/", follow=True)
self.assertEqual(response.status_code, 200)
# exception raised.
response = self.client.get("/permission_required_exception/", follow=True)
self.assertEqual(response.status_code, 403)
# redirected to login.
response = self.client.get(
"/login_and_permission_required_exception/", follow=True
)
self.assertEqual(response.status_code, 200)
def test_permission_required_logged_in(self):
self.login()
# Already logged in...
with self.settings(LOGIN_URL=self.do_redirect_url):
# redirect loop encountered.
with self.assertRaisesMessage(
RedirectCycleError, "Redirect loop detected."
):
self.client.get("/permission_required_redirect/", follow=True)
# exception raised.
response = self.client.get("/permission_required_exception/", follow=True)
self.assertEqual(response.status_code, 403)
# exception raised.
response = self.client.get(
"/login_and_permission_required_exception/", follow=True
)
self.assertEqual(response.status_code, 403)
class LoginSuccessURLAllowedHostsTest(AuthViewsTestCase):
def test_success_url_allowed_hosts_same_host(self):
response = self.client.post(
"/login/allowed_hosts/",
{
"username": "testclient",
"password": "password",
"next": "https://testserver/home",
},
)
self.assertIn(SESSION_KEY, self.client.session)
self.assertRedirects(
response, "https://testserver/home", fetch_redirect_response=False
)
def test_success_url_allowed_hosts_safe_host(self):
response = self.client.post(
"/login/allowed_hosts/",
{
"username": "testclient",
"password": "password",
"next": "https://otherserver/home",
},
)
self.assertIn(SESSION_KEY, self.client.session)
self.assertRedirects(
response, "https://otherserver/home", fetch_redirect_response=False
)
def test_success_url_allowed_hosts_unsafe_host(self):
response = self.client.post(
"/login/allowed_hosts/",
{
"username": "testclient",
"password": "password",
"next": "https://evil/home",
},
)
self.assertIn(SESSION_KEY, self.client.session)
self.assertRedirects(
response, "/accounts/profile/", fetch_redirect_response=False
)
class LogoutTest(AuthViewsTestCase):
def confirm_logged_out(self):
self.assertNotIn(SESSION_KEY, self.client.session)
def test_logout_default(self):
"Logout without next_page option renders the default template"
self.login()
response = self.client.post("/logout/")
self.assertContains(response, "Logged out")
self.confirm_logged_out()
def test_logout_with_post(self):
self.login()
response = self.client.post("/logout/")
self.assertContains(response, "Logged out")
self.confirm_logged_out()
def test_14377(self):
# Bug 14377
self.login()
response = self.client.post("/logout/")
self.assertIn("site", response.context)
def test_logout_doesnt_cache(self):
"""
The logout() view should send "no-cache" headers for reasons described
in #25490.
"""
response = self.client.post("/logout/")
self.assertIn("no-store", response.headers["Cache-Control"])
def test_logout_with_overridden_redirect_url(self):
# Bug 11223
self.login()
response = self.client.post("/logout/next_page/")
self.assertRedirects(response, "/somewhere/", fetch_redirect_response=False)
response = self.client.post("/logout/next_page/?next=/login/")
self.assertRedirects(response, "/login/", fetch_redirect_response=False)
self.confirm_logged_out()
def test_logout_with_next_page_specified(self):
"Logout with next_page option given redirects to specified resource"
self.login()
response = self.client.post("/logout/next_page/")
self.assertRedirects(response, "/somewhere/", fetch_redirect_response=False)
self.confirm_logged_out()
def test_logout_with_redirect_argument(self):
"Logout with query string redirects to specified resource"
self.login()
response = self.client.post("/logout/?next=/login/")
self.assertRedirects(response, "/login/", fetch_redirect_response=False)
self.confirm_logged_out()
def test_logout_with_custom_redirect_argument(self):
"Logout with custom query string redirects to specified resource"
self.login()
response = self.client.post("/logout/custom_query/?follow=/somewhere/")
self.assertRedirects(response, "/somewhere/", fetch_redirect_response=False)
self.confirm_logged_out()
def test_logout_with_named_redirect(self):
"Logout resolves names or URLs passed as next_page."
self.login()
response = self.client.post("/logout/next_page/named/")
self.assertRedirects(
response, "/password_reset/", fetch_redirect_response=False
)
self.confirm_logged_out()
def test_success_url_allowed_hosts_same_host(self):
self.login()
response = self.client.post("/logout/allowed_hosts/?next=https://testserver/")
self.assertRedirects(
response, "https://testserver/", fetch_redirect_response=False
)
self.confirm_logged_out()
def test_success_url_allowed_hosts_safe_host(self):
self.login()
response = self.client.post("/logout/allowed_hosts/?next=https://otherserver/")
self.assertRedirects(
response, "https://otherserver/", fetch_redirect_response=False
)
self.confirm_logged_out()
def test_success_url_allowed_hosts_unsafe_host(self):
self.login()
response = self.client.post("/logout/allowed_hosts/?next=https://evil/")
self.assertRedirects(
response, "/logout/allowed_hosts/", fetch_redirect_response=False
)
self.confirm_logged_out()
def test_security_check(self):
logout_url = reverse("logout")
# These URLs should not pass the security check.
bad_urls = (
"http://example.com",
"http:///example.com",
"https://example.com",
"ftp://example.com",
"///example.com",
"//example.com",
'javascript:alert("XSS")',
)
for bad_url in bad_urls:
with self.subTest(bad_url=bad_url):
nasty_url = "%(url)s?%(next)s=%(bad_url)s" % {
"url": logout_url,
"next": REDIRECT_FIELD_NAME,
"bad_url": quote(bad_url),
}
self.login()
response = self.client.post(nasty_url)
self.assertEqual(response.status_code, 302)
self.assertNotIn(
bad_url, response.url, "%s should be blocked" % bad_url
)
self.confirm_logged_out()
# These URLs should pass the security check.
good_urls = (
"/view/?param=http://example.com",
"/view/?param=https://example.com",
"/view?param=ftp://example.com",
"view/?param=//example.com",
"https://testserver/",
"HTTPS://testserver/",
"//testserver/",
"/url%20with%20spaces/",
)
for good_url in good_urls:
with self.subTest(good_url=good_url):
safe_url = "%(url)s?%(next)s=%(good_url)s" % {
"url": logout_url,
"next": REDIRECT_FIELD_NAME,
"good_url": quote(good_url),
}
self.login()
response = self.client.post(safe_url)
self.assertEqual(response.status_code, 302)
self.assertIn(good_url, response.url, "%s should be allowed" % good_url)
self.confirm_logged_out()
def test_security_check_https(self):
logout_url = reverse("logout")
non_https_next_url = "http://testserver/"
url = "%(url)s?%(next)s=%(next_url)s" % {
"url": logout_url,
"next": REDIRECT_FIELD_NAME,
"next_url": quote(non_https_next_url),
}
self.login()
response = self.client.post(url, secure=True)
self.assertRedirects(response, logout_url, fetch_redirect_response=False)
self.confirm_logged_out()
def test_logout_preserve_language(self):
"""Language is preserved after logout."""
self.login()
self.client.post("/setlang/", {"language": "pl"})
self.assertEqual(self.client.cookies[settings.LANGUAGE_COOKIE_NAME].value, "pl")
self.client.post("/logout/")
self.assertEqual(self.client.cookies[settings.LANGUAGE_COOKIE_NAME].value, "pl")
@override_settings(LOGOUT_REDIRECT_URL="/custom/")
def test_logout_redirect_url_setting(self):
self.login()
response = self.client.post("/logout/")
self.assertRedirects(response, "/custom/", fetch_redirect_response=False)
@override_settings(LOGOUT_REDIRECT_URL="/custom/")
def test_logout_redirect_url_setting_allowed_hosts_unsafe_host(self):
self.login()
response = self.client.post("/logout/allowed_hosts/?next=https://evil/")
self.assertRedirects(response, "/custom/", fetch_redirect_response=False)
@override_settings(LOGOUT_REDIRECT_URL="logout")
def test_logout_redirect_url_named_setting(self):
self.login()
response = self.client.post("/logout/")
self.assertContains(response, "Logged out")
self.confirm_logged_out()
@modify_settings(
MIDDLEWARE={"append": "django.contrib.auth.middleware.LoginRequiredMiddleware"}
)
def test_access_under_login_required_middleware(self):
response = self.client.post("/logout/")
self.assertRedirects(
response,
settings.LOGIN_URL + "?next=/logout/",
fetch_redirect_response=False,
)
self.login()
response = self.client.post("/logout/")
self.assertEqual(response.status_code, 200)
def get_perm(Model, perm):
ct = ContentType.objects.get_for_model(Model)
return Permission.objects.get(content_type=ct, codename=perm)
# Redirect in test_user_change_password will fail if session auth hash
# isn't updated after password change (#21649)
@override_settings(
ROOT_URLCONF="auth_tests.urls_admin",
PASSWORD_HASHERS=["django.contrib.auth.hashers.MD5PasswordHasher"],
)
class ChangelistTests(MessagesTestMixin, AuthViewsTestCase):
@classmethod
def setUpTestData(cls):
super().setUpTestData()
# Make me a superuser before logging in.
User.objects.filter(username="testclient").update(
is_staff=True, is_superuser=True
)
def setUp(self):
self.login()
# Get the latest last_login value.
self.admin = User.objects.get(pk=self.u1.pk)
def get_user_data(self, user):
return {
"username": user.username,
"password": user.password,
"email": user.email,
"is_active": user.is_active,
"is_staff": user.is_staff,
"is_superuser": user.is_superuser,
"last_login_0": user.last_login.strftime("%Y-%m-%d"),
"last_login_1": user.last_login.strftime("%H:%M:%S"),
"initial-last_login_0": user.last_login.strftime("%Y-%m-%d"),
"initial-last_login_1": user.last_login.strftime("%H:%M:%S"),
"date_joined_0": user.date_joined.strftime("%Y-%m-%d"),
"date_joined_1": user.date_joined.strftime("%H:%M:%S"),
"initial-date_joined_0": user.date_joined.strftime("%Y-%m-%d"),
"initial-date_joined_1": user.date_joined.strftime("%H:%M:%S"),
"first_name": user.first_name,
"last_name": user.last_name,
}
# #20078 - users shouldn't be allowed to guess password hashes via
# repeated password__startswith queries.
def test_changelist_disallows_password_lookups(self):
# A lookup that tries to filter on password isn't OK
with self.assertLogs("django.security.DisallowedModelAdminLookup", "ERROR"):
response = self.client.get(
reverse("auth_test_admin:auth_user_changelist")
+ "?password__startswith=sha1$"
)
self.assertEqual(response.status_code, 400)
def test_user_change_email(self):
data = self.get_user_data(self.admin)
data["email"] = "new_" + data["email"]
response = self.client.post(
reverse("auth_test_admin:auth_user_change", args=(self.admin.pk,)), data
)
self.assertRedirects(response, reverse("auth_test_admin:auth_user_changelist"))
row = LogEntry.objects.latest("id")
self.assertEqual(row.get_change_message(), "Changed Email address.")
def test_user_not_change(self):
response = self.client.post(
reverse("auth_test_admin:auth_user_change", args=(self.admin.pk,)),
self.get_user_data(self.admin),
)
self.assertRedirects(response, reverse("auth_test_admin:auth_user_changelist"))
row = LogEntry.objects.latest("id")
self.assertEqual(row.get_change_message(), "No fields changed.")
def test_user_with_usable_password_change_password(self):
user_change_url = reverse(
"auth_test_admin:auth_user_change", args=(self.admin.pk,)
)
password_change_url = reverse(
"auth_test_admin:auth_user_password_change", args=(self.admin.pk,)
)
response = self.client.get(user_change_url)
# Test the link inside password field help_text.
rel_link = re.search(
r'<a role="button" class="button" href="([^"]*)">Reset password</a>',
response.text,
)[1]
self.assertEqual(urljoin(user_change_url, rel_link), password_change_url)
response = self.client.get(password_change_url)
# Test the form title with original (usable) password
self.assertContains(
response, f"<h1>Change password: {self.admin.username}</h1>"
)
# Breadcrumb.
self.assertContains(
response,
f'{self.admin.username}</a></li>\n<li aria-current="page">'
"Change password</li>",
)
# Usable password field.
self.assertContains(
response,
'<fieldset class="flex-container">'
"<legend>Password-based authentication:</legend>",
)
# Submit buttons
self.assertContains(response, '<input type="submit" name="set-password"')
self.assertContains(response, '<input type="submit" name="unset-password"')
# Password change.
response = self.client.post(
password_change_url,
{
"password1": "password1",
"password2": "password1",
},
)
self.assertRedirects(response, user_change_url)
self.assertMessages(
response, [Message(level=25, message="Password changed successfully.")]
)
row = LogEntry.objects.latest("id")
self.assertEqual(row.get_change_message(), "Changed password.")
self.logout()
self.login(password="password1")
# Disable password-based authentication without proper submit button.
response = self.client.post(
password_change_url,
{
"password1": "password1",
"password2": "password1",
"usable_password": "false",
},
)
self.assertRedirects(response, password_change_url)
self.assertMessages(
response,
[
Message(
level=40,
message="Conflicting form data submitted. Please try again.",
)
],
)
# No password change yet.
self.login(password="password1")
# Disable password-based authentication with proper submit button.
response = self.client.post(
password_change_url,
{
"password1": "password1",
"password2": "password1",
"usable_password": "false",
"unset-password": 1,
},
)
self.assertRedirects(response, user_change_url)
self.assertMessages(
response,
[Message(level=25, message="Password-based authentication was disabled.")],
)
row = LogEntry.objects.latest("id")
self.assertEqual(row.get_change_message(), "Changed password.")
self.logout()
# Password-based authentication was disabled.
with self.assertRaises(AssertionError):
self.login(password="password1")
self.admin.refresh_from_db()
self.assertIs(self.admin.has_usable_password(), False)
def test_user_with_unusable_password_change_password(self):
# Test for title with unusable password with a test user
test_user = User.objects.get(email="[email protected]")
test_user.set_unusable_password()
test_user.save()
user_change_url = reverse(
"auth_test_admin:auth_user_change", args=(test_user.pk,)
)
password_change_url = reverse(
"auth_test_admin:auth_user_password_change", args=(test_user.pk,)
)
response = self.client.get(user_change_url)
# Test the link inside password field help_text.
rel_link = re.search(
r'<a role="button" class="button" href="([^"]*)">Set password</a>',
response.text,
)[1]
self.assertEqual(urljoin(user_change_url, rel_link), password_change_url)
response = self.client.get(password_change_url)
# Test the form title with original (usable) password
self.assertContains(response, f"<h1>Set password: {test_user.username}</h1>")
# Breadcrumb.
self.assertContains(
response,
f'{test_user.username}</a></li>\n<li aria-current="page">'
"Set password</li>",
)
# Submit buttons
self.assertContains(response, '<input type="submit" name="set-password"')
self.assertNotContains(response, '<input type="submit" name="unset-password"')
response = self.client.post(
password_change_url,
{
"password1": "password1",
"password2": "password1",
},
)
self.assertRedirects(response, user_change_url)
self.assertMessages(
response, [Message(level=25, message="Password changed successfully.")]
)
row = LogEntry.objects.latest("id")
self.assertEqual(row.get_change_message(), "Changed password.")
def test_user_change_different_user_password(self):
u = User.objects.get(email="[email protected]")
response = self.client.post(
reverse("auth_test_admin:auth_user_password_change", args=(u.pk,)),
{
"password1": "password1",
"password2": "password1",
},
)
self.assertRedirects(
response, reverse("auth_test_admin:auth_user_change", args=(u.pk,))
)
row = LogEntry.objects.latest("id")
self.assertEqual(row.user_id, self.admin.pk)
self.assertEqual(row.object_id, str(u.pk))
self.assertEqual(row.get_change_message(), "Changed password.")
def test_password_change_bad_url(self):
response = self.client.get(
reverse("auth_test_admin:auth_user_password_change", args=("foobar",))
)
self.assertEqual(response.status_code, 404)
@mock.patch("django.contrib.auth.admin.UserAdmin.has_change_permission")
def test_user_change_password_passes_user_to_has_change_permission(
self, has_change_permission
):
url = reverse(
"auth_test_admin:auth_user_password_change", args=(self.admin.pk,)
)
self.client.post(url, {"password1": "password1", "password2": "password1"})
(_request, user), _kwargs = has_change_permission.call_args
self.assertEqual(user.pk, self.admin.pk)
def test_view_user_password_is_readonly(self):
u = User.objects.get(username="testclient")
u.is_superuser = False
u.save()
original_password = u.password
u.user_permissions.add(get_perm(User, "view_user"))
response = self.client.get(
reverse("auth_test_admin:auth_user_change", args=(u.pk,)),
)
algo, salt, hash_string = u.password.split("$")
self.assertContains(response, '<div class="readonly">testclient</div>')
# The password value is hashed.
self.assertContains(
response,
"<strong>algorithm</strong>: <bdi>%s</bdi>\n\n"
"<strong>salt</strong>: <bdi>%s********************</bdi>\n\n"
"<strong>hash</strong>: <bdi>%s**************************</bdi>\n\n"
% (
algo,
salt[:2],
hash_string[:6],
),
html=True,
)
self.assertNotContains(
response,
'<a role="button" class="button" href="../password/">Reset password</a>',
)
# Value in POST data is ignored.
data = self.get_user_data(u)
data["password"] = "shouldnotchange"
change_url = reverse("auth_test_admin:auth_user_change", args=(u.pk,))
response = self.client.post(change_url, data)
self.assertEqual(response.status_code, 403)
u.refresh_from_db()
self.assertEqual(u.password, original_password)
@override_settings(
AUTH_USER_MODEL="auth_tests.UUIDUser",
ROOT_URLCONF="auth_tests.urls_custom_user_admin",
)
class UUIDUserTests(TestCase):
def test_admin_password_change(self):
u = UUIDUser.objects.create_superuser(
username="uuid", email="[email protected]", password="test"
)
self.assertTrue(self.client.login(username="uuid", password="test"))
user_change_url = reverse(
"custom_user_admin:auth_tests_uuiduser_change", args=(u.pk,)
)
response = self.client.get(user_change_url)
self.assertEqual(response.status_code, 200)
password_change_url = reverse(
"custom_user_admin:auth_user_password_change", args=(u.pk,)
)
response = self.client.get(password_change_url)
# The action attribute is omitted.
self.assertContains(response, '<form method="post" id="uuiduser_form">')
# A LogEntry is created with pk=1 which breaks a FK constraint on MySQL
with connection.constraint_checks_disabled():
response = self.client.post(
password_change_url,
{
"password1": "password1",
"password2": "password1",
},
)
self.assertRedirects(response, user_change_url)
row = LogEntry.objects.latest("id")
self.assertEqual(row.user_id, 1) # hardcoded in CustomUserAdmin.log_change()
self.assertEqual(row.object_id, str(u.pk))
self.assertEqual(row.get_change_message(), "Changed password.")
# The LogEntry.user column isn't altered to a UUID type so it's set to
# an integer manually in CustomUserAdmin to avoid an error. To avoid a
# constraint error, delete the entry before constraints are checked
# after the test.
row.delete()
|
./temp_repos/django/tests/urlpatterns/views.py
|
./temp_repos/django/tests/auth_tests/test_views.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'Unknown'.
Context:
- Class Name: Unknown
- Dependencies to Mock: None detected
- Key Imports: django.http
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
Unknown
|
python
|
"""
Creates the default Site object.
"""
from django.apps import apps as global_apps
from django.conf import settings
from django.core.management.color import no_style
from django.db import DEFAULT_DB_ALIAS, connections, router
def create_default_site(
app_config,
verbosity=2,
interactive=True,
using=DEFAULT_DB_ALIAS,
apps=global_apps,
**kwargs,
):
try:
Site = apps.get_model("sites", "Site")
except LookupError:
return
if not router.allow_migrate_model(using, Site):
return
if not Site.objects.using(using).exists():
# The default settings set SITE_ID = 1, and some tests in Django's test
# suite rely on this value. However, if database sequences are reused
# (e.g. in the test suite after flush/syncdb), it isn't guaranteed that
# the next id will be 1, so we coerce it. See #15573 and #16353. This
# can also crop up outside of tests - see #15346.
if verbosity >= 2:
print("Creating example.com Site object")
Site(
pk=getattr(settings, "SITE_ID", 1), domain="example.com", name="example.com"
).save(using=using)
# We set an explicit pk instead of relying on auto-incrementation,
# so we need to reset the database sequence. See #17415.
sequence_sql = connections[using].ops.sequence_reset_sql(no_style(), [Site])
if sequence_sql:
if verbosity >= 2:
print("Resetting sequence")
with connections[using].cursor() as cursor:
for command in sequence_sql:
cursor.execute(command)
|
import builtins
import getpass
import os
import sys
from datetime import date
from io import StringIO
from unittest import mock
from django.apps import apps
from django.conf import settings
from django.contrib.auth import get_permission_codename, management
from django.contrib.auth.management import (
RenamePermission,
create_permissions,
get_default_username,
)
from django.contrib.auth.management.commands import changepassword, createsuperuser
from django.contrib.auth.models import Group, Permission, User
from django.contrib.contenttypes.models import ContentType
from django.core.management import call_command
from django.core.management.base import CommandError
from django.db import migrations, models
from django.test import TestCase, override_settings
from django.test.testcases import TransactionTestCase
from django.utils.translation import gettext_lazy as _
from .models import (
CustomUser,
CustomUserNonUniqueUsername,
CustomUserWithFK,
CustomUserWithM2M,
CustomUserWithUniqueConstraint,
Email,
Organization,
UserProxy,
)
MOCK_INPUT_KEY_TO_PROMPTS = {
# @mock_inputs dict key: [expected prompt messages],
"bypass": ["Bypass password validation and create user anyway? [y/N]: "],
"email": ["Email address: "],
"date_of_birth": ["Date of birth: "],
"first_name": ["First name: "],
"username": [
"Username: ",
lambda: "Username (leave blank to use '%s'): " % get_default_username(),
],
}
def mock_inputs(inputs):
"""
Decorator to temporarily replace input/getpass to allow interactive
createsuperuser.
"""
def inner(test_func):
def wrapper(*args):
class mock_getpass:
@staticmethod
def getpass(prompt=b"Password: ", stream=None):
if callable(inputs["password"]):
return inputs["password"]()
return inputs["password"]
def mock_input(prompt):
assert "__proxy__" not in prompt
response = None
for key, val in inputs.items():
if val == "KeyboardInterrupt":
raise KeyboardInterrupt
# get() fallback because sometimes 'key' is the actual
# prompt rather than a shortcut name.
prompt_msgs = MOCK_INPUT_KEY_TO_PROMPTS.get(key, key)
if isinstance(prompt_msgs, list):
prompt_msgs = [
msg() if callable(msg) else msg for msg in prompt_msgs
]
if prompt in prompt_msgs:
if callable(val):
response = val()
else:
response = val
break
if response is None:
raise ValueError("Mock input for %r not found." % prompt)
return response
old_getpass = createsuperuser.getpass
old_input = builtins.input
createsuperuser.getpass = mock_getpass
builtins.input = mock_input
try:
test_func(*args)
finally:
createsuperuser.getpass = old_getpass
builtins.input = old_input
return wrapper
return inner
class MockTTY:
"""
A fake stdin object that pretends to be a TTY to be used in conjunction
with mock_inputs.
"""
def isatty(self):
return True
class MockInputTests(TestCase):
@mock_inputs({"username": "alice"})
def test_input_not_found(self):
with self.assertRaisesMessage(
ValueError, "Mock input for 'Email address: ' not found."
):
call_command("createsuperuser", stdin=MockTTY())
class GetDefaultUsernameTestCase(TestCase):
databases = {"default", "other"}
def setUp(self):
self.old_get_system_username = management.get_system_username
def tearDown(self):
management.get_system_username = self.old_get_system_username
def test_actual_implementation(self):
self.assertIsInstance(management.get_system_username(), str)
def test_getuser_raises_exception(self):
# TODO: Drop ImportError and KeyError when dropping support for PY312.
for exc in (ImportError, KeyError, OSError):
with self.subTest(exc=str(exc)):
with mock.patch("getpass.getuser", side_effect=exc):
self.assertEqual(management.get_system_username(), "")
def test_simple(self):
management.get_system_username = lambda: "joe"
self.assertEqual(management.get_default_username(), "joe")
def test_existing(self):
User.objects.create(username="joe")
management.get_system_username = lambda: "joe"
self.assertEqual(management.get_default_username(), "")
self.assertEqual(management.get_default_username(check_db=False), "joe")
def test_i18n(self):
# 'Julia' with accented 'u':
management.get_system_username = lambda: "J\xfalia"
self.assertEqual(management.get_default_username(), "julia")
def test_with_database(self):
User.objects.create(username="joe")
management.get_system_username = lambda: "joe"
self.assertEqual(management.get_default_username(), "")
self.assertEqual(management.get_default_username(database="other"), "joe")
User.objects.using("other").create(username="joe")
self.assertEqual(management.get_default_username(database="other"), "")
@override_settings(
AUTH_PASSWORD_VALIDATORS=[
{"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator"},
]
)
class ChangepasswordManagementCommandTestCase(TestCase):
@classmethod
def setUpTestData(cls):
cls.user = User.objects.create_user(username="joe", password="qwerty")
def setUp(self):
self.stdout = StringIO()
self.addCleanup(self.stdout.close)
self.stderr = StringIO()
self.addCleanup(self.stderr.close)
@mock.patch.object(getpass, "getpass", return_value="password")
def test_get_pass(self, mock_get_pass):
call_command("changepassword", username="joe", stdout=self.stdout)
self.assertIs(User.objects.get(username="joe").check_password("password"), True)
@mock.patch.object(getpass, "getpass", return_value="")
def test_get_pass_no_input(self, mock_get_pass):
with self.assertRaisesMessage(CommandError, "aborted"):
call_command("changepassword", username="joe", stdout=self.stdout)
@mock.patch.object(changepassword.Command, "_get_pass", return_value="new_password")
def test_system_username(self, mock_get_pass):
"""The system username is used if --username isn't provided."""
username = getpass.getuser()
User.objects.create_user(username=username, password="qwerty")
call_command("changepassword", stdout=self.stdout)
self.assertIs(
User.objects.get(username=username).check_password("new_password"), True
)
def test_nonexistent_username(self):
with self.assertRaisesMessage(CommandError, "user 'test' does not exist"):
call_command("changepassword", username="test", stdout=self.stdout)
@mock.patch.object(changepassword.Command, "_get_pass", return_value="not qwerty")
def test_that_changepassword_command_changes_joes_password(self, mock_get_pass):
"""
Executing the changepassword management command should change joe's
password
"""
self.assertTrue(self.user.check_password("qwerty"))
call_command("changepassword", username="joe", stdout=self.stdout)
command_output = self.stdout.getvalue().strip()
self.assertEqual(
command_output,
"Changing password for user 'joe'\n"
"Password changed successfully for user 'joe'",
)
self.assertTrue(User.objects.get(username="joe").check_password("not qwerty"))
@mock.patch.object(
changepassword.Command, "_get_pass", side_effect=lambda *args: str(args)
)
def test_that_max_tries_exits_1(self, mock_get_pass):
"""
A CommandError should be thrown by handle() if the user enters in
mismatched passwords three times.
"""
msg = "Aborting password change for user 'joe' after 3 attempts"
with self.assertRaisesMessage(CommandError, msg):
call_command(
"changepassword", username="joe", stdout=self.stdout, stderr=self.stderr
)
@mock.patch.object(changepassword.Command, "_get_pass", return_value="1234567890")
def test_password_validation(self, mock_get_pass):
"""
A CommandError should be raised if the user enters in passwords which
fail validation three times.
"""
abort_msg = "Aborting password change for user 'joe' after 3 attempts"
with self.assertRaisesMessage(CommandError, abort_msg):
call_command(
"changepassword", username="joe", stdout=self.stdout, stderr=self.stderr
)
self.assertIn("This password is entirely numeric.", self.stderr.getvalue())
@mock.patch.object(changepassword.Command, "_get_pass", return_value="not qwerty")
def test_that_changepassword_command_works_with_nonascii_output(
self, mock_get_pass
):
"""
#21627 -- Executing the changepassword management command should allow
non-ASCII characters from the User object representation.
"""
# 'Julia' with accented 'u':
User.objects.create_user(username="J\xfalia", password="qwerty")
call_command("changepassword", username="J\xfalia", stdout=self.stdout)
class MultiDBChangepasswordManagementCommandTestCase(TestCase):
databases = {"default", "other"}
@mock.patch.object(changepassword.Command, "_get_pass", return_value="not qwerty")
def test_that_changepassword_command_with_database_option_uses_given_db(
self, mock_get_pass
):
"""
changepassword --database should operate on the specified DB.
"""
user = User.objects.db_manager("other").create_user(
username="joe", password="qwerty"
)
self.assertTrue(user.check_password("qwerty"))
out = StringIO()
call_command("changepassword", username="joe", database="other", stdout=out)
command_output = out.getvalue().strip()
self.assertEqual(
command_output,
"Changing password for user 'joe'\n"
"Password changed successfully for user 'joe'",
)
self.assertTrue(
User.objects.using("other").get(username="joe").check_password("not qwerty")
)
@override_settings(
SILENCED_SYSTEM_CHECKS=["fields.W342"], # ForeignKey(unique=True)
AUTH_PASSWORD_VALIDATORS=[
{"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator"}
],
)
class CreatesuperuserManagementCommandTestCase(TestCase):
def test_no_email_argument(self):
new_io = StringIO()
with self.assertRaisesMessage(
CommandError, "You must use --email with --noinput."
):
call_command(
"createsuperuser", interactive=False, username="joe", stdout=new_io
)
def test_basic_usage(self):
"Check the operation of the createsuperuser management command"
# We can use the management command to create a superuser
new_io = StringIO()
call_command(
"createsuperuser",
interactive=False,
username="joe",
email="[email protected]",
stdout=new_io,
)
command_output = new_io.getvalue().strip()
self.assertEqual(command_output, "Superuser created successfully.")
u = User.objects.get(username="joe")
self.assertEqual(u.email, "[email protected]")
# created password should be unusable
self.assertFalse(u.has_usable_password())
def test_validate_username(self):
msg = (
"Enter a valid username. This value may contain only letters, numbers, "
"and @/./+/-/_ characters."
)
with self.assertRaisesMessage(CommandError, msg):
call_command(
"createsuperuser",
interactive=False,
username="🤠",
email="[email protected]",
)
def test_non_ascii_verbose_name(self):
@mock_inputs(
{
"password": "nopasswd",
"Uživatel (leave blank to use '%s'): "
% get_default_username(): "foo", # username (cz)
"email": "[email protected]",
}
)
def test(self):
username_field = User._meta.get_field("username")
old_verbose_name = username_field.verbose_name
username_field.verbose_name = _("u\u017eivatel")
new_io = StringIO()
try:
call_command(
"createsuperuser",
interactive=True,
stdout=new_io,
stdin=MockTTY(),
)
finally:
username_field.verbose_name = old_verbose_name
command_output = new_io.getvalue().strip()
self.assertEqual(command_output, "Superuser created successfully.")
test(self)
def test_verbosity_zero(self):
# We can suppress output on the management command
new_io = StringIO()
call_command(
"createsuperuser",
interactive=False,
username="joe2",
email="[email protected]",
verbosity=0,
stdout=new_io,
)
command_output = new_io.getvalue().strip()
self.assertEqual(command_output, "")
u = User.objects.get(username="joe2")
self.assertEqual(u.email, "[email protected]")
self.assertFalse(u.has_usable_password())
def test_email_in_username(self):
call_command(
"createsuperuser",
interactive=False,
username="[email protected]",
email="[email protected]",
verbosity=0,
)
u = User._default_manager.get(username="[email protected]")
self.assertEqual(u.email, "[email protected]")
self.assertFalse(u.has_usable_password())
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUser")
def test_swappable_user(self):
"A superuser can be created when a custom user model is in use"
# We can use the management command to create a superuser
# We skip validation because the temporary substitution of the
# swappable User model messes with validation.
new_io = StringIO()
call_command(
"createsuperuser",
interactive=False,
email="[email protected]",
date_of_birth="1976-04-01",
first_name="Joe",
stdout=new_io,
)
command_output = new_io.getvalue().strip()
self.assertEqual(command_output, "Superuser created successfully.")
u = CustomUser._default_manager.get(email="[email protected]")
self.assertEqual(u.date_of_birth, date(1976, 4, 1))
# created password should be unusable
self.assertFalse(u.has_usable_password())
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUser")
def test_swappable_user_missing_required_field(self):
"""
A Custom superuser won't be created when a required field isn't
provided
"""
# We can use the management command to create a superuser
# We skip validation because the temporary substitution of the
# swappable User model messes with validation.
new_io = StringIO()
with self.assertRaisesMessage(
CommandError, "You must use --email with --noinput."
):
call_command(
"createsuperuser",
interactive=False,
stdout=new_io,
stderr=new_io,
)
self.assertEqual(CustomUser._default_manager.count(), 0)
@override_settings(
AUTH_USER_MODEL="auth_tests.CustomUserNonUniqueUsername",
AUTHENTICATION_BACKENDS=["my.custom.backend"],
)
def test_swappable_user_username_non_unique(self):
@mock_inputs(
{
"username": "joe",
"password": "nopasswd",
}
)
def createsuperuser():
new_io = StringIO()
call_command(
"createsuperuser",
interactive=True,
email="[email protected]",
stdout=new_io,
stdin=MockTTY(),
)
command_output = new_io.getvalue().strip()
self.assertEqual(command_output, "Superuser created successfully.")
for i in range(2):
createsuperuser()
users = CustomUserNonUniqueUsername.objects.filter(username="joe")
self.assertEqual(users.count(), 2)
def test_skip_if_not_in_TTY(self):
"""
If the command is not called from a TTY, it should be skipped and a
message should be displayed (#7423).
"""
class FakeStdin:
"""A fake stdin object that has isatty() return False."""
def isatty(self):
return False
out = StringIO()
call_command(
"createsuperuser",
stdin=FakeStdin(),
stdout=out,
interactive=True,
)
self.assertEqual(User._default_manager.count(), 0)
self.assertIn("Superuser creation skipped", out.getvalue())
def test_passing_stdin(self):
"""
You can pass a stdin object as an option and it should be
available on self.stdin.
If no such option is passed, it defaults to sys.stdin.
"""
sentinel = object()
command = createsuperuser.Command()
call_command(
command,
stdin=sentinel,
interactive=False,
verbosity=0,
username="janet",
email="[email protected]",
)
self.assertIs(command.stdin, sentinel)
command = createsuperuser.Command()
call_command(
command,
interactive=False,
verbosity=0,
username="joe",
email="[email protected]",
)
self.assertIs(command.stdin, sys.stdin)
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUserWithFK")
def test_fields_with_fk(self):
new_io = StringIO()
group = Group.objects.create(name="mygroup")
email = Email.objects.create(email="[email protected]")
call_command(
"createsuperuser",
interactive=False,
username=email.pk,
email=email.email,
group=group.pk,
stdout=new_io,
)
command_output = new_io.getvalue().strip()
self.assertEqual(command_output, "Superuser created successfully.")
u = CustomUserWithFK._default_manager.get(email=email)
self.assertEqual(u.username, email)
self.assertEqual(u.group, group)
non_existent_email = "[email protected]"
msg = "email instance with email %r is not a valid choice." % non_existent_email
with self.assertRaisesMessage(CommandError, msg):
call_command(
"createsuperuser",
interactive=False,
username=email.pk,
email=non_existent_email,
stdout=new_io,
)
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUserWithFK")
def test_fields_with_fk_interactive(self):
new_io = StringIO()
group = Group.objects.create(name="mygroup")
email = Email.objects.create(email="[email protected]")
@mock_inputs(
{
"password": "nopasswd",
"Username (Email.id): ": email.pk,
"Email (Email.email): ": email.email,
"Group (Group.id): ": group.pk,
}
)
def test(self):
call_command(
"createsuperuser",
interactive=True,
stdout=new_io,
stdin=MockTTY(),
)
command_output = new_io.getvalue().strip()
self.assertEqual(command_output, "Superuser created successfully.")
u = CustomUserWithFK._default_manager.get(email=email)
self.assertEqual(u.username, email)
self.assertEqual(u.group, group)
test(self)
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUserWithFK")
def test_fields_with_fk_via_option_interactive(self):
new_io = StringIO()
group = Group.objects.create(name="mygroup")
email = Email.objects.create(email="[email protected]")
@mock_inputs({"password": "nopasswd"})
def test(self):
call_command(
"createsuperuser",
interactive=True,
username=email.pk,
email=email.email,
group=group.pk,
stdout=new_io,
stdin=MockTTY(),
)
command_output = new_io.getvalue().strip()
self.assertEqual(command_output, "Superuser created successfully.")
u = CustomUserWithFK._default_manager.get(email=email)
self.assertEqual(u.username, email)
self.assertEqual(u.group, group)
test(self)
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUserWithFK")
def test_validate_fk(self):
email = Email.objects.create(email="[email protected]")
Group.objects.all().delete()
nonexistent_group_id = 1
msg = f"group instance with id {nonexistent_group_id} is not a valid choice."
with self.assertRaisesMessage(CommandError, msg):
call_command(
"createsuperuser",
interactive=False,
username=email.pk,
email=email.email,
group=nonexistent_group_id,
verbosity=0,
)
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUserWithFK")
def test_validate_fk_environment_variable(self):
email = Email.objects.create(email="[email protected]")
Group.objects.all().delete()
nonexistent_group_id = 1
msg = f"group instance with id {nonexistent_group_id} is not a valid choice."
with mock.patch.dict(
os.environ,
{"DJANGO_SUPERUSER_GROUP": str(nonexistent_group_id)},
):
with self.assertRaisesMessage(CommandError, msg):
call_command(
"createsuperuser",
interactive=False,
username=email.pk,
email=email.email,
verbosity=0,
)
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUserWithFK")
def test_validate_fk_via_option_interactive(self):
email = Email.objects.create(email="[email protected]")
Group.objects.all().delete()
nonexistent_group_id = 1
msg = f"group instance with id {nonexistent_group_id} is not a valid choice."
@mock_inputs(
{
"password": "nopasswd",
"Username (Email.id): ": email.pk,
"Email (Email.email): ": email.email,
}
)
def test(self):
with self.assertRaisesMessage(CommandError, msg):
call_command(
"createsuperuser",
group=nonexistent_group_id,
stdin=MockTTY(),
verbosity=0,
)
test(self)
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUserWithM2m")
def test_fields_with_m2m(self):
new_io = StringIO()
org_id_1 = Organization.objects.create(name="Organization 1").pk
org_id_2 = Organization.objects.create(name="Organization 2").pk
call_command(
"createsuperuser",
interactive=False,
username="joe",
orgs=[org_id_1, org_id_2],
stdout=new_io,
)
command_output = new_io.getvalue().strip()
self.assertEqual(command_output, "Superuser created successfully.")
user = CustomUserWithM2M._default_manager.get(username="joe")
self.assertEqual(user.orgs.count(), 2)
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUserWithM2M")
def test_fields_with_m2m_interactive(self):
new_io = StringIO()
org_id_1 = Organization.objects.create(name="Organization 1").pk
org_id_2 = Organization.objects.create(name="Organization 2").pk
@mock_inputs(
{
"password": "nopasswd",
"Username: ": "joe",
"Orgs (Organization.id): ": "%s, %s" % (org_id_1, org_id_2),
}
)
def test(self):
call_command(
"createsuperuser",
interactive=True,
stdout=new_io,
stdin=MockTTY(),
)
command_output = new_io.getvalue().strip()
self.assertEqual(command_output, "Superuser created successfully.")
user = CustomUserWithM2M._default_manager.get(username="joe")
self.assertEqual(user.orgs.count(), 2)
test(self)
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUserWithM2M")
def test_fields_with_m2m_interactive_blank(self):
new_io = StringIO()
org_id = Organization.objects.create(name="Organization").pk
entered_orgs = [str(org_id), " "]
def return_orgs():
return entered_orgs.pop()
@mock_inputs(
{
"password": "nopasswd",
"Username: ": "joe",
"Orgs (Organization.id): ": return_orgs,
}
)
def test(self):
call_command(
"createsuperuser",
interactive=True,
stdout=new_io,
stderr=new_io,
stdin=MockTTY(),
)
self.assertEqual(
new_io.getvalue().strip(),
"Error: This field cannot be blank.\n"
"Superuser created successfully.",
)
test(self)
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUserWithM2MThrough")
def test_fields_with_m2m_and_through(self):
msg = (
"Required field 'orgs' specifies a many-to-many relation through "
"model, which is not supported."
)
with self.assertRaisesMessage(CommandError, msg):
call_command("createsuperuser")
def test_default_username(self):
"""createsuperuser uses a default username when one isn't provided."""
# Get the default username before creating a user.
default_username = get_default_username()
new_io = StringIO()
entered_passwords = ["password", "password"]
def return_passwords():
return entered_passwords.pop(0)
@mock_inputs({"password": return_passwords, "username": "", "email": ""})
def test(self):
call_command(
"createsuperuser",
interactive=True,
stdin=MockTTY(),
stdout=new_io,
stderr=new_io,
)
self.assertEqual(
new_io.getvalue().strip(), "Superuser created successfully."
)
self.assertTrue(User.objects.filter(username=default_username).exists())
test(self)
def test_password_validation(self):
"""
Creation should fail if the password fails validation.
"""
new_io = StringIO()
entered_passwords = ["1234567890", "1234567890", "password", "password"]
def bad_then_good_password():
return entered_passwords.pop(0)
@mock_inputs(
{
"password": bad_then_good_password,
"username": "joe1234567890",
"email": "",
"bypass": "n",
}
)
def test(self):
call_command(
"createsuperuser",
interactive=True,
stdin=MockTTY(),
stdout=new_io,
stderr=new_io,
)
self.assertEqual(
new_io.getvalue().strip(),
"This password is entirely numeric.\n"
"Superuser created successfully.",
)
test(self)
@override_settings(
AUTH_PASSWORD_VALIDATORS=[
{
"NAME": (
"django.contrib.auth.password_validation."
"UserAttributeSimilarityValidator"
)
},
]
)
def test_validate_password_against_username(self):
new_io = StringIO()
username = "supremelycomplex"
entered_passwords = [
username,
username,
"superduperunguessablepassword",
"superduperunguessablepassword",
]
def bad_then_good_password():
return entered_passwords.pop(0)
@mock_inputs(
{
"password": bad_then_good_password,
"username": username,
"email": "",
"bypass": "n",
}
)
def test(self):
call_command(
"createsuperuser",
interactive=True,
stdin=MockTTY(),
stdout=new_io,
stderr=new_io,
)
self.assertEqual(
new_io.getvalue().strip(),
"The password is too similar to the username.\n"
"Superuser created successfully.",
)
test(self)
@override_settings(
AUTH_USER_MODEL="auth_tests.CustomUser",
AUTH_PASSWORD_VALIDATORS=[
{
"NAME": (
"django.contrib.auth.password_validation."
"UserAttributeSimilarityValidator"
)
},
],
)
def test_validate_password_against_required_fields(self):
new_io = StringIO()
first_name = "josephine"
entered_passwords = [
first_name,
first_name,
"superduperunguessablepassword",
"superduperunguessablepassword",
]
def bad_then_good_password():
return entered_passwords.pop(0)
@mock_inputs(
{
"password": bad_then_good_password,
"username": "whatever",
"first_name": first_name,
"date_of_birth": "1970-01-01",
"email": "[email protected]",
"bypass": "n",
}
)
def test(self):
call_command(
"createsuperuser",
interactive=True,
stdin=MockTTY(),
stdout=new_io,
stderr=new_io,
)
self.assertEqual(
new_io.getvalue().strip(),
"The password is too similar to the first name.\n"
"Superuser created successfully.",
)
test(self)
@override_settings(
AUTH_USER_MODEL="auth_tests.CustomUser",
AUTH_PASSWORD_VALIDATORS=[
{
"NAME": (
"django.contrib.auth.password_validation."
"UserAttributeSimilarityValidator"
)
},
],
)
def test_validate_password_against_required_fields_via_option(self):
new_io = StringIO()
first_name = "josephine"
entered_passwords = [
first_name,
first_name,
"superduperunguessablepassword",
"superduperunguessablepassword",
]
def bad_then_good_password():
return entered_passwords.pop(0)
@mock_inputs(
{
"password": bad_then_good_password,
"bypass": "n",
}
)
def test(self):
call_command(
"createsuperuser",
interactive=True,
first_name=first_name,
date_of_birth="1970-01-01",
email="[email protected]",
stdin=MockTTY(),
stdout=new_io,
stderr=new_io,
)
self.assertEqual(
new_io.getvalue().strip(),
"The password is too similar to the first name.\n"
"Superuser created successfully.",
)
test(self)
def test_blank_username(self):
"""Creation fails if --username is blank."""
new_io = StringIO()
with self.assertRaisesMessage(CommandError, "Username cannot be blank."):
call_command(
"createsuperuser",
username="",
stdin=MockTTY(),
stdout=new_io,
stderr=new_io,
)
def test_blank_username_non_interactive(self):
new_io = StringIO()
with self.assertRaisesMessage(CommandError, "Username cannot be blank."):
call_command(
"createsuperuser",
username="",
interactive=False,
stdin=MockTTY(),
stdout=new_io,
stderr=new_io,
)
def test_blank_email_allowed_non_interactive(self):
new_io = StringIO()
call_command(
"createsuperuser",
email="",
username="joe",
interactive=False,
stdout=new_io,
stderr=new_io,
)
self.assertEqual(new_io.getvalue().strip(), "Superuser created successfully.")
u = User.objects.get(username="joe")
self.assertEqual(u.email, "")
@mock.patch.dict(os.environ, {"DJANGO_SUPERUSER_EMAIL": ""})
def test_blank_email_allowed_non_interactive_environment_variable(self):
new_io = StringIO()
call_command(
"createsuperuser",
username="joe",
interactive=False,
stdout=new_io,
stderr=new_io,
)
self.assertEqual(new_io.getvalue().strip(), "Superuser created successfully.")
u = User.objects.get(username="joe")
self.assertEqual(u.email, "")
def test_password_validation_bypass(self):
"""
Password validation can be bypassed by entering 'y' at the prompt.
"""
new_io = StringIO()
@mock_inputs(
{
"password": "1234567890",
"username": "joe1234567890",
"email": "",
"bypass": "y",
}
)
def test(self):
call_command(
"createsuperuser",
interactive=True,
stdin=MockTTY(),
stdout=new_io,
stderr=new_io,
)
self.assertEqual(
new_io.getvalue().strip(),
"This password is entirely numeric.\n"
"Superuser created successfully.",
)
test(self)
def test_invalid_username(self):
"""Creation fails if the username fails validation."""
user_field = User._meta.get_field(User.USERNAME_FIELD)
new_io = StringIO()
entered_passwords = ["password", "password"]
# Enter an invalid (too long) username first and then a valid one.
invalid_username = ("x" * user_field.max_length) + "y"
entered_usernames = [invalid_username, "janet"]
def return_passwords():
return entered_passwords.pop(0)
def return_usernames():
return entered_usernames.pop(0)
@mock_inputs(
{"password": return_passwords, "username": return_usernames, "email": ""}
)
def test(self):
call_command(
"createsuperuser",
interactive=True,
stdin=MockTTY(),
stdout=new_io,
stderr=new_io,
)
self.assertEqual(
new_io.getvalue().strip(),
"Error: Ensure this value has at most %s characters (it has %s).\n"
"Superuser created successfully."
% (user_field.max_length, len(invalid_username)),
)
test(self)
@mock_inputs({"username": "KeyboardInterrupt"})
def test_keyboard_interrupt(self):
new_io = StringIO()
with self.assertRaises(SystemExit):
call_command(
"createsuperuser",
interactive=True,
stdin=MockTTY(),
stdout=new_io,
stderr=new_io,
)
self.assertEqual(new_io.getvalue(), "\nOperation cancelled.\n")
def test_existing_username(self):
"""Creation fails if the username already exists."""
user = User.objects.create(username="janet")
new_io = StringIO()
entered_passwords = ["password", "password"]
# Enter the existing username first and then a new one.
entered_usernames = [user.username, "joe"]
def return_passwords():
return entered_passwords.pop(0)
def return_usernames():
return entered_usernames.pop(0)
@mock_inputs(
{"password": return_passwords, "username": return_usernames, "email": ""}
)
def test(self):
call_command(
"createsuperuser",
interactive=True,
stdin=MockTTY(),
stdout=new_io,
stderr=new_io,
)
self.assertEqual(
new_io.getvalue().strip(),
"Error: That username is already taken.\n"
"Superuser created successfully.",
)
test(self)
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUserWithUniqueConstraint")
def test_existing_username_meta_unique_constraint(self):
"""
Creation fails if the username already exists and a custom user model
has UniqueConstraint.
"""
user = CustomUserWithUniqueConstraint.objects.create(username="janet")
new_io = StringIO()
entered_passwords = ["password", "password"]
# Enter the existing username first and then a new one.
entered_usernames = [user.username, "joe"]
def return_passwords():
return entered_passwords.pop(0)
def return_usernames():
return entered_usernames.pop(0)
@mock_inputs({"password": return_passwords, "username": return_usernames})
def test(self):
call_command(
"createsuperuser",
interactive=True,
stdin=MockTTY(),
stdout=new_io,
stderr=new_io,
)
self.assertEqual(
new_io.getvalue().strip(),
"Error: That username is already taken.\n"
"Superuser created successfully.",
)
test(self)
def test_existing_username_non_interactive(self):
"""Creation fails if the username already exists."""
User.objects.create(username="janet")
new_io = StringIO()
with self.assertRaisesMessage(
CommandError, "Error: That username is already taken."
):
call_command(
"createsuperuser",
username="janet",
email="",
interactive=False,
stdout=new_io,
)
def test_existing_username_provided_via_option_and_interactive(self):
"""call_command() gets username='janet' and interactive=True."""
new_io = StringIO()
entered_passwords = ["password", "password"]
User.objects.create(username="janet")
def return_passwords():
return entered_passwords.pop(0)
@mock_inputs(
{
"password": return_passwords,
"username": "janet1",
"email": "[email protected]",
}
)
def test(self):
call_command(
"createsuperuser",
username="janet",
interactive=True,
stdin=MockTTY(),
stdout=new_io,
stderr=new_io,
)
msg = (
"Error: That username is already taken.\n"
"Superuser created successfully."
)
self.assertEqual(new_io.getvalue().strip(), msg)
test(self)
def test_validation_mismatched_passwords(self):
"""
Creation should fail if the user enters mismatched passwords.
"""
new_io = StringIO()
# The first two passwords do not match, but the second two do match and
# are valid.
entered_passwords = ["password", "not password", "password2", "password2"]
def mismatched_passwords_then_matched():
return entered_passwords.pop(0)
@mock_inputs(
{
"password": mismatched_passwords_then_matched,
"username": "joe1234567890",
"email": "",
}
)
def test(self):
call_command(
"createsuperuser",
interactive=True,
stdin=MockTTY(),
stdout=new_io,
stderr=new_io,
)
self.assertEqual(
new_io.getvalue().strip(),
"Error: Your passwords didn't match.\n"
"Superuser created successfully.",
)
test(self)
def test_validation_blank_password_entered(self):
"""
Creation should fail if the user enters blank passwords.
"""
new_io = StringIO()
# The first two passwords are empty strings, but the second two are
# valid.
entered_passwords = ["", "", "password2", "password2"]
def blank_passwords_then_valid():
return entered_passwords.pop(0)
@mock_inputs(
{
"password": blank_passwords_then_valid,
"username": "joe1234567890",
"email": "",
}
)
def test(self):
call_command(
"createsuperuser",
interactive=True,
stdin=MockTTY(),
stdout=new_io,
stderr=new_io,
)
self.assertEqual(
new_io.getvalue().strip(),
"Error: Blank passwords aren't allowed.\n"
"Superuser created successfully.",
)
test(self)
@override_settings(AUTH_USER_MODEL="auth_tests.NoPasswordUser")
def test_usermodel_without_password(self):
new_io = StringIO()
call_command(
"createsuperuser",
interactive=False,
stdin=MockTTY(),
stdout=new_io,
stderr=new_io,
username="username",
)
self.assertEqual(new_io.getvalue().strip(), "Superuser created successfully.")
@override_settings(AUTH_USER_MODEL="auth_tests.NoPasswordUser")
def test_usermodel_without_password_interactive(self):
new_io = StringIO()
@mock_inputs({"username": "username"})
def test(self):
call_command(
"createsuperuser",
interactive=True,
stdin=MockTTY(),
stdout=new_io,
stderr=new_io,
)
self.assertEqual(
new_io.getvalue().strip(), "Superuser created successfully."
)
test(self)
@mock.patch.dict(
os.environ,
{
"DJANGO_SUPERUSER_PASSWORD": "test_password",
"DJANGO_SUPERUSER_USERNAME": "test_superuser",
"DJANGO_SUPERUSER_EMAIL": "[email protected]",
"DJANGO_SUPERUSER_FIRST_NAME": "ignored_first_name",
},
)
def test_environment_variable_non_interactive(self):
call_command("createsuperuser", interactive=False, verbosity=0)
user = User.objects.get(username="test_superuser")
self.assertEqual(user.email, "[email protected]")
self.assertTrue(user.check_password("test_password"))
# Environment variables are ignored for non-required fields.
self.assertEqual(user.first_name, "")
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUserWithM2m")
def test_environment_variable_m2m_non_interactive(self):
new_io = StringIO()
org_id_1 = Organization.objects.create(name="Organization 1").pk
org_id_2 = Organization.objects.create(name="Organization 2").pk
with mock.patch.dict(
os.environ,
{
"DJANGO_SUPERUSER_ORGS": f"{org_id_1},{org_id_2}",
},
):
call_command(
"createsuperuser",
interactive=False,
username="joe",
stdout=new_io,
)
command_output = new_io.getvalue().strip()
self.assertEqual(command_output, "Superuser created successfully.")
user = CustomUserWithM2M._default_manager.get(username="joe")
self.assertEqual(user.orgs.count(), 2)
@mock.patch.dict(
os.environ,
{
"DJANGO_SUPERUSER_USERNAME": "test_superuser",
"DJANGO_SUPERUSER_EMAIL": "[email protected]",
},
)
def test_ignore_environment_variable_non_interactive(self):
# Environment variables are ignored in non-interactive mode, if
# provided by a command line arguments.
call_command(
"createsuperuser",
interactive=False,
username="cmd_superuser",
email="[email protected]",
verbosity=0,
)
user = User.objects.get(username="cmd_superuser")
self.assertEqual(user.email, "[email protected]")
self.assertFalse(user.has_usable_password())
@mock.patch.dict(
os.environ,
{
"DJANGO_SUPERUSER_PASSWORD": "test_password",
"DJANGO_SUPERUSER_USERNAME": "test_superuser",
"DJANGO_SUPERUSER_EMAIL": "[email protected]",
},
)
def test_ignore_environment_variable_interactive(self):
# Environment variables are ignored in interactive mode.
@mock_inputs({"password": "cmd_password"})
def test(self):
call_command(
"createsuperuser",
interactive=True,
username="cmd_superuser",
email="[email protected]",
stdin=MockTTY(),
verbosity=0,
)
user = User.objects.get(username="cmd_superuser")
self.assertEqual(user.email, "[email protected]")
self.assertTrue(user.check_password("cmd_password"))
test(self)
class MultiDBCreatesuperuserTestCase(TestCase):
databases = {"default", "other"}
def test_createsuperuser_command_with_database_option(self):
"""
createsuperuser --database should operate on the specified DB.
"""
new_io = StringIO()
call_command(
"createsuperuser",
interactive=False,
username="joe",
email="[email protected]",
database="other",
stdout=new_io,
)
command_output = new_io.getvalue().strip()
self.assertEqual(command_output, "Superuser created successfully.")
user = User.objects.using("other").get(username="joe")
self.assertEqual(user.email, "[email protected]")
def test_createsuperuser_command_suggested_username_with_database_option(self):
default_username = get_default_username(database="other")
qs = User.objects.using("other")
@mock_inputs({"password": "nopasswd", "username": "", "email": ""})
def test_other_create_with_suggested_username(self):
call_command(
"createsuperuser",
interactive=True,
stdin=MockTTY(),
verbosity=0,
database="other",
)
self.assertIs(qs.filter(username=default_username).exists(), True)
test_other_create_with_suggested_username(self)
@mock_inputs({"password": "nopasswd", "Username: ": "other", "email": ""})
def test_other_no_suggestion(self):
call_command(
"createsuperuser",
interactive=True,
stdin=MockTTY(),
verbosity=0,
database="other",
)
self.assertIs(qs.filter(username="other").exists(), True)
test_other_no_suggestion(self)
class CreatePermissionsTests(TestCase):
def setUp(self):
self._original_permissions = Permission._meta.permissions[:]
self._original_default_permissions = Permission._meta.default_permissions
self.app_config = apps.get_app_config("auth")
def tearDown(self):
Permission._meta.permissions = self._original_permissions
Permission._meta.default_permissions = self._original_default_permissions
ContentType.objects.clear_cache()
def test_default_permissions(self):
permission_content_type = ContentType.objects.get_by_natural_key(
"auth", "permission"
)
Permission._meta.permissions = [
("my_custom_permission", "Some permission"),
]
create_permissions(self.app_config, verbosity=0)
# view/add/change/delete permission by default + custom permission
self.assertEqual(
Permission.objects.filter(
content_type=permission_content_type,
).count(),
5,
)
Permission.objects.filter(content_type=permission_content_type).delete()
Permission._meta.default_permissions = []
create_permissions(self.app_config, verbosity=0)
# custom permission only since default permissions is empty
self.assertEqual(
Permission.objects.filter(
content_type=permission_content_type,
).count(),
1,
)
def test_unavailable_models(self):
"""
#24075 - Permissions shouldn't be created or deleted if the ContentType
or Permission models aren't available.
"""
state = migrations.state.ProjectState()
# Unavailable contenttypes.ContentType
with self.assertNumQueries(0):
create_permissions(self.app_config, verbosity=0, apps=state.apps)
# Unavailable auth.Permission
state = migrations.state.ProjectState(real_apps={"contenttypes"})
with self.assertNumQueries(0):
create_permissions(self.app_config, verbosity=0, apps=state.apps)
def test_create_permissions_checks_contenttypes_created(self):
"""
`post_migrate` handler ordering isn't guaranteed. Simulate a case
where create_permissions() is called before create_contenttypes().
"""
# Warm the manager cache.
ContentType.objects.get_for_model(Group)
# Apply a deletion as if e.g. a database 'flush' had been executed.
ContentType.objects.filter(app_label="auth", model="group").delete()
# This fails with a foreign key constraint without the fix.
create_permissions(apps.get_app_config("auth"), interactive=False, verbosity=0)
def test_permission_with_proxy_content_type_created(self):
"""
A proxy model's permissions use its own content type rather than the
content type of the concrete model.
"""
opts = UserProxy._meta
codename = get_permission_codename("add", opts)
self.assertTrue(
Permission.objects.filter(
content_type__model=opts.model_name,
content_type__app_label=opts.app_label,
codename=codename,
).exists()
)
@override_settings(
MIGRATION_MODULES=dict(
settings.MIGRATION_MODULES,
auth_tests="auth_tests.operations_migrations",
),
)
class PermissionRenameOperationsTests(TransactionTestCase):
available_apps = [
"django.contrib.contenttypes",
"django.contrib.auth",
"auth_tests",
]
databases = {"default", "other"}
def setUp(self):
app_config = apps.get_app_config("auth_tests")
models.signals.post_migrate.connect(
self.assertOperationsInjected, sender=app_config
)
self.addCleanup(
models.signals.post_migrate.disconnect,
self.assertOperationsInjected,
sender=app_config,
)
def assertOperationsInjected(self, plan, **kwargs):
for migration, _backward in plan:
operations = iter(migration.operations)
for operation in operations:
if isinstance(operation, migrations.RenameModel):
next_operation = next(operations)
self.assertIsInstance(next_operation, RenamePermission)
self.assertEqual(next_operation.app_label, migration.app_label)
self.assertEqual(next_operation.old_model, operation.old_name)
self.assertEqual(next_operation.new_model, operation.new_name)
def test_permission_rename(self):
ct = ContentType.objects.create(app_label="auth_tests", model="oldmodel")
actions = ["add", "change", "delete", "view"]
for action in actions:
Permission.objects.create(
codename=f"{action}_oldmodel",
name=f"Can {action} old model",
content_type=ct,
)
call_command("migrate", "auth_tests", verbosity=0)
for action in actions:
self.assertFalse(
Permission.objects.filter(codename=f"{action}_oldmodel").exists()
)
self.assertTrue(
Permission.objects.filter(codename=f"{action}_newmodel").exists()
)
call_command(
"migrate",
"auth_tests",
"zero",
database="default",
interactive=False,
verbosity=0,
)
for action in actions:
self.assertTrue(
Permission.objects.filter(codename=f"{action}_oldmodel").exists()
)
self.assertFalse(
Permission.objects.filter(codename=f"{action}_newmodel").exists()
)
def test_permission_rename_other_db(self):
ct = ContentType.objects.using("default").create(
app_label="auth_tests", model="oldmodel"
)
permission = Permission.objects.using("default").create(
codename="add_oldmodel",
name="Can add old model",
content_type=ct,
)
# RenamePermission respects the database.
call_command("migrate", "auth_tests", verbosity=0, database="other")
permission.refresh_from_db()
self.assertEqual(permission.codename, "add_oldmodel")
self.assertFalse(
Permission.objects.using("other").filter(codename="add_oldmodel").exists()
)
self.assertTrue(
Permission.objects.using("other").filter(codename="add_newmodel").exists()
)
@mock.patch(
"django.db.router.allow_migrate_model",
return_value=False,
)
def test_rename_skipped_if_router_disallows(self, _):
ct = ContentType.objects.create(app_label="auth_tests", model="oldmodel")
Permission.objects.create(
codename="change_oldmodel",
name="Can change old model",
content_type=ct,
)
# The rename operation should not be there when disallowed by router.
app_config = apps.get_app_config("auth_tests")
models.signals.post_migrate.disconnect(
self.assertOperationsInjected, sender=app_config
)
call_command(
"migrate",
"auth_tests",
database="default",
interactive=False,
verbosity=0,
)
self.assertTrue(Permission.objects.filter(codename="change_oldmodel").exists())
self.assertFalse(Permission.objects.filter(codename="change_newmodel").exists())
call_command(
"migrate",
"auth_tests",
"zero",
database="default",
interactive=False,
verbosity=0,
)
def test_rename_backward_does_nothing_if_no_permissions(self):
Permission.objects.filter(content_type__app_label="auth_tests").delete()
call_command(
"migrate",
"auth_tests",
"zero",
database="default",
interactive=False,
verbosity=0,
)
self.assertFalse(
Permission.objects.filter(
codename__in=["change_oldmodel", "change_newmodel"]
).exists()
)
def test_rename_permission_conflict(self):
ct = ContentType.objects.create(app_label="auth_tests", model="oldmodel")
Permission.objects.create(
codename="change_newmodel",
name="Can change new model",
content_type=ct,
)
Permission.objects.create(
codename="change_oldmodel",
name="Can change old model",
content_type=ct,
)
call_command(
"migrate",
"auth_tests",
database="default",
interactive=False,
verbosity=0,
)
self.assertTrue(
Permission.objects.filter(
codename="change_oldmodel",
name="Can change old model",
).exists()
)
self.assertEqual(
Permission.objects.filter(
codename="change_newmodel",
name="Can change new model",
).count(),
1,
)
call_command(
"migrate",
"auth_tests",
"zero",
database="default",
interactive=False,
verbosity=0,
)
class DefaultDBRouter:
"""Route all writes to default."""
def db_for_write(self, model, **hints):
return "default"
@override_settings(DATABASE_ROUTERS=[DefaultDBRouter()])
class CreatePermissionsMultipleDatabasesTests(TestCase):
databases = {"default", "other"}
def test_set_permissions_fk_to_using_parameter(self):
Permission.objects.using("other").delete()
with self.assertNumQueries(4, using="other") as captured_queries:
create_permissions(apps.get_app_config("auth"), verbosity=0, using="other")
self.assertIn("INSERT INTO", captured_queries[-1]["sql"].upper())
self.assertGreater(Permission.objects.using("other").count(), 0)
|
./temp_repos/django/django/contrib/sites/management.py
|
./temp_repos/django/tests/auth_tests/test_management.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'Unknown'.
Context:
- Class Name: Unknown
- Dependencies to Mock: None detected
- Key Imports: django.db, django.conf, django.apps, django.core.management.color
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
Unknown
|
python
|
import base64
import binascii
import functools
import hashlib
import importlib
import math
import warnings
from asgiref.sync import sync_to_async
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.core.signals import setting_changed
from django.dispatch import receiver
from django.utils.crypto import (
RANDOM_STRING_CHARS,
constant_time_compare,
get_random_string,
pbkdf2,
)
from django.utils.encoding import force_bytes, force_str
from django.utils.module_loading import import_string
from django.utils.translation import gettext_noop as _
UNUSABLE_PASSWORD_PREFIX = "!" # This will never be a valid encoded hash
UNUSABLE_PASSWORD_SUFFIX_LENGTH = (
40 # number of random chars to add after UNUSABLE_PASSWORD_PREFIX
)
def is_password_usable(encoded):
"""
Return True if this password wasn't generated by
User.set_unusable_password(), i.e. make_password(None).
"""
return encoded is None or not encoded.startswith(UNUSABLE_PASSWORD_PREFIX)
def verify_password(password, encoded, preferred="default"):
"""
Return two booleans. The first is whether the raw password matches the
three part encoded digest, and the second whether to regenerate the
password.
"""
fake_runtime = password is None or not is_password_usable(encoded)
preferred = get_hasher(preferred)
try:
hasher = identify_hasher(encoded)
except ValueError:
# encoded is gibberish or uses a hasher that's no longer installed.
fake_runtime = True
if fake_runtime:
# Run the default password hasher once to reduce the timing difference
# between an existing user with an unusable password and a nonexistent
# user or missing hasher (similar to #20760).
make_password(get_random_string(UNUSABLE_PASSWORD_SUFFIX_LENGTH))
return False, False
hasher_changed = hasher.algorithm != preferred.algorithm
must_update = hasher_changed or preferred.must_update(encoded)
is_correct = hasher.verify(password, encoded)
# If the hasher didn't change (we don't protect against enumeration if it
# does) and the password should get updated, try to close the timing gap
# between the work factor of the current encoded password and the default
# work factor.
if not is_correct and not hasher_changed and must_update:
hasher.harden_runtime(password, encoded)
return is_correct, must_update
def check_password(password, encoded, setter=None, preferred="default"):
"""
Return a boolean of whether the raw password matches the three part encoded
digest.
If setter is specified, it'll be called when you need to regenerate the
password.
"""
is_correct, must_update = verify_password(password, encoded, preferred=preferred)
if setter and is_correct and must_update:
setter(password)
return is_correct
async def acheck_password(password, encoded, setter=None, preferred="default"):
"""See check_password()."""
is_correct, must_update = await sync_to_async(
verify_password,
thread_sensitive=False,
)(password, encoded, preferred=preferred)
if setter and is_correct and must_update:
await setter(password)
return is_correct
def make_password(password, salt=None, hasher="default"):
"""
Turn a plain-text password into a hash for database storage
Same as encode() but generate a new random salt. If password is None then
return a concatenation of UNUSABLE_PASSWORD_PREFIX and a random string,
which disallows logins. Additional random string reduces chances of gaining
access to staff or superuser accounts. See ticket #20079 for more info.
"""
if password is None:
return UNUSABLE_PASSWORD_PREFIX + get_random_string(
UNUSABLE_PASSWORD_SUFFIX_LENGTH
)
if not isinstance(password, (bytes, str)):
raise TypeError(
"Password must be a string or bytes, got %s." % type(password).__qualname__
)
hasher = get_hasher(hasher)
salt = salt or hasher.salt()
return hasher.encode(password, salt)
@functools.lru_cache
def get_hashers():
hashers = []
for hasher_path in settings.PASSWORD_HASHERS:
hasher_cls = import_string(hasher_path)
hasher = hasher_cls()
if not getattr(hasher, "algorithm"):
raise ImproperlyConfigured(
"hasher doesn't specify an algorithm name: %s" % hasher_path
)
hashers.append(hasher)
return hashers
@functools.lru_cache
def get_hashers_by_algorithm():
return {hasher.algorithm: hasher for hasher in get_hashers()}
@receiver(setting_changed)
def reset_hashers(*, setting, **kwargs):
if setting == "PASSWORD_HASHERS":
get_hashers.cache_clear()
get_hashers_by_algorithm.cache_clear()
def get_hasher(algorithm="default"):
"""
Return an instance of a loaded password hasher.
If algorithm is 'default', return the default hasher. Lazily import hashers
specified in the project's settings file if needed.
"""
if hasattr(algorithm, "algorithm"):
return algorithm
elif algorithm == "default":
return get_hashers()[0]
else:
hashers = get_hashers_by_algorithm()
try:
return hashers[algorithm]
except KeyError:
raise ValueError(
"Unknown password hashing algorithm '%s'. "
"Did you specify it in the PASSWORD_HASHERS "
"setting?" % algorithm
)
def identify_hasher(encoded):
"""
Return an instance of a loaded password hasher.
Identify hasher algorithm by examining encoded hash, and call
get_hasher() to return hasher. Raise ValueError if
algorithm cannot be identified, or if hasher is not loaded.
"""
# Ancient versions of Django created plain MD5 passwords and accepted
# MD5 passwords with an empty salt.
if (len(encoded) == 32 and "$" not in encoded) or (
len(encoded) == 37 and encoded.startswith("md5$$")
):
algorithm = "unsalted_md5"
# Ancient versions of Django accepted SHA1 passwords with an empty salt.
elif len(encoded) == 46 and encoded.startswith("sha1$$"):
algorithm = "unsalted_sha1"
else:
algorithm = encoded.split("$", 1)[0]
return get_hasher(algorithm)
def mask_hash(hash, show=6, char="*"):
"""
Return the given hash, with only the first ``show`` number shown. The
rest are masked with ``char`` for security reasons.
"""
masked = hash[:show]
masked += char * len(hash[show:])
return masked
def must_update_salt(salt, expected_entropy):
# Each character in the salt provides log_2(len(alphabet)) bits of entropy.
return len(salt) * math.log2(len(RANDOM_STRING_CHARS)) < expected_entropy
class BasePasswordHasher:
"""
Abstract base class for password hashers
When creating your own hasher, you need to override algorithm,
verify(), encode() and safe_summary().
PasswordHasher objects are immutable.
"""
algorithm = None
library = None
salt_entropy = 128
def _load_library(self):
if self.library is not None:
if isinstance(self.library, (tuple, list)):
name, mod_path = self.library
else:
mod_path = self.library
try:
module = importlib.import_module(mod_path)
except ImportError as e:
raise ValueError(
"Couldn't load %r algorithm library: %s"
% (self.__class__.__name__, e)
)
return module
raise ValueError(
"Hasher %r doesn't specify a library attribute" % self.__class__.__name__
)
def salt(self):
"""
Generate a cryptographically secure nonce salt in ASCII with an entropy
of at least `salt_entropy` bits.
"""
# Each character in the salt provides
# log_2(len(alphabet)) bits of entropy.
char_count = math.ceil(self.salt_entropy / math.log2(len(RANDOM_STRING_CHARS)))
return get_random_string(char_count, allowed_chars=RANDOM_STRING_CHARS)
def verify(self, password, encoded):
"""Check if the given password is correct."""
raise NotImplementedError(
"subclasses of BasePasswordHasher must provide a verify() method"
)
def _check_encode_args(self, password, salt):
if password is None:
raise TypeError("password must be provided.")
if not salt or "$" in force_str(salt): # salt can be str or bytes.
raise ValueError("salt must be provided and cannot contain $.")
def encode(self, password, salt):
"""
Create an encoded database value.
The result is normally formatted as "algorithm$salt$hash" and
must be fewer than 128 characters.
"""
raise NotImplementedError(
"subclasses of BasePasswordHasher must provide an encode() method"
)
def decode(self, encoded):
"""
Return a decoded database value.
The result is a dictionary and should contain `algorithm`, `hash`, and
`salt`. Extra keys can be algorithm specific like `iterations` or
`work_factor`.
"""
raise NotImplementedError(
"subclasses of BasePasswordHasher must provide a decode() method."
)
def safe_summary(self, encoded):
"""
Return a summary of safe values.
The result is a dictionary and will be used where the password field
must be displayed to construct a safe representation of the password.
"""
raise NotImplementedError(
"subclasses of BasePasswordHasher must provide a safe_summary() method"
)
def must_update(self, encoded):
return False
def harden_runtime(self, password, encoded):
"""
Bridge the runtime gap between the work factor supplied in `encoded`
and the work factor suggested by this hasher.
Taking PBKDF2 as an example, if `encoded` contains 20000 iterations and
`self.iterations` is 30000, this method should run password through
another 10000 iterations of PBKDF2. Similar approaches should exist
for any hasher that has a work factor. If not, this method should be
defined as a no-op to silence the warning.
"""
warnings.warn(
"subclasses of BasePasswordHasher should provide a harden_runtime() method"
)
class PBKDF2PasswordHasher(BasePasswordHasher):
"""
Secure password hashing using the PBKDF2 algorithm (recommended)
Configured to use PBKDF2 + HMAC + SHA256.
The result is a 64 byte binary string. Iterations may be changed
safely but you must rename the algorithm if you change SHA256.
"""
algorithm = "pbkdf2_sha256"
iterations = 1_500_000
digest = hashlib.sha256
def encode(self, password, salt, iterations=None):
self._check_encode_args(password, salt)
iterations = iterations or self.iterations
password = force_str(password)
salt = force_str(salt)
hash = pbkdf2(password, salt, iterations, digest=self.digest)
hash = base64.b64encode(hash).decode("ascii").strip()
return "%s$%d$%s$%s" % (self.algorithm, iterations, salt, hash)
def decode(self, encoded):
algorithm, iterations, salt, hash = encoded.split("$", 3)
assert algorithm == self.algorithm
return {
"algorithm": algorithm,
"hash": hash,
"iterations": int(iterations),
"salt": salt,
}
def verify(self, password, encoded):
decoded = self.decode(encoded)
encoded_2 = self.encode(password, decoded["salt"], decoded["iterations"])
return constant_time_compare(encoded, encoded_2)
def safe_summary(self, encoded):
decoded = self.decode(encoded)
return {
_("algorithm"): decoded["algorithm"],
_("iterations"): decoded["iterations"],
_("salt"): mask_hash(decoded["salt"]),
_("hash"): mask_hash(decoded["hash"]),
}
def must_update(self, encoded):
decoded = self.decode(encoded)
update_salt = must_update_salt(decoded["salt"], self.salt_entropy)
return (decoded["iterations"] != self.iterations) or update_salt
def harden_runtime(self, password, encoded):
decoded = self.decode(encoded)
extra_iterations = self.iterations - decoded["iterations"]
if extra_iterations > 0:
self.encode(password, decoded["salt"], extra_iterations)
class PBKDF2SHA1PasswordHasher(PBKDF2PasswordHasher):
"""
Alternate PBKDF2 hasher which uses SHA1, the default PRF
recommended by PKCS #5. This is compatible with other
implementations of PBKDF2, such as openssl's
PKCS5_PBKDF2_HMAC_SHA1().
"""
algorithm = "pbkdf2_sha1"
digest = hashlib.sha1
class Argon2PasswordHasher(BasePasswordHasher):
"""
Secure password hashing using the argon2 algorithm.
This is the winner of the Password Hashing Competition 2013-2015
(https://password-hashing.net). It requires the argon2-cffi library which
depends on native C code and might cause portability issues.
"""
algorithm = "argon2"
library = "argon2"
time_cost = 2
memory_cost = 102400
parallelism = 8
def encode(self, password, salt):
argon2 = self._load_library()
params = self.params()
data = argon2.low_level.hash_secret(
force_bytes(password),
force_bytes(salt),
time_cost=params.time_cost,
memory_cost=params.memory_cost,
parallelism=params.parallelism,
hash_len=params.hash_len,
type=params.type,
)
return self.algorithm + data.decode("ascii")
def decode(self, encoded):
argon2 = self._load_library()
algorithm, rest = encoded.split("$", 1)
assert algorithm == self.algorithm
params = argon2.extract_parameters("$" + rest)
variety, *_, b64salt, hash = rest.split("$")
# Add padding.
b64salt += "=" * (-len(b64salt) % 4)
salt = base64.b64decode(b64salt).decode("latin1")
return {
"algorithm": algorithm,
"hash": hash,
"memory_cost": params.memory_cost,
"parallelism": params.parallelism,
"salt": salt,
"time_cost": params.time_cost,
"variety": variety,
"version": params.version,
"params": params,
}
def verify(self, password, encoded):
argon2 = self._load_library()
algorithm, rest = encoded.split("$", 1)
assert algorithm == self.algorithm
try:
return argon2.PasswordHasher().verify("$" + rest, password)
except argon2.exceptions.VerificationError:
return False
def safe_summary(self, encoded):
decoded = self.decode(encoded)
return {
_("algorithm"): decoded["algorithm"],
_("variety"): decoded["variety"],
_("version"): decoded["version"],
_("memory cost"): decoded["memory_cost"],
_("time cost"): decoded["time_cost"],
_("parallelism"): decoded["parallelism"],
_("salt"): mask_hash(decoded["salt"]),
_("hash"): mask_hash(decoded["hash"]),
}
def must_update(self, encoded):
decoded = self.decode(encoded)
current_params = decoded["params"]
new_params = self.params()
# Set salt_len to the salt_len of the current parameters because salt
# is explicitly passed to argon2.
new_params.salt_len = current_params.salt_len
update_salt = must_update_salt(decoded["salt"], self.salt_entropy)
return (current_params != new_params) or update_salt
def harden_runtime(self, password, encoded):
# The runtime for Argon2 is too complicated to implement a sensible
# hardening algorithm.
pass
def params(self):
argon2 = self._load_library()
# salt_len is a noop, because we provide our own salt.
return argon2.Parameters(
type=argon2.low_level.Type.ID,
version=argon2.low_level.ARGON2_VERSION,
salt_len=argon2.DEFAULT_RANDOM_SALT_LENGTH,
hash_len=argon2.DEFAULT_HASH_LENGTH,
time_cost=self.time_cost,
memory_cost=self.memory_cost,
parallelism=self.parallelism,
)
class BCryptSHA256PasswordHasher(BasePasswordHasher):
"""
Secure password hashing using the bcrypt algorithm (recommended)
This is considered by many to be the most secure algorithm but you
must first install the bcrypt library. Please be warned that
this library depends on native C code and might cause portability
issues.
"""
algorithm = "bcrypt_sha256"
digest = hashlib.sha256
library = ("bcrypt", "bcrypt")
rounds = 12
def salt(self):
bcrypt = self._load_library()
return bcrypt.gensalt(self.rounds)
def encode(self, password, salt):
bcrypt = self._load_library()
password = force_bytes(password)
salt = force_bytes(salt)
# Hash the password prior to using bcrypt to prevent password
# truncation as described in #20138.
if self.digest is not None:
# Use binascii.hexlify() because a hex encoded bytestring is str.
password = binascii.hexlify(self.digest(password).digest())
data = bcrypt.hashpw(password, salt)
return "%s$%s" % (self.algorithm, data.decode("ascii"))
def decode(self, encoded):
algorithm, empty, algostr, work_factor, data = encoded.split("$", 4)
assert algorithm == self.algorithm
return {
"algorithm": algorithm,
"algostr": algostr,
"checksum": data[22:],
"salt": data[:22],
"work_factor": int(work_factor),
}
def verify(self, password, encoded):
algorithm, data = encoded.split("$", 1)
assert algorithm == self.algorithm
encoded_2 = self.encode(password, data.encode("ascii"))
return constant_time_compare(encoded, encoded_2)
def safe_summary(self, encoded):
decoded = self.decode(encoded)
return {
_("algorithm"): decoded["algorithm"],
_("work factor"): decoded["work_factor"],
_("salt"): mask_hash(decoded["salt"]),
_("checksum"): mask_hash(decoded["checksum"]),
}
def must_update(self, encoded):
decoded = self.decode(encoded)
return decoded["work_factor"] != self.rounds
def harden_runtime(self, password, encoded):
_, data = encoded.split("$", 1)
salt = data[:29] # Length of the salt in bcrypt.
rounds = data.split("$")[2]
# work factor is logarithmic, adding one doubles the load.
diff = 2 ** (self.rounds - int(rounds)) - 1
while diff > 0:
self.encode(password, salt.encode("ascii"))
diff -= 1
class BCryptPasswordHasher(BCryptSHA256PasswordHasher):
"""
Secure password hashing using the bcrypt algorithm
This is considered by many to be the most secure algorithm but you
must first install the bcrypt library. Please be warned that
this library depends on native C code and might cause portability
issues.
This hasher does not first hash the password which means it is subject to
bcrypt's 72 bytes password truncation. Most use cases should prefer the
BCryptSHA256PasswordHasher.
"""
algorithm = "bcrypt"
digest = None
class ScryptPasswordHasher(BasePasswordHasher):
"""
Secure password hashing using the Scrypt algorithm.
"""
algorithm = "scrypt"
block_size = 8
maxmem = 0
parallelism = 5
work_factor = 2**14
def encode(self, password, salt, n=None, r=None, p=None):
self._check_encode_args(password, salt)
n = n or self.work_factor
r = r or self.block_size
p = p or self.parallelism
hash_ = hashlib.scrypt(
password=force_bytes(password),
salt=force_bytes(salt),
n=n,
r=r,
p=p,
maxmem=self.maxmem,
dklen=64,
)
hash_ = base64.b64encode(hash_).decode("ascii").strip()
return "%s$%d$%s$%d$%d$%s" % (self.algorithm, n, force_str(salt), r, p, hash_)
def decode(self, encoded):
algorithm, work_factor, salt, block_size, parallelism, hash_ = encoded.split(
"$", 6
)
assert algorithm == self.algorithm
return {
"algorithm": algorithm,
"work_factor": int(work_factor),
"salt": salt,
"block_size": int(block_size),
"parallelism": int(parallelism),
"hash": hash_,
}
def verify(self, password, encoded):
decoded = self.decode(encoded)
encoded_2 = self.encode(
password,
decoded["salt"],
decoded["work_factor"],
decoded["block_size"],
decoded["parallelism"],
)
return constant_time_compare(encoded, encoded_2)
def safe_summary(self, encoded):
decoded = self.decode(encoded)
return {
_("algorithm"): decoded["algorithm"],
_("work factor"): decoded["work_factor"],
_("block size"): decoded["block_size"],
_("parallelism"): decoded["parallelism"],
_("salt"): mask_hash(decoded["salt"]),
_("hash"): mask_hash(decoded["hash"]),
}
def must_update(self, encoded):
decoded = self.decode(encoded)
return (
decoded["work_factor"] != self.work_factor
or decoded["block_size"] != self.block_size
or decoded["parallelism"] != self.parallelism
)
def harden_runtime(self, password, encoded):
# The runtime for Scrypt is too complicated to implement a sensible
# hardening algorithm.
pass
class MD5PasswordHasher(BasePasswordHasher):
"""
The Salted MD5 password hashing algorithm (not recommended)
"""
algorithm = "md5"
def encode(self, password, salt):
self._check_encode_args(password, salt)
password = force_str(password)
salt = force_str(salt)
hash = hashlib.md5((salt + password).encode()).hexdigest()
return "%s$%s$%s" % (self.algorithm, salt, hash)
def decode(self, encoded):
algorithm, salt, hash = encoded.split("$", 2)
assert algorithm == self.algorithm
return {
"algorithm": algorithm,
"hash": hash,
"salt": salt,
}
def verify(self, password, encoded):
decoded = self.decode(encoded)
encoded_2 = self.encode(password, decoded["salt"])
return constant_time_compare(encoded, encoded_2)
def safe_summary(self, encoded):
decoded = self.decode(encoded)
return {
_("algorithm"): decoded["algorithm"],
_("salt"): mask_hash(decoded["salt"], show=2),
_("hash"): mask_hash(decoded["hash"]),
}
def must_update(self, encoded):
decoded = self.decode(encoded)
return must_update_salt(decoded["salt"], self.salt_entropy)
def harden_runtime(self, password, encoded):
pass
|
from contextlib import contextmanager
from unittest import mock, skipUnless
from django.conf.global_settings import PASSWORD_HASHERS
from django.contrib.auth.hashers import (
UNUSABLE_PASSWORD_PREFIX,
UNUSABLE_PASSWORD_SUFFIX_LENGTH,
Argon2PasswordHasher,
BasePasswordHasher,
BCryptPasswordHasher,
BCryptSHA256PasswordHasher,
MD5PasswordHasher,
PBKDF2PasswordHasher,
PBKDF2SHA1PasswordHasher,
ScryptPasswordHasher,
acheck_password,
check_password,
get_hasher,
identify_hasher,
is_password_usable,
make_password,
)
from django.test import SimpleTestCase
from django.test.utils import override_settings
try:
import bcrypt
except ImportError:
bcrypt = None
try:
import argon2
except ImportError:
argon2 = None
# scrypt requires OpenSSL 1.1+
try:
import hashlib
scrypt = hashlib.scrypt
except ImportError:
scrypt = None
class PBKDF2SingleIterationHasher(PBKDF2PasswordHasher):
iterations = 1
@override_settings(PASSWORD_HASHERS=PASSWORD_HASHERS)
class TestUtilsHashPass(SimpleTestCase):
def test_simple(self):
encoded = make_password("lètmein")
self.assertTrue(encoded.startswith("pbkdf2_sha256$"))
self.assertTrue(is_password_usable(encoded))
self.assertTrue(check_password("lètmein", encoded))
self.assertFalse(check_password("lètmeinz", encoded))
# Blank passwords
blank_encoded = make_password("")
self.assertTrue(blank_encoded.startswith("pbkdf2_sha256$"))
self.assertTrue(is_password_usable(blank_encoded))
self.assertTrue(check_password("", blank_encoded))
self.assertFalse(check_password(" ", blank_encoded))
async def test_acheck_password(self):
encoded = make_password("lètmein")
self.assertIs(await acheck_password("lètmein", encoded), True)
self.assertIs(await acheck_password("lètmeinz", encoded), False)
# Blank passwords.
blank_encoded = make_password("")
self.assertIs(await acheck_password("", blank_encoded), True)
self.assertIs(await acheck_password(" ", blank_encoded), False)
def test_bytes(self):
encoded = make_password(b"bytes_password")
self.assertTrue(encoded.startswith("pbkdf2_sha256$"))
self.assertIs(is_password_usable(encoded), True)
self.assertIs(check_password(b"bytes_password", encoded), True)
def test_invalid_password(self):
msg = "Password must be a string or bytes, got int."
with self.assertRaisesMessage(TypeError, msg):
make_password(1)
def test_pbkdf2(self):
encoded = make_password("lètmein", "seasalt", "pbkdf2_sha256")
self.assertEqual(
encoded,
"pbkdf2_sha256$1500000$"
"seasalt$P4UiMPVduVWIL/oS1GzH+IofsccjJNM5hUTikBvi5to=",
)
self.assertTrue(is_password_usable(encoded))
self.assertTrue(check_password("lètmein", encoded))
self.assertFalse(check_password("lètmeinz", encoded))
self.assertEqual(identify_hasher(encoded).algorithm, "pbkdf2_sha256")
# Blank passwords
blank_encoded = make_password("", "seasalt", "pbkdf2_sha256")
self.assertTrue(blank_encoded.startswith("pbkdf2_sha256$"))
self.assertTrue(is_password_usable(blank_encoded))
self.assertTrue(check_password("", blank_encoded))
self.assertFalse(check_password(" ", blank_encoded))
# Salt entropy check.
hasher = get_hasher("pbkdf2_sha256")
encoded_weak_salt = make_password("lètmein", "iodizedsalt", "pbkdf2_sha256")
encoded_strong_salt = make_password("lètmein", hasher.salt(), "pbkdf2_sha256")
self.assertIs(hasher.must_update(encoded_weak_salt), True)
self.assertIs(hasher.must_update(encoded_strong_salt), False)
@override_settings(
PASSWORD_HASHERS=["django.contrib.auth.hashers.MD5PasswordHasher"]
)
def test_md5(self):
encoded = make_password("lètmein", "seasalt", "md5")
self.assertEqual(encoded, "md5$seasalt$3f86d0d3d465b7b458c231bf3555c0e3")
self.assertTrue(is_password_usable(encoded))
self.assertTrue(check_password("lètmein", encoded))
self.assertFalse(check_password("lètmeinz", encoded))
self.assertEqual(identify_hasher(encoded).algorithm, "md5")
# Blank passwords
blank_encoded = make_password("", "seasalt", "md5")
self.assertTrue(blank_encoded.startswith("md5$"))
self.assertTrue(is_password_usable(blank_encoded))
self.assertTrue(check_password("", blank_encoded))
self.assertFalse(check_password(" ", blank_encoded))
# Salt entropy check.
hasher = get_hasher("md5")
encoded_weak_salt = make_password("lètmein", "iodizedsalt", "md5")
encoded_strong_salt = make_password("lètmein", hasher.salt(), "md5")
self.assertIs(hasher.must_update(encoded_weak_salt), True)
self.assertIs(hasher.must_update(encoded_strong_salt), False)
@skipUnless(bcrypt, "bcrypt not installed")
def test_bcrypt_sha256(self):
encoded = make_password("lètmein", hasher="bcrypt_sha256")
self.assertTrue(is_password_usable(encoded))
self.assertTrue(encoded.startswith("bcrypt_sha256$"))
self.assertTrue(check_password("lètmein", encoded))
self.assertFalse(check_password("lètmeinz", encoded))
self.assertEqual(identify_hasher(encoded).algorithm, "bcrypt_sha256")
# password truncation no longer works
password = (
"VSK0UYV6FFQVZ0KG88DYN9WADAADZO1CTSIVDJUNZSUML6IBX7LN7ZS3R5"
"JGB3RGZ7VI7G7DJQ9NI8BQFSRPTG6UWTTVESA5ZPUN"
)
encoded = make_password(password, hasher="bcrypt_sha256")
self.assertTrue(check_password(password, encoded))
self.assertFalse(check_password(password[:72], encoded))
# Blank passwords
blank_encoded = make_password("", hasher="bcrypt_sha256")
self.assertTrue(blank_encoded.startswith("bcrypt_sha256$"))
self.assertTrue(is_password_usable(blank_encoded))
self.assertTrue(check_password("", blank_encoded))
self.assertFalse(check_password(" ", blank_encoded))
@skipUnless(bcrypt, "bcrypt not installed")
@override_settings(
PASSWORD_HASHERS=["django.contrib.auth.hashers.BCryptPasswordHasher"]
)
def test_bcrypt(self):
encoded = make_password("lètmein", hasher="bcrypt")
self.assertTrue(is_password_usable(encoded))
self.assertTrue(encoded.startswith("bcrypt$"))
self.assertTrue(check_password("lètmein", encoded))
self.assertFalse(check_password("lètmeinz", encoded))
self.assertEqual(identify_hasher(encoded).algorithm, "bcrypt")
# Blank passwords
blank_encoded = make_password("", hasher="bcrypt")
self.assertTrue(blank_encoded.startswith("bcrypt$"))
self.assertTrue(is_password_usable(blank_encoded))
self.assertTrue(check_password("", blank_encoded))
self.assertFalse(check_password(" ", blank_encoded))
@skipUnless(bcrypt, "bcrypt not installed")
@override_settings(
PASSWORD_HASHERS=["django.contrib.auth.hashers.BCryptPasswordHasher"]
)
def test_bcrypt_upgrade(self):
hasher = get_hasher("bcrypt")
self.assertEqual("bcrypt", hasher.algorithm)
self.assertNotEqual(hasher.rounds, 4)
old_rounds = hasher.rounds
try:
# Generate a password with 4 rounds.
hasher.rounds = 4
encoded = make_password("letmein", hasher="bcrypt")
rounds = hasher.safe_summary(encoded)["work factor"]
self.assertEqual(rounds, 4)
state = {"upgraded": False}
def setter(password):
state["upgraded"] = True
# No upgrade is triggered.
self.assertTrue(check_password("letmein", encoded, setter, "bcrypt"))
self.assertFalse(state["upgraded"])
# Revert to the old rounds count and ...
hasher.rounds = old_rounds
# ... check if the password would get updated to the new count.
self.assertTrue(check_password("letmein", encoded, setter, "bcrypt"))
self.assertTrue(state["upgraded"])
finally:
hasher.rounds = old_rounds
@skipUnless(bcrypt, "bcrypt not installed")
@override_settings(
PASSWORD_HASHERS=["django.contrib.auth.hashers.BCryptPasswordHasher"]
)
def test_bcrypt_harden_runtime(self):
hasher = get_hasher("bcrypt")
self.assertEqual("bcrypt", hasher.algorithm)
with mock.patch.object(hasher, "rounds", 4):
encoded = make_password("letmein", hasher="bcrypt")
with (
mock.patch.object(hasher, "rounds", 6),
mock.patch.object(hasher, "encode", side_effect=hasher.encode),
):
hasher.harden_runtime("wrong_password", encoded)
# Increasing rounds from 4 to 6 means an increase of 4 in workload,
# therefore hardening should run 3 times to make the timing the
# same (the original encode() call already ran once).
self.assertEqual(hasher.encode.call_count, 3)
# Get the original salt (includes the original workload factor)
algorithm, data = encoded.split("$", 1)
expected_call = (("wrong_password", data[:29].encode()),)
self.assertEqual(hasher.encode.call_args_list, [expected_call] * 3)
def test_unusable(self):
encoded = make_password(None)
self.assertEqual(
len(encoded),
len(UNUSABLE_PASSWORD_PREFIX) + UNUSABLE_PASSWORD_SUFFIX_LENGTH,
)
self.assertFalse(is_password_usable(encoded))
self.assertFalse(check_password(None, encoded))
self.assertFalse(check_password(encoded, encoded))
self.assertFalse(check_password(UNUSABLE_PASSWORD_PREFIX, encoded))
self.assertFalse(check_password("", encoded))
self.assertFalse(check_password("lètmein", encoded))
self.assertFalse(check_password("lètmeinz", encoded))
with self.assertRaisesMessage(ValueError, "Unknown password hashing algorithm"):
identify_hasher(encoded)
# Assert that the unusable passwords actually contain a random part.
# This might fail one day due to a hash collision.
self.assertNotEqual(encoded, make_password(None), "Random password collision?")
def test_unspecified_password(self):
"""
Makes sure specifying no plain password with a valid encoded password
returns `False`.
"""
self.assertFalse(check_password(None, make_password("lètmein")))
def test_bad_algorithm(self):
msg = (
"Unknown password hashing algorithm '%s'. Did you specify it in "
"the PASSWORD_HASHERS setting?"
)
with self.assertRaisesMessage(ValueError, msg % "lolcat"):
make_password("lètmein", hasher="lolcat")
with self.assertRaisesMessage(ValueError, msg % "lolcat"):
identify_hasher("lolcat$salt$hash")
def test_is_password_usable(self):
passwords = ("lètmein_badencoded", "", None)
for password in passwords:
with self.subTest(password=password):
self.assertIs(is_password_usable(password), True)
def test_low_level_pbkdf2(self):
hasher = PBKDF2PasswordHasher()
encoded = hasher.encode("lètmein", "seasalt2")
self.assertEqual(
encoded,
"pbkdf2_sha256$1500000$"
"seasalt2$xWKIh704updzhxL+vMfPbhVsHljK62FyE988AtcoHU4=",
)
self.assertTrue(hasher.verify("lètmein", encoded))
def test_low_level_pbkdf2_sha1(self):
hasher = PBKDF2SHA1PasswordHasher()
encoded = hasher.encode("lètmein", "seasalt2")
self.assertEqual(
encoded, "pbkdf2_sha1$1500000$seasalt2$ep4Ou2hnt2mlvMRsIjUln0Z5MYY="
)
self.assertTrue(hasher.verify("lètmein", encoded))
@skipUnless(bcrypt, "bcrypt not installed")
def test_bcrypt_salt_check(self):
hasher = BCryptPasswordHasher()
encoded = hasher.encode("lètmein", hasher.salt())
self.assertIs(hasher.must_update(encoded), False)
@skipUnless(bcrypt, "bcrypt not installed")
def test_bcryptsha256_salt_check(self):
hasher = BCryptSHA256PasswordHasher()
encoded = hasher.encode("lètmein", hasher.salt())
self.assertIs(hasher.must_update(encoded), False)
@override_settings(
PASSWORD_HASHERS=[
"django.contrib.auth.hashers.PBKDF2PasswordHasher",
"django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher",
"django.contrib.auth.hashers.MD5PasswordHasher",
],
)
def test_upgrade(self):
self.assertEqual("pbkdf2_sha256", get_hasher("default").algorithm)
for algo in ("pbkdf2_sha1", "md5"):
with self.subTest(algo=algo):
encoded = make_password("lètmein", hasher=algo)
state = {"upgraded": False}
def setter(password):
state["upgraded"] = True
self.assertTrue(check_password("lètmein", encoded, setter))
self.assertTrue(state["upgraded"])
def test_no_upgrade(self):
encoded = make_password("lètmein")
state = {"upgraded": False}
def setter():
state["upgraded"] = True
self.assertFalse(check_password("WRONG", encoded, setter))
self.assertFalse(state["upgraded"])
@override_settings(
PASSWORD_HASHERS=[
"django.contrib.auth.hashers.PBKDF2PasswordHasher",
"django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher",
"django.contrib.auth.hashers.MD5PasswordHasher",
],
)
def test_no_upgrade_on_incorrect_pass(self):
self.assertEqual("pbkdf2_sha256", get_hasher("default").algorithm)
for algo in ("pbkdf2_sha1", "md5"):
with self.subTest(algo=algo):
encoded = make_password("lètmein", hasher=algo)
state = {"upgraded": False}
def setter():
state["upgraded"] = True
self.assertFalse(check_password("WRONG", encoded, setter))
self.assertFalse(state["upgraded"])
def test_pbkdf2_upgrade(self):
hasher = get_hasher("default")
self.assertEqual("pbkdf2_sha256", hasher.algorithm)
self.assertNotEqual(hasher.iterations, 1)
old_iterations = hasher.iterations
try:
# Generate a password with 1 iteration.
hasher.iterations = 1
encoded = make_password("letmein")
algo, iterations, salt, hash = encoded.split("$", 3)
self.assertEqual(iterations, "1")
state = {"upgraded": False}
def setter(password):
state["upgraded"] = True
# No upgrade is triggered
self.assertTrue(check_password("letmein", encoded, setter))
self.assertFalse(state["upgraded"])
# Revert to the old iteration count and ...
hasher.iterations = old_iterations
# ... check if the password would get updated to the new iteration
# count.
self.assertTrue(check_password("letmein", encoded, setter))
self.assertTrue(state["upgraded"])
finally:
hasher.iterations = old_iterations
def test_pbkdf2_harden_runtime(self):
hasher = get_hasher("default")
self.assertEqual("pbkdf2_sha256", hasher.algorithm)
with mock.patch.object(hasher, "iterations", 1):
encoded = make_password("letmein")
with (
mock.patch.object(hasher, "iterations", 6),
mock.patch.object(hasher, "encode", side_effect=hasher.encode),
):
hasher.harden_runtime("wrong_password", encoded)
# Encode should get called once ...
self.assertEqual(hasher.encode.call_count, 1)
# ... with the original salt and 5 iterations.
algorithm, iterations, salt, hash = encoded.split("$", 3)
expected_call = (("wrong_password", salt, 5),)
self.assertEqual(hasher.encode.call_args, expected_call)
def test_pbkdf2_upgrade_new_hasher(self):
hasher = get_hasher("default")
self.assertEqual("pbkdf2_sha256", hasher.algorithm)
self.assertNotEqual(hasher.iterations, 1)
state = {"upgraded": False}
def setter(password):
state["upgraded"] = True
with self.settings(
PASSWORD_HASHERS=["auth_tests.test_hashers.PBKDF2SingleIterationHasher"]
):
encoded = make_password("letmein")
algo, iterations, salt, hash = encoded.split("$", 3)
self.assertEqual(iterations, "1")
# No upgrade is triggered
self.assertTrue(check_password("letmein", encoded, setter))
self.assertFalse(state["upgraded"])
# Revert to the old iteration count and check if the password would get
# updated to the new iteration count.
with self.settings(
PASSWORD_HASHERS=[
"django.contrib.auth.hashers.PBKDF2PasswordHasher",
"auth_tests.test_hashers.PBKDF2SingleIterationHasher",
]
):
self.assertTrue(check_password("letmein", encoded, setter))
self.assertTrue(state["upgraded"])
def test_check_password_calls_harden_runtime(self):
hasher = get_hasher("default")
encoded = make_password("letmein")
with (
mock.patch.object(hasher, "harden_runtime"),
mock.patch.object(hasher, "must_update", return_value=True),
):
# Correct password supplied, no hardening needed
check_password("letmein", encoded)
self.assertEqual(hasher.harden_runtime.call_count, 0)
# Wrong password supplied, hardening needed
check_password("wrong_password", encoded)
self.assertEqual(hasher.harden_runtime.call_count, 1)
@contextmanager
def assertMakePasswordCalled(self, password, encoded, hasher_side_effect):
hasher = get_hasher("default")
with (
mock.patch(
"django.contrib.auth.hashers.identify_hasher",
side_effect=hasher_side_effect,
) as mock_identify_hasher,
mock.patch(
"django.contrib.auth.hashers.make_password"
) as mock_make_password,
mock.patch(
"django.contrib.auth.hashers.get_random_string",
side_effect=lambda size: "x" * size,
),
mock.patch.object(hasher, "verify"),
):
# Ensure make_password is called to standardize timing.
yield
self.assertEqual(hasher.verify.call_count, 0)
self.assertEqual(mock_identify_hasher.mock_calls, [mock.call(encoded)])
self.assertEqual(
mock_make_password.mock_calls,
[mock.call("x" * UNUSABLE_PASSWORD_SUFFIX_LENGTH)],
)
def test_check_password_calls_make_password_to_fake_runtime(self):
cases = [
(None, None, None), # no plain text password provided
("foo", make_password(password=None), None), # unusable encoded
("letmein", make_password(password="letmein"), ValueError), # valid encoded
]
for password, encoded, hasher_side_effect in cases:
with (
self.subTest(encoded=encoded),
self.assertMakePasswordCalled(password, encoded, hasher_side_effect),
):
check_password(password, encoded)
async def test_acheck_password_calls_make_password_to_fake_runtime(self):
cases = [
(None, None, None), # no plain text password provided
("foo", make_password(password=None), None), # unusable encoded
("letmein", make_password(password="letmein"), ValueError), # valid encoded
]
for password, encoded, hasher_side_effect in cases:
with (
self.subTest(encoded=encoded),
self.assertMakePasswordCalled(password, encoded, hasher_side_effect),
):
await acheck_password(password, encoded)
def test_encode_invalid_salt(self):
hasher_classes = [
MD5PasswordHasher,
PBKDF2PasswordHasher,
PBKDF2SHA1PasswordHasher,
ScryptPasswordHasher,
]
msg = "salt must be provided and cannot contain $."
for hasher_class in hasher_classes:
hasher = hasher_class()
for salt in [None, "", "sea$salt"]:
with self.subTest(hasher_class.__name__, salt=salt):
with self.assertRaisesMessage(ValueError, msg):
hasher.encode("password", salt)
def test_password_and_salt_in_str_and_bytes(self):
hasher_classes = [
MD5PasswordHasher,
PBKDF2PasswordHasher,
PBKDF2SHA1PasswordHasher,
ScryptPasswordHasher,
]
for hasher_class in hasher_classes:
hasher = hasher_class()
with self.subTest(hasher_class.__name__):
passwords = ["password", b"password"]
for password in passwords:
for salt in [hasher.salt(), hasher.salt().encode()]:
encoded = hasher.encode(password, salt)
for password_to_verify in passwords:
self.assertIs(
hasher.verify(password_to_verify, encoded), True
)
@skipUnless(argon2, "argon2-cffi not installed")
def test_password_and_salt_in_str_and_bytes_argon2(self):
hasher = Argon2PasswordHasher()
passwords = ["password", b"password"]
for password in passwords:
for salt in [hasher.salt(), hasher.salt().encode()]:
encoded = hasher.encode(password, salt)
for password_to_verify in passwords:
self.assertIs(hasher.verify(password_to_verify, encoded), True)
@skipUnless(bcrypt, "bcrypt not installed")
def test_password_and_salt_in_str_and_bytes_bcrypt(self):
hasher_classes = [
BCryptPasswordHasher,
BCryptSHA256PasswordHasher,
]
for hasher_class in hasher_classes:
hasher = hasher_class()
with self.subTest(hasher_class.__name__):
passwords = ["password", b"password"]
for password in passwords:
salts = [hasher.salt().decode(), hasher.salt()]
for salt in salts:
encoded = hasher.encode(password, salt)
for password_to_verify in passwords:
self.assertIs(
hasher.verify(password_to_verify, encoded), True
)
def test_encode_password_required(self):
hasher_classes = [
MD5PasswordHasher,
PBKDF2PasswordHasher,
PBKDF2SHA1PasswordHasher,
ScryptPasswordHasher,
]
msg = "password must be provided."
for hasher_class in hasher_classes:
hasher = hasher_class()
with self.subTest(hasher_class.__name__):
with self.assertRaisesMessage(TypeError, msg):
hasher.encode(None, "seasalt")
class BasePasswordHasherTests(SimpleTestCase):
not_implemented_msg = "subclasses of BasePasswordHasher must provide %s() method"
def setUp(self):
self.hasher = BasePasswordHasher()
def test_load_library_no_algorithm(self):
msg = "Hasher 'BasePasswordHasher' doesn't specify a library attribute"
with self.assertRaisesMessage(ValueError, msg):
self.hasher._load_library()
def test_load_library_importerror(self):
PlainHasher = type(
"PlainHasher",
(BasePasswordHasher,),
{"algorithm": "plain", "library": "plain"},
)
msg = "Couldn't load 'PlainHasher' algorithm library: No module named 'plain'"
with self.assertRaisesMessage(ValueError, msg):
PlainHasher()._load_library()
def test_attributes(self):
self.assertIsNone(self.hasher.algorithm)
self.assertIsNone(self.hasher.library)
def test_encode(self):
msg = self.not_implemented_msg % "an encode"
with self.assertRaisesMessage(NotImplementedError, msg):
self.hasher.encode("password", "salt")
def test_decode(self):
msg = self.not_implemented_msg % "a decode"
with self.assertRaisesMessage(NotImplementedError, msg):
self.hasher.decode("encoded")
def test_harden_runtime(self):
msg = (
"subclasses of BasePasswordHasher should provide a harden_runtime() method"
)
with self.assertWarnsMessage(Warning, msg):
self.hasher.harden_runtime("password", "encoded")
def test_must_update(self):
self.assertIs(self.hasher.must_update("encoded"), False)
def test_safe_summary(self):
msg = self.not_implemented_msg % "a safe_summary"
with self.assertRaisesMessage(NotImplementedError, msg):
self.hasher.safe_summary("encoded")
def test_verify(self):
msg = self.not_implemented_msg % "a verify"
with self.assertRaisesMessage(NotImplementedError, msg):
self.hasher.verify("password", "encoded")
@skipUnless(argon2, "argon2-cffi not installed")
@override_settings(PASSWORD_HASHERS=PASSWORD_HASHERS)
class TestUtilsHashPassArgon2(SimpleTestCase):
def test_argon2(self):
encoded = make_password("lètmein", hasher="argon2")
self.assertTrue(is_password_usable(encoded))
self.assertTrue(encoded.startswith("argon2$argon2id$"))
self.assertTrue(check_password("lètmein", encoded))
self.assertFalse(check_password("lètmeinz", encoded))
self.assertEqual(identify_hasher(encoded).algorithm, "argon2")
# Blank passwords
blank_encoded = make_password("", hasher="argon2")
self.assertTrue(blank_encoded.startswith("argon2$argon2id$"))
self.assertTrue(is_password_usable(blank_encoded))
self.assertTrue(check_password("", blank_encoded))
self.assertFalse(check_password(" ", blank_encoded))
# Old hashes without version attribute
encoded = (
"argon2$argon2i$m=8,t=1,p=1$c29tZXNhbHQ$gwQOXSNhxiOxPOA0+PY10P9QFO"
"4NAYysnqRt1GSQLE55m+2GYDt9FEjPMHhP2Cuf0nOEXXMocVrsJAtNSsKyfg"
)
self.assertTrue(check_password("secret", encoded))
self.assertFalse(check_password("wrong", encoded))
# Old hashes with version attribute.
encoded = "argon2$argon2i$v=19$m=8,t=1,p=1$c2FsdHNhbHQ$YC9+jJCrQhs5R6db7LlN8Q"
self.assertIs(check_password("secret", encoded), True)
self.assertIs(check_password("wrong", encoded), False)
# Salt entropy check.
hasher = get_hasher("argon2")
encoded_weak_salt = make_password("lètmein", "iodizedsalt", "argon2")
encoded_strong_salt = make_password("lètmein", hasher.salt(), "argon2")
self.assertIs(hasher.must_update(encoded_weak_salt), True)
self.assertIs(hasher.must_update(encoded_strong_salt), False)
def test_argon2_decode(self):
salt = "abcdefghijk"
encoded = make_password("lètmein", salt=salt, hasher="argon2")
hasher = get_hasher("argon2")
decoded = hasher.decode(encoded)
self.assertEqual(decoded["memory_cost"], hasher.memory_cost)
self.assertEqual(decoded["parallelism"], hasher.parallelism)
self.assertEqual(decoded["salt"], salt)
self.assertEqual(decoded["time_cost"], hasher.time_cost)
def test_argon2_upgrade(self):
self._test_argon2_upgrade("time_cost", "time cost", 1)
self._test_argon2_upgrade("memory_cost", "memory cost", 64)
self._test_argon2_upgrade("parallelism", "parallelism", 1)
def test_argon2_version_upgrade(self):
hasher = get_hasher("argon2")
state = {"upgraded": False}
encoded = (
"argon2$argon2id$v=19$m=102400,t=2,p=8$Y041dExhNkljRUUy$TMa6A8fPJh"
"CAUXRhJXCXdw"
)
def setter(password):
state["upgraded"] = True
old_m = hasher.memory_cost
old_t = hasher.time_cost
old_p = hasher.parallelism
try:
hasher.memory_cost = 8
hasher.time_cost = 1
hasher.parallelism = 1
self.assertTrue(check_password("secret", encoded, setter, "argon2"))
self.assertTrue(state["upgraded"])
finally:
hasher.memory_cost = old_m
hasher.time_cost = old_t
hasher.parallelism = old_p
def _test_argon2_upgrade(self, attr, summary_key, new_value):
hasher = get_hasher("argon2")
self.assertEqual("argon2", hasher.algorithm)
self.assertNotEqual(getattr(hasher, attr), new_value)
old_value = getattr(hasher, attr)
try:
# Generate hash with attr set to 1
setattr(hasher, attr, new_value)
encoded = make_password("letmein", hasher="argon2")
attr_value = hasher.safe_summary(encoded)[summary_key]
self.assertEqual(attr_value, new_value)
state = {"upgraded": False}
def setter(password):
state["upgraded"] = True
# No upgrade is triggered.
self.assertTrue(check_password("letmein", encoded, setter, "argon2"))
self.assertFalse(state["upgraded"])
# Revert to the old rounds count and ...
setattr(hasher, attr, old_value)
# ... check if the password would get updated to the new count.
self.assertTrue(check_password("letmein", encoded, setter, "argon2"))
self.assertTrue(state["upgraded"])
finally:
setattr(hasher, attr, old_value)
@skipUnless(scrypt, "scrypt not available")
@override_settings(PASSWORD_HASHERS=PASSWORD_HASHERS)
class TestUtilsHashPassScrypt(SimpleTestCase):
def test_scrypt(self):
encoded = make_password("lètmein", "seasalt", "scrypt")
self.assertEqual(
encoded,
"scrypt$16384$seasalt$8$5$ECMIUp+LMxMSK8xB/IVyba+KYGTI7FTnet025q/1f"
"/vBAVnnP3hdYqJuRi+mJn6ji6ze3Fbb7JEFPKGpuEf5vw==",
)
self.assertIs(is_password_usable(encoded), True)
self.assertIs(check_password("lètmein", encoded), True)
self.assertIs(check_password("lètmeinz", encoded), False)
self.assertEqual(identify_hasher(encoded).algorithm, "scrypt")
# Blank passwords.
blank_encoded = make_password("", "seasalt", "scrypt")
self.assertIs(blank_encoded.startswith("scrypt$"), True)
self.assertIs(is_password_usable(blank_encoded), True)
self.assertIs(check_password("", blank_encoded), True)
self.assertIs(check_password(" ", blank_encoded), False)
def test_scrypt_decode(self):
encoded = make_password("lètmein", "seasalt", "scrypt")
hasher = get_hasher("scrypt")
decoded = hasher.decode(encoded)
tests = [
("block_size", hasher.block_size),
("parallelism", hasher.parallelism),
("salt", "seasalt"),
("work_factor", hasher.work_factor),
]
for key, excepted in tests:
with self.subTest(key=key):
self.assertEqual(decoded[key], excepted)
def _test_scrypt_upgrade(self, attr, summary_key, new_value):
hasher = get_hasher("scrypt")
self.assertEqual(hasher.algorithm, "scrypt")
self.assertNotEqual(getattr(hasher, attr), new_value)
old_value = getattr(hasher, attr)
try:
# Generate hash with attr set to the new value.
setattr(hasher, attr, new_value)
encoded = make_password("lètmein", "seasalt", "scrypt")
attr_value = hasher.safe_summary(encoded)[summary_key]
self.assertEqual(attr_value, new_value)
state = {"upgraded": False}
def setter(password):
state["upgraded"] = True
# No update is triggered.
self.assertIs(check_password("lètmein", encoded, setter, "scrypt"), True)
self.assertIs(state["upgraded"], False)
# Revert to the old value.
setattr(hasher, attr, old_value)
# Password is updated.
self.assertIs(check_password("lètmein", encoded, setter, "scrypt"), True)
self.assertIs(state["upgraded"], True)
finally:
setattr(hasher, attr, old_value)
def test_scrypt_upgrade(self):
tests = [
("work_factor", "work factor", 2**11),
("block_size", "block size", 10),
("parallelism", "parallelism", 2),
]
for attr, summary_key, new_value in tests:
with self.subTest(attr=attr):
self._test_scrypt_upgrade(attr, summary_key, new_value)
|
./temp_repos/django/django/contrib/auth/hashers.py
|
./temp_repos/django/tests/auth_tests/test_hashers.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'BasePasswordHasher'.
Context:
- Class Name: BasePasswordHasher
- Dependencies to Mock: None detected
- Key Imports: django.dispatch, django.utils.module_loading, django.conf, django.utils.crypto, django.core.signals, importlib, math, warnings, django.core.exceptions, functools
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
BasePasswordHasher
|
python
|
from types import NoneType
from django.core.exceptions import ValidationError
from django.db import DEFAULT_DB_ALIAS
from django.db.backends.ddl_references import Expressions, Statement, Table
from django.db.models import BaseConstraint, Deferrable, F, Q
from django.db.models.expressions import Exists, ExpressionList
from django.db.models.indexes import IndexExpression
from django.db.models.lookups import PostgresOperatorLookup
from django.db.models.sql import Query
from .utils import CheckPostgresInstalledMixin
__all__ = ["ExclusionConstraint"]
class ExclusionConstraintExpression(IndexExpression):
template = "%(expressions)s WITH %(operator)s"
class ExclusionConstraint(CheckPostgresInstalledMixin, BaseConstraint):
template = (
"CONSTRAINT %(name)s EXCLUDE USING %(index_type)s "
"(%(expressions)s)%(include)s%(where)s%(deferrable)s"
)
def __init__(
self,
*,
name,
expressions,
index_type=None,
condition=None,
deferrable=None,
include=None,
violation_error_code=None,
violation_error_message=None,
):
if index_type and index_type.lower() not in {"gist", "spgist"}:
raise ValueError(
"Exclusion constraints only support GiST or SP-GiST indexes."
)
if not expressions:
raise ValueError(
"At least one expression is required to define an exclusion "
"constraint."
)
if not all(
isinstance(expr, (list, tuple)) and len(expr) == 2 for expr in expressions
):
raise ValueError("The expressions must be a list of 2-tuples.")
if not isinstance(condition, (NoneType, Q)):
raise ValueError("ExclusionConstraint.condition must be a Q instance.")
if not isinstance(deferrable, (NoneType, Deferrable)):
raise ValueError(
"ExclusionConstraint.deferrable must be a Deferrable instance."
)
if not isinstance(include, (NoneType, list, tuple)):
raise ValueError("ExclusionConstraint.include must be a list or tuple.")
self.expressions = expressions
self.index_type = index_type or "GIST"
self.condition = condition
self.deferrable = deferrable
self.include = tuple(include) if include else ()
super().__init__(
name=name,
violation_error_code=violation_error_code,
violation_error_message=violation_error_message,
)
def _get_expressions(self, schema_editor, query):
expressions = []
for idx, (expression, operator) in enumerate(self.expressions):
if isinstance(expression, str):
expression = F(expression)
expression = ExclusionConstraintExpression(expression, operator=operator)
expression.set_wrapper_classes(schema_editor.connection)
expressions.append(expression)
return ExpressionList(*expressions).resolve_expression(query)
def check(self, model, connection):
errors = super().check(model, connection)
references = set()
for expr, _ in self.expressions:
if isinstance(expr, str):
expr = F(expr)
references.update(model._get_expr_references(expr))
errors.extend(self._check_references(model, references))
return errors
def _get_condition_sql(self, compiler, schema_editor, query):
if self.condition is None:
return None
where = query.build_where(self.condition)
sql, params = where.as_sql(compiler, schema_editor.connection)
return sql % tuple(schema_editor.quote_value(p) for p in params)
def constraint_sql(self, model, schema_editor):
query = Query(model, alias_cols=False)
compiler = query.get_compiler(connection=schema_editor.connection)
expressions = self._get_expressions(schema_editor, query)
table = model._meta.db_table
condition = self._get_condition_sql(compiler, schema_editor, query)
include = [
model._meta.get_field(field_name).column for field_name in self.include
]
return Statement(
self.template,
table=Table(table, schema_editor.quote_name),
name=schema_editor.quote_name(self.name),
index_type=self.index_type,
expressions=Expressions(
table, expressions, compiler, schema_editor.quote_value
),
where=" WHERE (%s)" % condition if condition else "",
include=schema_editor._index_include_sql(model, include),
deferrable=schema_editor._deferrable_constraint_sql(self.deferrable),
)
def create_sql(self, model, schema_editor):
return Statement(
"ALTER TABLE %(table)s ADD %(constraint)s",
table=Table(model._meta.db_table, schema_editor.quote_name),
constraint=self.constraint_sql(model, schema_editor),
)
def remove_sql(self, model, schema_editor):
return schema_editor._delete_constraint_sql(
schema_editor.sql_delete_check,
model,
schema_editor.quote_name(self.name),
)
def deconstruct(self):
path, args, kwargs = super().deconstruct()
kwargs["expressions"] = self.expressions
if self.condition is not None:
kwargs["condition"] = self.condition
if self.index_type.lower() != "gist":
kwargs["index_type"] = self.index_type
if self.deferrable:
kwargs["deferrable"] = self.deferrable
if self.include:
kwargs["include"] = self.include
return path, args, kwargs
def __eq__(self, other):
if isinstance(other, self.__class__):
return (
self.name == other.name
and self.index_type == other.index_type
and self.expressions == other.expressions
and self.condition == other.condition
and self.deferrable == other.deferrable
and self.include == other.include
and self.violation_error_code == other.violation_error_code
and self.violation_error_message == other.violation_error_message
)
return super().__eq__(other)
def __repr__(self):
return "<%s: index_type=%s expressions=%s name=%s%s%s%s%s%s>" % (
self.__class__.__qualname__,
repr(self.index_type),
repr(self.expressions),
repr(self.name),
"" if self.condition is None else " condition=%s" % self.condition,
"" if self.deferrable is None else " deferrable=%r" % self.deferrable,
"" if not self.include else " include=%s" % repr(self.include),
(
""
if self.violation_error_code is None
else " violation_error_code=%r" % self.violation_error_code
),
(
""
if self.violation_error_message is None
or self.violation_error_message == self.default_violation_error_message
else " violation_error_message=%r" % self.violation_error_message
),
)
def validate(self, model, instance, exclude=None, using=DEFAULT_DB_ALIAS):
queryset = model._default_manager.using(using)
replacement_map = instance._get_field_expression_map(
meta=model._meta, exclude=exclude
)
replacements = {F(field): value for field, value in replacement_map.items()}
lookups = []
for expression, operator in self.expressions:
if isinstance(expression, str):
expression = F(expression)
if exclude and self._expression_refs_exclude(model, expression, exclude):
return
rhs_expression = expression.replace_expressions(replacements)
if hasattr(expression, "get_expression_for_validation"):
expression = expression.get_expression_for_validation()
if hasattr(rhs_expression, "get_expression_for_validation"):
rhs_expression = rhs_expression.get_expression_for_validation()
lookup = PostgresOperatorLookup(lhs=expression, rhs=rhs_expression)
lookup.postgres_operator = operator
lookups.append(lookup)
queryset = queryset.filter(*lookups)
model_class_pk = instance._get_pk_val(model._meta)
if not instance._state.adding and instance._is_pk_set(model._meta):
queryset = queryset.exclude(pk=model_class_pk)
if not self.condition:
if queryset.exists():
raise ValidationError(
self.get_violation_error_message(), code=self.violation_error_code
)
else:
# Ignore constraints with excluded fields in condition.
if exclude and self._expression_refs_exclude(
model, self.condition, exclude
):
return
if (self.condition & Exists(queryset.filter(self.condition))).check(
replacement_map, using=using
):
raise ValidationError(
self.get_violation_error_message(), code=self.violation_error_code
)
|
import datetime
from unittest import mock
from django.contrib.postgres.indexes import OpClass
from django.core.checks import Error
from django.core.exceptions import ValidationError
from django.db import IntegrityError, connection, transaction
from django.db.models import (
CASCADE,
CharField,
CheckConstraint,
DateField,
Deferrable,
F,
ForeignKey,
Func,
GeneratedField,
IntegerField,
Model,
Q,
UniqueConstraint,
)
from django.db.models.fields.json import KeyTextTransform
from django.db.models.functions import Cast, Left, Lower
from django.test import skipUnlessDBFeature
from django.test.utils import isolate_apps
from django.utils import timezone
from . import PostgreSQLTestCase
from .models import HotelReservation, IntegerArrayModel, RangesModel, Room, Scene
try:
from django.contrib.postgres.constraints import ExclusionConstraint
from django.contrib.postgres.fields import (
DateTimeRangeField,
IntegerRangeField,
RangeBoundary,
RangeOperators,
)
from django.db.backends.postgresql.psycopg_any import DateRange, NumericRange
except ImportError:
pass
class SchemaTests(PostgreSQLTestCase):
get_opclass_query = """
SELECT opcname, c.relname FROM pg_opclass AS oc
JOIN pg_index as i on oc.oid = ANY(i.indclass)
JOIN pg_class as c on c.oid = i.indexrelid
WHERE c.relname = %s
"""
def get_constraints(self, table):
"""Get the constraints on the table using a new cursor."""
with connection.cursor() as cursor:
return connection.introspection.get_constraints(cursor, table)
def test_check_constraint_range_value(self):
constraint_name = "ints_between"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = CheckConstraint(
condition=Q(ints__contained_by=NumericRange(10, 30)),
name=constraint_name,
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
with self.assertRaises(IntegrityError), transaction.atomic():
RangesModel.objects.create(ints=(20, 50))
RangesModel.objects.create(ints=(10, 30))
def test_check_constraint_array_contains(self):
constraint = CheckConstraint(
condition=Q(field__contains=[1]),
name="array_contains",
)
msg = f"Constraint “{constraint.name}” is violated."
with self.assertRaisesMessage(ValidationError, msg):
constraint.validate(IntegerArrayModel, IntegerArrayModel())
constraint.validate(IntegerArrayModel, IntegerArrayModel(field=[1]))
def test_check_constraint_array_length(self):
constraint = CheckConstraint(
condition=Q(field__len=1),
name="array_length",
)
msg = f"Constraint “{constraint.name}” is violated."
with self.assertRaisesMessage(ValidationError, msg):
constraint.validate(IntegerArrayModel, IntegerArrayModel())
constraint.validate(IntegerArrayModel, IntegerArrayModel(field=[1]))
def test_check_constraint_daterange_contains(self):
constraint_name = "dates_contains"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = CheckConstraint(
condition=Q(dates__contains=F("dates_inner")),
name=constraint_name,
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
date_1 = datetime.date(2016, 1, 1)
date_2 = datetime.date(2016, 1, 4)
with self.assertRaises(IntegrityError), transaction.atomic():
RangesModel.objects.create(
dates=(date_1, date_2),
dates_inner=(date_1, date_2.replace(day=5)),
)
RangesModel.objects.create(
dates=(date_1, date_2),
dates_inner=(date_1, date_2),
)
def test_check_constraint_datetimerange_contains(self):
constraint_name = "timestamps_contains"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = CheckConstraint(
condition=Q(timestamps__contains=F("timestamps_inner")),
name=constraint_name,
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
datetime_1 = datetime.datetime(2016, 1, 1)
datetime_2 = datetime.datetime(2016, 1, 2, 12)
with self.assertRaises(IntegrityError), transaction.atomic():
RangesModel.objects.create(
timestamps=(datetime_1, datetime_2),
timestamps_inner=(datetime_1, datetime_2.replace(hour=13)),
)
RangesModel.objects.create(
timestamps=(datetime_1, datetime_2),
timestamps_inner=(datetime_1, datetime_2),
)
def test_check_constraint_range_contains(self):
constraint = CheckConstraint(
condition=Q(ints__contains=(1, 5)),
name="ints_contains",
)
msg = f"Constraint “{constraint.name}” is violated."
with self.assertRaisesMessage(ValidationError, msg):
constraint.validate(RangesModel, RangesModel(ints=(6, 10)))
def test_check_constraint_range_lower_upper(self):
constraint = CheckConstraint(
condition=Q(ints__startswith__gte=0) & Q(ints__endswith__lte=99),
name="ints_range_lower_upper",
)
msg = f"Constraint “{constraint.name}” is violated."
with self.assertRaisesMessage(ValidationError, msg):
constraint.validate(RangesModel, RangesModel(ints=(-1, 20)))
with self.assertRaisesMessage(ValidationError, msg):
constraint.validate(RangesModel, RangesModel(ints=(0, 100)))
constraint.validate(RangesModel, RangesModel(ints=(0, 99)))
def test_check_constraint_range_lower_with_nulls(self):
constraint = CheckConstraint(
condition=Q(ints__isnull=True) | Q(ints__startswith__gte=0),
name="ints_optional_positive_range",
)
constraint.validate(RangesModel, RangesModel())
constraint = CheckConstraint(
condition=Q(ints__startswith__gte=0),
name="ints_positive_range",
)
constraint.validate(RangesModel, RangesModel())
def test_opclass(self):
constraint = UniqueConstraint(
name="test_opclass",
fields=["scene"],
opclasses=["varchar_pattern_ops"],
)
with connection.schema_editor() as editor:
editor.add_constraint(Scene, constraint)
self.assertIn(constraint.name, self.get_constraints(Scene._meta.db_table))
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query, [constraint.name])
self.assertEqual(
cursor.fetchall(),
[("varchar_pattern_ops", constraint.name)],
)
# Drop the constraint.
with connection.schema_editor() as editor:
editor.remove_constraint(Scene, constraint)
self.assertNotIn(constraint.name, self.get_constraints(Scene._meta.db_table))
def test_opclass_multiple_columns(self):
constraint = UniqueConstraint(
name="test_opclass_multiple",
fields=["scene", "setting"],
opclasses=["varchar_pattern_ops", "text_pattern_ops"],
)
with connection.schema_editor() as editor:
editor.add_constraint(Scene, constraint)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query, [constraint.name])
expected_opclasses = (
("varchar_pattern_ops", constraint.name),
("text_pattern_ops", constraint.name),
)
self.assertCountEqual(cursor.fetchall(), expected_opclasses)
def test_opclass_partial(self):
constraint = UniqueConstraint(
name="test_opclass_partial",
fields=["scene"],
opclasses=["varchar_pattern_ops"],
condition=Q(setting__contains="Sir Bedemir's Castle"),
)
with connection.schema_editor() as editor:
editor.add_constraint(Scene, constraint)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query, [constraint.name])
self.assertCountEqual(
cursor.fetchall(),
[("varchar_pattern_ops", constraint.name)],
)
@skipUnlessDBFeature("supports_covering_indexes")
def test_opclass_include(self):
constraint = UniqueConstraint(
name="test_opclass_include",
fields=["scene"],
opclasses=["varchar_pattern_ops"],
include=["setting"],
)
with connection.schema_editor() as editor:
editor.add_constraint(Scene, constraint)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query, [constraint.name])
self.assertCountEqual(
cursor.fetchall(),
[("varchar_pattern_ops", constraint.name)],
)
@skipUnlessDBFeature("supports_expression_indexes")
def test_opclass_func(self):
constraint = UniqueConstraint(
OpClass(Lower("scene"), name="text_pattern_ops"),
name="test_opclass_func",
)
with connection.schema_editor() as editor:
editor.add_constraint(Scene, constraint)
constraints = self.get_constraints(Scene._meta.db_table)
self.assertIs(constraints[constraint.name]["unique"], True)
self.assertIn(constraint.name, constraints)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query, [constraint.name])
self.assertEqual(
cursor.fetchall(),
[("text_pattern_ops", constraint.name)],
)
Scene.objects.create(scene="Scene 10", setting="The dark forest of Ewing")
with self.assertRaises(IntegrityError), transaction.atomic():
Scene.objects.create(scene="ScEnE 10", setting="Sir Bedemir's Castle")
Scene.objects.create(scene="Scene 5", setting="Sir Bedemir's Castle")
# Drop the constraint.
with connection.schema_editor() as editor:
editor.remove_constraint(Scene, constraint)
self.assertNotIn(constraint.name, self.get_constraints(Scene._meta.db_table))
Scene.objects.create(scene="ScEnE 10", setting="Sir Bedemir's Castle")
def test_opclass_func_validate_constraints(self):
constraint_name = "test_opclass_func_validate_constraints"
constraint = UniqueConstraint(
OpClass(Lower("scene"), name="text_pattern_ops"),
name="test_opclass_func_validate_constraints",
)
Scene.objects.create(scene="First scene")
# Non-unique scene.
msg = f"Constraint “{constraint_name}” is violated."
with self.assertRaisesMessage(ValidationError, msg):
constraint.validate(Scene, Scene(scene="first Scene"))
constraint.validate(Scene, Scene(scene="second Scene"))
class ExclusionConstraintTests(PostgreSQLTestCase):
def get_constraints(self, table):
"""Get the constraints on the table using a new cursor."""
with connection.cursor() as cursor:
return connection.introspection.get_constraints(cursor, table)
def test_invalid_condition(self):
msg = "ExclusionConstraint.condition must be a Q instance."
with self.assertRaisesMessage(ValueError, msg):
ExclusionConstraint(
index_type="GIST",
name="exclude_invalid_condition",
expressions=[(F("datespan"), RangeOperators.OVERLAPS)],
condition=F("invalid"),
)
def test_invalid_index_type(self):
msg = "Exclusion constraints only support GiST or SP-GiST indexes."
with self.assertRaisesMessage(ValueError, msg):
ExclusionConstraint(
index_type="gin",
name="exclude_invalid_index_type",
expressions=[(F("datespan"), RangeOperators.OVERLAPS)],
)
def test_invalid_expressions(self):
msg = "The expressions must be a list of 2-tuples."
for expressions in (["foo"], [("foo",)], [("foo_1", "foo_2", "foo_3")]):
with self.subTest(expressions), self.assertRaisesMessage(ValueError, msg):
ExclusionConstraint(
index_type="GIST",
name="exclude_invalid_expressions",
expressions=expressions,
)
def test_empty_expressions(self):
msg = "At least one expression is required to define an exclusion constraint."
for empty_expressions in (None, []):
with (
self.subTest(empty_expressions),
self.assertRaisesMessage(ValueError, msg),
):
ExclusionConstraint(
index_type="GIST",
name="exclude_empty_expressions",
expressions=empty_expressions,
)
def test_invalid_deferrable(self):
msg = "ExclusionConstraint.deferrable must be a Deferrable instance."
with self.assertRaisesMessage(ValueError, msg):
ExclusionConstraint(
name="exclude_invalid_deferrable",
expressions=[(F("datespan"), RangeOperators.OVERLAPS)],
deferrable="invalid",
)
def test_invalid_include_type(self):
msg = "ExclusionConstraint.include must be a list or tuple."
with self.assertRaisesMessage(ValueError, msg):
ExclusionConstraint(
name="exclude_invalid_include",
expressions=[(F("datespan"), RangeOperators.OVERLAPS)],
include="invalid",
)
@isolate_apps("postgres_tests")
def test_check(self):
class Author(Model):
name = CharField(max_length=255)
alias = CharField(max_length=255)
class Meta:
app_label = "postgres_tests"
class Book(Model):
title = CharField(max_length=255)
published_date = DateField()
author = ForeignKey(Author, CASCADE)
class Meta:
app_label = "postgres_tests"
constraints = [
ExclusionConstraint(
name="exclude_check",
expressions=[
(F("title"), RangeOperators.EQUAL),
(F("published_date__year"), RangeOperators.EQUAL),
("published_date__month", RangeOperators.EQUAL),
(F("author__name"), RangeOperators.EQUAL),
("author__alias", RangeOperators.EQUAL),
("nonexistent", RangeOperators.EQUAL),
],
)
]
self.assertCountEqual(
Book.check(databases=self.databases),
[
Error(
"'constraints' refers to the nonexistent field 'nonexistent'.",
obj=Book,
id="models.E012",
),
Error(
"'constraints' refers to the joined field 'author__alias'.",
obj=Book,
id="models.E041",
),
Error(
"'constraints' refers to the joined field 'author__name'.",
obj=Book,
id="models.E041",
),
],
)
def test_repr(self):
constraint = ExclusionConstraint(
name="exclude_overlapping",
expressions=[
(F("datespan"), RangeOperators.OVERLAPS),
(F("room"), RangeOperators.EQUAL),
],
)
self.assertEqual(
repr(constraint),
"<ExclusionConstraint: index_type='GIST' expressions=["
"(F(datespan), '&&'), (F(room), '=')] name='exclude_overlapping'>",
)
constraint = ExclusionConstraint(
name="exclude_overlapping",
expressions=[(F("datespan"), RangeOperators.ADJACENT_TO)],
condition=Q(cancelled=False),
index_type="SPGiST",
)
self.assertEqual(
repr(constraint),
"<ExclusionConstraint: index_type='SPGiST' expressions=["
"(F(datespan), '-|-')] name='exclude_overlapping' "
"condition=(AND: ('cancelled', False))>",
)
constraint = ExclusionConstraint(
name="exclude_overlapping",
expressions=[(F("datespan"), RangeOperators.ADJACENT_TO)],
deferrable=Deferrable.IMMEDIATE,
)
self.assertEqual(
repr(constraint),
"<ExclusionConstraint: index_type='GIST' expressions=["
"(F(datespan), '-|-')] name='exclude_overlapping' "
"deferrable=Deferrable.IMMEDIATE>",
)
constraint = ExclusionConstraint(
name="exclude_overlapping",
expressions=[(F("datespan"), RangeOperators.ADJACENT_TO)],
include=["cancelled", "room"],
)
self.assertEqual(
repr(constraint),
"<ExclusionConstraint: index_type='GIST' expressions=["
"(F(datespan), '-|-')] name='exclude_overlapping' "
"include=('cancelled', 'room')>",
)
constraint = ExclusionConstraint(
name="exclude_overlapping",
expressions=[
(OpClass("datespan", name="range_ops"), RangeOperators.ADJACENT_TO),
],
)
self.assertEqual(
repr(constraint),
"<ExclusionConstraint: index_type='GIST' expressions=["
"(OpClass(F(datespan), name=range_ops), '-|-')] "
"name='exclude_overlapping'>",
)
constraint = ExclusionConstraint(
name="exclude_overlapping",
expressions=[(F("datespan"), RangeOperators.ADJACENT_TO)],
violation_error_message="Overlapping must be excluded",
)
self.assertEqual(
repr(constraint),
"<ExclusionConstraint: index_type='GIST' expressions=["
"(F(datespan), '-|-')] name='exclude_overlapping' "
"violation_error_message='Overlapping must be excluded'>",
)
constraint = ExclusionConstraint(
name="exclude_overlapping",
expressions=[(F("datespan"), RangeOperators.ADJACENT_TO)],
violation_error_code="overlapping_must_be_excluded",
)
self.assertEqual(
repr(constraint),
"<ExclusionConstraint: index_type='GIST' expressions=["
"(F(datespan), '-|-')] name='exclude_overlapping' "
"violation_error_code='overlapping_must_be_excluded'>",
)
def test_eq(self):
constraint_1 = ExclusionConstraint(
name="exclude_overlapping",
expressions=[
(F("datespan"), RangeOperators.OVERLAPS),
(F("room"), RangeOperators.EQUAL),
],
condition=Q(cancelled=False),
)
constraint_2 = ExclusionConstraint(
name="exclude_overlapping",
expressions=[
("datespan", RangeOperators.OVERLAPS),
("room", RangeOperators.EQUAL),
],
)
constraint_3 = ExclusionConstraint(
name="exclude_overlapping",
expressions=[("datespan", RangeOperators.OVERLAPS)],
condition=Q(cancelled=False),
)
constraint_4 = ExclusionConstraint(
name="exclude_overlapping",
expressions=[
("datespan", RangeOperators.OVERLAPS),
("room", RangeOperators.EQUAL),
],
deferrable=Deferrable.DEFERRED,
)
constraint_5 = ExclusionConstraint(
name="exclude_overlapping",
expressions=[
("datespan", RangeOperators.OVERLAPS),
("room", RangeOperators.EQUAL),
],
deferrable=Deferrable.IMMEDIATE,
)
constraint_6 = ExclusionConstraint(
name="exclude_overlapping",
expressions=[
("datespan", RangeOperators.OVERLAPS),
("room", RangeOperators.EQUAL),
],
deferrable=Deferrable.IMMEDIATE,
include=["cancelled"],
)
constraint_7 = ExclusionConstraint(
name="exclude_overlapping",
expressions=[
("datespan", RangeOperators.OVERLAPS),
("room", RangeOperators.EQUAL),
],
include=["cancelled"],
)
constraint_10 = ExclusionConstraint(
name="exclude_overlapping",
expressions=[
(F("datespan"), RangeOperators.OVERLAPS),
(F("room"), RangeOperators.EQUAL),
],
condition=Q(cancelled=False),
violation_error_message="custom error",
)
constraint_11 = ExclusionConstraint(
name="exclude_overlapping",
expressions=[
(F("datespan"), RangeOperators.OVERLAPS),
(F("room"), RangeOperators.EQUAL),
],
condition=Q(cancelled=False),
violation_error_message="other custom error",
)
constraint_12 = ExclusionConstraint(
name="exclude_overlapping",
expressions=[
(F("datespan"), RangeOperators.OVERLAPS),
(F("room"), RangeOperators.EQUAL),
],
condition=Q(cancelled=False),
violation_error_code="custom_code",
violation_error_message="other custom error",
)
self.assertEqual(constraint_1, constraint_1)
self.assertEqual(constraint_1, mock.ANY)
self.assertNotEqual(constraint_1, constraint_2)
self.assertNotEqual(constraint_1, constraint_3)
self.assertNotEqual(constraint_1, constraint_4)
self.assertNotEqual(constraint_1, constraint_10)
self.assertNotEqual(constraint_2, constraint_3)
self.assertNotEqual(constraint_2, constraint_4)
self.assertNotEqual(constraint_2, constraint_7)
self.assertNotEqual(constraint_4, constraint_5)
self.assertNotEqual(constraint_5, constraint_6)
self.assertNotEqual(constraint_1, object())
self.assertNotEqual(constraint_10, constraint_11)
self.assertNotEqual(constraint_11, constraint_12)
self.assertEqual(constraint_10, constraint_10)
self.assertEqual(constraint_12, constraint_12)
def test_deconstruct(self):
constraint = ExclusionConstraint(
name="exclude_overlapping",
expressions=[
("datespan", RangeOperators.OVERLAPS),
("room", RangeOperators.EQUAL),
],
)
path, args, kwargs = constraint.deconstruct()
self.assertEqual(
path, "django.contrib.postgres.constraints.ExclusionConstraint"
)
self.assertEqual(args, ())
self.assertEqual(
kwargs,
{
"name": "exclude_overlapping",
"expressions": [
("datespan", RangeOperators.OVERLAPS),
("room", RangeOperators.EQUAL),
],
},
)
def test_deconstruct_index_type(self):
constraint = ExclusionConstraint(
name="exclude_overlapping",
index_type="SPGIST",
expressions=[
("datespan", RangeOperators.OVERLAPS),
("room", RangeOperators.EQUAL),
],
)
path, args, kwargs = constraint.deconstruct()
self.assertEqual(
path, "django.contrib.postgres.constraints.ExclusionConstraint"
)
self.assertEqual(args, ())
self.assertEqual(
kwargs,
{
"name": "exclude_overlapping",
"index_type": "SPGIST",
"expressions": [
("datespan", RangeOperators.OVERLAPS),
("room", RangeOperators.EQUAL),
],
},
)
def test_deconstruct_condition(self):
constraint = ExclusionConstraint(
name="exclude_overlapping",
expressions=[
("datespan", RangeOperators.OVERLAPS),
("room", RangeOperators.EQUAL),
],
condition=Q(cancelled=False),
)
path, args, kwargs = constraint.deconstruct()
self.assertEqual(
path, "django.contrib.postgres.constraints.ExclusionConstraint"
)
self.assertEqual(args, ())
self.assertEqual(
kwargs,
{
"name": "exclude_overlapping",
"expressions": [
("datespan", RangeOperators.OVERLAPS),
("room", RangeOperators.EQUAL),
],
"condition": Q(cancelled=False),
},
)
def test_deconstruct_deferrable(self):
constraint = ExclusionConstraint(
name="exclude_overlapping",
expressions=[("datespan", RangeOperators.OVERLAPS)],
deferrable=Deferrable.DEFERRED,
)
path, args, kwargs = constraint.deconstruct()
self.assertEqual(
path, "django.contrib.postgres.constraints.ExclusionConstraint"
)
self.assertEqual(args, ())
self.assertEqual(
kwargs,
{
"name": "exclude_overlapping",
"expressions": [("datespan", RangeOperators.OVERLAPS)],
"deferrable": Deferrable.DEFERRED,
},
)
def test_deconstruct_include(self):
constraint = ExclusionConstraint(
name="exclude_overlapping",
expressions=[("datespan", RangeOperators.OVERLAPS)],
include=["cancelled", "room"],
)
path, args, kwargs = constraint.deconstruct()
self.assertEqual(
path, "django.contrib.postgres.constraints.ExclusionConstraint"
)
self.assertEqual(args, ())
self.assertEqual(
kwargs,
{
"name": "exclude_overlapping",
"expressions": [("datespan", RangeOperators.OVERLAPS)],
"include": ("cancelled", "room"),
},
)
def _test_range_overlaps(self, constraint):
# Create exclusion constraint.
self.assertNotIn(
constraint.name, self.get_constraints(HotelReservation._meta.db_table)
)
with connection.schema_editor() as editor:
editor.add_constraint(HotelReservation, constraint)
self.assertIn(
constraint.name, self.get_constraints(HotelReservation._meta.db_table)
)
# Add initial reservations.
room101 = Room.objects.create(number=101)
room102 = Room.objects.create(number=102)
datetimes = [
timezone.datetime(2018, 6, 20),
timezone.datetime(2018, 6, 24),
timezone.datetime(2018, 6, 26),
timezone.datetime(2018, 6, 28),
timezone.datetime(2018, 6, 29),
]
reservation = HotelReservation.objects.create(
datespan=DateRange(datetimes[0].date(), datetimes[1].date()),
start=datetimes[0],
end=datetimes[1],
room=room102,
)
constraint.validate(HotelReservation, reservation)
HotelReservation.objects.create(
datespan=DateRange(datetimes[1].date(), datetimes[3].date()),
start=datetimes[1],
end=datetimes[3],
room=room102,
)
HotelReservation.objects.create(
datespan=DateRange(datetimes[3].date(), datetimes[4].date()),
start=datetimes[3],
end=datetimes[4],
room=room102,
cancelled=True,
)
# Overlap dates.
with self.assertRaises(IntegrityError), transaction.atomic():
reservation = HotelReservation(
datespan=(datetimes[1].date(), datetimes[2].date()),
start=datetimes[1],
end=datetimes[2],
room=room102,
)
msg = f"Constraint “{constraint.name}” is violated."
with self.assertRaisesMessage(ValidationError, msg):
constraint.validate(HotelReservation, reservation)
reservation.save()
# Valid range.
other_valid_reservations = [
# Other room.
HotelReservation(
datespan=(datetimes[1].date(), datetimes[2].date()),
start=datetimes[1],
end=datetimes[2],
room=room101,
),
# Cancelled reservation.
HotelReservation(
datespan=(datetimes[1].date(), datetimes[1].date()),
start=datetimes[1],
end=datetimes[2],
room=room102,
cancelled=True,
),
# Other adjacent dates.
HotelReservation(
datespan=(datetimes[3].date(), datetimes[4].date()),
start=datetimes[3],
end=datetimes[4],
room=room102,
),
]
for reservation in other_valid_reservations:
constraint.validate(HotelReservation, reservation)
HotelReservation.objects.bulk_create(other_valid_reservations)
# Excluded fields.
constraint.validate(
HotelReservation,
HotelReservation(
datespan=(datetimes[1].date(), datetimes[2].date()),
start=datetimes[1],
end=datetimes[2],
room=room102,
),
exclude={"room"},
)
constraint.validate(
HotelReservation,
HotelReservation(
datespan=(datetimes[1].date(), datetimes[2].date()),
start=datetimes[1],
end=datetimes[2],
room=room102,
),
exclude={"datespan", "start", "end", "room"},
)
# Constraints with excluded fields in condition are ignored.
constraint.validate(
HotelReservation,
HotelReservation(
datespan=(datetimes[1].date(), datetimes[2].date()),
start=datetimes[1],
end=datetimes[2],
room=room102,
),
exclude={"cancelled"},
)
def test_range_overlaps_custom(self):
class TsTzRange(Func):
function = "TSTZRANGE"
output_field = DateTimeRangeField()
constraint = ExclusionConstraint(
name="exclude_overlapping_reservations_custom_opclass",
expressions=[
(
OpClass(TsTzRange("start", "end", RangeBoundary()), "range_ops"),
RangeOperators.OVERLAPS,
),
(OpClass("room", "gist_int8_ops"), RangeOperators.EQUAL),
],
condition=Q(cancelled=False),
)
self._test_range_overlaps(constraint)
def test_range_overlaps(self):
constraint = ExclusionConstraint(
name="exclude_overlapping_reservations",
expressions=[
(F("datespan"), RangeOperators.OVERLAPS),
("room", RangeOperators.EQUAL),
],
condition=Q(cancelled=False),
)
self._test_range_overlaps(constraint)
def test_range_adjacent(self):
constraint_name = "ints_adjacent"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[("ints", RangeOperators.ADJACENT_TO)],
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
RangesModel.objects.create(ints=(20, 50))
with self.assertRaises(IntegrityError), transaction.atomic():
RangesModel.objects.create(ints=(10, 20))
RangesModel.objects.create(ints=(10, 19))
RangesModel.objects.create(ints=(51, 60))
# Drop the constraint.
with connection.schema_editor() as editor:
editor.remove_constraint(RangesModel, constraint)
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
def test_validate_range_adjacent(self):
constraint = ExclusionConstraint(
name="ints_adjacent",
expressions=[("ints", RangeOperators.ADJACENT_TO)],
violation_error_code="custom_code",
violation_error_message="Custom error message.",
)
range_obj = RangesModel.objects.create(ints=(20, 50))
constraint.validate(RangesModel, range_obj)
msg = "Custom error message."
with self.assertRaisesMessage(ValidationError, msg) as cm:
constraint.validate(RangesModel, RangesModel(ints=(10, 20)))
self.assertEqual(cm.exception.code, "custom_code")
constraint.validate(RangesModel, RangesModel(ints=(10, 19)))
constraint.validate(RangesModel, RangesModel(ints=(51, 60)))
constraint.validate(RangesModel, RangesModel(ints=(10, 20)), exclude={"ints"})
@skipUnlessDBFeature("supports_stored_generated_columns")
@isolate_apps("postgres_tests")
def test_validate_generated_field_range_adjacent(self):
class RangesModelGeneratedField(Model):
ints = IntegerRangeField(blank=True, null=True)
ints_generated = GeneratedField(
expression=F("ints"),
output_field=IntegerRangeField(null=True),
db_persist=True,
)
with connection.schema_editor() as editor:
editor.create_model(RangesModelGeneratedField)
constraint = ExclusionConstraint(
name="ints_adjacent",
expressions=[("ints_generated", RangeOperators.ADJACENT_TO)],
violation_error_code="custom_code",
violation_error_message="Custom error message.",
)
RangesModelGeneratedField.objects.create(ints=(20, 50))
range_obj = RangesModelGeneratedField(ints=(3, 20))
with self.assertRaisesMessage(ValidationError, "Custom error message."):
constraint.validate(RangesModelGeneratedField, range_obj)
# Excluding referenced or generated field should skip validation.
constraint.validate(RangesModelGeneratedField, range_obj, exclude={"ints"})
constraint.validate(
RangesModelGeneratedField, range_obj, exclude={"ints_generated"}
)
def test_validate_with_custom_code_and_condition(self):
constraint = ExclusionConstraint(
name="ints_adjacent",
expressions=[("ints", RangeOperators.ADJACENT_TO)],
violation_error_code="custom_code",
condition=Q(ints__lt=(100, 200)),
)
range_obj = RangesModel.objects.create(ints=(20, 50))
constraint.validate(RangesModel, range_obj)
with self.assertRaises(ValidationError) as cm:
constraint.validate(RangesModel, RangesModel(ints=(10, 20)))
self.assertEqual(cm.exception.code, "custom_code")
def test_expressions_with_params(self):
constraint_name = "scene_left_equal"
self.assertNotIn(constraint_name, self.get_constraints(Scene._meta.db_table))
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[(Left("scene", 4), RangeOperators.EQUAL)],
)
with connection.schema_editor() as editor:
editor.add_constraint(Scene, constraint)
self.assertIn(constraint_name, self.get_constraints(Scene._meta.db_table))
def test_expressions_with_key_transform(self):
constraint_name = "exclude_overlapping_reservations_smoking"
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[
(F("datespan"), RangeOperators.OVERLAPS),
(KeyTextTransform("smoking", "requirements"), RangeOperators.EQUAL),
],
)
with connection.schema_editor() as editor:
editor.add_constraint(HotelReservation, constraint)
self.assertIn(
constraint_name,
self.get_constraints(HotelReservation._meta.db_table),
)
def test_index_transform(self):
constraint_name = "first_index_equal"
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[("field__0", RangeOperators.EQUAL)],
)
with connection.schema_editor() as editor:
editor.add_constraint(IntegerArrayModel, constraint)
self.assertIn(
constraint_name,
self.get_constraints(IntegerArrayModel._meta.db_table),
)
def test_range_adjacent_initially_deferred(self):
constraint_name = "ints_adjacent_deferred"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[("ints", RangeOperators.ADJACENT_TO)],
deferrable=Deferrable.DEFERRED,
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
RangesModel.objects.create(ints=(20, 50))
adjacent_range = RangesModel.objects.create(ints=(10, 20))
# Constraint behavior can be changed with SET CONSTRAINTS.
with self.assertRaises(IntegrityError):
with transaction.atomic(), connection.cursor() as cursor:
quoted_name = connection.ops.quote_name(constraint_name)
cursor.execute("SET CONSTRAINTS %s IMMEDIATE" % quoted_name)
# Remove adjacent range before the end of transaction.
adjacent_range.delete()
RangesModel.objects.create(ints=(10, 19))
RangesModel.objects.create(ints=(51, 60))
def test_range_adjacent_initially_deferred_with_condition(self):
constraint_name = "ints_adjacent_deferred_with_condition"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[("ints", RangeOperators.ADJACENT_TO)],
condition=Q(ints__lt=(100, 200)),
deferrable=Deferrable.DEFERRED,
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
RangesModel.objects.create(ints=(20, 50))
adjacent_range = RangesModel.objects.create(ints=(10, 20))
# Constraint behavior can be changed with SET CONSTRAINTS.
with self.assertRaises(IntegrityError):
with transaction.atomic(), connection.cursor() as cursor:
quoted_name = connection.ops.quote_name(constraint_name)
cursor.execute(f"SET CONSTRAINTS {quoted_name} IMMEDIATE")
# Remove adjacent range before the end of transaction.
adjacent_range.delete()
RangesModel.objects.create(ints=(10, 19))
RangesModel.objects.create(ints=(51, 60))
# Add adjacent range that doesn't match the condition.
RangesModel.objects.create(ints=(200, 500))
adjacent_range = RangesModel.objects.create(ints=(100, 200))
# Constraint behavior can be changed with SET CONSTRAINTS.
with transaction.atomic(), connection.cursor() as cursor:
quoted_name = connection.ops.quote_name(constraint_name)
cursor.execute(f"SET CONSTRAINTS {quoted_name} IMMEDIATE")
def test_range_adjacent_gist_include(self):
constraint_name = "ints_adjacent_gist_include"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[("ints", RangeOperators.ADJACENT_TO)],
index_type="gist",
include=["decimals", "ints"],
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
RangesModel.objects.create(ints=(20, 50))
with self.assertRaises(IntegrityError), transaction.atomic():
RangesModel.objects.create(ints=(10, 20))
RangesModel.objects.create(ints=(10, 19))
RangesModel.objects.create(ints=(51, 60))
def test_range_adjacent_spgist_include(self):
constraint_name = "ints_adjacent_spgist_include"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[("ints", RangeOperators.ADJACENT_TO)],
index_type="spgist",
include=["decimals", "ints"],
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
RangesModel.objects.create(ints=(20, 50))
with self.assertRaises(IntegrityError), transaction.atomic():
RangesModel.objects.create(ints=(10, 20))
RangesModel.objects.create(ints=(10, 19))
RangesModel.objects.create(ints=(51, 60))
def test_range_adjacent_gist_include_condition(self):
constraint_name = "ints_adjacent_gist_include_condition"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[("ints", RangeOperators.ADJACENT_TO)],
index_type="gist",
include=["decimals"],
condition=Q(id__gte=100),
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
def test_range_adjacent_spgist_include_condition(self):
constraint_name = "ints_adjacent_spgist_include_condition"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[("ints", RangeOperators.ADJACENT_TO)],
index_type="spgist",
include=["decimals"],
condition=Q(id__gte=100),
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
def test_range_adjacent_gist_include_deferrable(self):
constraint_name = "ints_adjacent_gist_include_deferrable"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[("ints", RangeOperators.ADJACENT_TO)],
index_type="gist",
include=["decimals"],
deferrable=Deferrable.DEFERRED,
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
def test_range_adjacent_spgist_include_deferrable(self):
constraint_name = "ints_adjacent_spgist_include_deferrable"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[("ints", RangeOperators.ADJACENT_TO)],
index_type="spgist",
include=["decimals"],
deferrable=Deferrable.DEFERRED,
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
def test_range_adjacent_opclass(self):
constraint_name = "ints_adjacent_opclass"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[
(OpClass("ints", name="range_ops"), RangeOperators.ADJACENT_TO),
],
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
constraints = self.get_constraints(RangesModel._meta.db_table)
self.assertIn(constraint_name, constraints)
with editor.connection.cursor() as cursor:
cursor.execute(SchemaTests.get_opclass_query, [constraint_name])
self.assertEqual(
cursor.fetchall(),
[("range_ops", constraint_name)],
)
RangesModel.objects.create(ints=(20, 50))
with self.assertRaises(IntegrityError), transaction.atomic():
RangesModel.objects.create(ints=(10, 20))
RangesModel.objects.create(ints=(10, 19))
RangesModel.objects.create(ints=(51, 60))
# Drop the constraint.
with connection.schema_editor() as editor:
editor.remove_constraint(RangesModel, constraint)
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
def test_range_adjacent_opclass_condition(self):
constraint_name = "ints_adjacent_opclass_condition"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[
(OpClass("ints", name="range_ops"), RangeOperators.ADJACENT_TO),
],
condition=Q(id__gte=100),
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
def test_range_adjacent_opclass_deferrable(self):
constraint_name = "ints_adjacent_opclass_deferrable"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[
(OpClass("ints", name="range_ops"), RangeOperators.ADJACENT_TO),
],
deferrable=Deferrable.DEFERRED,
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
def test_range_adjacent_gist_opclass_include(self):
constraint_name = "ints_adjacent_gist_opclass_include"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[
(OpClass("ints", name="range_ops"), RangeOperators.ADJACENT_TO),
],
index_type="gist",
include=["decimals"],
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
def test_range_adjacent_spgist_opclass_include(self):
constraint_name = "ints_adjacent_spgist_opclass_include"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[
(OpClass("ints", name="range_ops"), RangeOperators.ADJACENT_TO),
],
index_type="spgist",
include=["decimals"],
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
def test_range_equal_cast(self):
constraint_name = "exclusion_equal_room_cast"
self.assertNotIn(constraint_name, self.get_constraints(Room._meta.db_table))
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[(Cast("number", IntegerField()), RangeOperators.EQUAL)],
)
with connection.schema_editor() as editor:
editor.add_constraint(Room, constraint)
self.assertIn(constraint_name, self.get_constraints(Room._meta.db_table))
@isolate_apps("postgres_tests")
def test_table_create(self):
constraint_name = "exclusion_equal_number_tc"
class ModelWithExclusionConstraint(Model):
number = IntegerField()
class Meta:
app_label = "postgres_tests"
constraints = [
ExclusionConstraint(
name=constraint_name,
expressions=[("number", RangeOperators.EQUAL)],
)
]
with connection.schema_editor() as editor:
editor.create_model(ModelWithExclusionConstraint)
self.assertIn(
constraint_name,
self.get_constraints(ModelWithExclusionConstraint._meta.db_table),
)
def test_database_default(self):
constraint = ExclusionConstraint(
name="ints_equal", expressions=[("ints", RangeOperators.EQUAL)]
)
RangesModel.objects.create()
msg = "Constraint “ints_equal” is violated."
with self.assertRaisesMessage(ValidationError, msg):
constraint.validate(RangesModel, RangesModel())
|
./temp_repos/django/django/contrib/postgres/constraints.py
|
./temp_repos/django/tests/postgres_tests/test_constraints.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'ExclusionConstraintExpression'.
Context:
- Class Name: ExclusionConstraintExpression
- Dependencies to Mock: None detected
- Key Imports: django.db, django.db.models.expressions, django.core.exceptions, django.db.models.lookups, utils, django.db.models.sql, django.db.backends.ddl_references, django.db.models.indexes, types, django.db.models
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
ExclusionConstraintExpression
|
python
|
from pathlib import Path
import jinja2
from django.conf import settings
from django.template import TemplateDoesNotExist, TemplateSyntaxError
from django.utils.functional import cached_property
from django.utils.module_loading import import_string
from .base import BaseEngine
from .utils import csrf_input_lazy, csrf_token_lazy
class Jinja2(BaseEngine):
app_dirname = "jinja2"
def __init__(self, params):
params = params.copy()
options = params.pop("OPTIONS").copy()
super().__init__(params)
self.context_processors = options.pop("context_processors", [])
environment = options.pop("environment", "jinja2.Environment")
environment_cls = import_string(environment)
if "loader" not in options:
options["loader"] = jinja2.FileSystemLoader(self.template_dirs)
options.setdefault("autoescape", True)
options.setdefault("auto_reload", settings.DEBUG)
options.setdefault(
"undefined", jinja2.DebugUndefined if settings.DEBUG else jinja2.Undefined
)
self.env = environment_cls(**options)
def from_string(self, template_code):
return Template(self.env.from_string(template_code), self)
def get_template(self, template_name):
try:
return Template(self.env.get_template(template_name), self)
except jinja2.TemplateNotFound as exc:
raise TemplateDoesNotExist(exc.name, backend=self) from exc
except jinja2.TemplateSyntaxError as exc:
new = TemplateSyntaxError(exc.args)
new.template_debug = get_exception_info(exc)
raise new from exc
@cached_property
def template_context_processors(self):
return [import_string(path) for path in self.context_processors]
class Template:
def __init__(self, template, backend):
self.template = template
self.backend = backend
self.origin = Origin(
name=template.filename,
template_name=template.name,
)
def render(self, context=None, request=None):
if context is None:
context = {}
if request is not None:
context["request"] = request
context["csrf_input"] = csrf_input_lazy(request)
context["csrf_token"] = csrf_token_lazy(request)
for context_processor in self.backend.template_context_processors:
context.update(context_processor(request))
try:
return self.template.render(context)
except jinja2.TemplateSyntaxError as exc:
new = TemplateSyntaxError(exc.args)
new.template_debug = get_exception_info(exc)
raise new from exc
class Origin:
"""
A container to hold debug information as described in the template API
documentation.
"""
def __init__(self, name, template_name):
self.name = name
self.template_name = template_name
def get_exception_info(exception):
"""
Format exception information for display on the debug page using the
structure described in the template API documentation.
"""
context_lines = 10
lineno = exception.lineno
source = exception.source
if source is None:
exception_file = Path(exception.filename)
if exception_file.exists():
source = exception_file.read_text()
if source is not None:
lines = list(enumerate(source.strip().split("\n"), start=1))
during = lines[lineno - 1][1]
total = len(lines)
top = max(0, lineno - context_lines - 1)
bottom = min(total, lineno + context_lines)
else:
during = ""
lines = []
total = top = bottom = 0
return {
"name": exception.filename,
"message": exception.message,
"source_lines": lines[top:bottom],
"line": lineno,
"before": "",
"during": during,
"after": "",
"total": total,
"top": top,
"bottom": bottom,
}
|
from pathlib import Path
from unittest import mock, skipIf
from django.contrib.auth.models import User
from django.template import TemplateSyntaxError
from django.test import RequestFactory, TestCase
from .test_dummy import TemplateStringsTests
try:
import jinja2
except ImportError:
jinja2 = None
Jinja2 = None
else:
from django.template.backends.jinja2 import Jinja2
@skipIf(jinja2 is None, "this test requires jinja2")
class Jinja2Tests(TemplateStringsTests):
engine_class = Jinja2
backend_name = "jinja2"
options = {
"keep_trailing_newline": True,
"context_processors": [
"django.template.context_processors.static",
],
}
def test_origin(self):
template = self.engine.get_template("template_backends/hello.html")
self.assertTrue(template.origin.name.endswith("hello.html"))
self.assertEqual(template.origin.template_name, "template_backends/hello.html")
def test_origin_from_string(self):
template = self.engine.from_string("Hello!\n")
self.assertEqual(template.origin.name, "<template>")
self.assertIsNone(template.origin.template_name)
def test_self_context(self):
"""
Using 'self' in the context should not throw errors (#24538).
"""
# self will be overridden to be a TemplateReference, so the self
# variable will not come through. Attempting to use one though should
# not throw an error.
template = self.engine.from_string("hello {{ foo }}!")
content = template.render(context={"self": "self", "foo": "world"})
self.assertEqual(content, "hello world!")
def test_exception_debug_info_min_context(self):
with self.assertRaises(TemplateSyntaxError) as e:
self.engine.get_template("template_backends/syntax_error.html")
debug = e.exception.template_debug
self.assertEqual(debug["after"], "")
self.assertEqual(debug["before"], "")
self.assertEqual(debug["during"], "{% block %}")
self.assertEqual(debug["bottom"], 1)
self.assertEqual(debug["top"], 0)
self.assertEqual(debug["line"], 1)
self.assertEqual(debug["total"], 1)
self.assertEqual(len(debug["source_lines"]), 1)
self.assertTrue(debug["name"].endswith("syntax_error.html"))
self.assertIn("message", debug)
def test_exception_debug_info_max_context(self):
with self.assertRaises(TemplateSyntaxError) as e:
self.engine.get_template("template_backends/syntax_error2.html")
debug = e.exception.template_debug
self.assertEqual(debug["after"], "")
self.assertEqual(debug["before"], "")
self.assertEqual(debug["during"], "{% block %}")
self.assertEqual(debug["bottom"], 26)
self.assertEqual(debug["top"], 5)
self.assertEqual(debug["line"], 16)
self.assertEqual(debug["total"], 31)
self.assertEqual(len(debug["source_lines"]), 21)
self.assertTrue(debug["name"].endswith("syntax_error2.html"))
self.assertIn("message", debug)
def test_context_processors(self):
request = RequestFactory().get("/")
template = self.engine.from_string("Static URL: {{ STATIC_URL }}")
content = template.render(request=request)
self.assertEqual(content, "Static URL: /static/")
with self.settings(STATIC_URL="/s/"):
content = template.render(request=request)
self.assertEqual(content, "Static URL: /s/")
def test_dirs_pathlib(self):
engine = Jinja2(
{
"DIRS": [Path(__file__).parent / "templates" / "template_backends"],
"APP_DIRS": False,
"NAME": "jinja2",
"OPTIONS": {},
}
)
template = engine.get_template("hello.html")
self.assertEqual(template.render({"name": "Joe"}), "Hello Joe!")
def test_template_render_nested_error(self):
template = self.engine.get_template(
"template_backends/syntax_error_include.html"
)
with self.assertRaises(TemplateSyntaxError) as e:
template.render(context={})
debug = e.exception.template_debug
self.assertEqual(debug["after"], "")
self.assertEqual(debug["before"], "")
self.assertEqual(debug["during"], "{% block %}")
self.assertEqual(debug["bottom"], 1)
self.assertEqual(debug["top"], 0)
self.assertEqual(debug["line"], 1)
self.assertEqual(debug["total"], 1)
self.assertEqual(len(debug["source_lines"]), 1)
self.assertTrue(debug["name"].endswith("syntax_error.html"))
self.assertIn("message", debug)
def test_template_render_error_nonexistent_source(self):
template = self.engine.get_template("template_backends/hello.html")
with mock.patch(
"jinja2.environment.Template.render",
side_effect=jinja2.TemplateSyntaxError("", 1, filename="nonexistent.html"),
):
with self.assertRaises(TemplateSyntaxError) as e:
template.render(context={})
debug = e.exception.template_debug
self.assertEqual(debug["after"], "")
self.assertEqual(debug["before"], "")
self.assertEqual(debug["during"], "")
self.assertEqual(debug["bottom"], 0)
self.assertEqual(debug["top"], 0)
self.assertEqual(debug["line"], 1)
self.assertEqual(debug["total"], 0)
self.assertEqual(len(debug["source_lines"]), 0)
self.assertTrue(debug["name"].endswith("nonexistent.html"))
self.assertIn("message", debug)
@skipIf(jinja2 is None, "this test requires jinja2")
class Jinja2SandboxTests(TestCase):
engine_class = Jinja2
backend_name = "jinja2"
options = {"environment": "jinja2.sandbox.SandboxedEnvironment"}
@classmethod
def setUpClass(cls):
super().setUpClass()
params = {
"DIRS": [],
"APP_DIRS": True,
"NAME": cls.backend_name,
"OPTIONS": cls.options,
}
cls.engine = cls.engine_class(params)
def test_set_alters_data(self):
template = self.engine.from_string(
"{% set test = User.objects.create_superuser("
"username='evil', email='[email protected]', password='xxx') %}"
"{{ test }}"
)
with self.assertRaises(jinja2.exceptions.SecurityError):
template.render(context={"User": User})
self.assertEqual(User.objects.count(), 0)
|
./temp_repos/django/django/template/backends/jinja2.py
|
./temp_repos/django/tests/template_backends/test_jinja2.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'Jinja2'.
Context:
- Class Name: Jinja2
- Dependencies to Mock: template, template_name, name, params, backend
- Key Imports: django.utils.module_loading, django.conf, base, pathlib, utils, django.template, django.utils.functional, jinja2
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
Jinja2
|
python
|
"Commonly-used date structures"
from django.utils.translation import gettext_lazy as _
from django.utils.translation import pgettext_lazy
WEEKDAYS = {
0: _("Monday"),
1: _("Tuesday"),
2: _("Wednesday"),
3: _("Thursday"),
4: _("Friday"),
5: _("Saturday"),
6: _("Sunday"),
}
WEEKDAYS_ABBR = {
0: _("Mon"),
1: _("Tue"),
2: _("Wed"),
3: _("Thu"),
4: _("Fri"),
5: _("Sat"),
6: _("Sun"),
}
MONTHS = {
1: _("January"),
2: _("February"),
3: _("March"),
4: _("April"),
5: _("May"),
6: _("June"),
7: _("July"),
8: _("August"),
9: _("September"),
10: _("October"),
11: _("November"),
12: _("December"),
}
MONTHS_3 = {
1: _("jan"),
2: _("feb"),
3: _("mar"),
4: _("apr"),
5: _("may"),
6: _("jun"),
7: _("jul"),
8: _("aug"),
9: _("sep"),
10: _("oct"),
11: _("nov"),
12: _("dec"),
}
MONTHS_AP = { # month names in Associated Press style
1: pgettext_lazy("abbrev. month", "Jan."),
2: pgettext_lazy("abbrev. month", "Feb."),
3: pgettext_lazy("abbrev. month", "March"),
4: pgettext_lazy("abbrev. month", "April"),
5: pgettext_lazy("abbrev. month", "May"),
6: pgettext_lazy("abbrev. month", "June"),
7: pgettext_lazy("abbrev. month", "July"),
8: pgettext_lazy("abbrev. month", "Aug."),
9: pgettext_lazy("abbrev. month", "Sept."),
10: pgettext_lazy("abbrev. month", "Oct."),
11: pgettext_lazy("abbrev. month", "Nov."),
12: pgettext_lazy("abbrev. month", "Dec."),
}
MONTHS_ALT = { # required for long date representation by some locales
1: pgettext_lazy("alt. month", "January"),
2: pgettext_lazy("alt. month", "February"),
3: pgettext_lazy("alt. month", "March"),
4: pgettext_lazy("alt. month", "April"),
5: pgettext_lazy("alt. month", "May"),
6: pgettext_lazy("alt. month", "June"),
7: pgettext_lazy("alt. month", "July"),
8: pgettext_lazy("alt. month", "August"),
9: pgettext_lazy("alt. month", "September"),
10: pgettext_lazy("alt. month", "October"),
11: pgettext_lazy("alt. month", "November"),
12: pgettext_lazy("alt. month", "December"),
}
|
import datetime
from unittest import mock
from django.core.exceptions import ImproperlyConfigured
from django.test import TestCase, override_settings, skipUnlessDBFeature
from django.test.utils import requires_tz_support
from .models import Artist, Author, Book, BookSigning, Page
def _make_books(n, base_date):
for i in range(n):
Book.objects.create(
name="Book %d" % i,
slug="book-%d" % i,
pages=100 + i,
pubdate=base_date - datetime.timedelta(days=i),
)
class TestDataMixin:
@classmethod
def setUpTestData(cls):
cls.artist1 = Artist.objects.create(name="Rene Magritte")
cls.author1 = Author.objects.create(
name="Roberto Bolaño", slug="roberto-bolano"
)
cls.author2 = Author.objects.create(
name="Scott Rosenberg", slug="scott-rosenberg"
)
cls.book1 = Book.objects.create(
name="2066", slug="2066", pages=800, pubdate=datetime.date(2008, 10, 1)
)
cls.book1.authors.add(cls.author1)
cls.book2 = Book.objects.create(
name="Dreaming in Code",
slug="dreaming-in-code",
pages=300,
pubdate=datetime.date(2006, 5, 1),
)
cls.page1 = Page.objects.create(
content="I was once bitten by a moose.",
template="generic_views/page_template.html",
)
@override_settings(ROOT_URLCONF="generic_views.urls")
class ArchiveIndexViewTests(TestDataMixin, TestCase):
def test_archive_view(self):
res = self.client.get("/dates/books/")
self.assertEqual(res.status_code, 200)
self.assertEqual(
list(res.context["date_list"]),
list(Book.objects.dates("pubdate", "year", "DESC")),
)
self.assertEqual(list(res.context["latest"]), list(Book.objects.all()))
self.assertTemplateUsed(res, "generic_views/book_archive.html")
def test_archive_view_context_object_name(self):
res = self.client.get("/dates/books/context_object_name/")
self.assertEqual(res.status_code, 200)
self.assertEqual(
list(res.context["date_list"]),
list(Book.objects.dates("pubdate", "year", "DESC")),
)
self.assertEqual(list(res.context["thingies"]), list(Book.objects.all()))
self.assertNotIn("latest", res.context)
self.assertTemplateUsed(res, "generic_views/book_archive.html")
def test_empty_archive_view(self):
Book.objects.all().delete()
res = self.client.get("/dates/books/")
self.assertEqual(res.status_code, 404)
def test_allow_empty_archive_view(self):
Book.objects.all().delete()
res = self.client.get("/dates/books/allow_empty/")
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context["date_list"]), [])
self.assertTemplateUsed(res, "generic_views/book_archive.html")
def test_archive_view_template(self):
res = self.client.get("/dates/books/template_name/")
self.assertEqual(res.status_code, 200)
self.assertEqual(
list(res.context["date_list"]),
list(Book.objects.dates("pubdate", "year", "DESC")),
)
self.assertEqual(list(res.context["latest"]), list(Book.objects.all()))
self.assertTemplateUsed(res, "generic_views/list.html")
def test_archive_view_template_suffix(self):
res = self.client.get("/dates/books/template_name_suffix/")
self.assertEqual(res.status_code, 200)
self.assertEqual(
list(res.context["date_list"]),
list(Book.objects.dates("pubdate", "year", "DESC")),
)
self.assertEqual(list(res.context["latest"]), list(Book.objects.all()))
self.assertTemplateUsed(res, "generic_views/book_detail.html")
def test_archive_view_invalid(self):
msg = (
"BookArchive is missing a QuerySet. Define BookArchive.model, "
"BookArchive.queryset, or override BookArchive.get_queryset()."
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
self.client.get("/dates/books/invalid/")
def test_archive_view_by_month(self):
res = self.client.get("/dates/books/by_month/")
self.assertEqual(res.status_code, 200)
self.assertEqual(
list(res.context["date_list"]),
list(Book.objects.dates("pubdate", "month", "DESC")),
)
def test_paginated_archive_view(self):
_make_books(20, base_date=datetime.date.today())
res = self.client.get("/dates/books/paginated/")
self.assertEqual(res.status_code, 200)
self.assertEqual(
list(res.context["date_list"]),
list(Book.objects.dates("pubdate", "year", "DESC")),
)
self.assertEqual(list(res.context["latest"]), list(Book.objects.all()[0:10]))
self.assertTemplateUsed(res, "generic_views/book_archive.html")
res = self.client.get("/dates/books/paginated/?page=2")
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context["page_obj"].number, 2)
self.assertEqual(list(res.context["latest"]), list(Book.objects.all()[10:20]))
def test_paginated_archive_view_does_not_load_entire_table(self):
# Regression test for #18087
_make_books(20, base_date=datetime.date.today())
# 1 query for years list + 1 query for books
with self.assertNumQueries(2):
self.client.get("/dates/books/")
# same as above + 1 query to test if books exist + 1 query to count
# them
with self.assertNumQueries(4):
self.client.get("/dates/books/paginated/")
def test_no_duplicate_query(self):
# Regression test for #18354
with self.assertNumQueries(2):
self.client.get("/dates/books/reverse/")
def test_datetime_archive_view(self):
BookSigning.objects.create(event_date=datetime.datetime(2008, 4, 2, 12, 0))
res = self.client.get("/dates/booksignings/")
self.assertEqual(res.status_code, 200)
@requires_tz_support
@skipUnlessDBFeature("has_zoneinfo_database")
@override_settings(USE_TZ=True, TIME_ZONE="Africa/Nairobi")
def test_aware_datetime_archive_view(self):
BookSigning.objects.create(
event_date=datetime.datetime(2008, 4, 2, 12, 0, tzinfo=datetime.UTC)
)
res = self.client.get("/dates/booksignings/")
self.assertEqual(res.status_code, 200)
def test_date_list_order(self):
"""date_list should be sorted descending in index"""
_make_books(5, base_date=datetime.date(2011, 12, 25))
res = self.client.get("/dates/books/")
self.assertEqual(res.status_code, 200)
self.assertEqual(
list(res.context["date_list"]),
sorted(res.context["date_list"], reverse=True),
)
def test_archive_view_custom_sorting(self):
Book.objects.create(
name="Zebras for Dummies", pages=600, pubdate=datetime.date(2007, 5, 1)
)
res = self.client.get("/dates/books/sortedbyname/")
self.assertEqual(res.status_code, 200)
self.assertEqual(
list(res.context["date_list"]),
list(Book.objects.dates("pubdate", "year", "DESC")),
)
self.assertEqual(
list(res.context["latest"]), list(Book.objects.order_by("name").all())
)
self.assertTemplateUsed(res, "generic_views/book_archive.html")
def test_archive_view_custom_sorting_dec(self):
Book.objects.create(
name="Zebras for Dummies", pages=600, pubdate=datetime.date(2007, 5, 1)
)
res = self.client.get("/dates/books/sortedbynamedec/")
self.assertEqual(res.status_code, 200)
self.assertEqual(
list(res.context["date_list"]),
list(Book.objects.dates("pubdate", "year", "DESC")),
)
self.assertEqual(
list(res.context["latest"]), list(Book.objects.order_by("-name").all())
)
self.assertTemplateUsed(res, "generic_views/book_archive.html")
def test_archive_view_without_date_field(self):
msg = "BookArchiveWithoutDateField.date_field is required."
with self.assertRaisesMessage(ImproperlyConfigured, msg):
self.client.get("/dates/books/without_date_field/")
@override_settings(ROOT_URLCONF="generic_views.urls")
class YearArchiveViewTests(TestDataMixin, TestCase):
def test_year_view(self):
res = self.client.get("/dates/books/2008/")
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context["date_list"]), [datetime.date(2008, 10, 1)])
self.assertEqual(res.context["year"], datetime.date(2008, 1, 1))
self.assertTemplateUsed(res, "generic_views/book_archive_year.html")
# Since allow_empty=False, next/prev years must be valid (#7164)
self.assertIsNone(res.context["next_year"])
self.assertEqual(res.context["previous_year"], datetime.date(2006, 1, 1))
def test_year_view_make_object_list(self):
res = self.client.get("/dates/books/2006/make_object_list/")
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context["date_list"]), [datetime.date(2006, 5, 1)])
self.assertEqual(
list(res.context["book_list"]),
list(Book.objects.filter(pubdate__year=2006)),
)
self.assertEqual(
list(res.context["object_list"]),
list(Book.objects.filter(pubdate__year=2006)),
)
self.assertTemplateUsed(res, "generic_views/book_archive_year.html")
def test_year_view_empty(self):
res = self.client.get("/dates/books/1999/")
self.assertEqual(res.status_code, 404)
res = self.client.get("/dates/books/1999/allow_empty/")
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context["date_list"]), [])
self.assertEqual(list(res.context["book_list"]), [])
# Since allow_empty=True, next/prev are allowed to be empty years
# (#7164)
self.assertEqual(res.context["next_year"], datetime.date(2000, 1, 1))
self.assertEqual(res.context["previous_year"], datetime.date(1998, 1, 1))
def test_year_view_allow_future(self):
# Create a new book in the future
year = datetime.date.today().year + 1
Book.objects.create(
name="The New New Testement", pages=600, pubdate=datetime.date(year, 1, 1)
)
res = self.client.get("/dates/books/%s/" % year)
self.assertEqual(res.status_code, 404)
res = self.client.get("/dates/books/%s/allow_empty/" % year)
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context["book_list"]), [])
res = self.client.get("/dates/books/%s/allow_future/" % year)
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context["date_list"]), [datetime.date(year, 1, 1)])
def test_year_view_paginated(self):
res = self.client.get("/dates/books/2006/paginated/")
self.assertEqual(res.status_code, 200)
self.assertEqual(
list(res.context["book_list"]),
list(Book.objects.filter(pubdate__year=2006)),
)
self.assertEqual(
list(res.context["object_list"]),
list(Book.objects.filter(pubdate__year=2006)),
)
self.assertTemplateUsed(res, "generic_views/book_archive_year.html")
def test_year_view_custom_sort_order(self):
# Zebras comes after Dreaming by name, but before on '-pubdate' which
# is the default sorting.
Book.objects.create(
name="Zebras for Dummies", pages=600, pubdate=datetime.date(2006, 9, 1)
)
res = self.client.get("/dates/books/2006/sortedbyname/")
self.assertEqual(res.status_code, 200)
self.assertEqual(
list(res.context["date_list"]),
[datetime.date(2006, 5, 1), datetime.date(2006, 9, 1)],
)
self.assertEqual(
list(res.context["book_list"]),
list(Book.objects.filter(pubdate__year=2006).order_by("name")),
)
self.assertEqual(
list(res.context["object_list"]),
list(Book.objects.filter(pubdate__year=2006).order_by("name")),
)
self.assertTemplateUsed(res, "generic_views/book_archive_year.html")
def test_year_view_two_custom_sort_orders(self):
Book.objects.create(
name="Zebras for Dummies", pages=300, pubdate=datetime.date(2006, 9, 1)
)
Book.objects.create(
name="Hunting Hippos", pages=400, pubdate=datetime.date(2006, 3, 1)
)
res = self.client.get("/dates/books/2006/sortedbypageandnamedec/")
self.assertEqual(res.status_code, 200)
self.assertEqual(
list(res.context["date_list"]),
[
datetime.date(2006, 3, 1),
datetime.date(2006, 5, 1),
datetime.date(2006, 9, 1),
],
)
self.assertEqual(
list(res.context["book_list"]),
list(Book.objects.filter(pubdate__year=2006).order_by("pages", "-name")),
)
self.assertEqual(
list(res.context["object_list"]),
list(Book.objects.filter(pubdate__year=2006).order_by("pages", "-name")),
)
self.assertTemplateUsed(res, "generic_views/book_archive_year.html")
def test_year_view_invalid_pattern(self):
res = self.client.get("/dates/books/no_year/")
self.assertEqual(res.status_code, 404)
def test_no_duplicate_query(self):
# Regression test for #18354
with self.assertNumQueries(4):
self.client.get("/dates/books/2008/reverse/")
def test_datetime_year_view(self):
BookSigning.objects.create(event_date=datetime.datetime(2008, 4, 2, 12, 0))
res = self.client.get("/dates/booksignings/2008/")
self.assertEqual(res.status_code, 200)
@skipUnlessDBFeature("has_zoneinfo_database")
@override_settings(USE_TZ=True, TIME_ZONE="Africa/Nairobi")
def test_aware_datetime_year_view(self):
BookSigning.objects.create(
event_date=datetime.datetime(2008, 4, 2, 12, 0, tzinfo=datetime.UTC)
)
res = self.client.get("/dates/booksignings/2008/")
self.assertEqual(res.status_code, 200)
def test_date_list_order(self):
"""date_list should be sorted ascending in year view"""
_make_books(10, base_date=datetime.date(2011, 12, 25))
res = self.client.get("/dates/books/2011/")
self.assertEqual(
list(res.context["date_list"]), sorted(res.context["date_list"])
)
@mock.patch("django.views.generic.list.MultipleObjectMixin.get_context_data")
def test_get_context_data_receives_extra_context(self, mock):
"""
MultipleObjectMixin.get_context_data() receives the context set by
BaseYearArchiveView.get_dated_items(). This behavior is implemented in
BaseDateListView.get().
"""
BookSigning.objects.create(event_date=datetime.datetime(2008, 4, 2, 12, 0))
with self.assertRaisesMessage(
TypeError, "context must be a dict rather than MagicMock."
):
self.client.get("/dates/booksignings/2008/")
args, kwargs = mock.call_args
# These are context values from get_dated_items().
self.assertEqual(kwargs["year"], datetime.date(2008, 1, 1))
self.assertIsNone(kwargs["previous_year"])
self.assertIsNone(kwargs["next_year"])
def test_get_dated_items_not_implemented(self):
msg = "A DateView must provide an implementation of get_dated_items()"
with self.assertRaisesMessage(NotImplementedError, msg):
self.client.get("/BaseDateListViewTest/")
@override_settings(ROOT_URLCONF="generic_views.urls")
class MonthArchiveViewTests(TestDataMixin, TestCase):
def test_month_view(self):
res = self.client.get("/dates/books/2008/oct/")
self.assertEqual(res.status_code, 200)
self.assertTemplateUsed(res, "generic_views/book_archive_month.html")
self.assertEqual(list(res.context["date_list"]), [datetime.date(2008, 10, 1)])
self.assertEqual(
list(res.context["book_list"]),
list(Book.objects.filter(pubdate=datetime.date(2008, 10, 1))),
)
self.assertEqual(res.context["month"], datetime.date(2008, 10, 1))
# Since allow_empty=False, next/prev months must be valid (#7164)
self.assertIsNone(res.context["next_month"])
self.assertEqual(res.context["previous_month"], datetime.date(2006, 5, 1))
def test_month_view_allow_empty(self):
# allow_empty = False, empty month
res = self.client.get("/dates/books/2000/jan/")
self.assertEqual(res.status_code, 404)
# allow_empty = True, empty month
res = self.client.get("/dates/books/2000/jan/allow_empty/")
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context["date_list"]), [])
self.assertEqual(list(res.context["book_list"]), [])
self.assertEqual(res.context["month"], datetime.date(2000, 1, 1))
# Since allow_empty=True, next/prev are allowed to be empty months
# (#7164)
self.assertEqual(res.context["next_month"], datetime.date(2000, 2, 1))
self.assertEqual(res.context["previous_month"], datetime.date(1999, 12, 1))
# allow_empty but not allow_future: next_month should be empty (#7164)
url = datetime.date.today().strftime("/dates/books/%Y/%b/allow_empty/").lower()
res = self.client.get(url)
self.assertEqual(res.status_code, 200)
self.assertIsNone(res.context["next_month"])
def test_month_view_allow_future(self):
future = (datetime.date.today() + datetime.timedelta(days=60)).replace(day=1)
urlbit = future.strftime("%Y/%b").lower()
b = Book.objects.create(name="The New New Testement", pages=600, pubdate=future)
# allow_future = False, future month
res = self.client.get("/dates/books/%s/" % urlbit)
self.assertEqual(res.status_code, 404)
# allow_future = True, valid future month
res = self.client.get("/dates/books/%s/allow_future/" % urlbit)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context["date_list"][0], b.pubdate)
self.assertEqual(list(res.context["book_list"]), [b])
self.assertEqual(res.context["month"], future)
# Since allow_future = True but not allow_empty, next/prev are not
# allowed to be empty months (#7164)
self.assertIsNone(res.context["next_month"])
self.assertEqual(res.context["previous_month"], datetime.date(2008, 10, 1))
# allow_future, but not allow_empty, with a current month. So next
# should be in the future (yup, #7164, again)
res = self.client.get("/dates/books/2008/oct/allow_future/")
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context["next_month"], future)
self.assertEqual(res.context["previous_month"], datetime.date(2006, 5, 1))
def test_month_view_paginated(self):
res = self.client.get("/dates/books/2008/oct/paginated/")
self.assertEqual(res.status_code, 200)
self.assertEqual(
list(res.context["book_list"]),
list(Book.objects.filter(pubdate__year=2008, pubdate__month=10)),
)
self.assertEqual(
list(res.context["object_list"]),
list(Book.objects.filter(pubdate__year=2008, pubdate__month=10)),
)
self.assertTemplateUsed(res, "generic_views/book_archive_month.html")
def test_custom_month_format(self):
res = self.client.get("/dates/books/2008/10/")
self.assertEqual(res.status_code, 200)
def test_month_view_invalid_pattern(self):
res = self.client.get("/dates/books/2007/no_month/")
self.assertEqual(res.status_code, 404)
def test_previous_month_without_content(self):
"Content can exist on any day of the previous month. Refs #14711"
self.pubdate_list = [
datetime.date(2010, month, day) for month, day in ((9, 1), (10, 2), (11, 3))
]
for pubdate in self.pubdate_list:
name = str(pubdate)
Book.objects.create(name=name, slug=name, pages=100, pubdate=pubdate)
res = self.client.get("/dates/books/2010/nov/allow_empty/")
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context["previous_month"], datetime.date(2010, 10, 1))
# The following test demonstrates the bug
res = self.client.get("/dates/books/2010/nov/")
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context["previous_month"], datetime.date(2010, 10, 1))
# The bug does not occur here because a Book with pubdate of Sep 1
# exists
res = self.client.get("/dates/books/2010/oct/")
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context["previous_month"], datetime.date(2010, 9, 1))
def test_datetime_month_view(self):
BookSigning.objects.create(event_date=datetime.datetime(2008, 2, 1, 12, 0))
BookSigning.objects.create(event_date=datetime.datetime(2008, 4, 2, 12, 0))
BookSigning.objects.create(event_date=datetime.datetime(2008, 6, 3, 12, 0))
res = self.client.get("/dates/booksignings/2008/apr/")
self.assertEqual(res.status_code, 200)
def test_month_view_get_month_from_request(self):
oct1 = datetime.date(2008, 10, 1)
res = self.client.get("/dates/books/without_month/2008/?month=oct")
self.assertEqual(res.status_code, 200)
self.assertTemplateUsed(res, "generic_views/book_archive_month.html")
self.assertEqual(list(res.context["date_list"]), [oct1])
self.assertEqual(
list(res.context["book_list"]), list(Book.objects.filter(pubdate=oct1))
)
self.assertEqual(res.context["month"], oct1)
def test_month_view_without_month_in_url(self):
res = self.client.get("/dates/books/without_month/2008/")
self.assertEqual(res.status_code, 404)
self.assertEqual(res.context["exception"], "No month specified")
@skipUnlessDBFeature("has_zoneinfo_database")
@override_settings(USE_TZ=True, TIME_ZONE="Africa/Nairobi")
def test_aware_datetime_month_view(self):
BookSigning.objects.create(
event_date=datetime.datetime(2008, 2, 1, 12, 0, tzinfo=datetime.UTC)
)
BookSigning.objects.create(
event_date=datetime.datetime(2008, 4, 2, 12, 0, tzinfo=datetime.UTC)
)
BookSigning.objects.create(
event_date=datetime.datetime(2008, 6, 3, 12, 0, tzinfo=datetime.UTC)
)
res = self.client.get("/dates/booksignings/2008/apr/")
self.assertEqual(res.status_code, 200)
def test_date_list_order(self):
"""date_list should be sorted ascending in month view"""
_make_books(10, base_date=datetime.date(2011, 12, 25))
res = self.client.get("/dates/books/2011/dec/")
self.assertEqual(
list(res.context["date_list"]), sorted(res.context["date_list"])
)
@override_settings(ROOT_URLCONF="generic_views.urls")
class WeekArchiveViewTests(TestDataMixin, TestCase):
def test_week_view(self):
res = self.client.get("/dates/books/2008/week/39/")
self.assertEqual(res.status_code, 200)
self.assertTemplateUsed(res, "generic_views/book_archive_week.html")
self.assertEqual(
res.context["book_list"][0],
Book.objects.get(pubdate=datetime.date(2008, 10, 1)),
)
self.assertEqual(res.context["week"], datetime.date(2008, 9, 28))
# Since allow_empty=False, next/prev weeks must be valid
self.assertIsNone(res.context["next_week"])
self.assertEqual(res.context["previous_week"], datetime.date(2006, 4, 30))
def test_week_view_allow_empty(self):
# allow_empty = False, empty week
res = self.client.get("/dates/books/2008/week/12/")
self.assertEqual(res.status_code, 404)
# allow_empty = True, empty month
res = self.client.get("/dates/books/2008/week/12/allow_empty/")
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context["book_list"]), [])
self.assertEqual(res.context["week"], datetime.date(2008, 3, 23))
# Since allow_empty=True, next/prev are allowed to be empty weeks
self.assertEqual(res.context["next_week"], datetime.date(2008, 3, 30))
self.assertEqual(res.context["previous_week"], datetime.date(2008, 3, 16))
# allow_empty but not allow_future: next_week should be empty
url = (
datetime.date.today()
.strftime("/dates/books/%Y/week/%U/allow_empty/")
.lower()
)
res = self.client.get(url)
self.assertEqual(res.status_code, 200)
self.assertIsNone(res.context["next_week"])
def test_week_view_allow_future(self):
# January 7th always falls in week 1, given Python's definition of week
# numbers
future = datetime.date(datetime.date.today().year + 1, 1, 7)
future_sunday = future - datetime.timedelta(days=(future.weekday() + 1) % 7)
b = Book.objects.create(name="The New New Testement", pages=600, pubdate=future)
res = self.client.get("/dates/books/%s/week/1/" % future.year)
self.assertEqual(res.status_code, 404)
res = self.client.get("/dates/books/%s/week/1/allow_future/" % future.year)
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context["book_list"]), [b])
self.assertEqual(res.context["week"], future_sunday)
# Since allow_future = True but not allow_empty, next/prev are not
# allowed to be empty weeks
self.assertIsNone(res.context["next_week"])
self.assertEqual(res.context["previous_week"], datetime.date(2008, 9, 28))
# allow_future, but not allow_empty, with a current week. So next
# should be in the future
res = self.client.get("/dates/books/2008/week/39/allow_future/")
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context["next_week"], future_sunday)
self.assertEqual(res.context["previous_week"], datetime.date(2006, 4, 30))
def test_week_view_paginated(self):
week_start = datetime.date(2008, 9, 28)
week_end = week_start + datetime.timedelta(days=7)
res = self.client.get("/dates/books/2008/week/39/")
self.assertEqual(res.status_code, 200)
self.assertEqual(
list(res.context["book_list"]),
list(Book.objects.filter(pubdate__gte=week_start, pubdate__lt=week_end)),
)
self.assertEqual(
list(res.context["object_list"]),
list(Book.objects.filter(pubdate__gte=week_start, pubdate__lt=week_end)),
)
self.assertTemplateUsed(res, "generic_views/book_archive_week.html")
def test_week_view_invalid_pattern(self):
res = self.client.get("/dates/books/2007/week/no_week/")
self.assertEqual(res.status_code, 404)
def test_week_start_Monday(self):
# Regression for #14752
res = self.client.get("/dates/books/2008/week/39/")
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context["week"], datetime.date(2008, 9, 28))
res = self.client.get("/dates/books/2008/week/39/monday/")
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context["week"], datetime.date(2008, 9, 29))
def test_week_iso_format(self):
res = self.client.get("/dates/books/2008/week/40/iso_format/")
self.assertEqual(res.status_code, 200)
self.assertTemplateUsed(res, "generic_views/book_archive_week.html")
self.assertEqual(
list(res.context["book_list"]),
[Book.objects.get(pubdate=datetime.date(2008, 10, 1))],
)
self.assertEqual(res.context["week"], datetime.date(2008, 9, 29))
def test_unknown_week_format(self):
msg = "Unknown week format '%T'. Choices are: %U, %V, %W"
with self.assertRaisesMessage(ValueError, msg):
self.client.get("/dates/books/2008/week/39/unknown_week_format/")
def test_incompatible_iso_week_format_view(self):
msg = (
"ISO week directive '%V' is incompatible with the year directive "
"'%Y'. Use the ISO year '%G' instead."
)
with self.assertRaisesMessage(ValueError, msg):
self.client.get("/dates/books/2008/week/40/invalid_iso_week_year_format/")
def test_datetime_week_view(self):
BookSigning.objects.create(event_date=datetime.datetime(2008, 4, 2, 12, 0))
res = self.client.get("/dates/booksignings/2008/week/13/")
self.assertEqual(res.status_code, 200)
@override_settings(USE_TZ=True, TIME_ZONE="Africa/Nairobi")
def test_aware_datetime_week_view(self):
BookSigning.objects.create(
event_date=datetime.datetime(2008, 4, 2, 12, 0, tzinfo=datetime.UTC)
)
res = self.client.get("/dates/booksignings/2008/week/13/")
self.assertEqual(res.status_code, 200)
@override_settings(ROOT_URLCONF="generic_views.urls")
class DayArchiveViewTests(TestDataMixin, TestCase):
def test_day_view(self):
res = self.client.get("/dates/books/2008/oct/01/")
self.assertEqual(res.status_code, 200)
self.assertTemplateUsed(res, "generic_views/book_archive_day.html")
self.assertEqual(
list(res.context["book_list"]),
list(Book.objects.filter(pubdate=datetime.date(2008, 10, 1))),
)
self.assertEqual(res.context["day"], datetime.date(2008, 10, 1))
# Since allow_empty=False, next/prev days must be valid.
self.assertIsNone(res.context["next_day"])
self.assertEqual(res.context["previous_day"], datetime.date(2006, 5, 1))
def test_day_view_allow_empty(self):
# allow_empty = False, empty month
res = self.client.get("/dates/books/2000/jan/1/")
self.assertEqual(res.status_code, 404)
# allow_empty = True, empty month
res = self.client.get("/dates/books/2000/jan/1/allow_empty/")
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context["book_list"]), [])
self.assertEqual(res.context["day"], datetime.date(2000, 1, 1))
# Since it's allow empty, next/prev are allowed to be empty months
# (#7164)
self.assertEqual(res.context["next_day"], datetime.date(2000, 1, 2))
self.assertEqual(res.context["previous_day"], datetime.date(1999, 12, 31))
# allow_empty but not allow_future: next_month should be empty (#7164)
url = (
datetime.date.today().strftime("/dates/books/%Y/%b/%d/allow_empty/").lower()
)
res = self.client.get(url)
self.assertEqual(res.status_code, 200)
self.assertIsNone(res.context["next_day"])
def test_day_view_allow_future(self):
future = datetime.date.today() + datetime.timedelta(days=60)
urlbit = future.strftime("%Y/%b/%d").lower()
b = Book.objects.create(name="The New New Testement", pages=600, pubdate=future)
# allow_future = False, future month
res = self.client.get("/dates/books/%s/" % urlbit)
self.assertEqual(res.status_code, 404)
# allow_future = True, valid future month
res = self.client.get("/dates/books/%s/allow_future/" % urlbit)
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context["book_list"]), [b])
self.assertEqual(res.context["day"], future)
# allow_future but not allow_empty, next/prev must be valid
self.assertIsNone(res.context["next_day"])
self.assertEqual(res.context["previous_day"], datetime.date(2008, 10, 1))
# allow_future, but not allow_empty, with a current month.
res = self.client.get("/dates/books/2008/oct/01/allow_future/")
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context["next_day"], future)
self.assertEqual(res.context["previous_day"], datetime.date(2006, 5, 1))
# allow_future for yesterday, next_day is today (#17192)
today = datetime.date.today()
yesterday = today - datetime.timedelta(days=1)
res = self.client.get(
"/dates/books/%s/allow_empty_and_future/"
% yesterday.strftime("%Y/%b/%d").lower()
)
self.assertEqual(res.context["next_day"], today)
def test_day_view_paginated(self):
res = self.client.get("/dates/books/2008/oct/1/")
self.assertEqual(res.status_code, 200)
self.assertEqual(
list(res.context["book_list"]),
list(
Book.objects.filter(
pubdate__year=2008, pubdate__month=10, pubdate__day=1
)
),
)
self.assertEqual(
list(res.context["object_list"]),
list(
Book.objects.filter(
pubdate__year=2008, pubdate__month=10, pubdate__day=1
)
),
)
self.assertTemplateUsed(res, "generic_views/book_archive_day.html")
def test_next_prev_context(self):
res = self.client.get("/dates/books/2008/oct/01/")
self.assertEqual(
res.content, b"Archive for Oct. 1, 2008. Previous day is May 1, 2006\n"
)
def test_custom_month_format(self):
res = self.client.get("/dates/books/2008/10/01/")
self.assertEqual(res.status_code, 200)
def test_day_view_invalid_pattern(self):
res = self.client.get("/dates/books/2007/oct/no_day/")
self.assertEqual(res.status_code, 404)
def test_today_view(self):
res = self.client.get("/dates/books/today/")
self.assertEqual(res.status_code, 404)
res = self.client.get("/dates/books/today/allow_empty/")
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context["day"], datetime.date.today())
def test_datetime_day_view(self):
BookSigning.objects.create(event_date=datetime.datetime(2008, 4, 2, 12, 0))
res = self.client.get("/dates/booksignings/2008/apr/2/")
self.assertEqual(res.status_code, 200)
@requires_tz_support
@override_settings(USE_TZ=True, TIME_ZONE="Africa/Nairobi")
def test_aware_datetime_day_view(self):
bs = BookSigning.objects.create(
event_date=datetime.datetime(2008, 4, 2, 12, 0, tzinfo=datetime.UTC)
)
res = self.client.get("/dates/booksignings/2008/apr/2/")
self.assertEqual(res.status_code, 200)
# 2008-04-02T00:00:00+03:00 (beginning of day) >
# 2008-04-01T22:00:00+00:00 (book signing event date).
bs.event_date = datetime.datetime(2008, 4, 1, 22, 0, tzinfo=datetime.UTC)
bs.save()
res = self.client.get("/dates/booksignings/2008/apr/2/")
self.assertEqual(res.status_code, 200)
# 2008-04-03T00:00:00+03:00 (end of day) > 2008-04-02T22:00:00+00:00
# (book signing event date).
bs.event_date = datetime.datetime(2008, 4, 2, 22, 0, tzinfo=datetime.UTC)
bs.save()
res = self.client.get("/dates/booksignings/2008/apr/2/")
self.assertEqual(res.status_code, 404)
@override_settings(ROOT_URLCONF="generic_views.urls")
class DateDetailViewTests(TestDataMixin, TestCase):
def test_date_detail_by_pk(self):
res = self.client.get("/dates/books/2008/oct/01/%s/" % self.book1.pk)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context["object"], self.book1)
self.assertEqual(res.context["book"], self.book1)
self.assertTemplateUsed(res, "generic_views/book_detail.html")
def test_date_detail_by_slug(self):
res = self.client.get("/dates/books/2006/may/01/byslug/dreaming-in-code/")
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context["book"], Book.objects.get(slug="dreaming-in-code"))
def test_date_detail_custom_month_format(self):
res = self.client.get("/dates/books/2008/10/01/%s/" % self.book1.pk)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context["book"], self.book1)
def test_date_detail_allow_future(self):
future = datetime.date.today() + datetime.timedelta(days=60)
urlbit = future.strftime("%Y/%b/%d").lower()
b = Book.objects.create(
name="The New New Testement", slug="new-new", pages=600, pubdate=future
)
res = self.client.get("/dates/books/%s/new-new/" % urlbit)
self.assertEqual(res.status_code, 404)
res = self.client.get("/dates/books/%s/%s/allow_future/" % (urlbit, b.id))
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context["book"], b)
self.assertTemplateUsed(res, "generic_views/book_detail.html")
def test_year_out_of_range(self):
urls = [
"/dates/books/9999/",
"/dates/books/9999/12/",
"/dates/books/9999/week/52/",
]
for url in urls:
with self.subTest(url=url):
res = self.client.get(url)
self.assertEqual(res.status_code, 404)
self.assertEqual(res.context["exception"], "Date out of range")
def test_invalid_url(self):
msg = (
"Generic detail view BookDetail must be called with either an "
"object pk or a slug in the URLconf."
)
with self.assertRaisesMessage(AttributeError, msg):
self.client.get("/dates/books/2008/oct/01/nopk/")
def test_get_object_custom_queryset(self):
"""
Custom querysets are used when provided to
BaseDateDetailView.get_object().
"""
res = self.client.get(
"/dates/books/get_object_custom_queryset/2006/may/01/%s/" % self.book2.pk
)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context["object"], self.book2)
self.assertEqual(res.context["book"], self.book2)
self.assertTemplateUsed(res, "generic_views/book_detail.html")
res = self.client.get(
"/dates/books/get_object_custom_queryset/2008/oct/01/9999999/"
)
self.assertEqual(res.status_code, 404)
def test_get_object_custom_queryset_numqueries(self):
with self.assertNumQueries(1):
self.client.get("/dates/books/get_object_custom_queryset/2006/may/01/2/")
def test_datetime_date_detail(self):
bs = BookSigning.objects.create(event_date=datetime.datetime(2008, 4, 2, 12, 0))
res = self.client.get("/dates/booksignings/2008/apr/2/%d/" % bs.pk)
self.assertEqual(res.status_code, 200)
@requires_tz_support
@override_settings(USE_TZ=True, TIME_ZONE="Africa/Nairobi")
def test_aware_datetime_date_detail(self):
bs = BookSigning.objects.create(
event_date=datetime.datetime(2008, 4, 2, 12, 0, tzinfo=datetime.UTC)
)
res = self.client.get("/dates/booksignings/2008/apr/2/%d/" % bs.pk)
self.assertEqual(res.status_code, 200)
# 2008-04-02T00:00:00+03:00 (beginning of day) >
# 2008-04-01T22:00:00+00:00 (book signing event date).
bs.event_date = datetime.datetime(2008, 4, 1, 22, 0, tzinfo=datetime.UTC)
bs.save()
res = self.client.get("/dates/booksignings/2008/apr/2/%d/" % bs.pk)
self.assertEqual(res.status_code, 200)
# 2008-04-03T00:00:00+03:00 (end of day) > 2008-04-02T22:00:00+00:00
# (book signing event date).
bs.event_date = datetime.datetime(2008, 4, 2, 22, 0, tzinfo=datetime.UTC)
bs.save()
res = self.client.get("/dates/booksignings/2008/apr/2/%d/" % bs.pk)
self.assertEqual(res.status_code, 404)
|
./temp_repos/django/django/utils/dates.py
|
./temp_repos/django/tests/generic_views/test_dates.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'Unknown'.
Context:
- Class Name: Unknown
- Dependencies to Mock: None detected
- Key Imports: django.utils.translation
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
Unknown
|
python
|
from django.apps import apps
from django.contrib.sites.models import Site
from django.core.cache import cache
from django.test import TestCase, modify_settings, override_settings
from .models import I18nTestModel, TestModel
@modify_settings(INSTALLED_APPS={"append": "django.contrib.sitemaps"})
@override_settings(ROOT_URLCONF="sitemaps_tests.urls.http")
class SitemapTestsBase(TestCase):
protocol = "http"
sites_installed = apps.is_installed("django.contrib.sites")
domain = "example.com" if sites_installed else "testserver"
@classmethod
def setUpTestData(cls):
# Create an object for sitemap content.
TestModel.objects.create(name="Test Object")
cls.i18n_model = I18nTestModel.objects.create(name="Test Object")
def setUp(self):
self.base_url = "%s://%s" % (self.protocol, self.domain)
cache.clear()
@classmethod
def setUpClass(cls):
super().setUpClass()
# This cleanup is necessary because contrib.sites cache
# makes tests interfere with each other, see #11505
Site.objects.clear_cache()
|
import logging
import time
from logging_tests.tests import LoggingAssertionMixin
from django.core.exceptions import ImproperlyConfigured
from django.http import HttpResponse
from django.test import RequestFactory, SimpleTestCase, override_settings
from django.test.utils import require_jinja2
from django.urls import resolve
from django.views.generic import RedirectView, TemplateView, View
from . import views
class SimpleView(View):
"""
A simple view with a docstring.
"""
def get(self, request):
return HttpResponse("This is a simple view")
class SimplePostView(SimpleView):
post = SimpleView.get
class PostOnlyView(View):
def post(self, request):
return HttpResponse("This view only accepts POST")
class CustomizableView(SimpleView):
parameter = {}
def decorator(view):
view.is_decorated = True
return view
class DecoratedDispatchView(SimpleView):
@decorator
def dispatch(self, request, *args, **kwargs):
return super().dispatch(request, *args, **kwargs)
class AboutTemplateView(TemplateView):
def get(self, request):
return self.render_to_response({})
def get_template_names(self):
return ["generic_views/about.html"]
class AboutTemplateAttributeView(TemplateView):
template_name = "generic_views/about.html"
def get(self, request):
return self.render_to_response(context={})
class InstanceView(View):
def get(self, request):
return self
class ViewTest(LoggingAssertionMixin, SimpleTestCase):
rf = RequestFactory()
def _assert_simple(self, response):
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b"This is a simple view")
def test_no_init_kwargs(self):
"""
A view can't be accidentally instantiated before deployment
"""
msg = "This method is available only on the class, not on instances."
with self.assertRaisesMessage(AttributeError, msg):
SimpleView(key="value").as_view()
def test_no_init_args(self):
"""
A view can't be accidentally instantiated before deployment
"""
msg = "as_view() takes 1 positional argument but 2 were given"
with self.assertRaisesMessage(TypeError, msg):
SimpleView.as_view("value")
def test_pathological_http_method(self):
"""
The edge case of an HTTP request that spoofs an existing method name is
caught.
"""
self.assertEqual(
SimpleView.as_view()(
self.rf.get("/", REQUEST_METHOD="DISPATCH")
).status_code,
405,
)
def test_get_only(self):
"""
Test a view which only allows GET doesn't allow other methods.
"""
self._assert_simple(SimpleView.as_view()(self.rf.get("/")))
self.assertEqual(SimpleView.as_view()(self.rf.post("/")).status_code, 405)
self.assertEqual(
SimpleView.as_view()(self.rf.get("/", REQUEST_METHOD="FAKE")).status_code,
405,
)
def test_get_and_head(self):
"""
Test a view which supplies a GET method also responds correctly to
HEAD.
"""
self._assert_simple(SimpleView.as_view()(self.rf.get("/")))
response = SimpleView.as_view()(self.rf.head("/"))
self.assertEqual(response.status_code, 200)
def test_setup_get_and_head(self):
view_instance = SimpleView()
self.assertFalse(hasattr(view_instance, "head"))
view_instance.setup(self.rf.get("/"))
self.assertTrue(hasattr(view_instance, "head"))
self.assertEqual(view_instance.head, view_instance.get)
def test_head_no_get(self):
"""
Test a view which supplies no GET method responds to HEAD with HTTP
405.
"""
response = PostOnlyView.as_view()(self.rf.head("/"))
self.assertEqual(response.status_code, 405)
def test_get_and_post(self):
"""
Test a view which only allows both GET and POST.
"""
self._assert_simple(SimplePostView.as_view()(self.rf.get("/")))
self._assert_simple(SimplePostView.as_view()(self.rf.post("/")))
self.assertEqual(
SimplePostView.as_view()(
self.rf.get("/", REQUEST_METHOD="FAKE")
).status_code,
405,
)
def test_invalid_keyword_argument(self):
"""
View arguments must be predefined on the class and can't
be named like an HTTP method.
"""
msg = (
"The method name %s is not accepted as a keyword argument to "
"SimpleView()."
)
# Check each of the allowed method names
for method in SimpleView.http_method_names:
with self.assertRaisesMessage(TypeError, msg % method):
SimpleView.as_view(**{method: "value"})
# Check the case view argument is ok if predefined on the class...
CustomizableView.as_view(parameter="value")
# ...but raises errors otherwise.
msg = (
"CustomizableView() received an invalid keyword 'foobar'. "
"as_view only accepts arguments that are already attributes of "
"the class."
)
with self.assertRaisesMessage(TypeError, msg):
CustomizableView.as_view(foobar="value")
def test_calling_more_than_once(self):
"""
Test a view can only be called once.
"""
request = self.rf.get("/")
view = InstanceView.as_view()
self.assertNotEqual(view(request), view(request))
def test_class_attributes(self):
"""
The callable returned from as_view() has proper special attributes.
"""
cls = SimpleView
view = cls.as_view()
self.assertEqual(view.__doc__, cls.__doc__)
self.assertEqual(view.__name__, "view")
self.assertEqual(view.__module__, cls.__module__)
self.assertEqual(view.__qualname__, f"{cls.as_view.__qualname__}.<locals>.view")
self.assertEqual(view.__annotations__, cls.dispatch.__annotations__)
self.assertFalse(hasattr(view, "__wrapped__"))
def test_dispatch_decoration(self):
"""
Attributes set by decorators on the dispatch method
are also present on the closure.
"""
self.assertTrue(DecoratedDispatchView.as_view().is_decorated)
def test_options(self):
"""
Views respond to HTTP OPTIONS requests with an Allow header
appropriate for the methods implemented by the view class.
"""
request = self.rf.options("/")
view = SimpleView.as_view()
response = view(request)
self.assertEqual(200, response.status_code)
self.assertTrue(response.headers["Allow"])
def test_options_for_get_view(self):
"""
A view implementing GET allows GET and HEAD.
"""
request = self.rf.options("/")
view = SimpleView.as_view()
response = view(request)
self._assert_allows(response, "GET", "HEAD")
def test_options_for_get_and_post_view(self):
"""
A view implementing GET and POST allows GET, HEAD, and POST.
"""
request = self.rf.options("/")
view = SimplePostView.as_view()
response = view(request)
self._assert_allows(response, "GET", "HEAD", "POST")
def test_options_for_post_view(self):
"""
A view implementing POST allows POST.
"""
request = self.rf.options("/")
view = PostOnlyView.as_view()
response = view(request)
self._assert_allows(response, "POST")
def _assert_allows(self, response, *expected_methods):
"Assert allowed HTTP methods reported in the Allow response header"
response_allows = set(response.headers["Allow"].split(", "))
self.assertEqual(set(expected_methods + ("OPTIONS",)), response_allows)
def test_args_kwargs_request_on_self(self):
"""
Test a view only has args, kwargs & request once `as_view`
has been called.
"""
bare_view = InstanceView()
view = InstanceView.as_view()(self.rf.get("/"))
for attribute in ("args", "kwargs", "request"):
self.assertNotIn(attribute, dir(bare_view))
self.assertIn(attribute, dir(view))
def test_overridden_setup(self):
class SetAttributeMixin:
def setup(self, request, *args, **kwargs):
self.attr = True
super().setup(request, *args, **kwargs)
class CheckSetupView(SetAttributeMixin, SimpleView):
def dispatch(self, request, *args, **kwargs):
assert hasattr(self, "attr")
return super().dispatch(request, *args, **kwargs)
response = CheckSetupView.as_view()(self.rf.get("/"))
self.assertEqual(response.status_code, 200)
def test_not_calling_parent_setup_error(self):
class TestView(View):
def setup(self, request, *args, **kwargs):
pass # Not calling super().setup()
msg = (
"TestView instance has no 'request' attribute. Did you override "
"setup() and forget to call super()?"
)
with self.assertRaisesMessage(AttributeError, msg):
TestView.as_view()(self.rf.get("/"))
def test_setup_adds_args_kwargs_request(self):
request = self.rf.get("/")
args = ("arg 1", "arg 2")
kwargs = {"kwarg_1": 1, "kwarg_2": "year"}
view = View()
view.setup(request, *args, **kwargs)
self.assertEqual(request, view.request)
self.assertEqual(args, view.args)
self.assertEqual(kwargs, view.kwargs)
def test_direct_instantiation(self):
"""
It should be possible to use the view by directly instantiating it
without going through .as_view() (#21564).
"""
view = PostOnlyView()
response = view.dispatch(self.rf.head("/"))
self.assertEqual(response.status_code, 405)
def test_method_not_allowed_response_logged(self):
for path, escaped in [
("/foo/", "/foo/"),
(r"/%1B[1;31mNOW IN RED!!!1B[0m/", r"/\x1b[1;31mNOW IN RED!!!1B[0m/"),
]:
with self.subTest(path=path):
request = self.rf.get(path, REQUEST_METHOD="BOGUS")
with self.assertLogs("django.request", "WARNING") as handler:
response = SimpleView.as_view()(request)
self.assertLogRecord(
handler,
f"Method Not Allowed (BOGUS): {escaped}",
logging.WARNING,
405,
request,
)
self.assertEqual(response.status_code, 405)
@override_settings(ROOT_URLCONF="generic_views.urls")
class TemplateViewTest(SimpleTestCase):
rf = RequestFactory()
def _assert_about(self, response):
response.render()
self.assertContains(response, "<h1>About</h1>")
def test_get(self):
"""
Test a view that simply renders a template on GET
"""
self._assert_about(AboutTemplateView.as_view()(self.rf.get("/about/")))
def test_head(self):
"""
Test a TemplateView responds correctly to HEAD
"""
response = AboutTemplateView.as_view()(self.rf.head("/about/"))
self.assertEqual(response.status_code, 200)
def test_get_template_attribute(self):
"""
Test a view that renders a template on GET with the template name as
an attribute on the class.
"""
self._assert_about(AboutTemplateAttributeView.as_view()(self.rf.get("/about/")))
def test_get_generic_template(self):
"""
Test a completely generic view that renders a template on GET
with the template name as an argument at instantiation.
"""
self._assert_about(
TemplateView.as_view(template_name="generic_views/about.html")(
self.rf.get("/about/")
)
)
def test_template_name_required(self):
"""
A template view must provide a template name.
"""
msg = (
"TemplateResponseMixin requires either a definition of "
"'template_name' or an implementation of 'get_template_names()'"
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
self.client.get("/template/no_template/")
@require_jinja2
def test_template_engine(self):
"""
A template view may provide a template engine.
"""
request = self.rf.get("/using/")
view = TemplateView.as_view(template_name="generic_views/using.html")
self.assertEqual(view(request).render().content, b"DTL\n")
view = TemplateView.as_view(
template_name="generic_views/using.html", template_engine="django"
)
self.assertEqual(view(request).render().content, b"DTL\n")
view = TemplateView.as_view(
template_name="generic_views/using.html", template_engine="jinja2"
)
self.assertEqual(view(request).render().content, b"Jinja2\n")
def test_template_params(self):
"""
A generic template view passes kwargs as context.
"""
response = self.client.get("/template/simple/bar/")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context["foo"], "bar")
self.assertIsInstance(response.context["view"], View)
def test_extra_template_params(self):
"""
A template view can be customized to return extra context.
"""
response = self.client.get("/template/custom/bar/")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context["foo"], "bar")
self.assertEqual(response.context["key"], "value")
self.assertIsInstance(response.context["view"], View)
def test_cached_views(self):
"""
A template view can be cached
"""
response = self.client.get("/template/cached/bar/")
self.assertEqual(response.status_code, 200)
time.sleep(1.0)
response2 = self.client.get("/template/cached/bar/")
self.assertEqual(response2.status_code, 200)
self.assertEqual(response.content, response2.content)
time.sleep(2.0)
# Let the cache expire and test again
response2 = self.client.get("/template/cached/bar/")
self.assertEqual(response2.status_code, 200)
self.assertNotEqual(response.content, response2.content)
def test_content_type(self):
response = self.client.get("/template/content_type/")
self.assertEqual(response.headers["Content-Type"], "text/plain")
def test_resolve_view(self):
match = resolve("/template/content_type/")
self.assertIs(match.func.view_class, TemplateView)
self.assertEqual(match.func.view_initkwargs["content_type"], "text/plain")
def test_resolve_login_required_view(self):
match = resolve("/template/login_required/")
self.assertIs(match.func.view_class, TemplateView)
def test_extra_context(self):
response = self.client.get("/template/extra_context/")
self.assertEqual(response.context["title"], "Title")
@override_settings(ROOT_URLCONF="generic_views.urls")
class RedirectViewTest(LoggingAssertionMixin, SimpleTestCase):
rf = RequestFactory()
def test_no_url(self):
"Without any configuration, returns HTTP 410 GONE"
response = RedirectView.as_view()(self.rf.get("/foo/"))
self.assertEqual(response.status_code, 410)
def test_default_redirect(self):
"Default is a temporary redirect"
response = RedirectView.as_view(url="/bar/")(self.rf.get("/foo/"))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/bar/")
def test_permanent_redirect(self):
"Permanent redirects are an option"
response = RedirectView.as_view(url="/bar/", permanent=True)(
self.rf.get("/foo/")
)
self.assertEqual(response.status_code, 301)
self.assertEqual(response.url, "/bar/")
def test_temporary_redirect(self):
"Temporary redirects are an option"
response = RedirectView.as_view(url="/bar/", permanent=False)(
self.rf.get("/foo/")
)
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/bar/")
def test_include_args(self):
"GET arguments can be included in the redirected URL"
response = RedirectView.as_view(url="/bar/")(self.rf.get("/foo/"))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/bar/")
response = RedirectView.as_view(url="/bar/", query_string=True)(
self.rf.get("/foo/?pork=spam")
)
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/bar/?pork=spam")
def test_include_urlencoded_args(self):
"GET arguments can be URL-encoded when included in the redirected URL"
response = RedirectView.as_view(url="/bar/", query_string=True)(
self.rf.get("/foo/?unicode=%E2%9C%93")
)
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/bar/?unicode=%E2%9C%93")
def test_parameter_substitution(self):
"Redirection URLs can be parameterized"
response = RedirectView.as_view(url="/bar/%(object_id)d/")(
self.rf.get("/foo/42/"), object_id=42
)
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/bar/42/")
def test_named_url_pattern(self):
"Named pattern parameter should reverse to the matching pattern"
response = RedirectView.as_view(pattern_name="artist_detail")(
self.rf.get("/foo/"), pk=1
)
self.assertEqual(response.status_code, 302)
self.assertEqual(response.headers["Location"], "/detail/artist/1/")
def test_named_url_pattern_using_args(self):
response = RedirectView.as_view(pattern_name="artist_detail")(
self.rf.get("/foo/"), 1
)
self.assertEqual(response.status_code, 302)
self.assertEqual(response.headers["Location"], "/detail/artist/1/")
def test_redirect_POST(self):
"Default is a temporary redirect"
response = RedirectView.as_view(url="/bar/")(self.rf.post("/foo/"))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/bar/")
def test_redirect_HEAD(self):
"Default is a temporary redirect"
response = RedirectView.as_view(url="/bar/")(self.rf.head("/foo/"))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/bar/")
def test_redirect_OPTIONS(self):
"Default is a temporary redirect"
response = RedirectView.as_view(url="/bar/")(self.rf.options("/foo/"))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/bar/")
def test_redirect_PUT(self):
"Default is a temporary redirect"
response = RedirectView.as_view(url="/bar/")(self.rf.put("/foo/"))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/bar/")
def test_redirect_PATCH(self):
"Default is a temporary redirect"
response = RedirectView.as_view(url="/bar/")(self.rf.patch("/foo/"))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/bar/")
def test_redirect_DELETE(self):
"Default is a temporary redirect"
response = RedirectView.as_view(url="/bar/")(self.rf.delete("/foo/"))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/bar/")
def test_redirect_when_meta_contains_no_query_string(self):
"regression for #16705"
# we can't use self.rf.get because it always sets QUERY_STRING
response = RedirectView.as_view(url="/bar/")(self.rf.request(PATH_INFO="/foo/"))
self.assertEqual(response.status_code, 302)
def test_direct_instantiation(self):
"""
It should be possible to use the view without going through .as_view()
(#21564).
"""
view = RedirectView()
response = view.dispatch(self.rf.head("/foo/"))
self.assertEqual(response.status_code, 410)
def test_gone_response_logged(self):
for path, escaped in [
("/foo/", "/foo/"),
(r"/%1B[1;31mNOW IN RED!!!1B[0m/", r"/\x1b[1;31mNOW IN RED!!!1B[0m/"),
]:
with self.subTest(path=path):
request = self.rf.get(path)
with self.assertLogs("django.request", "WARNING") as handler:
RedirectView().dispatch(request)
self.assertLogRecord(
handler, f"Gone: {escaped}", logging.WARNING, 410, request
)
def test_redirect_with_querry_string_in_destination(self):
response = RedirectView.as_view(url="/bar/?pork=spam", query_string=True)(
self.rf.get("/foo")
)
self.assertEqual(response.status_code, 302)
self.assertEqual(response.headers["Location"], "/bar/?pork=spam")
def test_redirect_with_query_string_in_destination_and_request(self):
response = RedirectView.as_view(url="/bar/?pork=spam", query_string=True)(
self.rf.get("/foo/?utm_source=social")
)
self.assertEqual(response.status_code, 302)
self.assertEqual(
response.headers["Location"], "/bar/?pork=spam&utm_source=social"
)
def test_redirect_with_same_query_string_param_will_append_not_replace(self):
response = RedirectView.as_view(url="/bar/?pork=spam", query_string=True)(
self.rf.get("/foo/?utm_source=social&pork=ham")
)
self.assertEqual(response.status_code, 302)
self.assertEqual(
response.headers["Location"], "/bar/?pork=spam&utm_source=social&pork=ham"
)
class GetContextDataTest(SimpleTestCase):
def test_get_context_data_super(self):
test_view = views.CustomContextView()
context = test_view.get_context_data(kwarg_test="kwarg_value")
# the test_name key is inserted by the test classes parent
self.assertIn("test_name", context)
self.assertEqual(context["kwarg_test"], "kwarg_value")
self.assertEqual(context["custom_key"], "custom_value")
# test that kwarg overrides values assigned higher up
context = test_view.get_context_data(test_name="test_value")
self.assertEqual(context["test_name"], "test_value")
def test_object_at_custom_name_in_context_data(self):
# Checks 'pony' key presence in dict returned by get_context_date
test_view = views.CustomSingleObjectView()
test_view.context_object_name = "pony"
context = test_view.get_context_data()
self.assertEqual(context["pony"], test_view.object)
def test_object_in_get_context_data(self):
# Checks 'object' key presence in dict returned by get_context_date
# #20234
test_view = views.CustomSingleObjectView()
context = test_view.get_context_data()
self.assertEqual(context["object"], test_view.object)
class UseMultipleObjectMixinTest(SimpleTestCase):
rf = RequestFactory()
def test_use_queryset_from_view(self):
test_view = views.CustomMultipleObjectMixinView()
test_view.get(self.rf.get("/"))
# Don't pass queryset as argument
context = test_view.get_context_data()
self.assertEqual(context["object_list"], test_view.queryset)
def test_overwrite_queryset(self):
test_view = views.CustomMultipleObjectMixinView()
test_view.get(self.rf.get("/"))
queryset = [{"name": "Lennon"}, {"name": "Ono"}]
self.assertNotEqual(test_view.queryset, queryset)
# Overwrite the view's queryset with queryset from kwarg
context = test_view.get_context_data(object_list=queryset)
self.assertEqual(context["object_list"], queryset)
class SingleObjectTemplateResponseMixinTest(SimpleTestCase):
def test_template_mixin_without_template(self):
"""
We want to makes sure that if you use a template mixin, but forget the
template, it still tells you it's ImproperlyConfigured instead of
TemplateDoesNotExist.
"""
view = views.TemplateResponseWithoutTemplate()
msg = (
"SingleObjectTemplateResponseMixin requires a definition "
"of 'template_name', 'template_name_field', or 'model'; "
"or an implementation of 'get_template_names()'."
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
view.get_template_names()
|
./temp_repos/django/tests/sitemaps_tests/base.py
|
./temp_repos/django/tests/generic_views/test_base.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'SitemapTestsBase'.
Context:
- Class Name: SitemapTestsBase
- Dependencies to Mock: None detected
- Key Imports: django.test, django.contrib.sites.models, django.core.cache, models, django.apps
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
SitemapTestsBase
|
python
|
"""
Timezone-related classes and functions.
"""
import functools
import zoneinfo
from contextlib import ContextDecorator
from datetime import UTC, datetime, timedelta, timezone, tzinfo
from asgiref.local import Local
from django.conf import settings
__all__ = [
"get_fixed_timezone",
"get_default_timezone",
"get_default_timezone_name",
"get_current_timezone",
"get_current_timezone_name",
"activate",
"deactivate",
"override",
"localtime",
"localdate",
"now",
"is_aware",
"is_naive",
"make_aware",
"make_naive",
]
def get_fixed_timezone(offset):
"""Return a tzinfo instance with a fixed offset from UTC."""
if isinstance(offset, timedelta):
offset = offset.total_seconds() // 60
sign = "-" if offset < 0 else "+"
hhmm = "%02d%02d" % divmod(abs(offset), 60)
name = sign + hhmm
return timezone(timedelta(minutes=offset), name)
# In order to avoid accessing settings at compile time,
# wrap the logic in a function and cache the result.
@functools.lru_cache
def get_default_timezone():
"""
Return the default time zone as a tzinfo instance.
This is the time zone defined by settings.TIME_ZONE.
"""
return zoneinfo.ZoneInfo(settings.TIME_ZONE)
# This function exists for consistency with get_current_timezone_name
def get_default_timezone_name():
"""Return the name of the default time zone."""
return _get_timezone_name(get_default_timezone())
_active = Local()
def get_current_timezone():
"""Return the currently active time zone as a tzinfo instance."""
return getattr(_active, "value", get_default_timezone())
def get_current_timezone_name():
"""Return the name of the currently active time zone."""
return _get_timezone_name(get_current_timezone())
def _get_timezone_name(timezone):
"""
Return the offset for fixed offset timezones, or the name of timezone if
not set.
"""
return timezone.tzname(None) or str(timezone)
# Timezone selection functions.
# These functions don't change os.environ['TZ'] and call time.tzset()
# because it isn't thread safe.
def activate(timezone):
"""
Set the time zone for the current thread.
The ``timezone`` argument must be an instance of a tzinfo subclass or a
time zone name.
"""
if isinstance(timezone, tzinfo):
_active.value = timezone
elif isinstance(timezone, str):
_active.value = zoneinfo.ZoneInfo(timezone)
else:
raise ValueError("Invalid timezone: %r" % timezone)
def deactivate():
"""
Unset the time zone for the current thread.
Django will then use the time zone defined by settings.TIME_ZONE.
"""
if hasattr(_active, "value"):
del _active.value
class override(ContextDecorator):
"""
Temporarily set the time zone for the current thread.
This is a context manager that uses django.utils.timezone.activate()
to set the timezone on entry and restores the previously active timezone
on exit.
The ``timezone`` argument must be an instance of a ``tzinfo`` subclass, a
time zone name, or ``None``. If it is ``None``, Django enables the default
time zone.
"""
def __init__(self, timezone):
self.timezone = timezone
def __enter__(self):
self.old_timezone = getattr(_active, "value", None)
if self.timezone is None:
deactivate()
else:
activate(self.timezone)
def __exit__(self, exc_type, exc_value, traceback):
if self.old_timezone is None:
deactivate()
else:
_active.value = self.old_timezone
# Templates
def template_localtime(value, use_tz=None):
"""
Check if value is a datetime and converts it to local time if necessary.
If use_tz is provided and is not None, that will force the value to
be converted (or not), overriding the value of settings.USE_TZ.
This function is designed for use by the template engine.
"""
should_convert = (
isinstance(value, datetime)
and (settings.USE_TZ if use_tz is None else use_tz)
and not is_naive(value)
and getattr(value, "convert_to_local_time", True)
)
return localtime(value) if should_convert else value
# Utilities
def localtime(value=None, timezone=None):
"""
Convert an aware datetime.datetime to local time.
Only aware datetimes are allowed. When value is omitted, it defaults to
now().
Local time is defined by the current time zone, unless another time zone
is specified.
"""
if value is None:
value = now()
if timezone is None:
timezone = get_current_timezone()
# Emulate the behavior of astimezone() on Python < 3.6.
if is_naive(value):
raise ValueError("localtime() cannot be applied to a naive datetime")
return value.astimezone(timezone)
def localdate(value=None, timezone=None):
"""
Convert an aware datetime to local time and return the value's date.
Only aware datetimes are allowed. When value is omitted, it defaults to
now().
Local time is defined by the current time zone, unless another time zone is
specified.
"""
return localtime(value, timezone).date()
def now():
"""
Return an aware or naive datetime.datetime, depending on settings.USE_TZ.
"""
return datetime.now(tz=UTC if settings.USE_TZ else None)
# By design, these four functions don't perform any checks on their arguments.
# The caller should ensure that they don't receive an invalid value like None.
def is_aware(value):
"""
Determine if a given datetime.datetime is aware.
The concept is defined in Python's docs:
https://docs.python.org/library/datetime.html#datetime.tzinfo
Assuming value.tzinfo is either None or a proper datetime.tzinfo,
value.utcoffset() implements the appropriate logic.
"""
return value.utcoffset() is not None
def is_naive(value):
"""
Determine if a given datetime.datetime is naive.
The concept is defined in Python's docs:
https://docs.python.org/library/datetime.html#datetime.tzinfo
Assuming value.tzinfo is either None or a proper datetime.tzinfo,
value.utcoffset() implements the appropriate logic.
"""
return value.utcoffset() is None
def make_aware(value, timezone=None):
"""Make a naive datetime.datetime in a given time zone aware."""
if timezone is None:
timezone = get_current_timezone()
# Check that we won't overwrite the timezone of an aware datetime.
if is_aware(value):
raise ValueError("make_aware expects a naive datetime, got %s" % value)
# This may be wrong around DST changes!
return value.replace(tzinfo=timezone)
def make_naive(value, timezone=None):
"""Make an aware datetime.datetime naive in a given time zone."""
if timezone is None:
timezone = get_current_timezone()
# Emulate the behavior of astimezone() on Python < 3.6.
if is_naive(value):
raise ValueError("make_naive() cannot be applied to a naive datetime")
return value.astimezone(timezone).replace(tzinfo=None)
def _datetime_ambiguous_or_imaginary(dt, tz):
return tz.utcoffset(dt.replace(fold=not dt.fold)) != tz.utcoffset(dt)
|
import datetime
import zoneinfo
from unittest import mock
from django.test import SimpleTestCase, override_settings
from django.utils import timezone
PARIS_ZI = zoneinfo.ZoneInfo("Europe/Paris")
EAT = timezone.get_fixed_timezone(180) # Africa/Nairobi
ICT = timezone.get_fixed_timezone(420) # Asia/Bangkok
UTC = datetime.UTC
class TimezoneTests(SimpleTestCase):
def test_default_timezone_is_zoneinfo(self):
self.assertIsInstance(timezone.get_default_timezone(), zoneinfo.ZoneInfo)
def test_now(self):
with override_settings(USE_TZ=True):
self.assertTrue(timezone.is_aware(timezone.now()))
with override_settings(USE_TZ=False):
self.assertTrue(timezone.is_naive(timezone.now()))
def test_localdate(self):
naive = datetime.datetime(2015, 1, 1, 0, 0, 1)
with self.assertRaisesMessage(
ValueError, "localtime() cannot be applied to a naive datetime"
):
timezone.localdate(naive)
with self.assertRaisesMessage(
ValueError, "localtime() cannot be applied to a naive datetime"
):
timezone.localdate(naive, timezone=EAT)
aware = datetime.datetime(2015, 1, 1, 0, 0, 1, tzinfo=ICT)
self.assertEqual(
timezone.localdate(aware, timezone=EAT), datetime.date(2014, 12, 31)
)
with timezone.override(EAT):
self.assertEqual(timezone.localdate(aware), datetime.date(2014, 12, 31))
with mock.patch("django.utils.timezone.now", return_value=aware):
self.assertEqual(
timezone.localdate(timezone=EAT), datetime.date(2014, 12, 31)
)
with timezone.override(EAT):
self.assertEqual(timezone.localdate(), datetime.date(2014, 12, 31))
def test_override(self):
default = timezone.get_default_timezone()
try:
timezone.activate(ICT)
with timezone.override(EAT):
self.assertIs(EAT, timezone.get_current_timezone())
self.assertIs(ICT, timezone.get_current_timezone())
with timezone.override(None):
self.assertIs(default, timezone.get_current_timezone())
self.assertIs(ICT, timezone.get_current_timezone())
timezone.deactivate()
with timezone.override(EAT):
self.assertIs(EAT, timezone.get_current_timezone())
self.assertIs(default, timezone.get_current_timezone())
with timezone.override(None):
self.assertIs(default, timezone.get_current_timezone())
self.assertIs(default, timezone.get_current_timezone())
finally:
timezone.deactivate()
def test_override_decorator(self):
default = timezone.get_default_timezone()
@timezone.override(EAT)
def func_tz_eat():
self.assertIs(EAT, timezone.get_current_timezone())
@timezone.override(None)
def func_tz_none():
self.assertIs(default, timezone.get_current_timezone())
try:
timezone.activate(ICT)
func_tz_eat()
self.assertIs(ICT, timezone.get_current_timezone())
func_tz_none()
self.assertIs(ICT, timezone.get_current_timezone())
timezone.deactivate()
func_tz_eat()
self.assertIs(default, timezone.get_current_timezone())
func_tz_none()
self.assertIs(default, timezone.get_current_timezone())
finally:
timezone.deactivate()
def test_override_string_tz(self):
with timezone.override("Asia/Bangkok"):
self.assertEqual(timezone.get_current_timezone_name(), "Asia/Bangkok")
def test_override_fixed_offset(self):
with timezone.override(datetime.timezone(datetime.timedelta(), "tzname")):
self.assertEqual(timezone.get_current_timezone_name(), "tzname")
def test_activate_invalid_timezone(self):
with self.assertRaisesMessage(ValueError, "Invalid timezone: None"):
timezone.activate(None)
def test_is_aware(self):
self.assertTrue(
timezone.is_aware(datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT))
)
self.assertFalse(timezone.is_aware(datetime.datetime(2011, 9, 1, 13, 20, 30)))
def test_is_naive(self):
self.assertFalse(
timezone.is_naive(datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT))
)
self.assertTrue(timezone.is_naive(datetime.datetime(2011, 9, 1, 13, 20, 30)))
def test_make_aware(self):
self.assertEqual(
timezone.make_aware(datetime.datetime(2011, 9, 1, 13, 20, 30), EAT),
datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT),
)
with self.assertRaises(ValueError):
timezone.make_aware(
datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT), EAT
)
def test_make_naive(self):
self.assertEqual(
timezone.make_naive(
datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT), EAT
),
datetime.datetime(2011, 9, 1, 13, 20, 30),
)
self.assertEqual(
timezone.make_naive(
datetime.datetime(2011, 9, 1, 17, 20, 30, tzinfo=ICT), EAT
),
datetime.datetime(2011, 9, 1, 13, 20, 30),
)
with self.assertRaisesMessage(
ValueError, "make_naive() cannot be applied to a naive datetime"
):
timezone.make_naive(datetime.datetime(2011, 9, 1, 13, 20, 30), EAT)
def test_make_naive_no_tz(self):
self.assertEqual(
timezone.make_naive(datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT)),
datetime.datetime(2011, 9, 1, 5, 20, 30),
)
def test_make_aware_no_tz(self):
self.assertEqual(
timezone.make_aware(datetime.datetime(2011, 9, 1, 13, 20, 30)),
datetime.datetime(
2011, 9, 1, 13, 20, 30, tzinfo=timezone.get_fixed_timezone(-300)
),
)
def test_make_aware2(self):
CEST = datetime.timezone(datetime.timedelta(hours=2), "CEST")
self.assertEqual(
timezone.make_aware(datetime.datetime(2011, 9, 1, 12, 20, 30), PARIS_ZI),
datetime.datetime(2011, 9, 1, 12, 20, 30, tzinfo=CEST),
)
with self.assertRaises(ValueError):
timezone.make_aware(
datetime.datetime(2011, 9, 1, 12, 20, 30, tzinfo=PARIS_ZI), PARIS_ZI
)
def test_make_naive_zoneinfo(self):
self.assertEqual(
timezone.make_naive(
datetime.datetime(2011, 9, 1, 12, 20, 30, tzinfo=PARIS_ZI), PARIS_ZI
),
datetime.datetime(2011, 9, 1, 12, 20, 30),
)
self.assertEqual(
timezone.make_naive(
datetime.datetime(2011, 9, 1, 12, 20, 30, fold=1, tzinfo=PARIS_ZI),
PARIS_ZI,
),
datetime.datetime(2011, 9, 1, 12, 20, 30, fold=1),
)
def test_make_aware_zoneinfo_ambiguous(self):
# 2:30 happens twice, once before DST ends and once after
ambiguous = datetime.datetime(2015, 10, 25, 2, 30)
std = timezone.make_aware(ambiguous.replace(fold=1), timezone=PARIS_ZI)
dst = timezone.make_aware(ambiguous, timezone=PARIS_ZI)
self.assertEqual(
std.astimezone(UTC) - dst.astimezone(UTC), datetime.timedelta(hours=1)
)
self.assertEqual(std.utcoffset(), datetime.timedelta(hours=1))
self.assertEqual(dst.utcoffset(), datetime.timedelta(hours=2))
def test_make_aware_zoneinfo_non_existent(self):
# 2:30 never happened due to DST
non_existent = datetime.datetime(2015, 3, 29, 2, 30)
std = timezone.make_aware(non_existent, PARIS_ZI)
dst = timezone.make_aware(non_existent.replace(fold=1), PARIS_ZI)
self.assertEqual(
std.astimezone(UTC) - dst.astimezone(UTC), datetime.timedelta(hours=1)
)
self.assertEqual(std.utcoffset(), datetime.timedelta(hours=1))
self.assertEqual(dst.utcoffset(), datetime.timedelta(hours=2))
def test_get_timezone_name(self):
"""
The _get_timezone_name() helper must return the offset for fixed offset
timezones, for usage with Trunc DB functions.
The datetime.timezone examples show the current behavior.
"""
tests = [
# datetime.timezone, fixed offset with and without `name`.
(datetime.timezone(datetime.timedelta(hours=10)), "UTC+10:00"),
(
datetime.timezone(datetime.timedelta(hours=10), name="Etc/GMT-10"),
"Etc/GMT-10",
),
# zoneinfo, named and fixed offset.
(zoneinfo.ZoneInfo("Europe/Madrid"), "Europe/Madrid"),
(zoneinfo.ZoneInfo("Etc/GMT-10"), "+10"),
]
for tz, expected in tests:
with self.subTest(tz=tz, expected=expected):
self.assertEqual(timezone._get_timezone_name(tz), expected)
def test_get_default_timezone(self):
self.assertEqual(timezone.get_default_timezone_name(), "America/Chicago")
def test_fixedoffset_timedelta(self):
delta = datetime.timedelta(hours=1)
self.assertEqual(timezone.get_fixed_timezone(delta).utcoffset(None), delta)
def test_fixedoffset_negative_timedelta(self):
delta = datetime.timedelta(hours=-2)
self.assertEqual(timezone.get_fixed_timezone(delta).utcoffset(None), delta)
|
./temp_repos/django/django/utils/timezone.py
|
./temp_repos/django/tests/utils_tests/test_timezone.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'override'.
Context:
- Class Name: override
- Dependencies to Mock: timezone
- Key Imports: contextlib, zoneinfo, django.conf, asgiref.local, functools, datetime
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
override
|
python
|
"""HTML utilities suitable for global use."""
import html
import json
import re
import warnings
from collections import deque
from collections.abc import Mapping
from html.parser import HTMLParser
from itertools import chain
from urllib.parse import parse_qsl, quote, unquote, urlencode, urlsplit, urlunsplit
from django.conf import settings
from django.core.exceptions import SuspiciousOperation, ValidationError
from django.core.validators import DomainNameValidator, EmailValidator
from django.utils.deprecation import RemovedInDjango70Warning
from django.utils.functional import Promise, cached_property, keep_lazy, keep_lazy_text
from django.utils.http import MAX_URL_LENGTH, RFC3986_GENDELIMS, RFC3986_SUBDELIMS
from django.utils.regex_helper import _lazy_re_compile
from django.utils.safestring import SafeData, SafeString, mark_safe
from django.utils.text import normalize_newlines
# https://html.spec.whatwg.org/#void-elements
VOID_ELEMENTS = frozenset(
(
"area",
"base",
"br",
"col",
"embed",
"hr",
"img",
"input",
"link",
"meta",
"param",
"source",
"track",
"wbr",
# Deprecated tags.
"frame",
"spacer",
)
)
MAX_STRIP_TAGS_DEPTH = 50
# HTML tag that opens but has no closing ">" after 1k+ chars.
long_open_tag_without_closing_re = _lazy_re_compile(r"<[a-zA-Z][^>]{1000,}")
@keep_lazy(SafeString)
def escape(text):
"""
Return the given text with ampersands, quotes and angle brackets encoded
for use in HTML.
Always escape input, even if it's already escaped and marked as such.
This may result in double-escaping. If this is a concern, use
conditional_escape() instead.
"""
return SafeString(html.escape(str(text)))
_js_escapes = {
ord("\\"): "\\u005C",
ord("'"): "\\u0027",
ord('"'): "\\u0022",
ord(">"): "\\u003E",
ord("<"): "\\u003C",
ord("&"): "\\u0026",
ord("="): "\\u003D",
ord("-"): "\\u002D",
ord(";"): "\\u003B",
ord("`"): "\\u0060",
ord("\u2028"): "\\u2028",
ord("\u2029"): "\\u2029",
}
# Escape every ASCII character with a value less than 32 (C0), 127(C0),
# or 128-159(C1).
_js_escapes.update(
(ord("%c" % z), "\\u%04X" % z) for z in chain(range(32), range(0x7F, 0xA0))
)
@keep_lazy(SafeString)
def escapejs(value):
"""Hex encode characters for use in JavaScript strings."""
return mark_safe(str(value).translate(_js_escapes))
_json_script_escapes = {
ord(">"): "\\u003E",
ord("<"): "\\u003C",
ord("&"): "\\u0026",
}
def json_script(value, element_id=None, encoder=None):
"""
Escape all the HTML/XML special characters with their unicode escapes, so
value is safe to be output anywhere except for inside a tag attribute. Wrap
the escaped JSON in a script tag.
"""
from django.core.serializers.json import DjangoJSONEncoder
json_str = json.dumps(value, cls=encoder or DjangoJSONEncoder).translate(
_json_script_escapes
)
if element_id:
template = '<script id="{}" type="application/json">{}</script>'
args = (element_id, mark_safe(json_str))
else:
template = '<script type="application/json">{}</script>'
args = (mark_safe(json_str),)
return format_html(template, *args)
def conditional_escape(text):
"""
Similar to escape(), except that it doesn't operate on pre-escaped strings.
This function relies on the __html__ convention used both by Django's
SafeData class and by third-party libraries like markupsafe.
"""
if isinstance(text, Promise):
text = str(text)
if hasattr(text, "__html__"):
return text.__html__()
else:
return escape(text)
def format_html(format_string, *args, **kwargs):
"""
Similar to str.format, but pass all arguments through conditional_escape(),
and call mark_safe() on the result. This function should be used instead
of str.format or % interpolation to build up small HTML fragments.
"""
if not (args or kwargs):
raise TypeError("args or kwargs must be provided.")
args_safe = map(conditional_escape, args)
kwargs_safe = {k: conditional_escape(v) for (k, v) in kwargs.items()}
return mark_safe(format_string.format(*args_safe, **kwargs_safe))
def format_html_join(sep, format_string, args_generator):
"""
A wrapper of format_html, for the common case of a group of arguments that
need to be formatted using the same format string, and then joined using
'sep'. 'sep' is also passed through conditional_escape.
'args_generator' should be an iterator that returns the sequence of 'args'
that will be passed to format_html.
Example:
format_html_join('\n', "<li>{} {}</li>", ((u.first_name, u.last_name)
for u in users))
"""
return mark_safe(
conditional_escape(sep).join(
(
format_html(format_string, **args)
if isinstance(args, Mapping)
else format_html(format_string, *args)
)
for args in args_generator
)
)
@keep_lazy_text
def linebreaks(value, autoescape=False):
"""Convert newlines into <p> and <br>s."""
value = normalize_newlines(value)
paras = re.split("\n{2,}", str(value))
if autoescape:
paras = ["<p>%s</p>" % escape(p).replace("\n", "<br>") for p in paras]
else:
paras = ["<p>%s</p>" % p.replace("\n", "<br>") for p in paras]
return "\n\n".join(paras)
class MLStripper(HTMLParser):
def __init__(self):
super().__init__(convert_charrefs=False)
self.reset()
self.fed = []
def handle_data(self, d):
self.fed.append(d)
def handle_entityref(self, name):
self.fed.append("&%s;" % name)
def handle_charref(self, name):
self.fed.append("&#%s;" % name)
def get_data(self):
return "".join(self.fed)
def _strip_once(value):
"""
Internal tag stripping utility used by strip_tags.
"""
s = MLStripper()
s.feed(value)
s.close()
return s.get_data()
@keep_lazy_text
def strip_tags(value):
"""Return the given HTML with all tags stripped."""
value = str(value)
for long_open_tag in long_open_tag_without_closing_re.finditer(value):
if long_open_tag.group().count("<") >= MAX_STRIP_TAGS_DEPTH:
raise SuspiciousOperation
# Note: in typical case this loop executes _strip_once twice (the second
# execution does not remove any more tags).
strip_tags_depth = 0
while "<" in value and ">" in value:
if strip_tags_depth >= MAX_STRIP_TAGS_DEPTH:
raise SuspiciousOperation
new_value = _strip_once(value)
if value.count("<") == new_value.count("<"):
# _strip_once wasn't able to detect more tags.
break
value = new_value
strip_tags_depth += 1
return value
@keep_lazy_text
def strip_spaces_between_tags(value):
"""Return the given HTML with spaces between tags removed."""
return re.sub(r">\s+<", "><", str(value))
def smart_urlquote(url):
"""Quote a URL if it isn't already quoted."""
def unquote_quote(segment):
segment = unquote(segment)
# Tilde is part of RFC 3986 Section 2.3 Unreserved Characters,
# see also https://bugs.python.org/issue16285
return quote(segment, safe=RFC3986_SUBDELIMS + RFC3986_GENDELIMS + "~")
try:
scheme, netloc, path, query, fragment = urlsplit(url)
except ValueError:
# invalid IPv6 URL (normally square brackets in hostname part).
return unquote_quote(url)
# Handle IDN as percent-encoded UTF-8 octets, per WHATWG URL Specification
# section 3.5 and RFC 3986 section 3.2.2. Defer any IDNA to the user agent.
# See #36013.
netloc = unquote_quote(netloc)
if query:
# Separately unquoting key/value, so as to not mix querystring
# separators included in query values. See #22267.
query_parts = [
(unquote(q[0]), unquote(q[1]))
for q in parse_qsl(query, keep_blank_values=True)
]
# urlencode will take care of quoting
query = urlencode(query_parts)
path = unquote_quote(path)
fragment = unquote_quote(fragment)
return urlunsplit((scheme, netloc, path, query, fragment))
class CountsDict(dict):
def __init__(self, *args, word, **kwargs):
super().__init__(*args, *kwargs)
self.word = word
def __missing__(self, key):
self[key] = self.word.count(key)
return self[key]
class Urlizer:
"""
Convert any URLs in text into clickable links.
Work on http://, https://, www. links, and also on links ending in one of
the original seven gTLDs (.com, .edu, .gov, .int, .mil, .net, and .org).
Links can have trailing punctuation (periods, commas, close-parens) and
leading punctuation (opening parens) and it'll still do the right thing.
"""
trailing_punctuation_chars = ".,:;!"
wrapping_punctuation = [("(", ")"), ("[", "]")]
simple_url_re = _lazy_re_compile(r"^https?://\[?\w", re.IGNORECASE)
simple_url_2_re = _lazy_re_compile(
rf"^www\.|^(?!http)(?:{DomainNameValidator.hostname_re})"
rf"(?:{DomainNameValidator.domain_re})"
r"\.(com|edu|gov|int|mil|net|org)($|/.*)$",
re.IGNORECASE,
)
word_split_re = _lazy_re_compile(r"""([\s<>"']+)""")
mailto_template = "mailto:{local}@{domain}"
url_template = '<a href="{href}"{attrs}>{url}</a>'
def __call__(self, text, trim_url_limit=None, nofollow=False, autoescape=False):
"""
If trim_url_limit is not None, truncate the URLs in the link text
longer than this limit to trim_url_limit - 1 characters and append an
ellipsis.
If nofollow is True, give the links a rel="nofollow" attribute.
If autoescape is True, autoescape the link text and URLs.
"""
safe_input = isinstance(text, SafeData)
words = self.word_split_re.split(str(text))
local_cache = {}
urlized_words = []
for word in words:
if (urlized_word := local_cache.get(word)) is None:
urlized_word = self.handle_word(
word,
safe_input=safe_input,
trim_url_limit=trim_url_limit,
nofollow=nofollow,
autoescape=autoescape,
)
local_cache[word] = urlized_word
urlized_words.append(urlized_word)
return "".join(urlized_words)
def handle_word(
self,
word,
*,
safe_input,
trim_url_limit=None,
nofollow=False,
autoescape=False,
):
if "." in word or "@" in word or ":" in word:
# lead: Punctuation trimmed from the beginning of the word.
# middle: State of the word.
# trail: Punctuation trimmed from the end of the word.
lead, middle, trail = self.trim_punctuation(word)
# Make URL we want to point to.
url = None
nofollow_attr = ' rel="nofollow"' if nofollow else ""
if len(middle) <= MAX_URL_LENGTH and self.simple_url_re.match(middle):
url = smart_urlquote(html.unescape(middle))
elif len(middle) <= MAX_URL_LENGTH and self.simple_url_2_re.match(middle):
unescaped_middle = html.unescape(middle)
# RemovedInDjango70Warning: When the deprecation ends, replace
# with:
# url = smart_urlquote(f"https://{unescaped_middle}")
protocol = (
"https"
if getattr(settings, "URLIZE_ASSUME_HTTPS", False)
else "http"
)
if not settings.URLIZE_ASSUME_HTTPS:
warnings.warn(
"The default protocol will be changed from HTTP to "
"HTTPS in Django 7.0. Set the URLIZE_ASSUME_HTTPS "
"transitional setting to True to opt into using HTTPS as the "
"new default protocol.",
RemovedInDjango70Warning,
stacklevel=2,
)
url = smart_urlquote(f"{protocol}://{unescaped_middle}")
elif ":" not in middle and self.is_email_simple(middle):
local, domain = middle.rsplit("@", 1)
# Encode per RFC 6068 Section 2 (items 1, 4, 5). Defer any IDNA
# to the user agent. See #36013.
local = quote(local, safe="")
domain = quote(domain, safe="")
url = self.mailto_template.format(local=local, domain=domain)
nofollow_attr = ""
# Make link.
if url:
trimmed = self.trim_url(middle, limit=trim_url_limit)
if autoescape and not safe_input:
lead, trail = escape(lead), escape(trail)
trimmed = escape(trimmed)
middle = self.url_template.format(
href=escape(url),
attrs=nofollow_attr,
url=trimmed,
)
return mark_safe(f"{lead}{middle}{trail}")
else:
if safe_input:
return mark_safe(word)
elif autoescape:
return escape(word)
elif safe_input:
return mark_safe(word)
elif autoescape:
return escape(word)
return word
def trim_url(self, x, *, limit):
if limit is None or len(x) <= limit:
return x
return "%s…" % x[: max(0, limit - 1)]
@cached_property
def wrapping_punctuation_openings(self):
return "".join(dict(self.wrapping_punctuation).keys())
@cached_property
def trailing_punctuation_chars_no_semicolon(self):
return self.trailing_punctuation_chars.replace(";", "")
@cached_property
def trailing_punctuation_chars_has_semicolon(self):
return ";" in self.trailing_punctuation_chars
def trim_punctuation(self, word):
"""
Trim trailing and wrapping punctuation from `word`. Return the items of
the new state.
"""
# Strip all opening wrapping punctuation.
middle = word.lstrip(self.wrapping_punctuation_openings)
lead = word[: len(word) - len(middle)]
trail = deque()
# Continue trimming until middle remains unchanged.
trimmed_something = True
counts = CountsDict(word=middle)
while trimmed_something and middle:
trimmed_something = False
# Trim wrapping punctuation.
for opening, closing in self.wrapping_punctuation:
if counts[opening] < counts[closing]:
rstripped = middle.rstrip(closing)
if rstripped != middle:
strip = counts[closing] - counts[opening]
trail.appendleft(middle[-strip:])
middle = middle[:-strip]
trimmed_something = True
counts[closing] -= strip
amp = middle.rfind("&")
if amp == -1:
rstripped = middle.rstrip(self.trailing_punctuation_chars)
else:
rstripped = middle.rstrip(self.trailing_punctuation_chars_no_semicolon)
if rstripped != middle:
trail.appendleft(middle[len(rstripped) :])
middle = rstripped
trimmed_something = True
if self.trailing_punctuation_chars_has_semicolon and middle.endswith(";"):
# Only strip if not part of an HTML entity.
potential_entity = middle[amp:]
escaped = html.unescape(potential_entity)
if escaped == potential_entity or escaped.endswith(";"):
rstripped = middle.rstrip(self.trailing_punctuation_chars)
trail_start = len(rstripped)
amount_trailing_semicolons = len(middle) - len(middle.rstrip(";"))
if amp > -1 and amount_trailing_semicolons > 1:
# Leave up to most recent semicolon as might be an
# entity.
recent_semicolon = middle[trail_start:].index(";")
middle_semicolon_index = recent_semicolon + trail_start + 1
trail.appendleft(middle[middle_semicolon_index:])
middle = rstripped + middle[trail_start:middle_semicolon_index]
else:
trail.appendleft(middle[trail_start:])
middle = rstripped
trimmed_something = True
trail = "".join(trail)
return lead, middle, trail
@staticmethod
def is_email_simple(value):
"""Return True if value looks like an email address."""
try:
EmailValidator(allowlist=[])(value)
except ValidationError:
return False
return True
urlizer = Urlizer()
@keep_lazy_text
def urlize(text, trim_url_limit=None, nofollow=False, autoescape=False):
return urlizer(
text, trim_url_limit=trim_url_limit, nofollow=nofollow, autoescape=autoescape
)
def avoid_wrapping(value):
"""
Avoid text wrapping in the middle of a phrase by adding non-breaking
spaces where there previously were normal spaces.
"""
return value.replace(" ", "\xa0")
def html_safe(klass):
"""
A decorator that defines the __html__ method. This helps non-Django
templates to detect classes whose __str__ methods return SafeString.
"""
if "__html__" in klass.__dict__:
raise ValueError(
"can't apply @html_safe to %s because it defines "
"__html__()." % klass.__name__
)
if "__str__" not in klass.__dict__:
raise ValueError(
"can't apply @html_safe to %s because it doesn't "
"define __str__()." % klass.__name__
)
klass_str = klass.__str__
klass.__str__ = lambda self: mark_safe(klass_str(self))
klass.__html__ = lambda self: str(self)
return klass
|
import os
import sys
from datetime import datetime
from django.core.exceptions import SuspiciousOperation
from django.core.serializers.json import DjangoJSONEncoder
from django.test import SimpleTestCase
from django.test.utils import override_settings
from django.utils.deprecation import RemovedInDjango70Warning
from django.utils.functional import lazystr
from django.utils.html import (
conditional_escape,
escape,
escapejs,
format_html,
format_html_join,
html_safe,
json_script,
linebreaks,
smart_urlquote,
strip_spaces_between_tags,
strip_tags,
urlize,
)
from django.utils.safestring import mark_safe
@override_settings(URLIZE_ASSUME_HTTPS=True)
class TestUtilsHtml(SimpleTestCase):
def check_output(self, function, value, output=None):
"""
function(value) equals output. If output is None, function(value)
equals value.
"""
if output is None:
output = value
self.assertEqual(function(value), output)
def test_escape(self):
items = (
("&", "&"),
("<", "<"),
(">", ">"),
('"', """),
("'", "'"),
)
# Substitution patterns for testing the above items.
patterns = ("%s", "asdf%sfdsa", "%s1", "1%sb")
for value, output in items:
with self.subTest(value=value, output=output):
for pattern in patterns:
with self.subTest(value=value, output=output, pattern=pattern):
self.check_output(escape, pattern % value, pattern % output)
self.check_output(
escape, lazystr(pattern % value), pattern % output
)
# Check repeated values.
self.check_output(escape, value * 2, output * 2)
# Verify it doesn't double replace &.
self.check_output(escape, "<&", "<&")
def test_format_html(self):
self.assertEqual(
format_html(
"{} {} {third} {fourth}",
"< Dangerous >",
mark_safe("<b>safe</b>"),
third="< dangerous again",
fourth=mark_safe("<i>safe again</i>"),
),
"< Dangerous > <b>safe</b> < dangerous again <i>safe again</i>",
)
def test_format_html_no_params(self):
msg = "args or kwargs must be provided."
with self.assertRaisesMessage(TypeError, msg):
name = "Adam"
self.assertEqual(format_html(f"<i>{name}</i>"), "<i>Adam</i>")
def test_format_html_join_with_positional_arguments(self):
self.assertEqual(
format_html_join(
"\n",
"<li>{}) {}</li>",
[(1, "Emma"), (2, "Matilda")],
),
"<li>1) Emma</li>\n<li>2) Matilda</li>",
)
def test_format_html_join_with_keyword_arguments(self):
self.assertEqual(
format_html_join(
"\n",
"<li>{id}) {text}</li>",
[{"id": 1, "text": "Emma"}, {"id": 2, "text": "Matilda"}],
),
"<li>1) Emma</li>\n<li>2) Matilda</li>",
)
def test_linebreaks(self):
items = (
("para1\n\npara2\r\rpara3", "<p>para1</p>\n\n<p>para2</p>\n\n<p>para3</p>"),
(
"para1\nsub1\rsub2\n\npara2",
"<p>para1<br>sub1<br>sub2</p>\n\n<p>para2</p>",
),
(
"para1\r\n\r\npara2\rsub1\r\rpara4",
"<p>para1</p>\n\n<p>para2<br>sub1</p>\n\n<p>para4</p>",
),
("para1\tmore\n\npara2", "<p>para1\tmore</p>\n\n<p>para2</p>"),
)
for value, output in items:
with self.subTest(value=value, output=output):
self.check_output(linebreaks, value, output)
self.check_output(linebreaks, lazystr(value), output)
def test_strip_tags(self):
# Python fixed a quadratic-time issue in HTMLParser in 3.13.6, 3.12.12.
# The fix slightly changes HTMLParser's output, so tests for
# particularly malformed input must handle both old and new results.
# The check below is temporary until all supported Python versions and
# CI workers include the fix. See:
# https://github.com/python/cpython/commit/6eb6c5db
min_fixed = {
(3, 13): (3, 13, 6),
(3, 12): (3, 12, 12),
}
major_version = sys.version_info[:2]
htmlparser_fixed = sys.version_info >= min_fixed.get(
major_version, major_version
)
items = (
(
"<p>See: 'é is an apostrophe followed by e acute</p>",
"See: 'é is an apostrophe followed by e acute",
),
(
"<p>See: 'é is an apostrophe followed by e acute</p>",
"See: 'é is an apostrophe followed by e acute",
),
("<adf>a", "a"),
("</adf>a", "a"),
("<asdf><asdf>e", "e"),
("hi, <f x", "hi, <f x"),
("234<235, right?", "234<235, right?"),
("a4<a5 right?", "a4<a5 right?"),
("b7>b2!", "b7>b2!"),
("</fe", "</fe"),
("<x>b<y>", "b"),
("a<p onclick=\"alert('<test>')\">b</p>c", "abc"),
("a<p a >b</p>c", "abc"),
("d<a:b c:d>e</p>f", "def"),
('<strong>foo</strong><a href="http://example.com">bar</a>', "foobar"),
# caused infinite loop on Pythons not patched with
# https://bugs.python.org/issue20288
("&gotcha&#;<>", "&gotcha&#;<>"),
("<sc<!-- -->ript>test<<!-- -->/script>", "ript>test"),
("<script>alert()</script>&h", "alert()h"),
(
"><!" + ("&" * 16000) + "D",
">" if htmlparser_fixed else "><!" + ("&" * 16000) + "D",
),
("X<<<<br>br>br>br>X", "XX"),
("<" * 50 + "a>" * 50, ""),
(
">" + "<a" * 500 + "a",
">" if htmlparser_fixed else ">" + "<a" * 500 + "a",
),
("<a" * 49 + "a" * 951, "<a" * 49 + "a" * 951),
("<" + "a" * 1_002, "<" + "a" * 1_002),
)
for value, output in items:
with self.subTest(value=value, output=output):
self.check_output(strip_tags, value, output)
self.check_output(strip_tags, lazystr(value), output)
def test_strip_tags_suspicious_operation_max_depth(self):
value = "<" * 51 + "a>" * 51, "<a>"
with self.assertRaises(SuspiciousOperation):
strip_tags(value)
def test_strip_tags_suspicious_operation_large_open_tags(self):
items = [
">" + "<a" * 501,
"<a" * 50 + "a" * 950,
]
for value in items:
with self.subTest(value=value):
with self.assertRaises(SuspiciousOperation):
strip_tags(value)
def test_strip_tags_files(self):
# Test with more lengthy content (also catching performance
# regressions)
for filename in ("strip_tags1.html", "strip_tags2.txt"):
with self.subTest(filename=filename):
path = os.path.join(os.path.dirname(__file__), "files", filename)
with open(path) as fp:
content = fp.read()
start = datetime.now()
stripped = strip_tags(content)
elapsed = datetime.now() - start
self.assertEqual(elapsed.seconds, 0)
self.assertIn("Test string that has not been stripped.", stripped)
self.assertNotIn("<", stripped)
def test_strip_spaces_between_tags(self):
# Strings that should come out untouched.
items = (" <adf>", "<adf> ", " </adf> ", " <f> x</f>")
for value in items:
with self.subTest(value=value):
self.check_output(strip_spaces_between_tags, value)
self.check_output(strip_spaces_between_tags, lazystr(value))
# Strings that have spaces to strip.
items = (
("<d> </d>", "<d></d>"),
("<p>hello </p>\n<p> world</p>", "<p>hello </p><p> world</p>"),
("\n<p>\t</p>\n<p> </p>\n", "\n<p></p><p></p>\n"),
)
for value, output in items:
with self.subTest(value=value, output=output):
self.check_output(strip_spaces_between_tags, value, output)
self.check_output(strip_spaces_between_tags, lazystr(value), output)
def test_escapejs(self):
items = (
(
"\"double quotes\" and 'single quotes'",
"\\u0022double quotes\\u0022 and \\u0027single quotes\\u0027",
),
(r"\ : backslashes, too", "\\u005C : backslashes, too"),
(
"and lots of whitespace: \r\n\t\v\f\b",
"and lots of whitespace: \\u000D\\u000A\\u0009\\u000B\\u000C\\u0008",
),
(
r"<script>and this</script>",
"\\u003Cscript\\u003Eand this\\u003C/script\\u003E",
),
(
"paragraph separator:\u2029and line separator:\u2028",
"paragraph separator:\\u2029and line separator:\\u2028",
),
("`", "\\u0060"),
("\u007f", "\\u007F"),
("\u0080", "\\u0080"),
("\u009f", "\\u009F"),
)
for value, output in items:
with self.subTest(value=value, output=output):
self.check_output(escapejs, value, output)
self.check_output(escapejs, lazystr(value), output)
def test_json_script(self):
tests = (
# "<", ">" and "&" are quoted inside JSON strings
(
(
"&<>",
'<script id="test_id" type="application/json">'
'"\\u0026\\u003C\\u003E"</script>',
)
),
# "<", ">" and "&" are quoted inside JSON objects
(
{"a": "<script>test&ing</script>"},
'<script id="test_id" type="application/json">'
'{"a": "\\u003Cscript\\u003Etest\\u0026ing\\u003C/script\\u003E"}'
"</script>",
),
# Lazy strings are quoted
(
lazystr("&<>"),
'<script id="test_id" type="application/json">"\\u0026\\u003C\\u003E"'
"</script>",
),
(
{"a": lazystr("<script>test&ing</script>")},
'<script id="test_id" type="application/json">'
'{"a": "\\u003Cscript\\u003Etest\\u0026ing\\u003C/script\\u003E"}'
"</script>",
),
)
for arg, expected in tests:
with self.subTest(arg=arg):
self.assertEqual(json_script(arg, "test_id"), expected)
def test_json_script_custom_encoder(self):
class CustomDjangoJSONEncoder(DjangoJSONEncoder):
def encode(self, o):
return '{"hello": "world"}'
self.assertHTMLEqual(
json_script({}, encoder=CustomDjangoJSONEncoder),
'<script type="application/json">{"hello": "world"}</script>',
)
def test_json_script_without_id(self):
self.assertHTMLEqual(
json_script({"key": "value"}),
'<script type="application/json">{"key": "value"}</script>',
)
def test_smart_urlquote(self):
items = (
# IDN is encoded as percent-encoded ("quoted") UTF-8 (#36013).
("http://öäü.com/", "http://%C3%B6%C3%A4%C3%BC.com/"),
("https://faß.example.com", "https://fa%C3%9F.example.com"),
(
"http://öäü.com/öäü/",
"http://%C3%B6%C3%A4%C3%BC.com/%C3%B6%C3%A4%C3%BC/",
),
(
# Valid under IDNA 2008, but was invalid in IDNA 2003.
"https://މިހާރު.com",
"https://%DE%89%DE%A8%DE%80%DE%A7%DE%83%DE%AA.com",
),
(
# Valid under WHATWG URL Specification but not IDNA 2008.
"http://👓.ws",
"http://%F0%9F%91%93.ws",
),
# Pre-encoded IDNA is left unchanged.
("http://xn--iny-zx5a.com/idna2003", "http://xn--iny-zx5a.com/idna2003"),
("http://xn--fa-hia.com/idna2008", "http://xn--fa-hia.com/idna2008"),
# Everything unsafe is quoted, !*'();:@&=+$,/?#[]~ is considered
# safe as per RFC.
(
"http://example.com/path/öäü/",
"http://example.com/path/%C3%B6%C3%A4%C3%BC/",
),
("http://example.com/%C3%B6/ä/", "http://example.com/%C3%B6/%C3%A4/"),
("http://example.com/?x=1&y=2+3&z=", "http://example.com/?x=1&y=2+3&z="),
("http://example.com/?x=<>\"'", "http://example.com/?x=%3C%3E%22%27"),
(
"http://example.com/?q=http://example.com/?x=1%26q=django",
"http://example.com/?q=http%3A%2F%2Fexample.com%2F%3Fx%3D1%26q%3D"
"django",
),
(
"http://example.com/?q=http%3A%2F%2Fexample.com%2F%3Fx%3D1%26q%3D"
"django",
"http://example.com/?q=http%3A%2F%2Fexample.com%2F%3Fx%3D1%26q%3D"
"django",
),
("http://.www.f oo.bar/", "http://.www.f%20oo.bar/"),
('http://example.com">', "http://example.com%22%3E"),
("http://10.22.1.1/", "http://10.22.1.1/"),
("http://[fd00::1]/", "http://[fd00::1]/"),
)
for value, output in items:
with self.subTest(value=value, output=output):
self.assertEqual(smart_urlquote(value), output)
def test_conditional_escape(self):
s = "<h1>interop</h1>"
self.assertEqual(conditional_escape(s), "<h1>interop</h1>")
self.assertEqual(conditional_escape(mark_safe(s)), s)
self.assertEqual(conditional_escape(lazystr(mark_safe(s))), s)
def test_html_safe(self):
@html_safe
class HtmlClass:
def __str__(self):
return "<h1>I'm a html class!</h1>"
html_obj = HtmlClass()
self.assertTrue(hasattr(HtmlClass, "__html__"))
self.assertTrue(hasattr(html_obj, "__html__"))
self.assertEqual(str(html_obj), html_obj.__html__())
def test_html_safe_subclass(self):
class BaseClass:
def __html__(self):
# defines __html__ on its own
return "some html content"
def __str__(self):
return "some non html content"
@html_safe
class Subclass(BaseClass):
def __str__(self):
# overrides __str__ and is marked as html_safe
return "some html safe content"
subclass_obj = Subclass()
self.assertEqual(str(subclass_obj), subclass_obj.__html__())
def test_html_safe_defines_html_error(self):
msg = "can't apply @html_safe to HtmlClass because it defines __html__()."
with self.assertRaisesMessage(ValueError, msg):
@html_safe
class HtmlClass:
def __html__(self):
return "<h1>I'm a html class!</h1>"
def test_html_safe_doesnt_define_str(self):
msg = "can't apply @html_safe to HtmlClass because it doesn't define __str__()."
with self.assertRaisesMessage(ValueError, msg):
@html_safe
class HtmlClass:
pass
def test_urlize(self):
tests = (
(
"Search for google.com/?q=! and see.",
'Search for <a href="https://google.com/?q=">google.com/?q=</a>! and '
"see.",
),
(
"Search for google.com/?q=1<! and see.",
'Search for <a href="https://google.com/?q=1%3C">google.com/?q=1<'
"</a>! and see.",
),
(
lazystr("Search for google.com/?q=!"),
'Search for <a href="https://google.com/?q=">google.com/?q=</a>!',
),
(
"http://www.foo.bar/",
'<a href="http://www.foo.bar/">http://www.foo.bar/</a>',
),
(
"Look on www.نامهای.com.",
"Look on <a "
'href="https://www.%D9%86%D8%A7%D9%85%D9%87%E2%80%8C%D8%A7%DB%8C.com"'
">www.نامهای.com</a>.",
),
("[email protected]", '<a href="mailto:[email protected]">[email protected]</a>'),
(
"test@" + "한.글." * 15 + "aaa",
'<a href="mailto:test@'
+ "%ED%95%9C.%EA%B8%80." * 15
+ 'aaa">'
+ "test@"
+ "한.글." * 15
+ "aaa</a>",
),
(
# RFC 6068 requires a mailto URI to percent-encode a number of
# characters that can appear in <addr-spec>.
"yes+this=is&a%[email protected]",
'<a href="mailto:yes%2Bthis%3Dis%26a%25valid%[email protected]"'
">yes+this=is&a%[email protected]</a>",
),
(
"foo@faß.example.com",
'<a href="mailto:foo@fa%C3%9F.example.com">foo@faß.example.com</a>',
),
(
"idna-2008@މިހާރު.example.mv",
'<a href="mailto:idna-2008@%DE%89%DE%A8%DE%80%DE%A7%DE%83%DE%AA.ex'
'ample.mv">idna-2008@މިހާރު.example.mv</a>',
),
(
"host.djangoproject.com",
'<a href="https://host.djangoproject.com">host.djangoproject.com</a>',
),
)
for value, output in tests:
with self.subTest(value=value):
self.assertEqual(urlize(value), output)
@override_settings(URLIZE_ASSUME_HTTPS=False)
def test_urlize_http_default_warning(self):
msg = (
"The default protocol will be changed from HTTP to HTTPS in Django 7.0. "
"Set the URLIZE_ASSUME_HTTPS transitional setting to True to opt into "
"using HTTPS as the new default protocol."
)
with self.assertWarnsMessage(RemovedInDjango70Warning, msg):
self.assertEqual(
urlize("Visit example.com"),
'Visit <a href="http://example.com">example.com</a>',
)
def test_urlize_unchanged_inputs(self):
tests = (
("a" + "@a" * 50000) + "a", # simple_email_re catastrophic test
# Unicode domain catastrophic tests.
"a@" + "한.글." * 1_000_000 + "a",
"http://" + "한.글." * 1_000_000 + "com",
"www." + "한.글." * 1_000_000 + "com",
("a" + "." * 1000000) + "a", # trailing_punctuation catastrophic test
"foo@",
"@foo.com",
"[email protected]",
"foo@localhost",
"foo@localhost.",
"test@example?;+!.com",
"email [email protected],then I'll respond",
"[a link](https://www.djangoproject.com/)",
# trim_punctuation catastrophic tests
"(" * 100_000 + ":" + ")" * 100_000,
"(" * 100_000 + "&:" + ")" * 100_000,
"([" * 100_000 + ":" + "])" * 100_000,
"[(" * 100_000 + ":" + ")]" * 100_000,
"([[" * 100_000 + ":" + "]])" * 100_000,
"&:" + ";" * 100_000,
"&.;" * 100_000,
".;" * 100_000,
"&" + ";:" * 100_000,
)
for value in tests:
with self.subTest(value=value):
self.assertEqual(urlize(value), value)
|
./temp_repos/django/django/utils/html.py
|
./temp_repos/django/tests/utils_tests/test_html.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'MLStripper'.
Context:
- Class Name: MLStripper
- Dependencies to Mock: None detected
- Key Imports: collections, html, django.utils.functional, django.utils.safestring, html.parser, django.core.exceptions, django.core.serializers.json, itertools, django.core.validators, django.utils.text
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
MLStripper
|
python
|
"""
Utility functions for generating "lorem ipsum" Latin text.
"""
import random
COMMON_P = (
"Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod "
"tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim "
"veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea "
"commodo consequat. Duis aute irure dolor in reprehenderit in voluptate "
"velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint "
"occaecat cupidatat non proident, sunt in culpa qui officia deserunt "
"mollit anim id est laborum."
)
WORDS = (
"exercitationem",
"perferendis",
"perspiciatis",
"laborum",
"eveniet",
"sunt",
"iure",
"nam",
"nobis",
"eum",
"cum",
"officiis",
"excepturi",
"odio",
"consectetur",
"quasi",
"aut",
"quisquam",
"vel",
"eligendi",
"itaque",
"non",
"odit",
"tempore",
"quaerat",
"dignissimos",
"facilis",
"neque",
"nihil",
"expedita",
"vitae",
"vero",
"ipsum",
"nisi",
"animi",
"cumque",
"pariatur",
"velit",
"modi",
"natus",
"iusto",
"eaque",
"sequi",
"illo",
"sed",
"ex",
"et",
"voluptatibus",
"tempora",
"veritatis",
"ratione",
"assumenda",
"incidunt",
"nostrum",
"placeat",
"aliquid",
"fuga",
"provident",
"praesentium",
"rem",
"necessitatibus",
"suscipit",
"adipisci",
"quidem",
"possimus",
"voluptas",
"debitis",
"sint",
"accusantium",
"unde",
"sapiente",
"voluptate",
"qui",
"aspernatur",
"laudantium",
"soluta",
"amet",
"quo",
"aliquam",
"saepe",
"culpa",
"libero",
"ipsa",
"dicta",
"reiciendis",
"nesciunt",
"doloribus",
"autem",
"impedit",
"minima",
"maiores",
"repudiandae",
"ipsam",
"obcaecati",
"ullam",
"enim",
"totam",
"delectus",
"ducimus",
"quis",
"voluptates",
"dolores",
"molestiae",
"harum",
"dolorem",
"quia",
"voluptatem",
"molestias",
"magni",
"distinctio",
"omnis",
"illum",
"dolorum",
"voluptatum",
"ea",
"quas",
"quam",
"corporis",
"quae",
"blanditiis",
"atque",
"deserunt",
"laboriosam",
"earum",
"consequuntur",
"hic",
"cupiditate",
"quibusdam",
"accusamus",
"ut",
"rerum",
"error",
"minus",
"eius",
"ab",
"ad",
"nemo",
"fugit",
"officia",
"at",
"in",
"id",
"quos",
"reprehenderit",
"numquam",
"iste",
"fugiat",
"sit",
"inventore",
"beatae",
"repellendus",
"magnam",
"recusandae",
"quod",
"explicabo",
"doloremque",
"aperiam",
"consequatur",
"asperiores",
"commodi",
"optio",
"dolor",
"labore",
"temporibus",
"repellat",
"veniam",
"architecto",
"est",
"esse",
"mollitia",
"nulla",
"a",
"similique",
"eos",
"alias",
"dolore",
"tenetur",
"deleniti",
"porro",
"facere",
"maxime",
"corrupti",
)
COMMON_WORDS = (
"lorem",
"ipsum",
"dolor",
"sit",
"amet",
"consectetur",
"adipisicing",
"elit",
"sed",
"do",
"eiusmod",
"tempor",
"incididunt",
"ut",
"labore",
"et",
"dolore",
"magna",
"aliqua",
)
def sentence():
"""
Return a randomly generated sentence of lorem ipsum text.
The first word is capitalized, and the sentence ends in either a period or
question mark. Commas are added at random.
"""
# Determine the number of comma-separated sections and number of words in
# each section for this sentence.
sections = [
" ".join(random.sample(WORDS, random.randint(3, 12)))
for i in range(random.randint(1, 5))
]
s = ", ".join(sections)
# Convert to sentence case and add end punctuation.
return "%s%s%s" % (s[0].upper(), s[1:], random.choice("?."))
def paragraph():
"""
Return a randomly generated paragraph of lorem ipsum text.
The paragraph consists of between 1 and 4 sentences, inclusive.
"""
return " ".join(sentence() for i in range(random.randint(1, 4)))
def paragraphs(count, common=True):
"""
Return a list of paragraphs as returned by paragraph().
If `common` is True, then the first paragraph will be the standard
'lorem ipsum' paragraph. Otherwise, the first paragraph will be random
Latin text. Either way, subsequent paragraphs will be random Latin text.
"""
paras = []
for i in range(count):
if common and i == 0:
paras.append(COMMON_P)
else:
paras.append(paragraph())
return paras
def words(count, common=True):
"""
Return a string of `count` lorem ipsum words separated by a single space.
If `common` is True, then the first 19 words will be the standard
'lorem ipsum' words. Otherwise, all words will be selected randomly.
"""
word_list = list(COMMON_WORDS) if common else []
c = len(word_list)
if count > c:
count -= c
while count > 0:
c = min(count, len(WORDS))
count -= c
word_list += random.sample(WORDS, c)
else:
word_list = word_list[:count]
return " ".join(word_list)
|
import unittest
from unittest import mock
from django.utils.lorem_ipsum import paragraph, paragraphs, sentence, words
class LoremIpsumTests(unittest.TestCase):
def test_negative_words(self):
"""words(n) returns n + 19 words, even if n is negative."""
self.assertEqual(
words(-5),
"lorem ipsum dolor sit amet consectetur adipisicing elit sed do "
"eiusmod tempor incididunt ut",
)
def test_same_or_less_common_words(self):
"""words(n) for n < 19."""
self.assertEqual(words(7), "lorem ipsum dolor sit amet consectetur adipisicing")
def test_common_words_in_string(self):
"""
words(n) starts with the 19 standard lorem ipsum words for n > 19.
"""
self.assertTrue(
words(25).startswith(
"lorem ipsum dolor sit amet consectetur adipisicing elit sed "
"do eiusmod tempor incididunt ut labore et dolore magna aliqua"
)
)
def test_more_words_than_common(self):
"""words(n) returns n words for n > 19."""
self.assertEqual(len(words(25).split()), 25)
def test_common_large_number_of_words(self):
"""words(n) has n words when n is greater than len(WORDS)."""
self.assertEqual(len(words(500).split()), 500)
@mock.patch("django.utils.lorem_ipsum.random.sample")
def test_not_common_words(self, mock_sample):
"""words(n, common=False) returns random words."""
mock_sample.return_value = ["exercitationem", "perferendis"]
self.assertEqual(words(2, common=False), "exercitationem perferendis")
def test_sentence_starts_with_capital(self):
"""A sentence starts with a capital letter."""
self.assertTrue(sentence()[0].isupper())
@mock.patch("django.utils.lorem_ipsum.random.sample")
@mock.patch("django.utils.lorem_ipsum.random.choice")
@mock.patch("django.utils.lorem_ipsum.random.randint")
def test_sentence(self, mock_randint, mock_choice, mock_sample):
"""
Sentences are built using some number of phrases and a set of words.
"""
mock_randint.return_value = 2 # Use two phrases.
mock_sample.return_value = ["exercitationem", "perferendis"]
mock_choice.return_value = "?"
value = sentence()
self.assertEqual(mock_randint.call_count, 3)
self.assertEqual(mock_sample.call_count, 2)
self.assertEqual(mock_choice.call_count, 1)
self.assertEqual(
value, "Exercitationem perferendis, exercitationem perferendis?"
)
@mock.patch("django.utils.lorem_ipsum.random.choice")
def test_sentence_ending(self, mock_choice):
"""Sentences end with a question mark or a period."""
mock_choice.return_value = "?"
self.assertIn(sentence()[-1], "?")
mock_choice.return_value = "."
self.assertIn(sentence()[-1], ".")
@mock.patch("django.utils.lorem_ipsum.random.sample")
@mock.patch("django.utils.lorem_ipsum.random.choice")
@mock.patch("django.utils.lorem_ipsum.random.randint")
def test_paragraph(self, mock_paragraph_randint, mock_choice, mock_sample):
"""paragraph() generates a single paragraph."""
# Make creating 2 sentences use 2 phrases.
mock_paragraph_randint.return_value = 2
mock_sample.return_value = ["exercitationem", "perferendis"]
mock_choice.return_value = "."
value = paragraph()
self.assertEqual(mock_paragraph_randint.call_count, 7)
self.assertEqual(
value,
(
"Exercitationem perferendis, exercitationem perferendis. "
"Exercitationem perferendis, exercitationem perferendis."
),
)
@mock.patch("django.utils.lorem_ipsum.random.sample")
@mock.patch("django.utils.lorem_ipsum.random.choice")
@mock.patch("django.utils.lorem_ipsum.random.randint")
def test_paragraphs_not_common(self, mock_randint, mock_choice, mock_sample):
"""
paragraphs(1, common=False) generating one paragraph that's not the
COMMON_P paragraph.
"""
# Make creating 2 sentences use 2 phrases.
mock_randint.return_value = 2
mock_sample.return_value = ["exercitationem", "perferendis"]
mock_choice.return_value = "."
self.assertEqual(
paragraphs(1, common=False),
[
"Exercitationem perferendis, exercitationem perferendis. "
"Exercitationem perferendis, exercitationem perferendis."
],
)
self.assertEqual(mock_randint.call_count, 7)
def test_paragraphs(self):
"""paragraphs(1) uses the COMMON_P paragraph."""
self.assertEqual(
paragraphs(1),
[
"Lorem ipsum dolor sit amet, consectetur adipisicing elit, "
"sed do eiusmod tempor incididunt ut labore et dolore magna "
"aliqua. Ut enim ad minim veniam, quis nostrud exercitation "
"ullamco laboris nisi ut aliquip ex ea commodo consequat. "
"Duis aute irure dolor in reprehenderit in voluptate velit "
"esse cillum dolore eu fugiat nulla pariatur. Excepteur sint "
"occaecat cupidatat non proident, sunt in culpa qui officia "
"deserunt mollit anim id est laborum."
],
)
|
./temp_repos/django/django/utils/lorem_ipsum.py
|
./temp_repos/django/tests/utils_tests/test_lorem_ipsum.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'Unknown'.
Context:
- Class Name: Unknown
- Dependencies to Mock: None detected
- Key Imports: random
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
Unknown
|
python
|
from datetime import date, datetime
from django.conf.urls.i18n import i18n_patterns
from django.contrib.sitemaps import GenericSitemap, Sitemap, views
from django.http import HttpResponse
from django.urls import path
from django.utils import timezone
from django.views.decorators.cache import cache_page
from ..models import I18nTestModel, TestModel
class SimpleSitemap(Sitemap):
changefreq = "never"
priority = 0.5
location = "/location/"
lastmod = date.today()
def items(self):
return [object()]
class SimplePagedSitemap(Sitemap):
lastmod = date.today()
def items(self):
return [object() for x in range(Sitemap.limit + 1)]
class SimpleI18nSitemap(Sitemap):
changefreq = "never"
priority = 0.5
i18n = True
def items(self):
return I18nTestModel.objects.order_by("pk").all()
class AlternatesI18nSitemap(SimpleI18nSitemap):
alternates = True
class LimitedI18nSitemap(AlternatesI18nSitemap):
languages = ["en", "es"]
class XDefaultI18nSitemap(AlternatesI18nSitemap):
x_default = True
class ItemByLangSitemap(SimpleI18nSitemap):
def get_languages_for_item(self, item):
if item.name == "Only for PT":
return ["pt"]
return super().get_languages_for_item(item)
class ItemByLangAlternatesSitemap(AlternatesI18nSitemap):
x_default = True
def get_languages_for_item(self, item):
if item.name == "Only for PT":
return ["pt"]
return super().get_languages_for_item(item)
class EmptySitemap(Sitemap):
changefreq = "never"
priority = 0.5
location = "/location/"
class FixedLastmodSitemap(SimpleSitemap):
lastmod = datetime(2013, 3, 13, 10, 0, 0)
class FixedLastmodMixedSitemap(Sitemap):
changefreq = "never"
priority = 0.5
location = "/location/"
loop = 0
def items(self):
o1 = TestModel()
o1.lastmod = datetime(2013, 3, 13, 10, 0, 0)
o2 = TestModel()
return [o1, o2]
class FixedNewerLastmodSitemap(SimpleSitemap):
lastmod = datetime(2013, 4, 20, 5, 0, 0)
class DateSiteMap(SimpleSitemap):
lastmod = date(2013, 3, 13)
class TimezoneSiteMap(SimpleSitemap):
lastmod = datetime(2013, 3, 13, 10, 0, 0, tzinfo=timezone.get_fixed_timezone(-300))
class CallableLastmodPartialSitemap(Sitemap):
"""Not all items have `lastmod`."""
location = "/location/"
def items(self):
o1 = TestModel()
o1.lastmod = datetime(2013, 3, 13, 10, 0, 0)
o2 = TestModel()
return [o1, o2]
def lastmod(self, obj):
return obj.lastmod
class CallableLastmodFullSitemap(Sitemap):
"""All items have `lastmod`."""
location = "/location/"
def items(self):
o1 = TestModel()
o1.lastmod = datetime(2013, 3, 13, 10, 0, 0)
o2 = TestModel()
o2.lastmod = datetime(2014, 3, 13, 10, 0, 0)
return [o1, o2]
def lastmod(self, obj):
return obj.lastmod
class CallableLastmodNoItemsSitemap(Sitemap):
location = "/location/"
def items(self):
return []
def lastmod(self, obj):
return obj.lastmod
class GetLatestLastmodNoneSiteMap(Sitemap):
changefreq = "never"
priority = 0.5
location = "/location/"
def items(self):
return [object()]
def lastmod(self, obj):
return datetime(2013, 3, 13, 10, 0, 0)
def get_latest_lastmod(self):
return None
class GetLatestLastmodSiteMap(SimpleSitemap):
def get_latest_lastmod(self):
return datetime(2013, 3, 13, 10, 0, 0)
def testmodelview(request, id):
return HttpResponse()
simple_sitemaps = {
"simple": SimpleSitemap,
}
simple_i18n_sitemaps = {
"i18n": SimpleI18nSitemap,
}
alternates_i18n_sitemaps = {
"i18n-alternates": AlternatesI18nSitemap,
}
limited_i18n_sitemaps = {
"i18n-limited": LimitedI18nSitemap,
}
xdefault_i18n_sitemaps = {
"i18n-xdefault": XDefaultI18nSitemap,
}
item_by_lang_i18n_sitemaps = {
"i18n-item-by-lang": ItemByLangSitemap,
}
item_by_lang_alternates_i18n_sitemaps = {
"i18n-item-by-lang-alternates": ItemByLangAlternatesSitemap,
}
simple_sitemaps_not_callable = {
"simple": SimpleSitemap(),
}
simple_sitemaps_paged = {
"simple": SimplePagedSitemap,
}
empty_sitemaps = {
"empty": EmptySitemap,
}
fixed_lastmod_sitemaps = {
"fixed-lastmod": FixedLastmodSitemap,
}
fixed_lastmod_mixed_sitemaps = {
"fixed-lastmod-mixed": FixedLastmodMixedSitemap,
}
sitemaps_lastmod_mixed_ascending = {
"no-lastmod": EmptySitemap,
"lastmod": FixedLastmodSitemap,
}
sitemaps_lastmod_mixed_descending = {
"lastmod": FixedLastmodSitemap,
"no-lastmod": EmptySitemap,
}
sitemaps_lastmod_ascending = {
"date": DateSiteMap,
"datetime": FixedLastmodSitemap,
"datetime-newer": FixedNewerLastmodSitemap,
}
sitemaps_lastmod_descending = {
"datetime-newer": FixedNewerLastmodSitemap,
"datetime": FixedLastmodSitemap,
"date": DateSiteMap,
}
generic_sitemaps = {
"generic": GenericSitemap({"queryset": TestModel.objects.order_by("pk").all()}),
}
get_latest_lastmod_none_sitemaps = {
"get-latest-lastmod-none": GetLatestLastmodNoneSiteMap,
}
get_latest_lastmod_sitemaps = {
"get-latest-lastmod": GetLatestLastmodSiteMap,
}
latest_lastmod_timezone_sitemaps = {
"latest-lastmod-timezone": TimezoneSiteMap,
}
generic_sitemaps_lastmod = {
"generic": GenericSitemap(
{
"queryset": TestModel.objects.order_by("pk").all(),
"date_field": "lastmod",
}
),
}
callable_lastmod_partial_sitemap = {
"callable-lastmod": CallableLastmodPartialSitemap,
}
callable_lastmod_full_sitemap = {
"callable-lastmod": CallableLastmodFullSitemap,
}
callable_lastmod_no_items_sitemap = {
"callable-lastmod": CallableLastmodNoItemsSitemap,
}
urlpatterns = [
path("simple/index.xml", views.index, {"sitemaps": simple_sitemaps}),
path("simple-paged/index.xml", views.index, {"sitemaps": simple_sitemaps_paged}),
path(
"simple-not-callable/index.xml",
views.index,
{"sitemaps": simple_sitemaps_not_callable},
),
path(
"simple/custom-lastmod-index.xml",
views.index,
{
"sitemaps": simple_sitemaps,
"template_name": "custom_sitemap_lastmod_index.xml",
},
),
path(
"simple/sitemap-<section>.xml",
views.sitemap,
{"sitemaps": simple_sitemaps},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"simple/sitemap.xml",
views.sitemap,
{"sitemaps": simple_sitemaps},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"simple/i18n.xml",
views.sitemap,
{"sitemaps": simple_i18n_sitemaps},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"alternates/i18n.xml",
views.sitemap,
{"sitemaps": alternates_i18n_sitemaps},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"limited/i18n.xml",
views.sitemap,
{"sitemaps": limited_i18n_sitemaps},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"x-default/i18n.xml",
views.sitemap,
{"sitemaps": xdefault_i18n_sitemaps},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"simple/custom-sitemap.xml",
views.sitemap,
{"sitemaps": simple_sitemaps, "template_name": "custom_sitemap.xml"},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"empty/sitemap.xml",
views.sitemap,
{"sitemaps": empty_sitemaps},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"lastmod/sitemap.xml",
views.sitemap,
{"sitemaps": fixed_lastmod_sitemaps},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"lastmod-mixed/sitemap.xml",
views.sitemap,
{"sitemaps": fixed_lastmod_mixed_sitemaps},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"lastmod/date-sitemap.xml",
views.sitemap,
{"sitemaps": {"date-sitemap": DateSiteMap}},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"lastmod/tz-sitemap.xml",
views.sitemap,
{"sitemaps": {"tz-sitemap": TimezoneSiteMap}},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"lastmod-sitemaps/mixed-ascending.xml",
views.sitemap,
{"sitemaps": sitemaps_lastmod_mixed_ascending},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"lastmod-sitemaps/mixed-descending.xml",
views.sitemap,
{"sitemaps": sitemaps_lastmod_mixed_descending},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"lastmod-sitemaps/ascending.xml",
views.sitemap,
{"sitemaps": sitemaps_lastmod_ascending},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"item-by-lang/i18n.xml",
views.sitemap,
{"sitemaps": item_by_lang_i18n_sitemaps},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"item-by-lang-alternates/i18n.xml",
views.sitemap,
{"sitemaps": item_by_lang_alternates_i18n_sitemaps},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"lastmod-sitemaps/descending.xml",
views.sitemap,
{"sitemaps": sitemaps_lastmod_descending},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"lastmod/get-latest-lastmod-none-sitemap.xml",
views.index,
{"sitemaps": get_latest_lastmod_none_sitemaps},
name="django.contrib.sitemaps.views.index",
),
path(
"lastmod/get-latest-lastmod-sitemap.xml",
views.index,
{"sitemaps": get_latest_lastmod_sitemaps},
name="django.contrib.sitemaps.views.index",
),
path(
"lastmod/latest-lastmod-timezone-sitemap.xml",
views.index,
{"sitemaps": latest_lastmod_timezone_sitemaps},
name="django.contrib.sitemaps.views.index",
),
path(
"generic/sitemap.xml",
views.sitemap,
{"sitemaps": generic_sitemaps},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"generic-lastmod/sitemap.xml",
views.sitemap,
{"sitemaps": generic_sitemaps_lastmod},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"cached/index.xml",
cache_page(1)(views.index),
{"sitemaps": simple_sitemaps, "sitemap_url_name": "cached_sitemap"},
),
path(
"cached/sitemap-<section>.xml",
cache_page(1)(views.sitemap),
{"sitemaps": simple_sitemaps},
name="cached_sitemap",
),
path(
"sitemap-without-entries/sitemap.xml",
views.sitemap,
{"sitemaps": {}},
name="django.contrib.sitemaps.views.sitemap",
),
path(
"callable-lastmod-partial/index.xml",
views.index,
{"sitemaps": callable_lastmod_partial_sitemap},
),
path(
"callable-lastmod-partial/sitemap.xml",
views.sitemap,
{"sitemaps": callable_lastmod_partial_sitemap},
),
path(
"callable-lastmod-full/index.xml",
views.index,
{"sitemaps": callable_lastmod_full_sitemap},
),
path(
"callable-lastmod-full/sitemap.xml",
views.sitemap,
{"sitemaps": callable_lastmod_full_sitemap},
),
path(
"callable-lastmod-no-items/index.xml",
views.index,
{"sitemaps": callable_lastmod_no_items_sitemap},
),
path(
"generic-lastmod/index.xml",
views.index,
{"sitemaps": generic_sitemaps_lastmod},
name="django.contrib.sitemaps.views.index",
),
]
urlpatterns += i18n_patterns(
path("i18n/testmodel/<int:id>/", testmodelview, name="i18n_testmodel"),
)
|
import platform
import unittest
from datetime import UTC, datetime
from unittest import mock
from django.test import SimpleTestCase
from django.utils.datastructures import MultiValueDict
from django.utils.http import (
MAX_HEADER_LENGTH,
MAX_URL_LENGTH,
base36_to_int,
content_disposition_header,
escape_leading_slashes,
http_date,
int_to_base36,
is_same_domain,
parse_etags,
parse_header_parameters,
parse_http_date,
quote_etag,
url_has_allowed_host_and_scheme,
urlencode,
urlsafe_base64_decode,
urlsafe_base64_encode,
)
class URLEncodeTests(SimpleTestCase):
cannot_encode_none_msg = (
"Cannot encode None for key 'a' in a query string. Did you mean to "
"pass an empty string or omit the value?"
)
def test_tuples(self):
self.assertEqual(urlencode((("a", 1), ("b", 2), ("c", 3))), "a=1&b=2&c=3")
def test_dict(self):
result = urlencode({"a": 1, "b": 2, "c": 3})
self.assertEqual(result, "a=1&b=2&c=3")
def test_dict_containing_sequence_not_doseq(self):
self.assertEqual(urlencode({"a": [1, 2]}, doseq=False), "a=%5B1%2C+2%5D")
def test_dict_containing_tuple_not_doseq(self):
self.assertEqual(urlencode({"a": (1, 2)}, doseq=False), "a=%281%2C+2%29")
def test_custom_iterable_not_doseq(self):
class IterableWithStr:
def __str__(self):
return "custom"
def __iter__(self):
yield from range(0, 3)
self.assertEqual(urlencode({"a": IterableWithStr()}, doseq=False), "a=custom")
def test_dict_containing_sequence_doseq(self):
self.assertEqual(urlencode({"a": [1, 2]}, doseq=True), "a=1&a=2")
def test_dict_containing_empty_sequence_doseq(self):
self.assertEqual(urlencode({"a": []}, doseq=True), "")
def test_multivaluedict(self):
result = urlencode(
MultiValueDict(
{
"name": ["Adrian", "Simon"],
"position": ["Developer"],
}
),
doseq=True,
)
self.assertEqual(result, "name=Adrian&name=Simon&position=Developer")
def test_dict_with_bytes_values(self):
self.assertEqual(urlencode({"a": b"abc"}, doseq=True), "a=abc")
def test_dict_with_sequence_of_bytes(self):
self.assertEqual(
urlencode({"a": [b"spam", b"eggs", b"bacon"]}, doseq=True),
"a=spam&a=eggs&a=bacon",
)
def test_dict_with_bytearray(self):
self.assertEqual(urlencode({"a": bytearray(range(2))}, doseq=True), "a=0&a=1")
def test_generator(self):
self.assertEqual(urlencode({"a": range(2)}, doseq=True), "a=0&a=1")
self.assertEqual(urlencode({"a": range(2)}, doseq=False), "a=range%280%2C+2%29")
def test_none(self):
with self.assertRaisesMessage(TypeError, self.cannot_encode_none_msg):
urlencode({"a": None})
def test_none_in_sequence(self):
with self.assertRaisesMessage(TypeError, self.cannot_encode_none_msg):
urlencode({"a": [None]}, doseq=True)
def test_none_in_generator(self):
def gen():
yield None
with self.assertRaisesMessage(TypeError, self.cannot_encode_none_msg):
urlencode({"a": gen()}, doseq=True)
class Base36IntTests(SimpleTestCase):
def test_roundtrip(self):
for n in [0, 1, 1000, 1000000]:
self.assertEqual(n, base36_to_int(int_to_base36(n)))
def test_negative_input(self):
with self.assertRaisesMessage(ValueError, "Negative base36 conversion input."):
int_to_base36(-1)
def test_to_base36_errors(self):
for n in ["1", "foo", {1: 2}, (1, 2, 3), 3.141]:
with self.assertRaises(TypeError):
int_to_base36(n)
def test_invalid_literal(self):
for n in ["#", " "]:
with self.assertRaisesMessage(
ValueError, "invalid literal for int() with base 36: '%s'" % n
):
base36_to_int(n)
def test_input_too_large(self):
with self.assertRaisesMessage(ValueError, "Base36 input too large"):
base36_to_int("1" * 14)
def test_to_int_errors(self):
for n in [123, {1: 2}, (1, 2, 3), 3.141]:
with self.assertRaises(TypeError):
base36_to_int(n)
def test_values(self):
for n, b36 in [(0, "0"), (1, "1"), (42, "16"), (818469960, "django")]:
self.assertEqual(int_to_base36(n), b36)
self.assertEqual(base36_to_int(b36), n)
class URLHasAllowedHostAndSchemeTests(unittest.TestCase):
def test_bad_urls(self):
bad_urls = (
"http://example.com",
"http:///example.com",
"https://example.com",
"ftp://example.com",
r"\\example.com",
r"\\\example.com",
r"/\\/example.com",
r"\\\example.com",
r"\\example.com",
r"\\//example.com",
r"/\/example.com",
r"\/example.com",
r"/\example.com",
"http:///example.com",
r"http:/\//example.com",
r"http:\/example.com",
r"http:/\example.com",
'javascript:alert("XSS")',
"\njavascript:alert(x)",
"java\nscript:alert(x)",
"\x08//example.com",
r"http://otherserver\@example.com",
r"http:\\testserver\@example.com",
r"http://testserver\me:[email protected]",
r"http://testserver\@example.com",
r"http:\\testserver\confirm\[email protected]",
"http:999999999",
"ftp:9999999999",
"\n",
"http://[2001:cdba:0000:0000:0000:0000:3257:9652/",
"http://2001:cdba:0000:0000:0000:0000:3257:9652]/",
)
for bad_url in bad_urls:
with self.subTest(url=bad_url):
self.assertIs(
url_has_allowed_host_and_scheme(
bad_url, allowed_hosts={"testserver", "testserver2"}
),
False,
)
def test_good_urls(self):
good_urls = (
"/view/?param=http://example.com",
"/view/?param=https://example.com",
"/view?param=ftp://example.com",
"view/?param=//example.com",
"https://testserver/",
"HTTPS://testserver/",
"//testserver/",
"http://testserver/[email protected]",
"/url%20with%20spaces/",
"path/http:2222222222",
)
for good_url in good_urls:
with self.subTest(url=good_url):
self.assertIs(
url_has_allowed_host_and_scheme(
good_url, allowed_hosts={"otherserver", "testserver"}
),
True,
)
def test_basic_auth(self):
# Valid basic auth credentials are allowed.
self.assertIs(
url_has_allowed_host_and_scheme(
r"http://user:pass@testserver/", allowed_hosts={"user:pass@testserver"}
),
True,
)
def test_no_allowed_hosts(self):
# A path without host is allowed.
self.assertIs(
url_has_allowed_host_and_scheme(
"/confirm/[email protected]", allowed_hosts=None
),
True,
)
# Basic auth without host is not allowed.
self.assertIs(
url_has_allowed_host_and_scheme(
r"http://testserver\@example.com", allowed_hosts=None
),
False,
)
def test_allowed_hosts_str(self):
self.assertIs(
url_has_allowed_host_and_scheme(
"http://good.com/good", allowed_hosts="good.com"
),
True,
)
self.assertIs(
url_has_allowed_host_and_scheme(
"http://good.co/evil", allowed_hosts="good.com"
),
False,
)
def test_secure_param_https_urls(self):
secure_urls = (
"https://example.com/p",
"HTTPS://example.com/p",
"/view/?param=http://example.com",
)
for url in secure_urls:
with self.subTest(url=url):
self.assertIs(
url_has_allowed_host_and_scheme(
url, allowed_hosts={"example.com"}, require_https=True
),
True,
)
def test_secure_param_non_https_urls(self):
insecure_urls = (
"http://example.com/p",
"ftp://example.com/p",
"//example.com/p",
)
for url in insecure_urls:
with self.subTest(url=url):
self.assertIs(
url_has_allowed_host_and_scheme(
url, allowed_hosts={"example.com"}, require_https=True
),
False,
)
def test_max_url_length(self):
allowed_host = "example.com"
max_extra_characters = "é" * (MAX_URL_LENGTH - len(allowed_host) - 1)
max_length_boundary_url = f"{allowed_host}/{max_extra_characters}"
cases = [
(max_length_boundary_url, True),
(max_length_boundary_url + "ú", False),
]
for url, expected in cases:
with self.subTest(url=url):
self.assertIs(
url_has_allowed_host_and_scheme(url, allowed_hosts={allowed_host}),
expected,
)
class URLSafeBase64Tests(unittest.TestCase):
def test_roundtrip(self):
bytestring = b"foo"
encoded = urlsafe_base64_encode(bytestring)
decoded = urlsafe_base64_decode(encoded)
self.assertEqual(bytestring, decoded)
class IsSameDomainTests(unittest.TestCase):
def test_good(self):
for pair in (
("example.com", "example.com"),
("example.com", ".example.com"),
("foo.example.com", ".example.com"),
("example.com:8888", "example.com:8888"),
("example.com:8888", ".example.com:8888"),
("foo.example.com:8888", ".example.com:8888"),
):
self.assertIs(is_same_domain(*pair), True)
def test_bad(self):
for pair in (
("example2.com", "example.com"),
("foo.example.com", "example.com"),
("example.com:9999", "example.com:8888"),
("foo.example.com:8888", ""),
):
self.assertIs(is_same_domain(*pair), False)
class ETagProcessingTests(unittest.TestCase):
def test_parsing(self):
self.assertEqual(
parse_etags(r'"" , "etag", "e\\tag", W/"weak"'),
['""', '"etag"', r'"e\\tag"', 'W/"weak"'],
)
self.assertEqual(parse_etags("*"), ["*"])
# Ignore RFC 2616 ETags that are invalid according to RFC 9110.
self.assertEqual(parse_etags(r'"etag", "e\"t\"ag"'), ['"etag"'])
def test_quoting(self):
self.assertEqual(quote_etag("etag"), '"etag"') # unquoted
self.assertEqual(quote_etag('"etag"'), '"etag"') # quoted
self.assertEqual(quote_etag('W/"etag"'), 'W/"etag"') # quoted, weak
class HttpDateProcessingTests(unittest.TestCase):
def test_http_date(self):
t = 1167616461.0
self.assertEqual(http_date(t), "Mon, 01 Jan 2007 01:54:21 GMT")
def test_parsing_rfc1123(self):
parsed = parse_http_date("Sun, 06 Nov 1994 08:49:37 GMT")
self.assertEqual(
datetime.fromtimestamp(parsed, UTC),
datetime(1994, 11, 6, 8, 49, 37, tzinfo=UTC),
)
@unittest.skipIf(platform.architecture()[0] == "32bit", "The Year 2038 problem.")
@mock.patch("django.utils.http.datetime")
def test_parsing_rfc850(self, mocked_datetime):
mocked_datetime.side_effect = datetime
now_1 = datetime(2019, 11, 6, 8, 49, 37, tzinfo=UTC)
now_2 = datetime(2020, 11, 6, 8, 49, 37, tzinfo=UTC)
now_3 = datetime(2048, 11, 6, 8, 49, 37, tzinfo=UTC)
tests = (
(
now_1,
"Tuesday, 31-Dec-69 08:49:37 GMT",
datetime(2069, 12, 31, 8, 49, 37, tzinfo=UTC),
),
(
now_1,
"Tuesday, 10-Nov-70 08:49:37 GMT",
datetime(1970, 11, 10, 8, 49, 37, tzinfo=UTC),
),
(
now_1,
"Sunday, 06-Nov-94 08:49:37 GMT",
datetime(1994, 11, 6, 8, 49, 37, tzinfo=UTC),
),
(
now_2,
"Wednesday, 31-Dec-70 08:49:37 GMT",
datetime(2070, 12, 31, 8, 49, 37, tzinfo=UTC),
),
(
now_2,
"Friday, 31-Dec-71 08:49:37 GMT",
datetime(1971, 12, 31, 8, 49, 37, tzinfo=UTC),
),
(
now_3,
"Sunday, 31-Dec-00 08:49:37 GMT",
datetime(2000, 12, 31, 8, 49, 37, tzinfo=UTC),
),
(
now_3,
"Friday, 31-Dec-99 08:49:37 GMT",
datetime(1999, 12, 31, 8, 49, 37, tzinfo=UTC),
),
)
for now, rfc850str, expected_date in tests:
with self.subTest(rfc850str=rfc850str):
mocked_datetime.now.return_value = now
parsed = parse_http_date(rfc850str)
mocked_datetime.now.assert_called_once_with(tz=UTC)
self.assertEqual(
datetime.fromtimestamp(parsed, UTC),
expected_date,
)
mocked_datetime.reset_mock()
def test_parsing_asctime(self):
parsed = parse_http_date("Sun Nov 6 08:49:37 1994")
self.assertEqual(
datetime.fromtimestamp(parsed, UTC),
datetime(1994, 11, 6, 8, 49, 37, tzinfo=UTC),
)
def test_parsing_asctime_nonascii_digits(self):
"""Non-ASCII unicode decimals raise an error."""
with self.assertRaises(ValueError):
parse_http_date("Sun Nov 6 08:49:37 1994")
with self.assertRaises(ValueError):
parse_http_date("Sun Nov 12 08:49:37 1994")
def test_parsing_year_less_than_70(self):
parsed = parse_http_date("Sun Nov 6 08:49:37 0037")
self.assertEqual(
datetime.fromtimestamp(parsed, UTC),
datetime(2037, 11, 6, 8, 49, 37, tzinfo=UTC),
)
class EscapeLeadingSlashesTests(unittest.TestCase):
def test(self):
tests = (
("//example.com", "/%2Fexample.com"),
("//", "/%2F"),
)
for url, expected in tests:
with self.subTest(url=url):
self.assertEqual(escape_leading_slashes(url), expected)
class ParseHeaderParameterTests(unittest.TestCase):
def test_basic(self):
tests = [
("", ("", {})),
(None, ("", {})),
("text/plain", ("text/plain", {})),
("text/vnd.just.made.this.up ; ", ("text/vnd.just.made.this.up", {})),
("text/plain;charset=us-ascii", ("text/plain", {"charset": "us-ascii"})),
(
'text/plain ; charset="us-ascii"',
("text/plain", {"charset": "us-ascii"}),
),
(
'text/plain ; charset="us-ascii"; another=opt',
("text/plain", {"charset": "us-ascii", "another": "opt"}),
),
(
'attachment; filename="silly.txt"',
("attachment", {"filename": "silly.txt"}),
),
(
'attachment; filename="strange;name"',
("attachment", {"filename": "strange;name"}),
),
(
'attachment; filename="strange;name";size=123;',
("attachment", {"filename": "strange;name", "size": "123"}),
),
(
'attachment; filename="strange;name";;;;size=123;;;',
("attachment", {"filename": "strange;name", "size": "123"}),
),
(
'form-data; name="files"; filename="fo\\"o;bar"',
("form-data", {"name": "files", "filename": 'fo"o;bar'}),
),
(
'form-data; name="files"; filename="\\"fo\\"o;b\\\\ar\\""',
("form-data", {"name": "files", "filename": '"fo"o;b\\ar"'}),
),
]
for header, expected in tests:
with self.subTest(header=header):
self.assertEqual(parse_header_parameters(header), expected)
def test_rfc2231_parsing(self):
test_data = (
(
"Content-Type: application/x-stuff; "
"title*=us-ascii'en-us'This%20is%20%2A%2A%2Afun%2A%2A%2A",
"This is ***fun***",
),
(
"Content-Type: application/x-stuff; title*=UTF-8''foo-%c3%a4.html",
"foo-ä.html",
),
(
"Content-Type: application/x-stuff; title*=iso-8859-1''foo-%E4.html",
"foo-ä.html",
),
)
for raw_line, expected_title in test_data:
parsed = parse_header_parameters(raw_line)
self.assertEqual(parsed[1]["title"], expected_title)
def test_rfc2231_wrong_title(self):
"""
Test wrongly formatted RFC 2231 headers (missing double single quotes).
Parsing should not crash (#24209).
"""
test_data = (
(
"Content-Type: application/x-stuff; "
"title*='This%20is%20%2A%2A%2Afun%2A%2A%2A",
"'This%20is%20%2A%2A%2Afun%2A%2A%2A",
),
("Content-Type: application/x-stuff; title*='foo.html", "'foo.html"),
("Content-Type: application/x-stuff; title*=bar.html", "bar.html"),
)
for raw_line, expected_title in test_data:
parsed = parse_header_parameters(raw_line)
self.assertEqual(parsed[1]["title"], expected_title)
def test_header_max_length(self):
base_header = "Content-Type: application/x-stuff; title*="
base_header_len = len(base_header)
test_data = [
(MAX_HEADER_LENGTH, {}),
(MAX_HEADER_LENGTH, {"max_length": None}),
(MAX_HEADER_LENGTH + 1, {"max_length": None}),
(100, {"max_length": 100}),
]
for line_length, kwargs in test_data:
with self.subTest(line_length=line_length, kwargs=kwargs):
title = "x" * (line_length - base_header_len)
line = base_header + title
assert len(line) == line_length
parsed = parse_header_parameters(line, **kwargs)
expected = ("content-type: application/x-stuff", {"title": title})
self.assertEqual(parsed, expected)
def test_header_too_long(self):
test_data = [
("x" * (MAX_HEADER_LENGTH + 1), {}),
("x" * 101, {"max_length": 100}),
]
for line, kwargs in test_data:
with self.subTest(line_length=len(line), kwargs=kwargs):
with self.assertRaises(ValueError):
parse_header_parameters(line, **kwargs)
class ContentDispositionHeaderTests(unittest.TestCase):
def test_basic(self):
tests = (
((False, None), None),
((False, "example"), 'inline; filename="example"'),
((True, None), "attachment"),
((True, "example"), 'attachment; filename="example"'),
(
(True, '"example" file\\name'),
'attachment; filename="\\"example\\" file\\\\name"',
),
((True, "espécimen"), "attachment; filename*=utf-8''esp%C3%A9cimen"),
(
(True, '"espécimen" filename'),
"attachment; filename*=utf-8''%22esp%C3%A9cimen%22%20filename",
),
((True, "some\nfile"), "attachment; filename*=utf-8''some%0Afile"),
)
for (is_attachment, filename), expected in tests:
with self.subTest(is_attachment=is_attachment, filename=filename):
self.assertEqual(
content_disposition_header(is_attachment, filename), expected
)
|
./temp_repos/django/tests/sitemaps_tests/urls/http.py
|
./temp_repos/django/tests/utils_tests/test_http.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'SimpleSitemap'.
Context:
- Class Name: SimpleSitemap
- Dependencies to Mock: None detected
- Key Imports: django.http, django.contrib.sitemaps, django.urls, django.utils, django.views.decorators.cache, django.conf.urls.i18n, models, datetime
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
SimpleSitemap
|
python
|
import itertools
import logging
import os
import signal
import subprocess
import sys
import threading
import time
import traceback
import weakref
from collections import defaultdict
from functools import lru_cache, wraps
from pathlib import Path
from types import ModuleType
from zipimport import zipimporter
import django
from django.apps import apps
from django.core.signals import request_finished
from django.dispatch import Signal
from django.utils.functional import cached_property
from django.utils.version import get_version_tuple
autoreload_started = Signal()
file_changed = Signal()
DJANGO_AUTORELOAD_ENV = "RUN_MAIN"
logger = logging.getLogger("django.utils.autoreload")
# If an error is raised while importing a file, it's not placed in sys.modules.
# This means that any future modifications aren't caught. Keep a list of these
# file paths to allow watching them in the future.
_error_files = []
_exception = None
try:
import termios
except ImportError:
termios = None
try:
import pywatchman
except ImportError:
pywatchman = None
def is_django_module(module):
"""Return True if the given module is nested under Django."""
return module.__name__.startswith("django.")
def is_django_path(path):
"""Return True if the given file path is nested under Django."""
return Path(django.__file__).parent in Path(path).parents
def check_errors(fn):
@wraps(fn)
def wrapper(*args, **kwargs):
global _exception
try:
fn(*args, **kwargs)
except Exception:
_exception = sys.exc_info()
et, ev, tb = _exception
if getattr(ev, "filename", None) is None:
# get the filename from the last item in the stack
filename = traceback.extract_tb(tb)[-1][0]
else:
filename = ev.filename
if filename not in _error_files:
_error_files.append(filename)
raise
return wrapper
def raise_last_exception():
if _exception is not None:
raise _exception[1]
def ensure_echo_on():
"""
Ensure that echo mode is enabled. Some tools such as PDB disable
it which causes usability issues after reload.
"""
if not termios or not sys.stdin.isatty():
return
attr_list = termios.tcgetattr(sys.stdin)
if not attr_list[3] & termios.ECHO:
attr_list[3] |= termios.ECHO
if hasattr(signal, "SIGTTOU"):
old_handler = signal.signal(signal.SIGTTOU, signal.SIG_IGN)
else:
old_handler = None
termios.tcsetattr(sys.stdin, termios.TCSANOW, attr_list)
if old_handler is not None:
signal.signal(signal.SIGTTOU, old_handler)
def iter_all_python_module_files():
# This is a hot path during reloading. Create a stable sorted list of
# modules based on the module name and pass it to iter_modules_and_files().
# This ensures cached results are returned in the usual case that modules
# aren't loaded on the fly.
keys = sorted(sys.modules)
modules = tuple(
m
for m in map(sys.modules.__getitem__, keys)
if not isinstance(m, weakref.ProxyTypes)
)
return iter_modules_and_files(modules, frozenset(_error_files))
@lru_cache(maxsize=1)
def iter_modules_and_files(modules, extra_files):
"""Iterate through all modules needed to be watched."""
sys_file_paths = []
for module in modules:
# During debugging (with PyDev) the 'typing.io' and 'typing.re' objects
# are added to sys.modules, however they are types not modules and so
# cause issues here.
if not isinstance(module, ModuleType):
continue
if module.__name__ in ("__main__", "__mp_main__"):
# __main__ (usually manage.py) doesn't always have a __spec__ set.
# Handle this by falling back to using __file__, resolved below.
# See https://docs.python.org/reference/import.html#main-spec
# __file__ may not exists, e.g. when running ipdb debugger.
if hasattr(module, "__file__"):
sys_file_paths.append(module.__file__)
continue
if getattr(module, "__spec__", None) is None:
continue
spec = module.__spec__
# Modules could be loaded from places without a concrete location. If
# this is the case, skip them.
if spec.has_location:
origin = (
spec.loader.archive
if isinstance(spec.loader, zipimporter)
else spec.origin
)
sys_file_paths.append(origin)
results = set()
for filename in itertools.chain(sys_file_paths, extra_files):
if not filename:
continue
path = Path(filename)
try:
if not path.exists():
# The module could have been removed, don't fail loudly if this
# is the case.
continue
except ValueError as e:
# Network filesystems may return null bytes in file paths.
logger.debug('"%s" raised when resolving path: "%s"', e, path)
continue
resolved_path = path.resolve().absolute()
results.add(resolved_path)
return frozenset(results)
@lru_cache(maxsize=1)
def common_roots(paths):
"""
Return a tuple of common roots that are shared between the given paths.
File system watchers operate on directories and aren't cheap to create.
Try to find the minimum set of directories to watch that encompass all of
the files that need to be watched.
"""
# Inspired from Werkzeug:
# https://github.com/pallets/werkzeug/blob/7477be2853df70a022d9613e765581b9411c3c39/werkzeug/_reloader.py
# Create a sorted list of the path components, longest first.
path_parts = sorted([x.parts for x in paths], key=len, reverse=True)
tree = {}
for chunks in path_parts:
node = tree
# Add each part of the path to the tree.
for chunk in chunks:
node = node.setdefault(chunk, {})
# Clear the last leaf in the tree.
node.clear()
# Turn the tree into a list of Path instances.
def _walk(node, path):
for prefix, child in node.items():
yield from _walk(child, [*path, prefix])
if not node:
yield Path(*path)
return tuple(_walk(tree, ()))
def sys_path_directories():
"""
Yield absolute directories from sys.path, ignoring entries that don't
exist.
"""
for path in sys.path:
path = Path(path)
if not path.exists():
continue
resolved_path = path.resolve().absolute()
# If the path is a file (like a zip file), watch the parent directory.
if resolved_path.is_file():
yield resolved_path.parent
else:
yield resolved_path
def get_child_arguments():
"""
Return the executable. This contains a workaround for Windows if the
executable is reported to not have the .exe extension which can cause bugs
on reloading.
"""
import __main__
py_script = Path(sys.argv[0])
exe_entrypoint = py_script.with_suffix(".exe")
args = [sys.executable] + ["-W%s" % o for o in sys.warnoptions]
if sys.implementation.name in ("cpython", "pypy"):
args.extend(
f"-X{key}" if value is True else f"-X{key}={value}"
for key, value in sys._xoptions.items()
)
# __spec__ is set when the server was started with the `-m` option,
# see https://docs.python.org/3/reference/import.html#main-spec
# __spec__ may not exist, e.g. when running in a Conda env.
if getattr(__main__, "__spec__", None) is not None and not exe_entrypoint.exists():
spec = __main__.__spec__
if (spec.name == "__main__" or spec.name.endswith(".__main__")) and spec.parent:
name = spec.parent
else:
name = spec.name
args += ["-m", name]
args += sys.argv[1:]
elif not py_script.exists():
# sys.argv[0] may not exist for several reasons on Windows.
# It may exist with a .exe extension or have a -script.py suffix.
if exe_entrypoint.exists():
# Should be executed directly, ignoring sys.executable.
return [exe_entrypoint, *sys.argv[1:]]
script_entrypoint = py_script.with_name("%s-script.py" % py_script.name)
if script_entrypoint.exists():
# Should be executed as usual.
return [*args, script_entrypoint, *sys.argv[1:]]
raise RuntimeError("Script %s does not exist." % py_script)
else:
args += sys.argv
return args
def trigger_reload(filename):
logger.info("%s changed, reloading.", filename)
sys.exit(3)
def restart_with_reloader():
new_environ = {**os.environ, DJANGO_AUTORELOAD_ENV: "true"}
orig = getattr(sys, "orig_argv", ())
if any(
(arg == "-u")
or (
arg.startswith("-")
and not arg.startswith(("--", "-X", "-W"))
and len(arg) > 2
and arg[1:].isalpha()
and "u" in arg
)
for arg in orig[1:]
):
new_environ.setdefault("PYTHONUNBUFFERED", "1")
args = get_child_arguments()
while True:
p = subprocess.run(args, env=new_environ, close_fds=False)
if p.returncode != 3:
return p.returncode
class BaseReloader:
def __init__(self):
self.extra_files = set()
self.directory_globs = defaultdict(set)
self._stop_condition = threading.Event()
def watch_dir(self, path, glob):
path = Path(path)
try:
path = path.absolute()
except FileNotFoundError:
logger.debug(
"Unable to watch directory %s as it cannot be resolved.",
path,
exc_info=True,
)
return
logger.debug("Watching dir %s with glob %s.", path, glob)
self.directory_globs[path].add(glob)
def watched_files(self, include_globs=True):
"""
Yield all files that need to be watched, including module files and
files within globs.
"""
yield from iter_all_python_module_files()
yield from self.extra_files
if include_globs:
for directory, patterns in self.directory_globs.items():
for pattern in patterns:
yield from directory.glob(pattern)
def wait_for_apps_ready(self, app_reg, django_main_thread):
"""
Wait until Django reports that the apps have been loaded. If the given
thread has terminated before the apps are ready, then a SyntaxError or
other non-recoverable error has been raised. In that case, stop waiting
for the apps_ready event and continue processing.
Return True if the thread is alive and the ready event has been
triggered, or False if the thread is terminated while waiting for the
event.
"""
while django_main_thread.is_alive():
if app_reg.ready_event.wait(timeout=0.1):
return True
else:
logger.debug("Main Django thread has terminated before apps are ready.")
return False
def run(self, django_main_thread):
logger.debug("Waiting for apps ready_event.")
self.wait_for_apps_ready(apps, django_main_thread)
from django.urls import get_resolver
# Prevent a race condition where URL modules aren't loaded when the
# reloader starts by accessing the urlconf_module property.
try:
get_resolver().urlconf_module
except Exception:
# Loading the urlconf can result in errors during development.
# If this occurs then swallow the error and continue.
pass
logger.debug("Apps ready_event triggered. Sending autoreload_started signal.")
autoreload_started.send(sender=self)
self.run_loop()
def run_loop(self):
ticker = self.tick()
while not self.should_stop:
try:
next(ticker)
except StopIteration:
break
self.stop()
def tick(self):
"""
This generator is called in a loop from run_loop. It's important that
the method takes care of pausing or otherwise waiting for a period of
time. This split between run_loop() and tick() is to improve the
testability of the reloader implementations by decoupling the work they
do from the loop.
"""
raise NotImplementedError("subclasses must implement tick().")
@classmethod
def check_availability(cls):
raise NotImplementedError("subclasses must implement check_availability().")
def notify_file_changed(self, path):
results = file_changed.send(sender=self, file_path=path)
logger.debug("%s notified as changed. Signal results: %s.", path, results)
if not any(res[1] for res in results):
trigger_reload(path)
# These are primarily used for testing.
@property
def should_stop(self):
return self._stop_condition.is_set()
def stop(self):
self._stop_condition.set()
class StatReloader(BaseReloader):
SLEEP_TIME = 1 # Check for changes once per second.
def tick(self):
mtimes = {}
while True:
for filepath, mtime in self.snapshot_files():
old_time = mtimes.get(filepath)
mtimes[filepath] = mtime
if old_time is None:
logger.debug("File %s first seen with mtime %s", filepath, mtime)
continue
elif mtime > old_time:
logger.debug(
"File %s previous mtime: %s, current mtime: %s",
filepath,
old_time,
mtime,
)
self.notify_file_changed(filepath)
time.sleep(self.SLEEP_TIME)
yield
def snapshot_files(self):
# watched_files may produce duplicate paths if globs overlap.
seen_files = set()
for file in self.watched_files():
if file in seen_files:
continue
try:
mtime = file.stat().st_mtime
except OSError:
# This is thrown when the file does not exist.
continue
seen_files.add(file)
yield file, mtime
@classmethod
def check_availability(cls):
return True
class WatchmanUnavailable(RuntimeError):
pass
class WatchmanReloader(BaseReloader):
def __init__(self):
self.roots = defaultdict(set)
self.processed_request = threading.Event()
self.client_timeout = int(os.environ.get("DJANGO_WATCHMAN_TIMEOUT", 5))
super().__init__()
@cached_property
def client(self):
return pywatchman.client(timeout=self.client_timeout)
def _watch_root(self, root):
# In practice this shouldn't occur, however, it's possible that a
# directory that doesn't exist yet is being watched. If it's outside of
# sys.path then this will end up a new root. How to handle this isn't
# clear: Not adding the root will likely break when subscribing to the
# changes, however, as this is currently an internal API, no files
# will be being watched outside of sys.path. Fixing this by checking
# inside watch_glob() and watch_dir() is expensive, instead this could
# could fall back to the StatReloader if this case is detected? For
# now, watching its parent, if possible, is sufficient.
if not root.exists():
if not root.parent.exists():
logger.warning(
"Unable to watch root dir %s as neither it or its parent exist.",
root,
)
return
root = root.parent
result = self.client.query("watch-project", str(root.absolute()))
if "warning" in result:
logger.warning("Watchman warning: %s", result["warning"])
logger.debug("Watchman watch-project result: %s", result)
return result["watch"], result.get("relative_path")
@lru_cache
def _get_clock(self, root):
return self.client.query("clock", root)["clock"]
def _subscribe(self, directory, name, expression):
root, rel_path = self._watch_root(directory)
# Only receive notifications of files changing, filtering out other
# types like special files:
# https://facebook.github.io/watchman/docs/type
only_files_expression = [
"allof",
["anyof", ["type", "f"], ["type", "l"]],
expression,
]
query = {
"expression": only_files_expression,
"fields": ["name"],
"since": self._get_clock(root),
"dedup_results": True,
}
if rel_path:
query["relative_root"] = rel_path
logger.debug(
"Issuing watchman subscription %s, for root %s. Query: %s",
name,
root,
query,
)
self.client.query("subscribe", root, name, query)
def _subscribe_dir(self, directory, filenames):
if not directory.exists():
if not directory.parent.exists():
logger.warning(
"Unable to watch directory %s as neither it or its parent exist.",
directory,
)
return
prefix = "files-parent-%s" % directory.name
filenames = ["%s/%s" % (directory.name, filename) for filename in filenames]
directory = directory.parent
expression = ["name", filenames, "wholename"]
else:
prefix = "files"
expression = ["name", filenames]
self._subscribe(directory, "%s:%s" % (prefix, directory), expression)
def _watch_glob(self, directory, patterns):
"""
Watch a directory with a specific glob. If the directory doesn't yet
exist, attempt to watch the parent directory and amend the patterns to
include this. It's important this method isn't called more than one per
directory when updating all subscriptions. Subsequent calls will
overwrite the named subscription, so it must include all possible glob
expressions.
"""
prefix = "glob"
if not directory.exists():
if not directory.parent.exists():
logger.warning(
"Unable to watch directory %s as neither it or its parent exist.",
directory,
)
return
prefix = "glob-parent-%s" % directory.name
patterns = ["%s/%s" % (directory.name, pattern) for pattern in patterns]
directory = directory.parent
expression = ["anyof"]
for pattern in patterns:
expression.append(["match", pattern, "wholename"])
self._subscribe(directory, "%s:%s" % (prefix, directory), expression)
def watched_roots(self, watched_files):
extra_directories = self.directory_globs.keys()
watched_file_dirs = [f.parent for f in watched_files]
sys_paths = list(sys_path_directories())
return frozenset((*extra_directories, *watched_file_dirs, *sys_paths))
def _update_watches(self):
watched_files = list(self.watched_files(include_globs=False))
found_roots = common_roots(self.watched_roots(watched_files))
logger.debug("Watching %s files", len(watched_files))
logger.debug("Found common roots: %s", found_roots)
# Setup initial roots for performance, shortest roots first.
for root in sorted(found_roots):
self._watch_root(root)
for directory, patterns in self.directory_globs.items():
self._watch_glob(directory, patterns)
# Group sorted watched_files by their parent directory.
sorted_files = sorted(watched_files, key=lambda p: p.parent)
for directory, group in itertools.groupby(sorted_files, key=lambda p: p.parent):
# These paths need to be relative to the parent directory.
self._subscribe_dir(
directory, [str(p.relative_to(directory)) for p in group]
)
def update_watches(self):
try:
self._update_watches()
except Exception as ex:
# If the service is still available, raise the original exception.
if self.check_server_status(ex):
raise
def _check_subscription(self, sub):
subscription = self.client.getSubscription(sub)
if not subscription:
return
logger.debug("Watchman subscription %s has results.", sub)
for result in subscription:
# When using watch-project, it's not simple to get the relative
# directory without storing some specific state. Store the full
# path to the directory in the subscription name, prefixed by its
# type (glob, files).
root_directory = Path(result["subscription"].split(":", 1)[1])
logger.debug("Found root directory %s", root_directory)
for file in result.get("files", []):
self.notify_file_changed(root_directory / file)
def request_processed(self, **kwargs):
logger.debug("Request processed. Setting update_watches event.")
self.processed_request.set()
def tick(self):
request_finished.connect(self.request_processed)
self.update_watches()
while True:
if self.processed_request.is_set():
self.update_watches()
self.processed_request.clear()
try:
self.client.receive()
except pywatchman.SocketTimeout:
pass
except pywatchman.WatchmanError as ex:
logger.debug("Watchman error: %s, checking server status.", ex)
self.check_server_status(ex)
else:
for sub in list(self.client.subs.keys()):
self._check_subscription(sub)
yield
# Protect against busy loops.
time.sleep(0.1)
def stop(self):
self.client.close()
super().stop()
def check_server_status(self, inner_ex=None):
"""Return True if the server is available."""
try:
self.client.query("version")
except Exception:
raise WatchmanUnavailable(str(inner_ex)) from inner_ex
return True
@classmethod
def check_availability(cls):
if not pywatchman:
raise WatchmanUnavailable("pywatchman not installed.")
client = pywatchman.client(timeout=0.1)
try:
result = client.capabilityCheck()
except Exception:
# The service is down?
raise WatchmanUnavailable("Cannot connect to the watchman service.")
version = get_version_tuple(result["version"])
# Watchman 4.9 includes multiple improvements to watching project
# directories as well as case insensitive filesystems.
logger.debug("Watchman version %s", version)
if version < (4, 9):
raise WatchmanUnavailable("Watchman 4.9 or later is required.")
def get_reloader():
"""Return the most suitable reloader for this environment."""
try:
WatchmanReloader.check_availability()
except WatchmanUnavailable:
return StatReloader()
return WatchmanReloader()
def start_django(reloader, main_func, *args, **kwargs):
ensure_echo_on()
main_func = check_errors(main_func)
django_main_thread = threading.Thread(
target=main_func, args=args, kwargs=kwargs, name="django-main-thread"
)
django_main_thread.daemon = True
django_main_thread.start()
while not reloader.should_stop:
reloader.run(django_main_thread)
def run_with_reloader(main_func, *args, **kwargs):
signal.signal(signal.SIGTERM, lambda *args: sys.exit(0))
try:
if os.environ.get(DJANGO_AUTORELOAD_ENV) == "true":
reloader = get_reloader()
logger.info(
"Watching for file changes with %s", reloader.__class__.__name__
)
start_django(reloader, main_func, *args, **kwargs)
else:
exit_code = restart_with_reloader()
sys.exit(exit_code)
except KeyboardInterrupt:
pass
|
import contextlib
import os
import py_compile
import shutil
import sys
import tempfile
import threading
import time
import types
import weakref
import zipfile
import zoneinfo
from importlib import import_module
from pathlib import Path
from subprocess import CompletedProcess
from unittest import mock, skip, skipIf
import django.__main__
from django.apps.registry import Apps
from django.test import SimpleTestCase
from django.test.utils import extend_sys_path
from django.utils import autoreload
from django.utils.autoreload import WatchmanUnavailable
from .test_module import __main__ as test_main
from .test_module import main_module as test_main_module
from .utils import on_macos_with_hfs
class TestIterModulesAndFiles(SimpleTestCase):
def import_and_cleanup(self, name):
import_module(name)
self.addCleanup(lambda: sys.path_importer_cache.clear())
self.addCleanup(lambda: sys.modules.pop(name, None))
def clear_autoreload_caches(self):
autoreload.iter_modules_and_files.cache_clear()
def assertFileFound(self, filename):
# Some temp directories are symlinks. Python resolves these fully while
# importing.
resolved_filename = filename.resolve(strict=True)
self.clear_autoreload_caches()
# Test uncached access
self.assertIn(
resolved_filename, list(autoreload.iter_all_python_module_files())
)
# Test cached access
self.assertIn(
resolved_filename, list(autoreload.iter_all_python_module_files())
)
self.assertEqual(autoreload.iter_modules_and_files.cache_info().hits, 1)
def assertFileNotFound(self, filename):
resolved_filename = filename.resolve(strict=True)
self.clear_autoreload_caches()
# Test uncached access
self.assertNotIn(
resolved_filename, list(autoreload.iter_all_python_module_files())
)
# Test cached access
self.assertNotIn(
resolved_filename, list(autoreload.iter_all_python_module_files())
)
self.assertEqual(autoreload.iter_modules_and_files.cache_info().hits, 1)
def temporary_file(self, filename):
dirname = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, dirname)
return Path(dirname) / filename
def test_paths_are_pathlib_instances(self):
for filename in autoreload.iter_all_python_module_files():
self.assertIsInstance(filename, Path)
def test_file_added(self):
"""
When a file is added, it's returned by iter_all_python_module_files().
"""
filename = self.temporary_file("test_deleted_removed_module.py")
filename.touch()
with extend_sys_path(str(filename.parent)):
self.import_and_cleanup("test_deleted_removed_module")
self.assertFileFound(filename.absolute())
def test_check_errors(self):
"""
When a file containing an error is imported in a function wrapped by
check_errors(), gen_filenames() returns it.
"""
filename = self.temporary_file("test_syntax_error.py")
filename.write_text("Ceci n'est pas du Python.")
with extend_sys_path(str(filename.parent)):
try:
with self.assertRaises(SyntaxError):
autoreload.check_errors(import_module)("test_syntax_error")
finally:
autoreload._exception = None
self.assertFileFound(filename)
def test_check_errors_catches_all_exceptions(self):
"""
Since Python may raise arbitrary exceptions when importing code,
check_errors() must catch Exception, not just some subclasses.
"""
filename = self.temporary_file("test_exception.py")
filename.write_text("raise Exception")
with extend_sys_path(str(filename.parent)):
try:
with self.assertRaises(Exception):
autoreload.check_errors(import_module)("test_exception")
finally:
autoreload._exception = None
self.assertFileFound(filename)
def test_zip_reload(self):
"""
Modules imported from zipped files have their archive location included
in the result.
"""
zip_file = self.temporary_file("zip_import.zip")
with zipfile.ZipFile(str(zip_file), "w", zipfile.ZIP_DEFLATED) as zipf:
zipf.writestr("test_zipped_file.py", "")
with extend_sys_path(str(zip_file)):
self.import_and_cleanup("test_zipped_file")
self.assertFileFound(zip_file)
def test_bytecode_conversion_to_source(self):
""".pyc and .pyo files are included in the files list."""
filename = self.temporary_file("test_compiled.py")
filename.touch()
compiled_file = Path(
py_compile.compile(str(filename), str(filename.with_suffix(".pyc")))
)
filename.unlink()
with extend_sys_path(str(compiled_file.parent)):
self.import_and_cleanup("test_compiled")
self.assertFileFound(compiled_file)
def test_weakref_in_sys_module(self):
"""iter_all_python_module_file() ignores weakref modules."""
time_proxy = weakref.proxy(time)
sys.modules["time_proxy"] = time_proxy
self.addCleanup(lambda: sys.modules.pop("time_proxy", None))
list(autoreload.iter_all_python_module_files()) # No crash.
def test_module_without_spec(self):
module = types.ModuleType("test_module")
del module.__spec__
self.assertEqual(
autoreload.iter_modules_and_files((module,), frozenset()), frozenset()
)
def test_main_module_is_resolved(self):
main_module = sys.modules["__main__"]
self.assertFileFound(Path(main_module.__file__))
def test_main_module_without_file_is_not_resolved(self):
fake_main = types.ModuleType("__main__")
self.assertEqual(
autoreload.iter_modules_and_files((fake_main,), frozenset()), frozenset()
)
def test_path_with_embedded_null_bytes(self):
for path in (
"embedded_null_byte\x00.py",
"di\x00rectory/embedded_null_byte.py",
):
with self.subTest(path=path):
self.assertEqual(
autoreload.iter_modules_and_files((), frozenset([path])),
frozenset(),
)
class TestChildArguments(SimpleTestCase):
@mock.patch.dict(sys.modules, {"__main__": django.__main__})
@mock.patch("sys.argv", [django.__main__.__file__, "runserver"])
@mock.patch("sys.warnoptions", [])
@mock.patch("sys._xoptions", {})
def test_run_as_module(self):
self.assertEqual(
autoreload.get_child_arguments(),
[sys.executable, "-m", "django", "runserver"],
)
@mock.patch.dict(sys.modules, {"__main__": test_main})
@mock.patch("sys.argv", [test_main.__file__, "runserver"])
@mock.patch("sys.warnoptions", [])
@mock.patch("sys._xoptions", {})
def test_run_as_non_django_module(self):
self.assertEqual(
autoreload.get_child_arguments(),
[sys.executable, "-m", "utils_tests.test_module", "runserver"],
)
@mock.patch.dict(sys.modules, {"__main__": test_main_module})
@mock.patch("sys.argv", [test_main.__file__, "runserver"])
@mock.patch("sys.warnoptions", [])
@mock.patch("sys._xoptions", {})
def test_run_as_non_django_module_non_package(self):
self.assertEqual(
autoreload.get_child_arguments(),
[sys.executable, "-m", "utils_tests.test_module.main_module", "runserver"],
)
@mock.patch("__main__.__spec__", None)
@mock.patch("sys.argv", [__file__, "runserver"])
@mock.patch("sys.warnoptions", ["error"])
@mock.patch("sys._xoptions", {})
def test_warnoptions(self):
self.assertEqual(
autoreload.get_child_arguments(),
[sys.executable, "-Werror", __file__, "runserver"],
)
@mock.patch("sys.argv", [__file__, "runserver"])
@mock.patch("sys.warnoptions", [])
@mock.patch("sys._xoptions", {"utf8": True, "a": "b"})
def test_xoptions(self):
self.assertEqual(
autoreload.get_child_arguments(),
[sys.executable, "-Xutf8", "-Xa=b", __file__, "runserver"],
)
@mock.patch("__main__.__spec__", None)
@mock.patch("sys.warnoptions", [])
def test_exe_fallback(self):
with tempfile.TemporaryDirectory() as tmpdir:
exe_path = Path(tmpdir) / "django-admin.exe"
exe_path.touch()
with mock.patch("sys.argv", [exe_path.with_suffix(""), "runserver"]):
self.assertEqual(
autoreload.get_child_arguments(), [exe_path, "runserver"]
)
@mock.patch("sys.warnoptions", [])
@mock.patch.dict(sys.modules, {"__main__": django.__main__})
def test_use_exe_when_main_spec(self):
with tempfile.TemporaryDirectory() as tmpdir:
exe_path = Path(tmpdir) / "django-admin.exe"
exe_path.touch()
with mock.patch("sys.argv", [exe_path.with_suffix(""), "runserver"]):
self.assertEqual(
autoreload.get_child_arguments(), [exe_path, "runserver"]
)
@mock.patch("__main__.__spec__", None)
@mock.patch("sys.warnoptions", [])
@mock.patch("sys._xoptions", {})
def test_entrypoint_fallback(self):
with tempfile.TemporaryDirectory() as tmpdir:
script_path = Path(tmpdir) / "django-admin-script.py"
script_path.touch()
with mock.patch(
"sys.argv", [script_path.with_name("django-admin"), "runserver"]
):
self.assertEqual(
autoreload.get_child_arguments(),
[sys.executable, script_path, "runserver"],
)
@mock.patch("__main__.__spec__", None)
@mock.patch("sys.argv", ["does-not-exist", "runserver"])
@mock.patch("sys.warnoptions", [])
def test_raises_runtimeerror(self):
msg = "Script does-not-exist does not exist."
with self.assertRaisesMessage(RuntimeError, msg):
autoreload.get_child_arguments()
@mock.patch("sys.argv", [__file__, "runserver"])
@mock.patch("sys.warnoptions", [])
@mock.patch("sys._xoptions", {})
def test_module_no_spec(self):
module = types.ModuleType("test_module")
del module.__spec__
with mock.patch.dict(sys.modules, {"__main__": module}):
self.assertEqual(
autoreload.get_child_arguments(),
[sys.executable, __file__, "runserver"],
)
class TestUtilities(SimpleTestCase):
def test_is_django_module(self):
for module, expected in ((zoneinfo, False), (sys, False), (autoreload, True)):
with self.subTest(module=module):
self.assertIs(autoreload.is_django_module(module), expected)
def test_is_django_path(self):
for module, expected in (
(zoneinfo.__file__, False),
(contextlib.__file__, False),
(autoreload.__file__, True),
):
with self.subTest(module=module):
self.assertIs(autoreload.is_django_path(module), expected)
class TestCommonRoots(SimpleTestCase):
def test_common_roots(self):
paths = (
Path("/first/second"),
Path("/first/second/third"),
Path("/first/"),
Path("/root/first/"),
)
results = autoreload.common_roots(paths)
self.assertCountEqual(results, [Path("/first/"), Path("/root/first/")])
class TestSysPathDirectories(SimpleTestCase):
def setUp(self):
_directory = tempfile.TemporaryDirectory()
self.addCleanup(_directory.cleanup)
self.directory = Path(_directory.name).resolve(strict=True).absolute()
self.file = self.directory / "test"
self.file.touch()
def test_sys_paths_with_directories(self):
with extend_sys_path(str(self.file)):
paths = list(autoreload.sys_path_directories())
self.assertIn(self.file.parent, paths)
def test_sys_paths_non_existing(self):
nonexistent_file = Path(self.directory.name) / "does_not_exist"
with extend_sys_path(str(nonexistent_file)):
paths = list(autoreload.sys_path_directories())
self.assertNotIn(nonexistent_file, paths)
self.assertNotIn(nonexistent_file.parent, paths)
def test_sys_paths_absolute(self):
paths = list(autoreload.sys_path_directories())
self.assertTrue(all(p.is_absolute() for p in paths))
def test_sys_paths_directories(self):
with extend_sys_path(str(self.directory)):
paths = list(autoreload.sys_path_directories())
self.assertIn(self.directory, paths)
class GetReloaderTests(SimpleTestCase):
@mock.patch("django.utils.autoreload.WatchmanReloader")
def test_watchman_unavailable(self, mocked_watchman):
mocked_watchman.check_availability.side_effect = WatchmanUnavailable
self.assertIsInstance(autoreload.get_reloader(), autoreload.StatReloader)
@mock.patch.object(autoreload.WatchmanReloader, "check_availability")
def test_watchman_available(self, mocked_available):
# If WatchmanUnavailable isn't raised, Watchman will be chosen.
mocked_available.return_value = None
result = autoreload.get_reloader()
self.assertIsInstance(result, autoreload.WatchmanReloader)
class RunWithReloaderTests(SimpleTestCase):
@mock.patch.dict(os.environ, {autoreload.DJANGO_AUTORELOAD_ENV: "true"})
@mock.patch("django.utils.autoreload.get_reloader")
def test_swallows_keyboard_interrupt(self, mocked_get_reloader):
mocked_get_reloader.side_effect = KeyboardInterrupt()
autoreload.run_with_reloader(lambda: None) # No exception
@mock.patch.dict(os.environ, {autoreload.DJANGO_AUTORELOAD_ENV: "false"})
@mock.patch("django.utils.autoreload.restart_with_reloader")
def test_calls_sys_exit(self, mocked_restart_reloader):
mocked_restart_reloader.return_value = 1
with self.assertRaises(SystemExit) as exc:
autoreload.run_with_reloader(lambda: None)
self.assertEqual(exc.exception.code, 1)
@mock.patch.dict(os.environ, {autoreload.DJANGO_AUTORELOAD_ENV: "true"})
@mock.patch("django.utils.autoreload.start_django")
@mock.patch("django.utils.autoreload.get_reloader")
def test_calls_start_django(self, mocked_reloader, mocked_start_django):
mocked_reloader.return_value = mock.sentinel.RELOADER
autoreload.run_with_reloader(mock.sentinel.METHOD)
self.assertEqual(mocked_start_django.call_count, 1)
self.assertSequenceEqual(
mocked_start_django.call_args[0],
[mock.sentinel.RELOADER, mock.sentinel.METHOD],
)
class StartDjangoTests(SimpleTestCase):
@mock.patch("django.utils.autoreload.ensure_echo_on")
def test_echo_on_called(self, mocked_echo):
fake_reloader = mock.MagicMock()
autoreload.start_django(fake_reloader, lambda: None)
self.assertEqual(mocked_echo.call_count, 1)
@mock.patch("django.utils.autoreload.check_errors")
def test_check_errors_called(self, mocked_check_errors):
fake_method = mock.MagicMock(return_value=None)
fake_reloader = mock.MagicMock()
autoreload.start_django(fake_reloader, fake_method)
self.assertCountEqual(mocked_check_errors.call_args[0], [fake_method])
@mock.patch("threading.Thread")
@mock.patch("django.utils.autoreload.check_errors")
def test_starts_thread_with_args(self, mocked_check_errors, mocked_thread):
fake_reloader = mock.MagicMock()
fake_main_func = mock.MagicMock()
fake_thread = mock.MagicMock()
mocked_check_errors.return_value = fake_main_func
mocked_thread.return_value = fake_thread
autoreload.start_django(fake_reloader, fake_main_func, 123, abc=123)
self.assertEqual(mocked_thread.call_count, 1)
self.assertEqual(
mocked_thread.call_args[1],
{
"target": fake_main_func,
"args": (123,),
"kwargs": {"abc": 123},
"name": "django-main-thread",
},
)
self.assertIs(fake_thread.daemon, True)
self.assertTrue(fake_thread.start.called)
class TestCheckErrors(SimpleTestCase):
def test_mutates_error_files(self):
fake_method = mock.MagicMock(side_effect=RuntimeError())
wrapped = autoreload.check_errors(fake_method)
with mock.patch.object(autoreload, "_error_files") as mocked_error_files:
try:
with self.assertRaises(RuntimeError):
wrapped()
finally:
autoreload._exception = None
self.assertEqual(mocked_error_files.append.call_count, 1)
class TestRaiseLastException(SimpleTestCase):
@mock.patch("django.utils.autoreload._exception", None)
def test_no_exception(self):
# Should raise no exception if _exception is None
autoreload.raise_last_exception()
def test_raises_exception(self):
class MyException(Exception):
pass
# Create an exception
try:
raise MyException("Test Message")
except MyException:
exc_info = sys.exc_info()
with mock.patch("django.utils.autoreload._exception", exc_info):
with self.assertRaisesMessage(MyException, "Test Message"):
autoreload.raise_last_exception()
def test_raises_custom_exception(self):
class MyException(Exception):
def __init__(self, msg, extra_context):
super().__init__(msg)
self.extra_context = extra_context
# Create an exception.
try:
raise MyException("Test Message", "extra context")
except MyException:
exc_info = sys.exc_info()
with mock.patch("django.utils.autoreload._exception", exc_info):
with self.assertRaisesMessage(MyException, "Test Message"):
autoreload.raise_last_exception()
def test_raises_exception_with_context(self):
try:
raise Exception(2)
except Exception as e:
try:
raise Exception(1) from e
except Exception:
exc_info = sys.exc_info()
with mock.patch("django.utils.autoreload._exception", exc_info):
with self.assertRaises(Exception) as cm:
autoreload.raise_last_exception()
self.assertEqual(cm.exception.args[0], 1)
self.assertEqual(cm.exception.__cause__.args[0], 2)
class RestartWithReloaderTests(SimpleTestCase):
executable = "/usr/bin/python"
def patch_autoreload(self, argv):
patch_call = mock.patch(
"django.utils.autoreload.subprocess.run",
return_value=CompletedProcess(argv, 0),
)
patches = [
mock.patch("django.utils.autoreload.sys.argv", argv),
mock.patch("django.utils.autoreload.sys.executable", self.executable),
mock.patch("django.utils.autoreload.sys.warnoptions", ["all"]),
mock.patch("django.utils.autoreload.sys._xoptions", {}),
]
for p in patches:
p.start()
self.addCleanup(p.stop)
mock_call = patch_call.start()
self.addCleanup(patch_call.stop)
return mock_call
def test_manage_py(self):
with tempfile.TemporaryDirectory() as temp_dir:
script = Path(temp_dir) / "manage.py"
script.touch()
argv = [str(script), "runserver"]
mock_call = self.patch_autoreload(argv)
with mock.patch("__main__.__spec__", None):
autoreload.restart_with_reloader()
self.assertEqual(mock_call.call_count, 1)
self.assertEqual(
mock_call.call_args[0][0],
[self.executable, "-Wall"] + argv,
)
def test_python_m_django(self):
main = "/usr/lib/pythonX.Y/site-packages/django/__main__.py"
argv = [main, "runserver"]
mock_call = self.patch_autoreload(argv)
with mock.patch("django.__main__.__file__", main):
with mock.patch.dict(sys.modules, {"__main__": django.__main__}):
autoreload.restart_with_reloader()
self.assertEqual(mock_call.call_count, 1)
self.assertEqual(
mock_call.call_args[0][0],
[self.executable, "-Wall", "-m", "django"] + argv[1:],
)
def test_propagates_unbuffered_from_parent(self):
for args in ("-u", "-Iuv"):
with self.subTest(args=args):
with mock.patch.dict(os.environ, {}, clear=True):
with tempfile.TemporaryDirectory() as d:
script = Path(d) / "manage.py"
script.touch()
mock_call = self.patch_autoreload([str(script), "runserver"])
with (
mock.patch("__main__.__spec__", None),
mock.patch.object(
autoreload.sys,
"orig_argv",
[self.executable, args, str(script), "runserver"],
),
):
autoreload.restart_with_reloader()
env = mock_call.call_args.kwargs["env"]
self.assertEqual(env.get("PYTHONUNBUFFERED"), "1")
def test_does_not_propagate_unbuffered_from_parent(self):
for args in (
"-Xdev",
"-Xfaulthandler",
"--user",
"-Wall",
"-Wdefault",
"-Wignore::UserWarning",
):
with self.subTest(args=args):
with mock.patch.dict(os.environ, {}, clear=True):
with tempfile.TemporaryDirectory() as d:
script = Path(d) / "manage.py"
script.touch()
mock_call = self.patch_autoreload([str(script), "runserver"])
with (
mock.patch("__main__.__spec__", None),
mock.patch.object(
autoreload.sys,
"orig_argv",
[self.executable, args, str(script), "runserver"],
),
):
autoreload.restart_with_reloader()
env = mock_call.call_args.kwargs["env"]
self.assertIsNone(env.get("PYTHONUNBUFFERED"))
class ReloaderTests(SimpleTestCase):
RELOADER_CLS = None
def setUp(self):
_tempdir = tempfile.TemporaryDirectory()
self.tempdir = Path(_tempdir.name).resolve(strict=True).absolute()
self.existing_file = self.ensure_file(self.tempdir / "test.py")
self.nonexistent_file = (self.tempdir / "does_not_exist.py").absolute()
self.reloader = self.RELOADER_CLS()
self.addCleanup(self.reloader.stop)
self.addCleanup(_tempdir.cleanup)
def ensure_file(self, path):
path.parent.mkdir(exist_ok=True, parents=True)
path.touch()
# On Linux and Windows updating the mtime of a file using touch() will
# set a timestamp value that is in the past, as the time value for the
# last kernel tick is used rather than getting the correct absolute
# time.
# To make testing simpler set the mtime to be the observed time when
# this function is called.
self.set_mtime(path, time.time())
return path.absolute()
def set_mtime(self, fp, value):
os.utime(str(fp), (value, value))
def increment_mtime(self, fp, by=1):
current_time = time.time()
self.set_mtime(fp, current_time + by)
@contextlib.contextmanager
def tick_twice(self):
ticker = self.reloader.tick()
next(ticker)
yield
next(ticker)
class IntegrationTests:
@mock.patch("django.utils.autoreload.BaseReloader.notify_file_changed")
@mock.patch(
"django.utils.autoreload.iter_all_python_module_files", return_value=frozenset()
)
def test_glob(self, mocked_modules, notify_mock):
non_py_file = self.ensure_file(self.tempdir / "non_py_file")
self.reloader.watch_dir(self.tempdir, "*.py")
with self.tick_twice():
self.increment_mtime(non_py_file)
self.increment_mtime(self.existing_file)
self.assertEqual(notify_mock.call_count, 1)
self.assertCountEqual(notify_mock.call_args[0], [self.existing_file])
@mock.patch("django.utils.autoreload.BaseReloader.notify_file_changed")
@mock.patch(
"django.utils.autoreload.iter_all_python_module_files", return_value=frozenset()
)
def test_multiple_globs(self, mocked_modules, notify_mock):
self.ensure_file(self.tempdir / "x.test")
self.reloader.watch_dir(self.tempdir, "*.py")
self.reloader.watch_dir(self.tempdir, "*.test")
with self.tick_twice():
self.increment_mtime(self.existing_file)
self.assertEqual(notify_mock.call_count, 1)
self.assertCountEqual(notify_mock.call_args[0], [self.existing_file])
@mock.patch("django.utils.autoreload.BaseReloader.notify_file_changed")
@mock.patch(
"django.utils.autoreload.iter_all_python_module_files", return_value=frozenset()
)
def test_overlapping_globs(self, mocked_modules, notify_mock):
self.reloader.watch_dir(self.tempdir, "*.py")
self.reloader.watch_dir(self.tempdir, "*.p*")
with self.tick_twice():
self.increment_mtime(self.existing_file)
self.assertEqual(notify_mock.call_count, 1)
self.assertCountEqual(notify_mock.call_args[0], [self.existing_file])
@mock.patch("django.utils.autoreload.BaseReloader.notify_file_changed")
@mock.patch(
"django.utils.autoreload.iter_all_python_module_files", return_value=frozenset()
)
def test_glob_recursive(self, mocked_modules, notify_mock):
non_py_file = self.ensure_file(self.tempdir / "dir" / "non_py_file")
py_file = self.ensure_file(self.tempdir / "dir" / "file.py")
self.reloader.watch_dir(self.tempdir, "**/*.py")
with self.tick_twice():
self.increment_mtime(non_py_file)
self.increment_mtime(py_file)
self.assertEqual(notify_mock.call_count, 1)
self.assertCountEqual(notify_mock.call_args[0], [py_file])
@mock.patch("django.utils.autoreload.BaseReloader.notify_file_changed")
@mock.patch(
"django.utils.autoreload.iter_all_python_module_files", return_value=frozenset()
)
def test_multiple_recursive_globs(self, mocked_modules, notify_mock):
non_py_file = self.ensure_file(self.tempdir / "dir" / "test.txt")
py_file = self.ensure_file(self.tempdir / "dir" / "file.py")
self.reloader.watch_dir(self.tempdir, "**/*.txt")
self.reloader.watch_dir(self.tempdir, "**/*.py")
with self.tick_twice():
self.increment_mtime(non_py_file)
self.increment_mtime(py_file)
self.assertEqual(notify_mock.call_count, 2)
self.assertCountEqual(
notify_mock.call_args_list, [mock.call(py_file), mock.call(non_py_file)]
)
@mock.patch("django.utils.autoreload.BaseReloader.notify_file_changed")
@mock.patch(
"django.utils.autoreload.iter_all_python_module_files", return_value=frozenset()
)
def test_nested_glob_recursive(self, mocked_modules, notify_mock):
inner_py_file = self.ensure_file(self.tempdir / "dir" / "file.py")
self.reloader.watch_dir(self.tempdir, "**/*.py")
self.reloader.watch_dir(inner_py_file.parent, "**/*.py")
with self.tick_twice():
self.increment_mtime(inner_py_file)
self.assertEqual(notify_mock.call_count, 1)
self.assertCountEqual(notify_mock.call_args[0], [inner_py_file])
@mock.patch("django.utils.autoreload.BaseReloader.notify_file_changed")
@mock.patch(
"django.utils.autoreload.iter_all_python_module_files", return_value=frozenset()
)
def test_overlapping_glob_recursive(self, mocked_modules, notify_mock):
py_file = self.ensure_file(self.tempdir / "dir" / "file.py")
self.reloader.watch_dir(self.tempdir, "**/*.p*")
self.reloader.watch_dir(self.tempdir, "**/*.py*")
with self.tick_twice():
self.increment_mtime(py_file)
self.assertEqual(notify_mock.call_count, 1)
self.assertCountEqual(notify_mock.call_args[0], [py_file])
class BaseReloaderTests(ReloaderTests):
RELOADER_CLS = autoreload.BaseReloader
def test_watch_dir_with_unresolvable_path(self):
path = Path("unresolvable_directory")
with mock.patch.object(Path, "absolute", side_effect=FileNotFoundError):
self.reloader.watch_dir(path, "**/*.mo")
self.assertEqual(list(self.reloader.directory_globs), [])
def test_watch_with_glob(self):
self.reloader.watch_dir(self.tempdir, "*.py")
watched_files = list(self.reloader.watched_files())
self.assertIn(self.existing_file, watched_files)
def test_watch_files_with_recursive_glob(self):
inner_file = self.ensure_file(self.tempdir / "test" / "test.py")
self.reloader.watch_dir(self.tempdir, "**/*.py")
watched_files = list(self.reloader.watched_files())
self.assertIn(self.existing_file, watched_files)
self.assertIn(inner_file, watched_files)
def test_run_loop_catches_stopiteration(self):
def mocked_tick():
yield
with mock.patch.object(self.reloader, "tick", side_effect=mocked_tick) as tick:
self.reloader.run_loop()
self.assertEqual(tick.call_count, 1)
def test_run_loop_stop_and_return(self):
def mocked_tick(*args):
yield
self.reloader.stop()
return # Raises StopIteration
with mock.patch.object(self.reloader, "tick", side_effect=mocked_tick) as tick:
self.reloader.run_loop()
self.assertEqual(tick.call_count, 1)
def test_wait_for_apps_ready_checks_for_exception(self):
app_reg = Apps()
app_reg.ready_event.set()
# thread.is_alive() is False if it's not started.
dead_thread = threading.Thread()
self.assertFalse(self.reloader.wait_for_apps_ready(app_reg, dead_thread))
def test_wait_for_apps_ready_without_exception(self):
app_reg = Apps()
app_reg.ready_event.set()
thread = mock.MagicMock()
thread.is_alive.return_value = True
self.assertTrue(self.reloader.wait_for_apps_ready(app_reg, thread))
def skip_unless_watchman_available():
try:
autoreload.WatchmanReloader.check_availability()
except WatchmanUnavailable as e:
return skip("Watchman unavailable: %s" % e)
return lambda func: func
@skip_unless_watchman_available()
class WatchmanReloaderTests(ReloaderTests, IntegrationTests):
RELOADER_CLS = autoreload.WatchmanReloader
def setUp(self):
super().setUp()
# Shorten the timeout to speed up tests.
self.reloader.client_timeout = int(os.environ.get("DJANGO_WATCHMAN_TIMEOUT", 2))
def test_watch_glob_ignores_non_existing_directories_two_levels(self):
with mock.patch.object(self.reloader, "_subscribe") as mocked_subscribe:
self.reloader._watch_glob(self.tempdir / "does_not_exist" / "more", ["*"])
self.assertFalse(mocked_subscribe.called)
def test_watch_glob_uses_existing_parent_directories(self):
with mock.patch.object(self.reloader, "_subscribe") as mocked_subscribe:
self.reloader._watch_glob(self.tempdir / "does_not_exist", ["*"])
self.assertSequenceEqual(
mocked_subscribe.call_args[0],
[
self.tempdir,
"glob-parent-does_not_exist:%s" % self.tempdir,
["anyof", ["match", "does_not_exist/*", "wholename"]],
],
)
def test_watch_glob_multiple_patterns(self):
with mock.patch.object(self.reloader, "_subscribe") as mocked_subscribe:
self.reloader._watch_glob(self.tempdir, ["*", "*.py"])
self.assertSequenceEqual(
mocked_subscribe.call_args[0],
[
self.tempdir,
"glob:%s" % self.tempdir,
["anyof", ["match", "*", "wholename"], ["match", "*.py", "wholename"]],
],
)
def test_watched_roots_contains_files(self):
paths = self.reloader.watched_roots([self.existing_file])
self.assertIn(self.existing_file.parent, paths)
def test_watched_roots_contains_directory_globs(self):
self.reloader.watch_dir(self.tempdir, "*.py")
paths = self.reloader.watched_roots([])
self.assertIn(self.tempdir, paths)
def test_watched_roots_contains_sys_path(self):
with extend_sys_path(str(self.tempdir)):
paths = self.reloader.watched_roots([])
self.assertIn(self.tempdir, paths)
def test_check_server_status(self):
self.assertTrue(self.reloader.check_server_status())
def test_check_server_status_raises_error(self):
with mock.patch.object(self.reloader.client, "query") as mocked_query:
mocked_query.side_effect = Exception()
with self.assertRaises(autoreload.WatchmanUnavailable):
self.reloader.check_server_status()
@mock.patch("pywatchman.client")
def test_check_availability(self, mocked_client):
mocked_client().capabilityCheck.side_effect = Exception()
with self.assertRaisesMessage(
WatchmanUnavailable, "Cannot connect to the watchman service"
):
self.RELOADER_CLS.check_availability()
@mock.patch("pywatchman.client")
def test_check_availability_lower_version(self, mocked_client):
mocked_client().capabilityCheck.return_value = {"version": "4.8.10"}
with self.assertRaisesMessage(
WatchmanUnavailable, "Watchman 4.9 or later is required."
):
self.RELOADER_CLS.check_availability()
def test_pywatchman_not_available(self):
with mock.patch.object(autoreload, "pywatchman") as mocked:
mocked.__bool__.return_value = False
with self.assertRaisesMessage(
WatchmanUnavailable, "pywatchman not installed."
):
self.RELOADER_CLS.check_availability()
def test_update_watches_raises_exceptions(self):
class TestException(Exception):
pass
with mock.patch.object(self.reloader, "_update_watches") as mocked_watches:
with mock.patch.object(
self.reloader, "check_server_status"
) as mocked_server_status:
mocked_watches.side_effect = TestException()
mocked_server_status.return_value = True
with self.assertRaises(TestException):
self.reloader.update_watches()
self.assertIsInstance(
mocked_server_status.call_args[0][0], TestException
)
@mock.patch.dict(os.environ, {"DJANGO_WATCHMAN_TIMEOUT": "10"})
def test_setting_timeout_from_environment_variable(self):
self.assertEqual(self.RELOADER_CLS().client_timeout, 10)
@skipIf(on_macos_with_hfs(), "These tests do not work with HFS+ as a filesystem")
class StatReloaderTests(ReloaderTests, IntegrationTests):
RELOADER_CLS = autoreload.StatReloader
def setUp(self):
super().setUp()
# Shorten the sleep time to speed up tests.
self.reloader.SLEEP_TIME = 0.01
@mock.patch("django.utils.autoreload.StatReloader.notify_file_changed")
def test_tick_does_not_trigger_twice(self, mock_notify_file_changed):
with mock.patch.object(
self.reloader, "watched_files", return_value=[self.existing_file]
):
ticker = self.reloader.tick()
next(ticker)
self.increment_mtime(self.existing_file)
next(ticker)
next(ticker)
self.assertEqual(mock_notify_file_changed.call_count, 1)
def test_snapshot_files_ignores_missing_files(self):
with mock.patch.object(
self.reloader, "watched_files", return_value=[self.nonexistent_file]
):
self.assertEqual(dict(self.reloader.snapshot_files()), {})
def test_snapshot_files_updates(self):
with mock.patch.object(
self.reloader, "watched_files", return_value=[self.existing_file]
):
snapshot1 = dict(self.reloader.snapshot_files())
self.assertIn(self.existing_file, snapshot1)
self.increment_mtime(self.existing_file)
snapshot2 = dict(self.reloader.snapshot_files())
self.assertNotEqual(
snapshot1[self.existing_file], snapshot2[self.existing_file]
)
def test_snapshot_files_with_duplicates(self):
with mock.patch.object(
self.reloader,
"watched_files",
return_value=[self.existing_file, self.existing_file],
):
snapshot = list(self.reloader.snapshot_files())
self.assertEqual(len(snapshot), 1)
self.assertEqual(snapshot[0][0], self.existing_file)
|
./temp_repos/django/django/utils/autoreload.py
|
./temp_repos/django/tests/utils_tests/test_autoreload.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'BaseReloader'.
Context:
- Class Name: BaseReloader
- Dependencies to Mock: None detected
- Key Imports: collections, django.dispatch, django, pathlib, django.utils.version, django.utils.functional, os, logging, subprocess, threading
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
BaseReloader
|
python
|
import secrets
from enum import StrEnum
from django.utils.functional import SimpleLazyObject, empty
class CSP(StrEnum):
"""
Content Security Policy constants for directive values and special tokens.
These constants represent:
1. Standard quoted string values from the CSP spec (e.g., 'self',
'unsafe-inline')
2. Special placeholder tokens (NONCE) that get replaced by the middleware
Using this enum instead of raw strings provides better type checking,
autocompletion, and protection against common mistakes like:
- Typos (e.g., 'noone' instead of 'none')
- Missing quotes (e.g., ["self"] instead of ["'self'"])
- Inconsistent quote styles (e.g., ["'self'", "\"unsafe-inline\""])
Example usage in Django settings:
SECURE_CSP = {
"default-src": [CSP.NONE],
"script-src": [CSP.SELF, CSP.NONCE],
}
"""
# HTTP Headers.
HEADER_ENFORCE = "Content-Security-Policy"
HEADER_REPORT_ONLY = "Content-Security-Policy-Report-Only"
# Standard CSP directive values.
NONE = "'none'"
REPORT_SAMPLE = "'report-sample'"
SELF = "'self'"
STRICT_DYNAMIC = "'strict-dynamic'"
UNSAFE_EVAL = "'unsafe-eval'"
UNSAFE_HASHES = "'unsafe-hashes'"
UNSAFE_INLINE = "'unsafe-inline'"
WASM_UNSAFE_EVAL = "'wasm-unsafe-eval'"
# Special placeholder that gets replaced by the middleware.
# The value itself is arbitrary and should not be mistaken for a real
# nonce.
NONCE = "<CSP_NONCE_SENTINEL>"
class LazyNonce(SimpleLazyObject):
"""
Lazily generates a cryptographically secure nonce string, for use in CSP
headers.
The nonce is only generated when first accessed (e.g., via string
interpolation or inside a template).
The nonce will evaluate as `True` if it has been generated, and `False` if
it has not. This is useful for third-party Django libraries that want to
support CSP without requiring it.
Example Django template usage with context processors enabled:
<script{% if csp_nonce %} nonce="{{ csp_nonce }}"...{% endif %}>
The `{% if %}` block will only render if the nonce has been evaluated
elsewhere.
"""
def __init__(self):
super().__init__(self._generate)
def _generate(self):
return secrets.token_urlsafe(16)
def __bool__(self):
return self._wrapped is not empty
def build_policy(config, nonce=None):
policy = []
for directive, values in config.items():
if values in (None, False):
continue
if values is True:
rendered_value = ""
else:
if isinstance(values, set):
# Sort values for consistency, preventing cache invalidation
# between requests and ensuring reliable browser caching.
values = sorted(values)
elif not isinstance(values, list | tuple):
values = [values]
# Replace the nonce sentinel with the actual nonce values, if the
# sentinel is found and a nonce is provided. Otherwise, remove it.
if (has_sentinel := CSP.NONCE in values) and nonce:
values = [f"'nonce-{nonce}'" if v == CSP.NONCE else v for v in values]
elif has_sentinel:
values = [v for v in values if v != CSP.NONCE]
if not values:
continue
rendered_value = " ".join(values)
policy.append(f"{directive} {rendered_value}".rstrip())
return "; ".join(policy)
|
from secrets import token_urlsafe
from unittest.mock import patch
from django.test import SimpleTestCase
from django.utils.csp import CSP, LazyNonce, build_policy
from django.utils.functional import empty
basic_config = {
"default-src": [CSP.SELF],
}
alt_config = {
"default-src": [CSP.SELF, CSP.UNSAFE_INLINE],
}
basic_policy = "default-src 'self'"
class CSPConstantsTests(SimpleTestCase):
def test_constants(self):
self.assertEqual(CSP.NONE, "'none'")
self.assertEqual(CSP.REPORT_SAMPLE, "'report-sample'")
self.assertEqual(CSP.SELF, "'self'")
self.assertEqual(CSP.STRICT_DYNAMIC, "'strict-dynamic'")
self.assertEqual(CSP.UNSAFE_EVAL, "'unsafe-eval'")
self.assertEqual(CSP.UNSAFE_HASHES, "'unsafe-hashes'")
self.assertEqual(CSP.UNSAFE_INLINE, "'unsafe-inline'")
self.assertEqual(CSP.WASM_UNSAFE_EVAL, "'wasm-unsafe-eval'")
self.assertEqual(CSP.NONCE, "<CSP_NONCE_SENTINEL>")
class CSPBuildPolicyTest(SimpleTestCase):
def assertPolicyEqual(self, a, b):
parts_a = sorted(a.split("; ")) if a is not None else None
parts_b = sorted(b.split("; ")) if b is not None else None
self.assertEqual(parts_a, parts_b, f"Policies not equal: {a!r} != {b!r}")
def test_config_empty(self):
self.assertPolicyEqual(build_policy({}), "")
def test_config_basic(self):
self.assertPolicyEqual(build_policy(basic_config), basic_policy)
def test_config_multiple_directives(self):
policy = {
"default-src": [CSP.SELF],
"script-src": [CSP.NONE],
}
self.assertPolicyEqual(
build_policy(policy), "default-src 'self'; script-src 'none'"
)
def test_config_value_as_string(self):
"""
Test that a single value can be passed as a string.
"""
policy = {"default-src": CSP.SELF}
self.assertPolicyEqual(build_policy(policy), "default-src 'self'")
def test_config_value_as_tuple(self):
"""
Test that a tuple can be passed as a value.
"""
policy = {"default-src": (CSP.SELF, "foo.com")}
self.assertPolicyEqual(build_policy(policy), "default-src 'self' foo.com")
def test_config_value_as_set(self):
"""
Test that a set can be passed as a value.
Sets are often used in Django settings to ensure uniqueness, however,
sets are unordered. The middleware ensures consistency via sorting if a
set is passed.
"""
policy = {"default-src": {CSP.SELF, "foo.com", "bar.com"}}
self.assertPolicyEqual(
build_policy(policy), "default-src 'self' bar.com foo.com"
)
def test_config_value_none(self):
"""
Test that `None` removes the directive from the policy.
Useful in cases where the CSP config is scripted in some way or
explicitly not wanting to set a directive.
"""
policy = {"default-src": [CSP.SELF], "script-src": None}
self.assertPolicyEqual(build_policy(policy), basic_policy)
def test_config_value_boolean_true(self):
policy = {"default-src": [CSP.SELF], "block-all-mixed-content": True}
self.assertPolicyEqual(
build_policy(policy), "default-src 'self'; block-all-mixed-content"
)
def test_config_value_boolean_false(self):
policy = {"default-src": [CSP.SELF], "block-all-mixed-content": False}
self.assertPolicyEqual(build_policy(policy), basic_policy)
def test_config_value_multiple_boolean(self):
policy = {
"default-src": [CSP.SELF],
"block-all-mixed-content": True,
"upgrade-insecure-requests": True,
}
self.assertPolicyEqual(
build_policy(policy),
"default-src 'self'; block-all-mixed-content; upgrade-insecure-requests",
)
def test_config_with_nonce_arg(self):
"""
Test when the `CSP.NONCE` is not in the defined policy, the nonce
argument has no effect.
"""
self.assertPolicyEqual(build_policy(basic_config, nonce="abc123"), basic_policy)
def test_config_with_nonce(self):
policy = {"default-src": [CSP.SELF, CSP.NONCE]}
self.assertPolicyEqual(
build_policy(policy, nonce="abc123"),
"default-src 'self' 'nonce-abc123'",
)
def test_config_with_multiple_nonces(self):
policy = {
"default-src": [CSP.SELF, CSP.NONCE],
"script-src": [CSP.SELF, CSP.NONCE],
}
self.assertPolicyEqual(
build_policy(policy, nonce="abc123"),
"default-src 'self' 'nonce-abc123'; script-src 'self' 'nonce-abc123'",
)
def test_config_with_empty_directive(self):
policy = {"default-src": []}
self.assertPolicyEqual(build_policy(policy), "")
class LazyNonceTests(SimpleTestCase):
def test_generates_on_usage(self):
generated_tokens = []
nonce = LazyNonce()
self.assertFalse(nonce)
self.assertIs(nonce._wrapped, empty)
def memento_token_urlsafe(size):
generated_tokens.append(result := token_urlsafe(size))
return result
with patch("django.utils.csp.secrets.token_urlsafe", memento_token_urlsafe):
# Force usage, similar to template rendering, to generate the
# nonce.
val = str(nonce)
self.assertTrue(nonce)
self.assertEqual(nonce, val)
self.assertIsInstance(nonce, str)
self.assertEqual(len(val), 22) # Based on secrets.token_urlsafe of 16 bytes.
self.assertEqual(generated_tokens, [nonce])
# Also test the wrapped value.
self.assertEqual(nonce._wrapped, val)
def test_returns_same_value(self):
nonce = LazyNonce()
first = str(nonce)
second = str(nonce)
self.assertEqual(first, second)
|
./temp_repos/django/django/utils/csp.py
|
./temp_repos/django/tests/utils_tests/test_csp.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'CSP'.
Context:
- Class Name: CSP
- Dependencies to Mock: None detected
- Key Imports: django.utils.functional, secrets, enum
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
CSP
|
python
|
"""
Syndication feed generation library -- used for generating RSS, etc.
Sample usage:
>>> from django.utils import feedgenerator
>>> feed = feedgenerator.Rss201rev2Feed(
... title="Poynter E-Media Tidbits",
... link="http://www.poynter.org/column.asp?id=31",
... description="A group blog by the sharpest minds in online journalism.",
... language="en",
... )
>>> feed.add_item(
... title="Hello",
... link="http://www.holovaty.com/test/",
... description="Testing."
... )
>>> with open('test.rss', 'w') as fp:
... feed.write(fp, 'utf-8')
For definitions of the different versions of RSS, see:
https://web.archive.org/web/20110718035220/http://diveintomark.org/archives/2004/02/04/incompatible-rss
"""
import datetime
import email
import mimetypes
from io import StringIO
from urllib.parse import urlparse
from django.forms.utils import flatatt
from django.utils.encoding import iri_to_uri
from django.utils.xmlutils import SimplerXMLGenerator
def rfc2822_date(date):
if not isinstance(date, datetime.datetime):
date = datetime.datetime.combine(date, datetime.time())
return email.utils.format_datetime(date)
def rfc3339_date(date):
if not isinstance(date, datetime.datetime):
date = datetime.datetime.combine(date, datetime.time())
return date.isoformat() + ("Z" if date.utcoffset() is None else "")
def get_tag_uri(url, date):
"""
Create a TagURI.
See
https://web.archive.org/web/20110514113830/http://diveintomark.org/archives/2004/05/28/howto-atom-id
"""
bits = urlparse(url)
d = ""
if date is not None:
d = ",%s" % date.strftime("%Y-%m-%d")
return "tag:%s%s:%s/%s" % (bits.hostname, d, bits.path, bits.fragment)
def _guess_stylesheet_mimetype(url):
"""
Return the given stylesheet's mimetype tuple, using a slightly custom
version of Python's mimetypes.guess_type().
"""
mimetypedb = mimetypes.MimeTypes()
# The official mimetype for XSLT files is technically
# `application/xslt+xml` but as of 2024 almost no browser supports that
# (they all expect text/xsl). On top of that, windows seems to assume that
# the type for xsl is text/xml.
mimetypedb.readfp(StringIO("text/xsl\txsl\ntext/xsl\txslt"))
return mimetypedb.guess_type(url)
class Stylesheet:
"""An RSS stylesheet"""
def __init__(self, url, mimetype="", media="screen"):
self._url = url
self._mimetype = mimetype
self.media = media
# Using a property to delay the evaluation of self._url as late as possible
# in case of a lazy object (like reverse_lazy(...) for example).
@property
def url(self):
return iri_to_uri(self._url)
@property
def mimetype(self):
if self._mimetype == "":
return _guess_stylesheet_mimetype(self.url)[0]
return self._mimetype
def __str__(self):
attrs = {
"href": iri_to_uri(self._url),
"type": self.mimetype,
"media": self.media,
}
return flatatt(attrs).strip()
def __repr__(self):
return repr((self.url, self.mimetype, self.media))
class SyndicationFeed:
"Base class for all syndication feeds. Subclasses should provide write()"
def __init__(
self,
title,
link,
description,
language=None,
author_email=None,
author_name=None,
author_link=None,
subtitle=None,
categories=None,
feed_url=None,
feed_copyright=None,
feed_guid=None,
ttl=None,
stylesheets=None,
**kwargs,
):
def to_str(s):
return str(s) if s is not None else s
def to_stylesheet(s):
return s if isinstance(s, Stylesheet) else Stylesheet(s)
categories = categories and [str(c) for c in categories]
if stylesheets is not None:
if isinstance(stylesheets, (Stylesheet, str)):
raise TypeError(
f"stylesheets should be a list, not {stylesheets.__class__}"
)
stylesheets = [to_stylesheet(s) for s in stylesheets]
self.feed = {
"title": to_str(title),
"link": iri_to_uri(link),
"description": to_str(description),
"language": to_str(language),
"author_email": to_str(author_email),
"author_name": to_str(author_name),
"author_link": iri_to_uri(author_link),
"subtitle": to_str(subtitle),
"categories": categories or (),
"feed_url": iri_to_uri(feed_url),
"feed_copyright": to_str(feed_copyright),
"id": feed_guid or link,
"ttl": to_str(ttl),
"stylesheets": stylesheets,
**kwargs,
}
self.items = []
def add_item(
self,
title,
link,
description,
author_email=None,
author_name=None,
author_link=None,
pubdate=None,
comments=None,
unique_id=None,
unique_id_is_permalink=None,
categories=(),
item_copyright=None,
ttl=None,
updateddate=None,
enclosures=None,
**kwargs,
):
"""
Add an item to the feed. All args are expected to be strings except
pubdate and updateddate, which are datetime.datetime objects, and
enclosures, which is an iterable of instances of the Enclosure class.
"""
def to_str(s):
return str(s) if s is not None else s
categories = categories and [to_str(c) for c in categories]
self.items.append(
{
"title": to_str(title),
"link": iri_to_uri(link),
"description": to_str(description),
"author_email": to_str(author_email),
"author_name": to_str(author_name),
"author_link": iri_to_uri(author_link),
"pubdate": pubdate,
"updateddate": updateddate,
"comments": to_str(comments),
"unique_id": to_str(unique_id),
"unique_id_is_permalink": unique_id_is_permalink,
"enclosures": enclosures or (),
"categories": categories or (),
"item_copyright": to_str(item_copyright),
"ttl": to_str(ttl),
**kwargs,
}
)
def num_items(self):
return len(self.items)
def root_attributes(self):
"""
Return extra attributes to place on the root (i.e. feed/channel)
element. Called from write().
"""
return {}
def add_root_elements(self, handler):
"""
Add elements in the root (i.e. feed/channel) element. Called
from write().
"""
pass
def add_stylesheets(self, handler):
"""
Add stylesheet(s) to the feed. Called from write().
"""
pass
def item_attributes(self, item):
"""
Return extra attributes to place on each item (i.e. item/entry)
element.
"""
return {}
def add_item_elements(self, handler, item):
"""
Add elements on each item (i.e. item/entry) element.
"""
pass
def write(self, outfile, encoding):
"""
Output the feed in the given encoding to outfile, which is a file-like
object. Subclasses should override this.
"""
raise NotImplementedError(
"subclasses of SyndicationFeed must provide a write() method"
)
def writeString(self, encoding):
"""
Return the feed in the given encoding as a string.
"""
s = StringIO()
self.write(s, encoding)
return s.getvalue()
def latest_post_date(self):
"""
Return the latest item's pubdate or updateddate. If no items
have either of these attributes this return the current UTC date/time.
"""
latest_date = None
date_keys = ("updateddate", "pubdate")
for item in self.items:
for date_key in date_keys:
item_date = item.get(date_key)
if item_date:
if latest_date is None or item_date > latest_date:
latest_date = item_date
return latest_date or datetime.datetime.now(tz=datetime.UTC)
class Enclosure:
"""An RSS enclosure"""
def __init__(self, url, length, mime_type):
"All args are expected to be strings"
self.length, self.mime_type = length, mime_type
self.url = iri_to_uri(url)
class RssFeed(SyndicationFeed):
content_type = "application/rss+xml; charset=utf-8"
def write(self, outfile, encoding):
handler = SimplerXMLGenerator(outfile, encoding, short_empty_elements=True)
handler.startDocument()
# Any stylesheet must come after the start of the document but before
# any tag. https://www.w3.org/Style/styling-XML.en.html
self.add_stylesheets(handler)
handler.startElement("rss", self.rss_attributes())
handler.startElement("channel", self.root_attributes())
self.add_root_elements(handler)
self.write_items(handler)
self.endChannelElement(handler)
handler.endElement("rss")
def rss_attributes(self):
return {
"version": self._version,
"xmlns:atom": "http://www.w3.org/2005/Atom",
}
def write_items(self, handler):
for item in self.items:
handler.startElement("item", self.item_attributes(item))
self.add_item_elements(handler, item)
handler.endElement("item")
def add_stylesheets(self, handler):
for stylesheet in self.feed["stylesheets"] or []:
handler.processingInstruction("xml-stylesheet", stylesheet)
def add_root_elements(self, handler):
handler.addQuickElement("title", self.feed["title"])
handler.addQuickElement("link", self.feed["link"])
handler.addQuickElement("description", self.feed["description"])
if self.feed["feed_url"] is not None:
handler.addQuickElement(
"atom:link", None, {"rel": "self", "href": self.feed["feed_url"]}
)
if self.feed["language"] is not None:
handler.addQuickElement("language", self.feed["language"])
for cat in self.feed["categories"]:
handler.addQuickElement("category", cat)
if self.feed["feed_copyright"] is not None:
handler.addQuickElement("copyright", self.feed["feed_copyright"])
handler.addQuickElement("lastBuildDate", rfc2822_date(self.latest_post_date()))
if self.feed["ttl"] is not None:
handler.addQuickElement("ttl", self.feed["ttl"])
def endChannelElement(self, handler):
handler.endElement("channel")
class RssUserland091Feed(RssFeed):
_version = "0.91"
def add_item_elements(self, handler, item):
handler.addQuickElement("title", item["title"])
handler.addQuickElement("link", item["link"])
if item["description"] is not None:
handler.addQuickElement("description", item["description"])
class Rss201rev2Feed(RssFeed):
# Spec: https://cyber.harvard.edu/rss/rss.html
_version = "2.0"
def add_item_elements(self, handler, item):
handler.addQuickElement("title", item["title"])
handler.addQuickElement("link", item["link"])
if item["description"] is not None:
handler.addQuickElement("description", item["description"])
# Author information.
if item["author_name"] and item["author_email"]:
handler.addQuickElement(
"author", "%s (%s)" % (item["author_email"], item["author_name"])
)
elif item["author_email"]:
handler.addQuickElement("author", item["author_email"])
elif item["author_name"]:
handler.addQuickElement(
"dc:creator",
item["author_name"],
{"xmlns:dc": "http://purl.org/dc/elements/1.1/"},
)
if item["pubdate"] is not None:
handler.addQuickElement("pubDate", rfc2822_date(item["pubdate"]))
if item["comments"] is not None:
handler.addQuickElement("comments", item["comments"])
if item["unique_id"] is not None:
guid_attrs = {}
if isinstance(item.get("unique_id_is_permalink"), bool):
guid_attrs["isPermaLink"] = str(item["unique_id_is_permalink"]).lower()
handler.addQuickElement("guid", item["unique_id"], guid_attrs)
if item["ttl"] is not None:
handler.addQuickElement("ttl", item["ttl"])
# Enclosure.
if item["enclosures"]:
enclosures = list(item["enclosures"])
if len(enclosures) > 1:
raise ValueError(
"RSS feed items may only have one enclosure, see "
"http://www.rssboard.org/rss-profile#element-channel-item-enclosure"
)
enclosure = enclosures[0]
handler.addQuickElement(
"enclosure",
"",
{
"url": enclosure.url,
"length": enclosure.length,
"type": enclosure.mime_type,
},
)
# Categories.
for cat in item["categories"]:
handler.addQuickElement("category", cat)
class Atom1Feed(SyndicationFeed):
# Spec: https://tools.ietf.org/html/rfc4287
content_type = "application/atom+xml; charset=utf-8"
ns = "http://www.w3.org/2005/Atom"
def write(self, outfile, encoding):
handler = SimplerXMLGenerator(outfile, encoding, short_empty_elements=True)
handler.startDocument()
handler.startElement("feed", self.root_attributes())
self.add_root_elements(handler)
self.write_items(handler)
handler.endElement("feed")
def root_attributes(self):
if self.feed["language"] is not None:
return {"xmlns": self.ns, "xml:lang": self.feed["language"]}
else:
return {"xmlns": self.ns}
def add_root_elements(self, handler):
handler.addQuickElement("title", self.feed["title"])
handler.addQuickElement(
"link", "", {"rel": "alternate", "href": self.feed["link"]}
)
if self.feed["feed_url"] is not None:
handler.addQuickElement(
"link", "", {"rel": "self", "href": self.feed["feed_url"]}
)
handler.addQuickElement("id", self.feed["id"])
handler.addQuickElement("updated", rfc3339_date(self.latest_post_date()))
if self.feed["author_name"] is not None:
handler.startElement("author", {})
handler.addQuickElement("name", self.feed["author_name"])
if self.feed["author_email"] is not None:
handler.addQuickElement("email", self.feed["author_email"])
if self.feed["author_link"] is not None:
handler.addQuickElement("uri", self.feed["author_link"])
handler.endElement("author")
if self.feed["subtitle"] is not None:
handler.addQuickElement("subtitle", self.feed["subtitle"])
for cat in self.feed["categories"]:
handler.addQuickElement("category", "", {"term": cat})
if self.feed["feed_copyright"] is not None:
handler.addQuickElement("rights", self.feed["feed_copyright"])
def write_items(self, handler):
for item in self.items:
handler.startElement("entry", self.item_attributes(item))
self.add_item_elements(handler, item)
handler.endElement("entry")
def add_item_elements(self, handler, item):
handler.addQuickElement("title", item["title"])
handler.addQuickElement("link", "", {"href": item["link"], "rel": "alternate"})
if item["pubdate"] is not None:
handler.addQuickElement("published", rfc3339_date(item["pubdate"]))
if item["updateddate"] is not None:
handler.addQuickElement("updated", rfc3339_date(item["updateddate"]))
# Author information.
if item["author_name"] is not None:
handler.startElement("author", {})
handler.addQuickElement("name", item["author_name"])
if item["author_email"] is not None:
handler.addQuickElement("email", item["author_email"])
if item["author_link"] is not None:
handler.addQuickElement("uri", item["author_link"])
handler.endElement("author")
# Unique ID.
if item["unique_id"] is not None:
unique_id = item["unique_id"]
else:
unique_id = get_tag_uri(item["link"], item["pubdate"])
handler.addQuickElement("id", unique_id)
# Summary.
if item["description"] is not None:
handler.addQuickElement("summary", item["description"], {"type": "html"})
# Enclosures.
for enclosure in item["enclosures"]:
handler.addQuickElement(
"link",
"",
{
"rel": "enclosure",
"href": enclosure.url,
"length": enclosure.length,
"type": enclosure.mime_type,
},
)
# Categories.
for cat in item["categories"]:
handler.addQuickElement("category", "", {"term": cat})
# Rights.
if item["item_copyright"] is not None:
handler.addQuickElement("rights", item["item_copyright"])
# This isolates the decision of what the system default is, so calling code can
# do "feedgenerator.DefaultFeed" instead of "feedgenerator.Rss201rev2Feed".
DefaultFeed = Rss201rev2Feed
|
import datetime
from unittest import mock
from django.test import SimpleTestCase
from django.utils import feedgenerator
from django.utils.functional import SimpleLazyObject
from django.utils.timezone import get_fixed_timezone
class FeedgeneratorTests(SimpleTestCase):
"""
Tests for the low-level syndication feed framework.
"""
def test_get_tag_uri(self):
"""
get_tag_uri() correctly generates TagURIs.
"""
self.assertEqual(
feedgenerator.get_tag_uri(
"http://example.org/foo/bar#headline", datetime.date(2004, 10, 25)
),
"tag:example.org,2004-10-25:/foo/bar/headline",
)
def test_get_tag_uri_with_port(self):
"""
get_tag_uri() correctly generates TagURIs from URLs with port numbers.
"""
self.assertEqual(
feedgenerator.get_tag_uri(
"http://www.example.org:8000/2008/11/14/django#headline",
datetime.datetime(2008, 11, 14, 13, 37, 0),
),
"tag:www.example.org,2008-11-14:/2008/11/14/django/headline",
)
def test_rfc2822_date(self):
"""
rfc2822_date() correctly formats datetime objects.
"""
self.assertEqual(
feedgenerator.rfc2822_date(datetime.datetime(2008, 11, 14, 13, 37, 0)),
"Fri, 14 Nov 2008 13:37:00 -0000",
)
def test_rfc2822_date_with_timezone(self):
"""
rfc2822_date() correctly formats datetime objects with tzinfo.
"""
self.assertEqual(
feedgenerator.rfc2822_date(
datetime.datetime(
2008, 11, 14, 13, 37, 0, tzinfo=get_fixed_timezone(60)
)
),
"Fri, 14 Nov 2008 13:37:00 +0100",
)
def test_rfc2822_date_without_time(self):
"""
rfc2822_date() correctly formats date objects.
"""
self.assertEqual(
feedgenerator.rfc2822_date(datetime.date(2008, 11, 14)),
"Fri, 14 Nov 2008 00:00:00 -0000",
)
def test_rfc3339_date(self):
"""
rfc3339_date() correctly formats datetime objects.
"""
self.assertEqual(
feedgenerator.rfc3339_date(datetime.datetime(2008, 11, 14, 13, 37, 0)),
"2008-11-14T13:37:00Z",
)
def test_rfc3339_date_with_timezone(self):
"""
rfc3339_date() correctly formats datetime objects with tzinfo.
"""
self.assertEqual(
feedgenerator.rfc3339_date(
datetime.datetime(
2008, 11, 14, 13, 37, 0, tzinfo=get_fixed_timezone(120)
)
),
"2008-11-14T13:37:00+02:00",
)
def test_rfc3339_date_without_time(self):
"""
rfc3339_date() correctly formats date objects.
"""
self.assertEqual(
feedgenerator.rfc3339_date(datetime.date(2008, 11, 14)),
"2008-11-14T00:00:00Z",
)
def test_atom1_mime_type(self):
"""
Atom MIME type has UTF8 Charset parameter set
"""
atom_feed = feedgenerator.Atom1Feed("title", "link", "description")
self.assertEqual(atom_feed.content_type, "application/atom+xml; charset=utf-8")
def test_rss_mime_type(self):
"""
RSS MIME type has UTF8 Charset parameter set
"""
rss_feed = feedgenerator.Rss201rev2Feed("title", "link", "description")
self.assertEqual(rss_feed.content_type, "application/rss+xml; charset=utf-8")
# Two regression tests for #14202
def test_feed_without_feed_url_gets_rendered_without_atom_link(self):
feed = feedgenerator.Rss201rev2Feed("title", "/link/", "descr")
self.assertIsNone(feed.feed["feed_url"])
feed_content = feed.writeString("utf-8")
self.assertNotIn("<atom:link", feed_content)
self.assertNotIn('href="/feed/"', feed_content)
self.assertNotIn('rel="self"', feed_content)
def test_feed_with_feed_url_gets_rendered_with_atom_link(self):
feed = feedgenerator.Rss201rev2Feed(
"title", "/link/", "descr", feed_url="/feed/"
)
self.assertEqual(feed.feed["feed_url"], "/feed/")
feed_content = feed.writeString("utf-8")
self.assertIn("<atom:link", feed_content)
self.assertIn('href="/feed/"', feed_content)
self.assertIn('rel="self"', feed_content)
def test_atom_add_item(self):
# Not providing any optional arguments to Atom1Feed.add_item()
feed = feedgenerator.Atom1Feed("title", "/link/", "descr")
feed.add_item("item_title", "item_link", "item_description")
feed.writeString("utf-8")
def test_deterministic_attribute_order(self):
feed = feedgenerator.Atom1Feed("title", "/link/", "desc")
feed_content = feed.writeString("utf-8")
self.assertIn('href="/link/" rel="alternate"', feed_content)
def test_latest_post_date_returns_utc_time(self):
for use_tz in (True, False):
with self.settings(USE_TZ=use_tz):
rss_feed = feedgenerator.Rss201rev2Feed("title", "link", "description")
self.assertEqual(
rss_feed.latest_post_date().tzinfo,
datetime.UTC,
)
def test_stylesheet_keeps_lazy_urls(self):
m = mock.Mock(return_value="test.css")
stylesheet = feedgenerator.Stylesheet(SimpleLazyObject(m))
m.assert_not_called()
self.assertEqual(
str(stylesheet), 'href="test.css" media="screen" type="text/css"'
)
m.assert_called_once()
def test_stylesheet_attribute_escaping(self):
style = feedgenerator.Stylesheet(
url='http://example.com/style.css?foo="bar"&baz=<>',
mimetype='text/css; charset="utf-8"',
media='screen and (max-width: "600px")',
)
self.assertEqual(
str(style),
'href="http://example.com/style.css?foo=%22bar%22&baz=%3C%3E" '
'media="screen and (max-width: "600px")" '
'type="text/css; charset="utf-8""',
)
|
./temp_repos/django/django/utils/feedgenerator.py
|
./temp_repos/django/tests/utils_tests/test_feedgenerator.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'Stylesheet'.
Context:
- Class Name: Stylesheet
- Dependencies to Mock: stylesheets, mime_type, link, author_link, feed_copyright, author_email, description, url, ttl, media, subtitle, author_name, title, mimetype, categories, feed_guid, language, feed_url, length
- Key Imports: mimetypes, io, django.utils.xmlutils, email, django.utils.encoding, urllib.parse, django.forms.utils, datetime
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
Stylesheet
|
python
|
import codecs
import datetime
import locale
from decimal import Decimal
from types import NoneType
from urllib.parse import quote
from django.utils.functional import Promise
class DjangoUnicodeDecodeError(UnicodeDecodeError):
def __str__(self):
return "%s. You passed in %r (%s)" % (
super().__str__(),
self.object,
type(self.object),
)
def smart_str(s, encoding="utf-8", strings_only=False, errors="strict"):
"""
Return a string representing 's'. Treat bytestrings using the 'encoding'
codec.
If strings_only is True, don't convert (some) non-string-like objects.
"""
if isinstance(s, Promise):
# The input is the result of a gettext_lazy() call.
return s
return force_str(s, encoding, strings_only, errors)
_PROTECTED_TYPES = (
NoneType,
int,
float,
Decimal,
datetime.datetime,
datetime.date,
datetime.time,
)
def is_protected_type(obj):
"""Determine if the object instance is of a protected type.
Objects of protected types are preserved as-is when passed to
force_str(strings_only=True).
"""
return isinstance(obj, _PROTECTED_TYPES)
def force_str(s, encoding="utf-8", strings_only=False, errors="strict"):
"""
Similar to smart_str(), except that lazy instances are resolved to
strings, rather than kept as lazy objects.
If strings_only is True, don't convert (some) non-string-like objects.
"""
# Handle the common case first for performance reasons.
if issubclass(type(s), str):
return s
if strings_only and is_protected_type(s):
return s
try:
if isinstance(s, bytes):
s = str(s, encoding, errors)
else:
s = str(s)
except UnicodeDecodeError as e:
raise DjangoUnicodeDecodeError(*e.args) from None
return s
def smart_bytes(s, encoding="utf-8", strings_only=False, errors="strict"):
"""
Return a bytestring version of 's', encoded as specified in 'encoding'.
If strings_only is True, don't convert (some) non-string-like objects.
"""
if isinstance(s, Promise):
# The input is the result of a gettext_lazy() call.
return s
return force_bytes(s, encoding, strings_only, errors)
def force_bytes(s, encoding="utf-8", strings_only=False, errors="strict"):
"""
Similar to smart_bytes, except that lazy instances are resolved to
strings, rather than kept as lazy objects.
If strings_only is True, don't convert (some) non-string-like objects.
"""
# Handle the common case first for performance reasons.
if isinstance(s, bytes):
if encoding == "utf-8":
return s
else:
return s.decode("utf-8", errors).encode(encoding, errors)
if strings_only and is_protected_type(s):
return s
if isinstance(s, memoryview):
return bytes(s)
return str(s).encode(encoding, errors)
def iri_to_uri(iri):
"""
Convert an Internationalized Resource Identifier (IRI) portion to a URI
portion that is suitable for inclusion in a URL.
This is the algorithm from RFC 3987 Section 3.1, slightly simplified since
the input is assumed to be a string rather than an arbitrary byte stream.
Take an IRI (string or UTF-8 bytes, e.g. '/I ♥ Django/' or
b'/I \xe2\x99\xa5 Django/') and return a string containing the encoded
result with ASCII chars only (e.g. '/I%20%E2%99%A5%20Django/').
"""
# The list of safe characters here is constructed from the "reserved" and
# "unreserved" characters specified in RFC 3986 Sections 2.2 and 2.3:
# reserved = gen-delims / sub-delims
# gen-delims = ":" / "/" / "?" / "#" / "[" / "]" / "@"
# sub-delims = "!" / "$" / "&" / "'" / "(" / ")"
# / "*" / "+" / "," / ";" / "="
# unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~"
# Of the unreserved characters, urllib.parse.quote() already considers all
# but the ~ safe.
# The % character is also added to the list of safe characters here, as the
# end of RFC 3987 Section 3.1 specifically mentions that % must not be
# converted.
if iri is None:
return iri
elif isinstance(iri, Promise):
iri = str(iri)
return quote(iri, safe="/#%[]=:;$&()+,!?*@'~")
# List of byte values that uri_to_iri() decodes from percent encoding.
# First, the unreserved characters from RFC 3986:
_ascii_ranges = [[45, 46, 95, 126], range(65, 91), range(97, 123)]
_hextobyte = {
(fmt % char).encode(): bytes((char,))
for ascii_range in _ascii_ranges
for char in ascii_range
for fmt in ["%02x", "%02X"]
}
# And then everything above 128, because bytes ≥ 128 are part of multibyte
# Unicode characters.
_hexdig = "0123456789ABCDEFabcdef"
_hextobyte.update(
{(a + b).encode(): bytes.fromhex(a + b) for a in _hexdig[8:] for b in _hexdig}
)
def uri_to_iri(uri):
"""
Convert a Uniform Resource Identifier(URI) into an Internationalized
Resource Identifier(IRI).
This is the algorithm from RFC 3987 Section 3.2, excluding step 4.
Take an URI in ASCII bytes (e.g. '/I%20%E2%99%A5%20Django/') and return
a string containing the encoded result (e.g. '/I%20♥%20Django/').
"""
if uri is None:
return uri
uri = force_bytes(uri)
# Fast selective unquote: First, split on '%' and then starting with the
# second block, decode the first 2 bytes if they represent a hex code to
# decode. The rest of the block is the part after '%AB', not containing
# any '%'. Add that to the output without further processing.
bits = uri.split(b"%")
if len(bits) == 1:
iri = uri
else:
parts = [bits[0]]
append = parts.append
hextobyte = _hextobyte
for item in bits[1:]:
hex = item[:2]
if hex in hextobyte:
append(hextobyte[item[:2]])
append(item[2:])
else:
append(b"%")
append(item)
iri = b"".join(parts)
return repercent_broken_unicode(iri).decode()
def escape_uri_path(path):
"""
Escape the unsafe characters from the path portion of a Uniform Resource
Identifier (URI).
"""
# These are the "reserved" and "unreserved" characters specified in RFC
# 3986 Sections 2.2 and 2.3:
# reserved = ";" | "/" | "?" | ":" | "@" | "&" | "=" | "+" | "$" | ","
# unreserved = alphanum | mark
# mark = "-" | "_" | "." | "!" | "~" | "*" | "'" | "(" | ")"
# The list of safe characters here is constructed subtracting ";", "=",
# and "?" according to RFC 3986 Section 3.3.
# The reason for not subtracting and escaping "/" is that we are escaping
# the entire path, not a path segment.
return quote(path, safe="/:@&+$,-_.!~*'()")
def punycode(domain):
"""Return the Punycode of the given domain if it's non-ASCII."""
return domain.encode("idna").decode("ascii")
def repercent_broken_unicode(path):
"""
As per RFC 3987 Section 3.2, step three of converting a URI into an IRI,
repercent-encode any octet produced that is not part of a strictly legal
UTF-8 octet sequence.
"""
changed_parts = []
while True:
try:
path.decode()
except UnicodeDecodeError as e:
# CVE-2019-14235: A recursion shouldn't be used since the exception
# handling uses massive amounts of memory
repercent = quote(path[e.start : e.end], safe=b"/#%[]=:;$&()+,!?*@'~")
changed_parts.append(path[: e.start] + repercent.encode())
path = path[e.end :]
else:
return b"".join(changed_parts) + path
def filepath_to_uri(path):
"""Convert a file system path to a URI portion that is suitable for
inclusion in a URL.
Encode certain chars that would normally be recognized as special chars
for URIs. Do not encode the ' character, as it is a valid character
within URIs. See the encodeURIComponent() JavaScript function for details.
"""
if path is None:
return path
# I know about `os.sep` and `os.altsep` but I want to leave
# some flexibility for hardcoding separators.
return quote(str(path).replace("\\", "/"), safe="/~!*()'")
def get_system_encoding():
"""
The encoding for the character type functions. Fallback to 'ascii' if the
#encoding is unsupported by Python or could not be determined. See tickets
#10335 and #5846.
"""
try:
encoding = locale.getlocale()[1] or "ascii"
codecs.lookup(encoding)
except Exception:
encoding = "ascii"
return encoding
DEFAULT_LOCALE_ENCODING = get_system_encoding()
|
import datetime
import inspect
import sys
import unittest
from pathlib import Path
from unittest import mock
from urllib.parse import quote, quote_plus
from django.test import SimpleTestCase
from django.utils.encoding import (
DjangoUnicodeDecodeError,
escape_uri_path,
filepath_to_uri,
force_bytes,
force_str,
get_system_encoding,
iri_to_uri,
repercent_broken_unicode,
smart_bytes,
smart_str,
uri_to_iri,
)
from django.utils.functional import SimpleLazyObject
from django.utils.translation import gettext_lazy
from django.utils.version import PYPY
class TestEncodingUtils(SimpleTestCase):
def test_force_str_exception(self):
"""
Broken __str__ actually raises an error.
"""
class MyString:
def __str__(self):
return b"\xc3\xb6\xc3\xa4\xc3\xbc"
# str(s) raises a TypeError if the result is not a text type.
with self.assertRaises(TypeError):
force_str(MyString())
def test_force_str_lazy(self):
s = SimpleLazyObject(lambda: "x")
self.assertIs(type(force_str(s)), str)
def test_force_str_DjangoUnicodeDecodeError(self):
reason = "unexpected end of data" if PYPY else "invalid start byte"
msg = (
f"'utf-8' codec can't decode byte 0xff in position 0: {reason}. "
"You passed in b'\\xff' (<class 'bytes'>)"
)
with self.assertRaisesMessage(DjangoUnicodeDecodeError, msg):
force_str(b"\xff")
def test_force_bytes_exception(self):
"""
force_bytes knows how to convert to bytes an exception
containing non-ASCII characters in its args.
"""
error_msg = "This is an exception, voilà"
exc = ValueError(error_msg)
self.assertEqual(force_bytes(exc), error_msg.encode())
self.assertEqual(
force_bytes(exc, encoding="ascii", errors="ignore"),
b"This is an exception, voil",
)
def test_force_bytes_strings_only(self):
today = datetime.date.today()
self.assertEqual(force_bytes(today, strings_only=True), today)
def test_force_bytes_encoding(self):
error_msg = "This is an exception, voilà".encode()
result = force_bytes(error_msg, encoding="ascii", errors="ignore")
self.assertEqual(result, b"This is an exception, voil")
def test_force_bytes_memory_view(self):
data = b"abc"
result = force_bytes(memoryview(data))
# Type check is needed because memoryview(bytes) == bytes.
self.assertIs(type(result), bytes)
self.assertEqual(result, data)
def test_smart_bytes(self):
class Test:
def __str__(self):
return "ŠĐĆŽćžšđ"
lazy_func = gettext_lazy("x")
self.assertIs(smart_bytes(lazy_func), lazy_func)
self.assertEqual(
smart_bytes(Test()),
b"\xc5\xa0\xc4\x90\xc4\x86\xc5\xbd\xc4\x87\xc5\xbe\xc5\xa1\xc4\x91",
)
self.assertEqual(smart_bytes(1), b"1")
self.assertEqual(smart_bytes("foo"), b"foo")
def test_smart_str(self):
class Test:
def __str__(self):
return "ŠĐĆŽćžšđ"
lazy_func = gettext_lazy("x")
self.assertIs(smart_str(lazy_func), lazy_func)
self.assertEqual(
smart_str(Test()), "\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111"
)
self.assertEqual(smart_str(1), "1")
self.assertEqual(smart_str("foo"), "foo")
def test_get_default_encoding(self):
with mock.patch("locale.getlocale", side_effect=Exception):
self.assertEqual(get_system_encoding(), "ascii")
def test_repercent_broken_unicode_recursion_error(self):
# Prepare a string long enough to force a recursion error if the tested
# function uses recursion.
data = b"\xfc" * sys.getrecursionlimit()
try:
self.assertEqual(
repercent_broken_unicode(data), b"%FC" * sys.getrecursionlimit()
)
except RecursionError:
self.fail("Unexpected RecursionError raised.")
def test_repercent_broken_unicode_small_fragments(self):
data = b"test\xfctest\xfctest\xfc"
decoded_paths = []
def mock_quote(*args, **kwargs):
# The second frame is the call to repercent_broken_unicode().
decoded_paths.append(inspect.currentframe().f_back.f_locals["path"])
return quote(*args, **kwargs)
with mock.patch("django.utils.encoding.quote", mock_quote):
self.assertEqual(repercent_broken_unicode(data), b"test%FCtest%FCtest%FC")
# decode() is called on smaller fragment of the path each time.
self.assertEqual(
decoded_paths,
[b"test\xfctest\xfctest\xfc", b"test\xfctest\xfc", b"test\xfc"],
)
class TestRFC3987IEncodingUtils(unittest.TestCase):
def test_filepath_to_uri(self):
self.assertIsNone(filepath_to_uri(None))
self.assertEqual(
filepath_to_uri("upload\\чубака.mp4"),
"upload/%D1%87%D1%83%D0%B1%D0%B0%D0%BA%D0%B0.mp4",
)
self.assertEqual(filepath_to_uri(Path("upload/test.png")), "upload/test.png")
self.assertEqual(filepath_to_uri(Path("upload\\test.png")), "upload/test.png")
def test_iri_to_uri(self):
cases = [
# Valid UTF-8 sequences are encoded.
("red%09rosé#red", "red%09ros%C3%A9#red"),
("/blog/for/Jürgen Münster/", "/blog/for/J%C3%BCrgen%20M%C3%BCnster/"),
(
"locations/%s" % quote_plus("Paris & Orléans"),
"locations/Paris+%26+Orl%C3%A9ans",
),
# Reserved chars remain unescaped.
("%&", "%&"),
("red&♥ros%#red", "red&%E2%99%A5ros%#red"),
(gettext_lazy("red&♥ros%#red"), "red&%E2%99%A5ros%#red"),
]
for iri, uri in cases:
with self.subTest(iri):
self.assertEqual(iri_to_uri(iri), uri)
# Test idempotency.
self.assertEqual(iri_to_uri(iri_to_uri(iri)), uri)
def test_uri_to_iri(self):
cases = [
(None, None),
# Valid UTF-8 sequences are decoded.
("/%e2%89%Ab%E2%99%a5%E2%89%aB/", "/≫♥≫/"),
("/%E2%99%A5%E2%99%A5/?utf8=%E2%9C%93", "/♥♥/?utf8=✓"),
("/%41%5a%6B/", "/AZk/"),
# Reserved and non-URL valid ASCII chars are not decoded.
("/%25%20%02%41%7b/", "/%25%20%02A%7b/"),
# Broken UTF-8 sequences remain escaped.
("/%AAd%AAj%AAa%AAn%AAg%AAo%AA/", "/%AAd%AAj%AAa%AAn%AAg%AAo%AA/"),
("/%E2%99%A5%E2%E2%99%A5/", "/♥%E2♥/"),
("/%E2%99%A5%E2%99%E2%99%A5/", "/♥%E2%99♥/"),
("/%E2%E2%99%A5%E2%99%A5%99/", "/%E2♥♥%99/"),
(
"/%E2%99%A5%E2%99%A5/?utf8=%9C%93%E2%9C%93%9C%93",
"/♥♥/?utf8=%9C%93✓%9C%93",
),
]
for uri, iri in cases:
with self.subTest(uri):
self.assertEqual(uri_to_iri(uri), iri)
# Test idempotency.
self.assertEqual(uri_to_iri(uri_to_iri(uri)), iri)
def test_complementarity(self):
cases = [
(
"/blog/for/J%C3%BCrgen%20M%C3%BCnster/",
"/blog/for/J\xfcrgen%20M\xfcnster/",
),
("%&", "%&"),
("red&%E2%99%A5ros%#red", "red&♥ros%#red"),
("/%E2%99%A5%E2%99%A5/", "/♥♥/"),
("/%E2%99%A5%E2%99%A5/?utf8=%E2%9C%93", "/♥♥/?utf8=✓"),
("/%25%20%02%7b/", "/%25%20%02%7b/"),
("/%AAd%AAj%AAa%AAn%AAg%AAo%AA/", "/%AAd%AAj%AAa%AAn%AAg%AAo%AA/"),
("/%E2%99%A5%E2%E2%99%A5/", "/♥%E2♥/"),
("/%E2%99%A5%E2%99%E2%99%A5/", "/♥%E2%99♥/"),
("/%E2%E2%99%A5%E2%99%A5%99/", "/%E2♥♥%99/"),
(
"/%E2%99%A5%E2%99%A5/?utf8=%9C%93%E2%9C%93%9C%93",
"/♥♥/?utf8=%9C%93✓%9C%93",
),
]
for uri, iri in cases:
with self.subTest(uri):
self.assertEqual(iri_to_uri(uri_to_iri(uri)), uri)
self.assertEqual(uri_to_iri(iri_to_uri(iri)), iri)
def test_escape_uri_path(self):
cases = [
(
"/;some/=awful/?path/:with/@lots/&of/+awful/chars",
"/%3Bsome/%3Dawful/%3Fpath/:with/@lots/&of/+awful/chars",
),
("/foo#bar", "/foo%23bar"),
("/foo?bar", "/foo%3Fbar"),
]
for uri, expected in cases:
with self.subTest(uri):
self.assertEqual(escape_uri_path(uri), expected)
|
./temp_repos/django/django/utils/encoding.py
|
./temp_repos/django/tests/utils_tests/test_encoding.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'DjangoUnicodeDecodeError'.
Context:
- Class Name: DjangoUnicodeDecodeError
- Dependencies to Mock: None detected
- Key Imports: decimal, codecs, locale, urllib.parse, django.utils.functional, datetime, types
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
DjangoUnicodeDecodeError
|
python
|
from collections.abc import Callable, Iterable, Iterator, Mapping
from itertools import islice, tee, zip_longest
from django.utils.functional import Promise
__all__ = [
"BaseChoiceIterator",
"BlankChoiceIterator",
"CallableChoiceIterator",
"flatten_choices",
"normalize_choices",
]
class BaseChoiceIterator:
"""Base class for lazy iterators for choices."""
def __eq__(self, other):
if isinstance(other, Iterable):
return all(a == b for a, b in zip_longest(self, other, fillvalue=object()))
return super().__eq__(other)
def __getitem__(self, index):
if isinstance(index, slice) or index < 0:
# Suboptimally consume whole iterator to handle slices and negative
# indexes.
return list(self)[index]
try:
return next(islice(self, index, index + 1))
except StopIteration:
raise IndexError("index out of range") from None
def __iter__(self):
raise NotImplementedError(
"BaseChoiceIterator subclasses must implement __iter__()."
)
class BlankChoiceIterator(BaseChoiceIterator):
"""Iterator to lazily inject a blank choice."""
def __init__(self, choices, blank_choice):
self.choices = choices
self.blank_choice = blank_choice
def __iter__(self):
choices, other = tee(self.choices)
if not any(value in ("", None) for value, _ in flatten_choices(other)):
yield from self.blank_choice
yield from choices
class CallableChoiceIterator(BaseChoiceIterator):
"""Iterator to lazily normalize choices generated by a callable."""
def __init__(self, func):
self.func = func
def __iter__(self):
yield from normalize_choices(self.func())
def flatten_choices(choices):
"""Flatten choices by removing nested values."""
for value_or_group, label_or_nested in choices or ():
if isinstance(label_or_nested, (list, tuple)):
yield from label_or_nested
else:
yield value_or_group, label_or_nested
def normalize_choices(value, *, depth=0):
"""Normalize choices values consistently for fields and widgets."""
# Avoid circular import when importing django.forms.
from django.db.models.enums import ChoicesType
match value:
case BaseChoiceIterator() | Promise() | bytes() | str():
# Avoid prematurely normalizing iterators that should be lazy.
# Because string-like types are iterable, return early to avoid
# iterating over them in the guard for the Iterable case below.
return value
case ChoicesType():
# Choices enumeration helpers already output in canonical form.
return value.choices
case Mapping() if depth < 2:
value = value.items()
case Iterator() if depth < 2:
# Although Iterator would be handled by the Iterable case below,
# the iterator would be consumed prematurely while checking that
# its elements are not string-like in the guard, so we handle it
# separately.
pass
case Iterable() if depth < 2 and not any(
isinstance(x, (Promise, bytes, str)) for x in value
):
# String-like types are iterable, so the guard above ensures that
# they're handled by the default case below.
pass
case Callable() if depth == 0:
# If at the top level, wrap callables to be evaluated lazily.
return CallableChoiceIterator(value)
case Callable() if depth < 2:
value = value()
case _:
return value
try:
# Recursive call to convert any nested values to a list of 2-tuples.
return [(k, normalize_choices(v, depth=depth + 1)) for k, v in value]
except (TypeError, ValueError):
# Return original value for the system check to raise if it has items
# that are not iterable or not 2-tuples:
# - TypeError: cannot unpack non-iterable <type> object
# - ValueError: <not enough / too many> values to unpack
return value
|
import collections.abc
from unittest import mock
from django.db.models import TextChoices
from django.test import SimpleTestCase
from django.utils.choices import (
BaseChoiceIterator,
CallableChoiceIterator,
flatten_choices,
normalize_choices,
)
from django.utils.translation import gettext_lazy as _
class SimpleChoiceIterator(BaseChoiceIterator):
def __iter__(self):
return ((i, f"Item #{i}") for i in range(1, 4))
class ChoiceIteratorTests(SimpleTestCase):
def test_not_implemented_error_on_missing_iter(self):
class InvalidChoiceIterator(BaseChoiceIterator):
pass # Not overriding __iter__().
msg = "BaseChoiceIterator subclasses must implement __iter__()."
with self.assertRaisesMessage(NotImplementedError, msg):
iter(InvalidChoiceIterator())
def test_eq(self):
unrolled = [(1, "Item #1"), (2, "Item #2"), (3, "Item #3")]
self.assertEqual(SimpleChoiceIterator(), unrolled)
self.assertEqual(unrolled, SimpleChoiceIterator())
def test_eq_instances(self):
self.assertEqual(SimpleChoiceIterator(), SimpleChoiceIterator())
def test_not_equal_subset(self):
self.assertNotEqual(SimpleChoiceIterator(), [(1, "Item #1"), (2, "Item #2")])
def test_not_equal_superset(self):
self.assertNotEqual(
SimpleChoiceIterator(),
[(1, "Item #1"), (2, "Item #2"), (3, "Item #3"), None],
)
def test_getitem(self):
choices = SimpleChoiceIterator()
for i, expected in [(0, (1, "Item #1")), (-1, (3, "Item #3"))]:
with self.subTest(index=i):
self.assertEqual(choices[i], expected)
def test_getitem_indexerror(self):
choices = SimpleChoiceIterator()
for i in (4, -4):
with self.subTest(index=i):
with self.assertRaises(IndexError) as ctx:
choices[i]
self.assertTrue(str(ctx.exception).endswith("index out of range"))
class FlattenChoicesTests(SimpleTestCase):
def test_empty(self):
def generator():
yield from ()
for choices in ({}, [], (), set(), frozenset(), generator(), None, ""):
with self.subTest(choices=choices):
result = flatten_choices(choices)
self.assertIsInstance(result, collections.abc.Generator)
self.assertEqual(list(result), [])
def test_non_empty(self):
choices = [
("C", _("Club")),
("D", _("Diamond")),
("H", _("Heart")),
("S", _("Spade")),
]
result = flatten_choices(choices)
self.assertIsInstance(result, collections.abc.Generator)
self.assertEqual(list(result), choices)
def test_nested_choices(self):
choices = [
("Audio", [("vinyl", _("Vinyl")), ("cd", _("CD"))]),
("Video", [("vhs", _("VHS Tape")), ("dvd", _("DVD"))]),
("unknown", _("Unknown")),
]
expected = [
("vinyl", _("Vinyl")),
("cd", _("CD")),
("vhs", _("VHS Tape")),
("dvd", _("DVD")),
("unknown", _("Unknown")),
]
result = flatten_choices(choices)
self.assertIsInstance(result, collections.abc.Generator)
self.assertEqual(list(result), expected)
class NormalizeFieldChoicesTests(SimpleTestCase):
expected = [
("C", _("Club")),
("D", _("Diamond")),
("H", _("Heart")),
("S", _("Spade")),
]
expected_nested = [
("Audio", [("vinyl", _("Vinyl")), ("cd", _("CD"))]),
("Video", [("vhs", _("VHS Tape")), ("dvd", _("DVD"))]),
("unknown", _("Unknown")),
]
invalid = [
1j,
123,
123.45,
"invalid",
b"invalid",
_("invalid"),
object(),
None,
True,
False,
]
invalid_iterable = [
# Special cases of a string-likes which would unpack incorrectly.
["ab"],
[b"ab"],
[_("ab")],
# Non-iterable items or iterable items with incorrect number of
# elements that cannot be unpacked.
[123],
[("value",)],
[("value", "label", "other")],
]
invalid_nested = [
# Nested choices can only be two-levels deep, so return callables,
# mappings, iterables, etc. at deeper levels unmodified.
[("Group", [("Value", lambda: "Label")])],
[("Group", [("Value", {"Label 1?": "Label 2?"})])],
[("Group", [("Value", [("Label 1?", "Label 2?")])])],
]
def test_empty(self):
def generator():
yield from ()
for choices in ({}, [], (), set(), frozenset(), generator()):
with self.subTest(choices=choices):
self.assertEqual(normalize_choices(choices), [])
def test_choices(self):
class Medal(TextChoices):
GOLD = "GOLD", _("Gold")
SILVER = "SILVER", _("Silver")
BRONZE = "BRONZE", _("Bronze")
expected = [
("GOLD", _("Gold")),
("SILVER", _("Silver")),
("BRONZE", _("Bronze")),
]
self.assertEqual(normalize_choices(Medal), expected)
def test_callable(self):
def get_choices():
return {
"C": _("Club"),
"D": _("Diamond"),
"H": _("Heart"),
"S": _("Spade"),
}
get_choices_spy = mock.Mock(wraps=get_choices)
output = normalize_choices(get_choices_spy)
get_choices_spy.assert_not_called()
self.assertIsInstance(output, CallableChoiceIterator)
self.assertEqual(output, self.expected)
get_choices_spy.assert_called_once()
def test_mapping(self):
choices = {
"C": _("Club"),
"D": _("Diamond"),
"H": _("Heart"),
"S": _("Spade"),
}
self.assertEqual(normalize_choices(choices), self.expected)
def test_iterable(self):
choices = [
("C", _("Club")),
("D", _("Diamond")),
("H", _("Heart")),
("S", _("Spade")),
]
self.assertEqual(normalize_choices(choices), self.expected)
def test_iterator(self):
def generator():
yield "C", _("Club")
yield "D", _("Diamond")
yield "H", _("Heart")
yield "S", _("Spade")
choices = generator()
self.assertEqual(normalize_choices(choices), self.expected)
def test_nested_callable(self):
def get_audio_choices():
return [("vinyl", _("Vinyl")), ("cd", _("CD"))]
def get_video_choices():
return [("vhs", _("VHS Tape")), ("dvd", _("DVD"))]
def get_media_choices():
return [
("Audio", get_audio_choices),
("Video", get_video_choices),
("unknown", _("Unknown")),
]
get_media_choices_spy = mock.Mock(wraps=get_media_choices)
output = normalize_choices(get_media_choices_spy)
get_media_choices_spy.assert_not_called()
self.assertIsInstance(output, CallableChoiceIterator)
self.assertEqual(output, self.expected_nested)
get_media_choices_spy.assert_called_once()
def test_nested_mapping(self):
choices = {
"Audio": {"vinyl": _("Vinyl"), "cd": _("CD")},
"Video": {"vhs": _("VHS Tape"), "dvd": _("DVD")},
"unknown": _("Unknown"),
}
self.assertEqual(normalize_choices(choices), self.expected_nested)
def test_nested_iterable(self):
choices = [
("Audio", [("vinyl", _("Vinyl")), ("cd", _("CD"))]),
("Video", [("vhs", _("VHS Tape")), ("dvd", _("DVD"))]),
("unknown", _("Unknown")),
]
self.assertEqual(normalize_choices(choices), self.expected_nested)
def test_nested_iterator(self):
def generate_audio_choices():
yield "vinyl", _("Vinyl")
yield "cd", _("CD")
def generate_video_choices():
yield "vhs", _("VHS Tape")
yield "dvd", _("DVD")
def generate_media_choices():
yield "Audio", generate_audio_choices()
yield "Video", generate_video_choices()
yield "unknown", _("Unknown")
choices = generate_media_choices()
self.assertEqual(normalize_choices(choices), self.expected_nested)
def test_callable_non_canonical(self):
# Canonical form is list of 2-tuple, but nested lists should work.
def get_choices():
return [
["C", _("Club")],
["D", _("Diamond")],
["H", _("Heart")],
["S", _("Spade")],
]
get_choices_spy = mock.Mock(wraps=get_choices)
output = normalize_choices(get_choices_spy)
get_choices_spy.assert_not_called()
self.assertIsInstance(output, CallableChoiceIterator)
self.assertEqual(output, self.expected)
get_choices_spy.assert_called_once()
def test_iterable_non_canonical(self):
# Canonical form is list of 2-tuple, but nested lists should work.
choices = [
["C", _("Club")],
["D", _("Diamond")],
["H", _("Heart")],
["S", _("Spade")],
]
self.assertEqual(normalize_choices(choices), self.expected)
def test_iterator_non_canonical(self):
# Canonical form is list of 2-tuple, but nested lists should work.
def generator():
yield ["C", _("Club")]
yield ["D", _("Diamond")]
yield ["H", _("Heart")]
yield ["S", _("Spade")]
choices = generator()
self.assertEqual(normalize_choices(choices), self.expected)
def test_nested_callable_non_canonical(self):
# Canonical form is list of 2-tuple, but nested lists should work.
def get_audio_choices():
return [["vinyl", _("Vinyl")], ["cd", _("CD")]]
def get_video_choices():
return [["vhs", _("VHS Tape")], ["dvd", _("DVD")]]
def get_media_choices():
return [
["Audio", get_audio_choices],
["Video", get_video_choices],
["unknown", _("Unknown")],
]
get_media_choices_spy = mock.Mock(wraps=get_media_choices)
output = normalize_choices(get_media_choices_spy)
get_media_choices_spy.assert_not_called()
self.assertIsInstance(output, CallableChoiceIterator)
self.assertEqual(output, self.expected_nested)
get_media_choices_spy.assert_called_once()
def test_nested_iterable_non_canonical(self):
# Canonical form is list of 2-tuple, but nested lists should work.
choices = [
["Audio", [["vinyl", _("Vinyl")], ["cd", _("CD")]]],
["Video", [["vhs", _("VHS Tape")], ["dvd", _("DVD")]]],
["unknown", _("Unknown")],
]
self.assertEqual(normalize_choices(choices), self.expected_nested)
def test_nested_iterator_non_canonical(self):
# Canonical form is list of 2-tuple, but nested lists should work.
def generator():
yield ["Audio", [["vinyl", _("Vinyl")], ["cd", _("CD")]]]
yield ["Video", [["vhs", _("VHS Tape")], ["dvd", _("DVD")]]]
yield ["unknown", _("Unknown")]
choices = generator()
self.assertEqual(normalize_choices(choices), self.expected_nested)
def test_nested_mixed_mapping_and_iterable(self):
# Although not documented, as it's better to stick to either mappings
# or iterables, nesting of mappings within iterables and vice versa
# works and is likely to occur in the wild. This is supported by the
# recursive call to `normalize_choices()` which will normalize nested
# choices.
choices = {
"Audio": [("vinyl", _("Vinyl")), ("cd", _("CD"))],
"Video": [("vhs", _("VHS Tape")), ("dvd", _("DVD"))],
"unknown": _("Unknown"),
}
self.assertEqual(normalize_choices(choices), self.expected_nested)
choices = [
("Audio", {"vinyl": _("Vinyl"), "cd": _("CD")}),
("Video", {"vhs": _("VHS Tape"), "dvd": _("DVD")}),
("unknown", _("Unknown")),
]
self.assertEqual(normalize_choices(choices), self.expected_nested)
def test_iterable_set(self):
# Although not documented, as sets are unordered which results in
# randomised order in form fields, passing a set of 2-tuples works.
# Consistent ordering of choices on model fields in migrations is
# enforced by the migrations serializer.
choices = {
("C", _("Club")),
("D", _("Diamond")),
("H", _("Heart")),
("S", _("Spade")),
}
self.assertEqual(sorted(normalize_choices(choices)), sorted(self.expected))
def test_unsupported_values_returned_unmodified(self):
# Unsupported values must be returned unmodified for model system check
# to work correctly.
for value in self.invalid + self.invalid_iterable + self.invalid_nested:
with self.subTest(value=value):
self.assertEqual(normalize_choices(value), value)
def test_unsupported_values_from_callable_returned_unmodified(self):
for value in self.invalid_iterable + self.invalid_nested:
with self.subTest(value=value):
self.assertEqual(normalize_choices(lambda: value), value)
def test_unsupported_values_from_iterator_returned_unmodified(self):
for value in self.invalid_nested:
with self.subTest(value=value):
self.assertEqual(
normalize_choices((lambda: (yield from value))()),
value,
)
|
./temp_repos/django/django/utils/choices.py
|
./temp_repos/django/tests/utils_tests/test_choices.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'BaseChoiceIterator'.
Context:
- Class Name: BaseChoiceIterator
- Dependencies to Mock: func, blank_choice, choices
- Key Imports: django.utils.functional, collections.abc, itertools, django.db.models.enums
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
BaseChoiceIterator
|
python
|
import gzip
import re
import secrets
import textwrap
import unicodedata
from collections import deque
from gzip import GzipFile
from gzip import compress as gzip_compress
from html import escape
from html.parser import HTMLParser
from io import BytesIO
from django.core.exceptions import SuspiciousFileOperation
from django.utils.functional import (
SimpleLazyObject,
cached_property,
keep_lazy_text,
lazy,
)
from django.utils.regex_helper import _lazy_re_compile
from django.utils.translation import gettext as _
from django.utils.translation import gettext_lazy, pgettext
@keep_lazy_text
def capfirst(x):
"""Capitalize the first letter of a string."""
if not x:
return x
if not isinstance(x, str):
x = str(x)
return x[0].upper() + x[1:]
# Set up regular expressions
re_newlines = _lazy_re_compile(r"\r\n|\r") # Used in normalize_newlines
re_camel_case = _lazy_re_compile(r"(((?<=[a-z])[A-Z])|([A-Z](?![A-Z]|$)))")
@keep_lazy_text
def wrap(text, width):
"""
A word-wrap function that preserves existing line breaks. Expects that
existing line breaks are posix newlines.
Preserve all white space except added line breaks consume the space on
which they break the line.
Don't wrap long words, thus the output text may have lines longer than
``width``.
"""
wrapper = textwrap.TextWrapper(
width=width,
break_long_words=False,
break_on_hyphens=False,
replace_whitespace=False,
)
result = []
for line in text.splitlines():
wrapped = wrapper.wrap(line)
if not wrapped:
# If `line` contains only whitespaces that are dropped, restore it.
result.append(line)
else:
result.extend(wrapped)
if text.endswith("\n"):
# If `text` ends with a newline, preserve it.
result.append("")
return "\n".join(result)
def add_truncation_text(text, truncate=None):
if truncate is None:
truncate = pgettext(
"String to return when truncating text", "%(truncated_text)s…"
)
if "%(truncated_text)s" in truncate:
return truncate % {"truncated_text": text}
# The truncation text didn't contain the %(truncated_text)s string
# replacement argument so just append it to the text.
if text.endswith(truncate):
# But don't append the truncation text if the current text already ends
# in this.
return text
return f"{text}{truncate}"
def calculate_truncate_chars_length(length, replacement):
truncate_len = length
for char in add_truncation_text("", replacement):
if not unicodedata.combining(char):
truncate_len -= 1
if truncate_len == 0:
break
return truncate_len
class TruncateHTMLParser(HTMLParser):
class TruncationCompleted(Exception):
pass
def __init__(self, *, length, replacement, convert_charrefs=True):
super().__init__(convert_charrefs=convert_charrefs)
self.tags = deque()
self.output = []
self.remaining = length
self.replacement = replacement
@cached_property
def void_elements(self):
from django.utils.html import VOID_ELEMENTS
return VOID_ELEMENTS
def handle_startendtag(self, tag, attrs):
self.handle_starttag(tag, attrs)
if tag not in self.void_elements:
self.handle_endtag(tag)
def handle_starttag(self, tag, attrs):
self.output.append(self.get_starttag_text())
if tag not in self.void_elements:
self.tags.appendleft(tag)
def handle_endtag(self, tag):
if tag not in self.void_elements:
self.output.append(f"</{tag}>")
try:
self.tags.remove(tag)
except ValueError:
pass
def handle_data(self, data):
data, output = self.process(data)
data_len = len(data)
if self.remaining < data_len:
self.remaining = 0
self.output.append(add_truncation_text(output, self.replacement))
raise self.TruncationCompleted
self.remaining -= data_len
self.output.append(output)
def feed(self, data):
try:
super().feed(data)
except self.TruncationCompleted:
self.output.extend([f"</{tag}>" for tag in self.tags])
self.tags.clear()
self.reset()
else:
# No data was handled.
self.reset()
class TruncateCharsHTMLParser(TruncateHTMLParser):
def __init__(self, *, length, replacement, convert_charrefs=True):
self.length = length
self.processed_chars = 0
super().__init__(
length=calculate_truncate_chars_length(length, replacement),
replacement=replacement,
convert_charrefs=convert_charrefs,
)
def process(self, data):
self.processed_chars += len(data)
if (self.processed_chars == self.length) and (
sum(len(p) for p in self.output) + len(data) == len(self.rawdata)
):
self.output.append(data)
raise self.TruncationCompleted
output = escape("".join(data[: self.remaining]))
return data, output
class TruncateWordsHTMLParser(TruncateHTMLParser):
def process(self, data):
data = re.split(r"(?<=\S)\s+(?=\S)", data)
output = escape(" ".join(data[: self.remaining]))
return data, output
class Truncator(SimpleLazyObject):
"""
An object used to truncate text, either by characters or words.
When truncating HTML text (either chars or words), input will be limited to
at most `MAX_LENGTH_HTML` characters.
"""
# 5 million characters are approximately 4000 text pages or 3 web pages.
MAX_LENGTH_HTML = 5_000_000
def __init__(self, text):
super().__init__(lambda: str(text))
def chars(self, num, truncate=None, html=False):
"""
Return the text truncated to be no longer than the specified number
of characters.
`truncate` specifies what should be used to notify that the string has
been truncated, defaulting to a translatable string of an ellipsis.
"""
self._setup()
length = int(num)
if length <= 0:
return ""
text = unicodedata.normalize("NFC", self._wrapped)
if html:
parser = TruncateCharsHTMLParser(length=length, replacement=truncate)
parser.feed(text)
parser.close()
return "".join(parser.output)
return self._text_chars(length, truncate, text)
def _text_chars(self, length, truncate, text):
"""Truncate a string after a certain number of chars."""
truncate_len = calculate_truncate_chars_length(length, truncate)
s_len = 0
end_index = None
for i, char in enumerate(text):
if unicodedata.combining(char):
# Don't consider combining characters
# as adding to the string length
continue
s_len += 1
if end_index is None and s_len > truncate_len:
end_index = i
if s_len > length:
# Return the truncated string
return add_truncation_text(text[: end_index or 0], truncate)
# Return the original string since no truncation was necessary
return text
def words(self, num, truncate=None, html=False):
"""
Truncate a string after a certain number of words. `truncate` specifies
what should be used to notify that the string has been truncated,
defaulting to ellipsis.
"""
self._setup()
length = int(num)
if length <= 0:
return ""
if html:
parser = TruncateWordsHTMLParser(length=length, replacement=truncate)
parser.feed(self._wrapped)
parser.close()
return "".join(parser.output)
return self._text_words(length, truncate)
def _text_words(self, length, truncate):
"""
Truncate a string after a certain number of words.
Strip newlines in the string.
"""
words = self._wrapped.split()
if len(words) > length:
words = words[:length]
return add_truncation_text(" ".join(words), truncate)
return " ".join(words)
@keep_lazy_text
def get_valid_filename(name):
"""
Return the given string converted to a string that can be used for a clean
filename. Remove leading and trailing spaces; convert other spaces to
underscores; and remove anything that is not an alphanumeric, dash,
underscore, or dot.
>>> get_valid_filename("john's portrait in 2004.jpg")
'johns_portrait_in_2004.jpg'
"""
s = str(name).strip().replace(" ", "_")
s = re.sub(r"(?u)[^-\w.]", "", s)
if s in {"", ".", ".."}:
raise SuspiciousFileOperation("Could not derive file name from '%s'" % name)
return s
@keep_lazy_text
def get_text_list(list_, last_word=gettext_lazy("or")):
"""
>>> get_text_list(['a', 'b', 'c', 'd'])
'a, b, c or d'
>>> get_text_list(['a', 'b', 'c'], 'and')
'a, b and c'
>>> get_text_list(['a', 'b'], 'and')
'a and b'
>>> get_text_list(['a'])
'a'
>>> get_text_list([])
''
"""
if not list_:
return ""
if len(list_) == 1:
return str(list_[0])
return "%s %s %s" % (
# Translators: This string is used as a separator between list elements
_(", ").join(str(i) for i in list_[:-1]),
str(last_word),
str(list_[-1]),
)
@keep_lazy_text
def normalize_newlines(text):
"""Normalize CRLF and CR newlines to just LF."""
return re_newlines.sub("\n", str(text))
@keep_lazy_text
def phone2numeric(phone):
"""Convert a phone number with letters into its numeric equivalent."""
char2number = {
"a": "2",
"b": "2",
"c": "2",
"d": "3",
"e": "3",
"f": "3",
"g": "4",
"h": "4",
"i": "4",
"j": "5",
"k": "5",
"l": "5",
"m": "6",
"n": "6",
"o": "6",
"p": "7",
"q": "7",
"r": "7",
"s": "7",
"t": "8",
"u": "8",
"v": "8",
"w": "9",
"x": "9",
"y": "9",
"z": "9",
}
return "".join(char2number.get(c, c) for c in phone.lower())
def _get_random_filename(max_random_bytes):
return b"a" * secrets.randbelow(max_random_bytes)
def compress_string(s, *, max_random_bytes=None):
compressed_data = gzip_compress(s, compresslevel=6, mtime=0)
if not max_random_bytes:
return compressed_data
compressed_view = memoryview(compressed_data)
header = bytearray(compressed_view[:10])
header[3] = gzip.FNAME
filename = _get_random_filename(max_random_bytes) + b"\x00"
return bytes(header) + filename + compressed_view[10:]
class StreamingBuffer(BytesIO):
def read(self):
ret = self.getvalue()
self.seek(0)
self.truncate()
return ret
# Like compress_string, but for iterators of strings.
def compress_sequence(sequence, *, max_random_bytes=None):
buf = StreamingBuffer()
filename = _get_random_filename(max_random_bytes) if max_random_bytes else None
with GzipFile(
filename=filename, mode="wb", compresslevel=6, fileobj=buf, mtime=0
) as zfile:
# Output headers...
yield buf.read()
for item in sequence:
zfile.write(item)
data = buf.read()
if data:
yield data
yield buf.read()
async def acompress_sequence(sequence, *, max_random_bytes=None):
buf = StreamingBuffer()
filename = _get_random_filename(max_random_bytes) if max_random_bytes else None
with GzipFile(
filename=filename, mode="wb", compresslevel=6, fileobj=buf, mtime=0
) as zfile:
# Output headers...
yield buf.read()
async for item in sequence:
zfile.write(item)
data = buf.read()
if data:
yield data
yield buf.read()
# Expression to match some_token and some_token="with spaces" (and similarly
# for single-quoted strings).
smart_split_re = _lazy_re_compile(
r"""
((?:
[^\s'"]*
(?:
(?:"(?:[^"\\]|\\.)*" | '(?:[^'\\]|\\.)*')
[^\s'"]*
)+
) | \S+)
""",
re.VERBOSE,
)
def smart_split(text):
r"""
Generator that splits a string by spaces, leaving quoted phrases together.
Supports both single and double quotes, and supports escaping quotes with
backslashes. In the output, strings will keep their initial and trailing
quote marks and escaped quotes will remain escaped (the results can then
be further processed with unescape_string_literal()).
>>> list(smart_split(r'This is "a person\'s" test.'))
['This', 'is', '"a person\\\'s"', 'test.']
>>> list(smart_split(r"Another 'person\'s' test."))
['Another', "'person\\'s'", 'test.']
>>> list(smart_split(r'A "\"funky\" style" test.'))
['A', '"\\"funky\\" style"', 'test.']
"""
for bit in smart_split_re.finditer(str(text)):
yield bit[0]
@keep_lazy_text
def unescape_string_literal(s):
r"""
Convert quoted string literals to unquoted strings with escaped quotes and
backslashes unquoted::
>>> unescape_string_literal('"abc"')
'abc'
>>> unescape_string_literal("'abc'")
'abc'
>>> unescape_string_literal('"a \"bc\""')
'a "bc"'
>>> unescape_string_literal("'\'ab\' c'")
"'ab' c"
"""
if not s or s[0] not in "\"'" or s[-1] != s[0]:
raise ValueError("Not a string literal: %r" % s)
quote = s[0]
return s[1:-1].replace(r"\%s" % quote, quote).replace(r"\\", "\\")
@keep_lazy_text
def slugify(value, allow_unicode=False):
"""
Convert to ASCII if 'allow_unicode' is False. Convert spaces or repeated
dashes to single dashes. Remove characters that aren't alphanumerics,
underscores, or hyphens. Convert to lowercase. Also strip leading and
trailing whitespace, dashes, and underscores.
"""
value = str(value)
if allow_unicode:
value = unicodedata.normalize("NFKC", value)
else:
value = (
unicodedata.normalize("NFKD", value)
.encode("ascii", "ignore")
.decode("ascii")
)
value = re.sub(r"[^\w\s-]", "", value.lower())
return re.sub(r"[-\s]+", "-", value).strip("-_")
def camel_case_to_spaces(value):
"""
Split CamelCase and convert to lowercase. Strip surrounding whitespace.
"""
return re_camel_case.sub(r" \1", value).strip().lower()
def _format_lazy(format_string, *args, **kwargs):
"""
Apply str.format() on 'format_string' where format_string, args,
and/or kwargs might be lazy.
"""
return format_string.format(*args, **kwargs)
format_lazy = lazy(_format_lazy, str)
|
import json
import sys
from unittest.mock import patch
from django.core.exceptions import SuspiciousFileOperation
from django.test import SimpleTestCase
from django.utils import text
from django.utils.functional import lazystr
from django.utils.text import format_lazy
from django.utils.translation import gettext_lazy, override
IS_WIDE_BUILD = len("\U0001f4a9") == 1
class TestUtilsText(SimpleTestCase):
def test_get_text_list(self):
self.assertEqual(text.get_text_list(["a", "b", "c", "d"]), "a, b, c or d")
self.assertEqual(text.get_text_list(["a", "b", "c"], "and"), "a, b and c")
self.assertEqual(text.get_text_list(["a", "b"], "and"), "a and b")
self.assertEqual(text.get_text_list(["a"]), "a")
self.assertEqual(text.get_text_list([]), "")
with override("ar"):
self.assertEqual(text.get_text_list(["a", "b", "c"]), "a، b أو c")
def test_smart_split(self):
testdata = [
('This is "a person" test.', ["This", "is", '"a person"', "test."]),
('This is "a person\'s" test.', ["This", "is", '"a person\'s"', "test."]),
('This is "a person\\"s" test.', ["This", "is", '"a person\\"s"', "test."]),
("\"a 'one", ['"a', "'one"]),
("all friends' tests", ["all", "friends'", "tests"]),
(
'url search_page words="something else"',
["url", "search_page", 'words="something else"'],
),
(
"url search_page words='something else'",
["url", "search_page", "words='something else'"],
),
(
'url search_page words "something else"',
["url", "search_page", "words", '"something else"'],
),
(
'url search_page words-"something else"',
["url", "search_page", 'words-"something else"'],
),
("url search_page words=hello", ["url", "search_page", "words=hello"]),
(
'url search_page words="something else',
["url", "search_page", 'words="something', "else"],
),
("cut:','|cut:' '", ["cut:','|cut:' '"]),
(lazystr("a b c d"), ["a", "b", "c", "d"]), # Test for #20231
]
for test, expected in testdata:
with self.subTest(value=test):
self.assertEqual(list(text.smart_split(test)), expected)
def test_truncate_chars(self):
truncator = text.Truncator("The quick brown fox jumped over the lazy dog.")
self.assertEqual(
"The quick brown fox jumped over the lazy dog.", truncator.chars(100)
),
self.assertEqual("The quick brown fox …", truncator.chars(21))
self.assertEqual("The quick brown fo.....", truncator.chars(23, "....."))
self.assertEqual(".....", truncator.chars(4, "....."))
nfc = text.Truncator("o\xfco\xfco\xfco\xfc")
nfd = text.Truncator("ou\u0308ou\u0308ou\u0308ou\u0308")
self.assertEqual("oüoüoüoü", nfc.chars(8))
self.assertEqual("oüoüoüoü", nfd.chars(8))
self.assertEqual("oü…", nfc.chars(3))
self.assertEqual("oü…", nfd.chars(3))
# Ensure the final length is calculated correctly when there are
# combining characters with no precomposed form, and that combining
# characters are not split up.
truncator = text.Truncator("-B\u030aB\u030a----8")
self.assertEqual("-B\u030a…", truncator.chars(3))
self.assertEqual("-B\u030aB\u030a-…", truncator.chars(5))
self.assertEqual("-B\u030aB\u030a----8", truncator.chars(8))
# Ensure the length of the end text is correctly calculated when it
# contains combining characters with no precomposed form.
truncator = text.Truncator("-----")
self.assertEqual("---B\u030a", truncator.chars(4, "B\u030a"))
self.assertEqual("-----", truncator.chars(5, "B\u030a"))
# Make a best effort to shorten to the desired length, but requesting
# a length shorter than the ellipsis shouldn't break
self.assertEqual("...", text.Truncator("asdf").chars(1, truncate="..."))
# lazy strings are handled correctly
self.assertEqual(
text.Truncator(lazystr("The quick brown fox")).chars(10), "The quick…"
)
def test_truncate_chars_html(self):
truncator = text.Truncator(
'<p id="par"><strong><em>The quick brown fox jumped over the lazy dog.</em>'
"</strong></p>"
)
self.assertEqual(
'<p id="par"><strong><em>The quick brown fox jumped over the lazy dog.</em>'
"</strong></p>",
truncator.chars(80, html=True),
)
self.assertEqual(
'<p id="par"><strong><em>The quick brown fox jumped over the lazy dog.</em>'
"</strong></p>",
truncator.chars(46, html=True),
)
self.assertEqual(
'<p id="par"><strong><em>The quick brown fox jumped over the lazy dog…</em>'
"</strong></p>",
truncator.chars(45, html=True),
)
self.assertEqual(
'<p id="par"><strong><em>The quick…</em></strong></p>',
truncator.chars(10, html=True),
)
self.assertEqual(
'<p id="par"><strong><em>…</em></strong></p>',
truncator.chars(1, html=True),
)
self.assertEqual("", truncator.chars(0, html=True))
self.assertEqual("", truncator.chars(-1, html=True))
self.assertEqual(
'<p id="par"><strong><em>The qu....</em></strong></p>',
truncator.chars(10, "....", html=True),
)
self.assertEqual(
'<p id="par"><strong><em>The quick </em></strong></p>',
truncator.chars(10, "", html=True),
)
truncator = text.Truncator("foo</p>")
self.assertEqual("foo</p>", truncator.chars(5, html=True))
@patch("django.utils.text.Truncator.MAX_LENGTH_HTML", 10_000)
def test_truncate_chars_html_size_limit(self):
max_len = text.Truncator.MAX_LENGTH_HTML
bigger_len = text.Truncator.MAX_LENGTH_HTML + 1
valid_html = "<p>Joel is a slug</p>" # 14 chars
perf_test_values = [
("</a" + "\t" * (max_len - 6) + "//>", "</a>"),
("</p" + "\t" * bigger_len + "//>", "</p>"),
("&" * bigger_len, ""),
("_X<<<<<<<<<<<>", "_X<<<<<<<…"),
(valid_html * bigger_len, "<p>Joel is a…</p>"), # 10 chars
]
for value, expected in perf_test_values:
with self.subTest(value=value):
truncator = text.Truncator(value)
self.assertEqual(expected, truncator.chars(10, html=True))
def test_truncate_chars_html_with_newline_inside_tag(self):
truncator = text.Truncator(
'<p>The quick <a href="xyz.html"\n id="mylink">brown fox</a> jumped over '
"the lazy dog.</p>"
)
self.assertEqual(
'<p>The quick <a href="xyz.html"\n id="mylink">brow…</a></p>',
truncator.chars(15, html=True),
)
self.assertEqual(
"<p>Th…</p>",
truncator.chars(3, html=True),
)
def test_truncate_chars_html_with_void_elements(self):
truncator = text.Truncator(
"<br/>The <hr />quick brown fox jumped over the lazy dog."
)
self.assertEqual("<br/>The <hr />quick brown…", truncator.chars(16, html=True))
truncator = text.Truncator(
"<br>The <hr/>quick <em>brown fox</em> jumped over the lazy dog."
)
self.assertEqual(
"<br>The <hr/>quick <em>brown…</em>", truncator.chars(16, html=True)
)
self.assertEqual("<br>The <hr/>q…", truncator.chars(6, html=True))
self.assertEqual("<br>The <hr/>…", truncator.chars(5, html=True))
self.assertEqual("<br>The…", truncator.chars(4, html=True))
self.assertEqual("<br>Th…", truncator.chars(3, html=True))
def test_truncate_chars_html_with_html_entities(self):
truncator = text.Truncator(
"<i>Buenos días! ¿Cómo está?</i>"
)
self.assertEqual(
"<i>Buenos días! ¿Cómo está?</i>",
truncator.chars(40, html=True),
)
self.assertEqual(
"<i>Buenos días…</i>",
truncator.chars(12, html=True),
)
self.assertEqual(
"<i>Buenos días! ¿Cómo está…</i>",
truncator.chars(24, html=True),
)
truncator = text.Truncator("<p>I <3 python, what about you?</p>")
self.assertEqual("<p>I <3 python, wh…</p>", truncator.chars(16, html=True))
def test_truncate_words(self):
truncator = text.Truncator("The quick brown fox jumped over the lazy dog.")
self.assertEqual(
"The quick brown fox jumped over the lazy dog.", truncator.words(10)
)
self.assertEqual("The quick brown fox…", truncator.words(4))
self.assertEqual("The quick brown fox[snip]", truncator.words(4, "[snip]"))
# lazy strings are handled correctly
truncator = text.Truncator(
lazystr("The quick brown fox jumped over the lazy dog.")
)
self.assertEqual("The quick brown fox…", truncator.words(4))
self.assertEqual("", truncator.words(0))
self.assertEqual("", truncator.words(-1))
def test_truncate_html_words(self):
truncator = text.Truncator(
'<p id="par"><strong><em>The quick brown fox jumped over the lazy dog.</em>'
"</strong></p>"
)
self.assertEqual(
'<p id="par"><strong><em>The quick brown fox jumped over the lazy dog.</em>'
"</strong></p>",
truncator.words(10, html=True),
)
self.assertEqual(
'<p id="par"><strong><em>The quick brown fox…</em></strong></p>',
truncator.words(4, html=True),
)
self.assertEqual(
"",
truncator.words(0, html=True),
)
self.assertEqual(
'<p id="par"><strong><em>The quick brown fox....</em></strong></p>',
truncator.words(4, "....", html=True),
)
self.assertEqual(
'<p id="par"><strong><em>The quick brown fox</em></strong></p>',
truncator.words(4, "", html=True),
)
truncator = text.Truncator(
"<p>The quick \t brown fox jumped over the lazy dog.</p>"
)
self.assertEqual(
"<p>The quick brown fox…</p>",
truncator.words(4, html=True),
)
# Test with new line inside tag
truncator = text.Truncator(
'<p>The quick <a href="xyz.html"\n id="mylink">brown fox</a> jumped over '
"the lazy dog.</p>"
)
self.assertEqual(
'<p>The quick <a href="xyz.html"\n id="mylink">brown…</a></p>',
truncator.words(3, html=True),
)
self.assertEqual(
"<p>The…</p>",
truncator.words(1, html=True),
)
# Test self-closing tags
truncator = text.Truncator(
"<br/>The <hr />quick brown fox jumped over the lazy dog."
)
self.assertEqual("<br/>The <hr />quick brown…", truncator.words(3, html=True))
truncator = text.Truncator(
"<br>The <hr/>quick <em>brown fox</em> jumped over the lazy dog."
)
self.assertEqual(
"<br>The <hr/>quick <em>brown…</em>", truncator.words(3, html=True)
)
# Test html entities
truncator = text.Truncator(
"<i>Buenos días! ¿Cómo está?</i>"
)
self.assertEqual(
"<i>Buenos días! ¿Cómo…</i>",
truncator.words(3, html=True),
)
truncator = text.Truncator("<p>I <3 python, what about you?</p>")
self.assertEqual("<p>I <3 python,…</p>", truncator.words(3, html=True))
truncator = text.Truncator("foo</p>")
self.assertEqual("foo</p>", truncator.words(3, html=True))
# Only open brackets.
truncator = text.Truncator("<" * 60_000)
self.assertEqual(truncator.words(1, html=True), "<…")
# Tags with special chars in attrs.
truncator = text.Truncator(
"""<i style="margin: 5%; font: *;">Hello, my dear lady!</i>"""
)
self.assertEqual(
"""<i style="margin: 5%; font: *;">Hello, my dear…</i>""",
truncator.words(3, html=True),
)
# Tags with special non-latin chars in attrs.
truncator = text.Truncator("""<p data-x="א">Hello, my dear lady!</p>""")
self.assertEqual(
"""<p data-x="א">Hello, my dear…</p>""",
truncator.words(3, html=True),
)
# Misplaced brackets.
truncator = text.Truncator("hello >< world")
self.assertEqual(truncator.words(1, html=True), "hello…")
self.assertEqual(truncator.words(2, html=True), "hello >…")
self.assertEqual(truncator.words(3, html=True), "hello ><…")
self.assertEqual(truncator.words(4, html=True), "hello >< world")
@patch("django.utils.text.Truncator.MAX_LENGTH_HTML", 10_000)
def test_truncate_words_html_size_limit(self):
max_len = text.Truncator.MAX_LENGTH_HTML
bigger_len = text.Truncator.MAX_LENGTH_HTML + 1
valid_html = "<p>Joel is a slug</p>" # 4 words
perf_test_values = [
("</a" + "\t" * (max_len - 6) + "//>", "</a>"),
("</p" + "\t" * bigger_len + "//>", "</p>"),
("&" * max_len, ""),
("&" * bigger_len, ""),
("_X<<<<<<<<<<<>", "_X<<<<<<<<<<<>"),
(valid_html * bigger_len, valid_html * 12 + "<p>Joel is…</p>"), # 50 words
]
for value, expected in perf_test_values:
with self.subTest(value=value):
truncator = text.Truncator(value)
self.assertEqual(expected, truncator.words(50, html=True))
def test_wrap(self):
digits = "1234 67 9"
self.assertEqual(text.wrap(digits, 100), "1234 67 9")
self.assertEqual(text.wrap(digits, 9), "1234 67 9")
self.assertEqual(text.wrap(digits, 8), "1234 67\n9")
self.assertEqual(text.wrap("short\na long line", 7), "short\na long\nline")
self.assertEqual(
text.wrap("do-not-break-long-words please? ok", 8),
"do-not-break-long-words\nplease?\nok",
)
long_word = "l%sng" % ("o" * 20)
self.assertEqual(text.wrap(long_word, 20), long_word)
self.assertEqual(
text.wrap("a %s word" % long_word, 10), "a\n%s\nword" % long_word
)
self.assertEqual(text.wrap(lazystr(digits), 100), "1234 67 9")
def test_normalize_newlines(self):
self.assertEqual(
text.normalize_newlines("abc\ndef\rghi\r\n"), "abc\ndef\nghi\n"
)
self.assertEqual(text.normalize_newlines("\n\r\r\n\r"), "\n\n\n\n")
self.assertEqual(text.normalize_newlines("abcdefghi"), "abcdefghi")
self.assertEqual(text.normalize_newlines(""), "")
self.assertEqual(
text.normalize_newlines(lazystr("abc\ndef\rghi\r\n")), "abc\ndef\nghi\n"
)
def test_phone2numeric(self):
numeric = text.phone2numeric("0800 flowers")
self.assertEqual(numeric, "0800 3569377")
lazy_numeric = lazystr(text.phone2numeric("0800 flowers"))
self.assertEqual(lazy_numeric, "0800 3569377")
def test_slugify(self):
items = (
# given - expected - Unicode?
("Hello, World!", "hello-world", False),
("spam & eggs", "spam-eggs", False),
(" multiple---dash and space ", "multiple-dash-and-space", False),
("\t whitespace-in-value \n", "whitespace-in-value", False),
("underscore_in-value", "underscore_in-value", False),
("__strip__underscore-value___", "strip__underscore-value", False),
("--strip-dash-value---", "strip-dash-value", False),
("__strip-mixed-value---", "strip-mixed-value", False),
("_ -strip-mixed-value _-", "strip-mixed-value", False),
("spam & ıçüş", "spam-ıçüş", True),
("foo ıç bar", "foo-ıç-bar", True),
(" foo ıç bar", "foo-ıç-bar", True),
("你好", "你好", True),
("İstanbul", "istanbul", True),
)
for value, output, is_unicode in items:
with self.subTest(value=value):
self.assertEqual(text.slugify(value, allow_unicode=is_unicode), output)
# Interning the result may be useful, e.g. when fed to Path.
with self.subTest("intern"):
self.assertEqual(sys.intern(text.slugify("a")), "a")
def test_unescape_string_literal(self):
items = [
('"abc"', "abc"),
("'abc'", "abc"),
('"a "bc""', 'a "bc"'),
("''ab' c'", "'ab' c"),
]
for value, output in items:
with self.subTest(value=value):
self.assertEqual(text.unescape_string_literal(value), output)
self.assertEqual(text.unescape_string_literal(lazystr(value)), output)
def test_unescape_string_literal_invalid_value(self):
items = ["", "abc", "'abc\""]
for item in items:
msg = f"Not a string literal: {item!r}"
with self.assertRaisesMessage(ValueError, msg):
text.unescape_string_literal(item)
def test_get_valid_filename(self):
filename = "^&'@{}[],$=!-#()%+~_123.txt"
self.assertEqual(text.get_valid_filename(filename), "-_123.txt")
self.assertEqual(text.get_valid_filename(lazystr(filename)), "-_123.txt")
msg = "Could not derive file name from '???'"
with self.assertRaisesMessage(SuspiciousFileOperation, msg):
text.get_valid_filename("???")
# After sanitizing this would yield '..'.
msg = "Could not derive file name from '$.$.$'"
with self.assertRaisesMessage(SuspiciousFileOperation, msg):
text.get_valid_filename("$.$.$")
def test_compress_sequence(self):
data = [{"key": i} for i in range(10)]
seq = list(json.JSONEncoder().iterencode(data))
seq = [s.encode() for s in seq]
actual_length = len(b"".join(seq))
out = text.compress_sequence(seq)
compressed_length = len(b"".join(out))
self.assertLess(compressed_length, actual_length)
def test_format_lazy(self):
self.assertEqual("django/test", format_lazy("{}/{}", "django", lazystr("test")))
self.assertEqual("django/test", format_lazy("{0}/{1}", *("django", "test")))
self.assertEqual(
"django/test", format_lazy("{a}/{b}", **{"a": "django", "b": "test"})
)
self.assertEqual(
"django/test", format_lazy("{a[0]}/{a[1]}", a=("django", "test"))
)
t = {}
s = format_lazy("{0[a]}-{p[a]}", t, p=t)
t["a"] = lazystr("django")
self.assertEqual("django-django", s)
t["a"] = "update"
self.assertEqual("update-update", s)
# The format string can be lazy. (string comes from contrib.admin)
s = format_lazy(
gettext_lazy("Added {name} “{object}”."),
name="article",
object="My first try",
)
with override("fr"):
self.assertEqual("Ajout de article «\xa0My first try\xa0».", s)
|
./temp_repos/django/django/utils/text.py
|
./temp_repos/django/tests/utils_tests/test_text.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'TruncateHTMLParser'.
Context:
- Class Name: TruncateHTMLParser
- Dependencies to Mock: text
- Key Imports: secrets, textwrap, collections, html.parser, io, django.core.exceptions, html, django.utils.translation, gzip, django.utils.regex_helper
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
TruncateHTMLParser
|
python
|
import functools
import re
from collections import defaultdict, namedtuple
from enum import Enum
from graphlib import TopologicalSorter
from itertools import chain
from django.conf import settings
from django.db import models
from django.db.migrations import operations
from django.db.migrations.migration import Migration
from django.db.migrations.operations.models import AlterModelOptions
from django.db.migrations.optimizer import MigrationOptimizer
from django.db.migrations.questioner import MigrationQuestioner
from django.db.migrations.utils import (
COMPILED_REGEX_TYPE,
RegexObject,
resolve_relation,
)
from django.utils.functional import cached_property
class OperationDependency(
namedtuple("OperationDependency", "app_label model_name field_name type")
):
class Type(Enum):
CREATE = 0
REMOVE = 1
ALTER = 2
REMOVE_ORDER_WRT = 3
ALTER_FOO_TOGETHER = 4
REMOVE_INDEX_OR_CONSTRAINT = 5
@cached_property
def model_name_lower(self):
return self.model_name.lower()
@cached_property
def field_name_lower(self):
return self.field_name.lower()
class MigrationAutodetector:
"""
Take a pair of ProjectStates and compare them to see what the first would
need doing to make it match the second (the second usually being the
project's current state).
Note that this naturally operates on entire projects at a time,
as it's likely that changes interact (for example, you can't
add a ForeignKey without having a migration to add the table it
depends on first). A user interface may offer single-app usage
if it wishes, with the caveat that it may not always be possible.
"""
def __init__(self, from_state, to_state, questioner=None):
self.from_state = from_state
self.to_state = to_state
self.questioner = questioner or MigrationQuestioner()
self.existing_apps = {app for app, model in from_state.models}
def changes(self, graph, trim_to_apps=None, convert_apps=None, migration_name=None):
"""
Main entry point to produce a list of applicable changes.
Take a graph to base names on and an optional set of apps
to try and restrict to (restriction is not guaranteed)
"""
changes = self._detect_changes(convert_apps, graph)
changes = self.arrange_for_graph(changes, graph, migration_name)
if trim_to_apps:
changes = self._trim_to_apps(changes, trim_to_apps)
return changes
def deep_deconstruct(self, obj):
"""
Recursive deconstruction for a field and its arguments.
Used for full comparison for rename/alter; sometimes a single-level
deconstruction will not compare correctly.
"""
if isinstance(obj, list):
return [self.deep_deconstruct(value) for value in obj]
elif isinstance(obj, tuple):
return tuple(self.deep_deconstruct(value) for value in obj)
elif isinstance(obj, dict):
return {key: self.deep_deconstruct(value) for key, value in obj.items()}
elif isinstance(obj, functools.partial):
return (
obj.func,
self.deep_deconstruct(obj.args),
self.deep_deconstruct(obj.keywords),
)
elif isinstance(obj, COMPILED_REGEX_TYPE):
return RegexObject(obj)
elif isinstance(obj, type):
# If this is a type that implements 'deconstruct' as an instance
# method, avoid treating this as being deconstructible itself - see
# #22951
return obj
elif hasattr(obj, "deconstruct"):
deconstructed = obj.deconstruct()
if isinstance(obj, models.Field):
# we have a field which also returns a name
deconstructed = deconstructed[1:]
path, args, kwargs = deconstructed
return (
path,
[self.deep_deconstruct(value) for value in args],
{key: self.deep_deconstruct(value) for key, value in kwargs.items()},
)
else:
return obj
def only_relation_agnostic_fields(self, fields):
"""
Return a definition of the fields that ignores field names and
what related fields actually relate to. Used for detecting renames (as
the related fields change during renames).
"""
fields_def = []
for name, field in sorted(fields.items()):
deconstruction = self.deep_deconstruct(field)
if field.remote_field and field.remote_field.model:
deconstruction[2].pop("to", None)
fields_def.append(deconstruction)
return fields_def
def _detect_changes(self, convert_apps=None, graph=None):
"""
Return a dict of migration plans which will achieve the
change from from_state to to_state. The dict has app labels
as keys and a list of migrations as values.
The resulting migrations aren't specially named, but the names
do matter for dependencies inside the set.
convert_apps is the list of apps to convert to use migrations
(i.e. to make initial migrations for, in the usual case)
graph is an optional argument that, if provided, can help improve
dependency generation and avoid potential circular dependencies.
"""
# The first phase is generating all the operations for each app
# and gathering them into a big per-app list.
# Then go through that list, order it, and split into migrations to
# resolve dependencies caused by M2Ms and FKs.
self.generated_operations = {}
self.altered_indexes = {}
self.altered_constraints = {}
self.renamed_fields = {}
# Prepare some old/new state and model lists, separating
# proxy models and ignoring unmigrated apps.
self.old_model_keys = set()
self.old_proxy_keys = set()
self.old_unmanaged_keys = set()
self.new_model_keys = set()
self.new_proxy_keys = set()
self.new_unmanaged_keys = set()
for (app_label, model_name), model_state in self.from_state.models.items():
if not model_state.options.get("managed", True):
self.old_unmanaged_keys.add((app_label, model_name))
elif app_label not in self.from_state.real_apps:
if model_state.options.get("proxy"):
self.old_proxy_keys.add((app_label, model_name))
else:
self.old_model_keys.add((app_label, model_name))
for (app_label, model_name), model_state in self.to_state.models.items():
if not model_state.options.get("managed", True):
self.new_unmanaged_keys.add((app_label, model_name))
elif app_label not in self.from_state.real_apps or (
convert_apps and app_label in convert_apps
):
if model_state.options.get("proxy"):
self.new_proxy_keys.add((app_label, model_name))
else:
self.new_model_keys.add((app_label, model_name))
self.from_state.resolve_fields_and_relations()
self.to_state.resolve_fields_and_relations()
# Renames have to come first
self.generate_renamed_models()
# Prepare lists of fields and generate through model map
self._prepare_field_lists()
self._generate_through_model_map()
# Generate non-rename model operations
self.generate_deleted_models()
self.generate_created_models()
self.generate_deleted_proxies()
self.generate_created_proxies()
self.generate_altered_options()
self.generate_altered_managers()
self.generate_altered_db_table_comment()
# Create the renamed fields and store them in self.renamed_fields.
# They are used by create_altered_indexes(), generate_altered_fields(),
# generate_removed_altered_unique_together(), and
# generate_altered_unique_together().
self.create_renamed_fields()
# Create the altered indexes and store them in self.altered_indexes.
# This avoids the same computation in generate_removed_indexes()
# and generate_added_indexes().
self.create_altered_indexes()
self.create_altered_constraints()
# Generate index removal operations before field is removed
self.generate_removed_constraints()
self.generate_removed_indexes()
# Generate field renaming operations.
self.generate_renamed_fields()
self.generate_renamed_indexes()
# Generate removal of foo together.
self.generate_removed_altered_unique_together()
# Generate field operations.
self.generate_removed_fields()
self.generate_added_fields()
self.generate_altered_fields()
self.generate_altered_order_with_respect_to()
self.generate_altered_unique_together()
self.generate_added_indexes()
self.generate_added_constraints()
self.generate_altered_constraints()
self.generate_altered_db_table()
self._sort_migrations()
self._build_migration_list(graph)
self._optimize_migrations()
return self.migrations
def _prepare_field_lists(self):
"""
Prepare field lists and a list of the fields that used through models
in the old state so dependencies can be made from the through model
deletion to the field that uses it.
"""
self.kept_model_keys = self.old_model_keys & self.new_model_keys
self.kept_proxy_keys = self.old_proxy_keys & self.new_proxy_keys
self.kept_unmanaged_keys = self.old_unmanaged_keys & self.new_unmanaged_keys
self.through_users = {}
self.old_field_keys = {
(app_label, model_name, field_name)
for app_label, model_name in self.kept_model_keys
for field_name in self.from_state.models[
app_label, self.renamed_models.get((app_label, model_name), model_name)
].fields
}
self.new_field_keys = {
(app_label, model_name, field_name)
for app_label, model_name in self.kept_model_keys
for field_name in self.to_state.models[app_label, model_name].fields
}
def _generate_through_model_map(self):
"""Through model map generation."""
for app_label, model_name in sorted(self.old_model_keys):
old_model_name = self.renamed_models.get(
(app_label, model_name), model_name
)
old_model_state = self.from_state.models[app_label, old_model_name]
for field_name, field in old_model_state.fields.items():
if hasattr(field, "remote_field") and getattr(
field.remote_field, "through", None
):
through_key = resolve_relation(
field.remote_field.through, app_label, model_name
)
self.through_users[through_key] = (
app_label,
old_model_name,
field_name,
)
@staticmethod
def _resolve_dependency(dependency):
"""
Return the resolved dependency and a boolean denoting whether or not
it was swappable.
"""
if dependency.app_label != "__setting__":
return dependency, False
resolved_app_label, resolved_object_name = getattr(
settings, dependency.model_name
).split(".")
return (
OperationDependency(
resolved_app_label,
resolved_object_name.lower(),
dependency.field_name,
dependency.type,
),
True,
)
def _build_migration_list(self, graph=None):
"""
Chop the lists of operations up into migrations with dependencies on
each other. Do this by going through an app's list of operations until
one is found that has an outgoing dependency that isn't in another
app's migration yet (hasn't been chopped off its list). Then chop off
the operations before it into a migration and move onto the next app.
If the loops completes without doing anything, there's a circular
dependency (which _should_ be impossible as the operations are
all split at this point so they can't depend and be depended on).
"""
self.migrations = {}
num_ops = sum(len(x) for x in self.generated_operations.values())
chop_mode = False
while num_ops:
# On every iteration, we step through all the apps and see if there
# is a completed set of operations.
# If we find that a subset of the operations are complete we can
# try to chop it off from the rest and continue, but we only
# do this if we've already been through the list once before
# without any chopping and nothing has changed.
for app_label in sorted(self.generated_operations):
chopped = []
dependencies = set()
for operation in list(self.generated_operations[app_label]):
deps_satisfied = True
operation_dependencies = set()
for dep in operation._auto_deps:
# Temporarily resolve the swappable dependency to
# prevent circular references. While keeping the
# dependency checks on the resolved model, add the
# swappable dependencies.
original_dep = dep
dep, is_swappable_dep = self._resolve_dependency(dep)
if dep.app_label != app_label:
# External app dependency. See if it's not yet
# satisfied.
for other_operation in self.generated_operations.get(
dep.app_label, []
):
if self.check_dependency(other_operation, dep):
deps_satisfied = False
break
if not deps_satisfied:
break
else:
if is_swappable_dep:
operation_dependencies.add(
(
original_dep.app_label,
original_dep.model_name,
)
)
elif dep.app_label in self.migrations:
operation_dependencies.add(
(
dep.app_label,
self.migrations[dep.app_label][-1].name,
)
)
else:
# If we can't find the other app, we add a
# first/last dependency, but only if we've
# already been through once and checked
# everything.
if chop_mode:
# If the app already exists, we add a
# dependency on the last migration, as
# we don't know which migration
# contains the target field. If it's
# not yet migrated or has no
# migrations, we use __first__.
if graph and graph.leaf_nodes(dep.app_label):
operation_dependencies.add(
graph.leaf_nodes(dep.app_label)[0]
)
else:
operation_dependencies.add(
(dep.app_label, "__first__")
)
else:
deps_satisfied = False
if deps_satisfied:
chopped.append(operation)
dependencies.update(operation_dependencies)
del self.generated_operations[app_label][0]
else:
break
# Make a migration! Well, only if there's stuff to put in it
if dependencies or chopped:
if not self.generated_operations[app_label] or chop_mode:
subclass = type(
"Migration",
(Migration,),
{"operations": [], "dependencies": []},
)
instance = subclass(
"auto_%i" % (len(self.migrations.get(app_label, [])) + 1),
app_label,
)
instance.dependencies = list(dependencies)
instance.operations = chopped
instance.initial = app_label not in self.existing_apps
self.migrations.setdefault(app_label, []).append(instance)
chop_mode = False
else:
self.generated_operations[app_label] = (
chopped + self.generated_operations[app_label]
)
new_num_ops = sum(len(x) for x in self.generated_operations.values())
if new_num_ops == num_ops:
if not chop_mode:
chop_mode = True
else:
raise ValueError(
"Cannot resolve operation dependencies: %r"
% self.generated_operations
)
num_ops = new_num_ops
def _sort_migrations(self):
"""
Reorder to make things possible. Reordering may be needed so FKs work
nicely inside the same app.
"""
for app_label, ops in sorted(self.generated_operations.items()):
ts = TopologicalSorter()
for op in ops:
ts.add(op)
for dep in op._auto_deps:
# Resolve intra-app dependencies to handle circular
# references involving a swappable model.
dep = self._resolve_dependency(dep)[0]
if dep.app_label != app_label:
continue
ts.add(op, *(x for x in ops if self.check_dependency(x, dep)))
self.generated_operations[app_label] = list(ts.static_order())
def _optimize_migrations(self):
# Add in internal dependencies among the migrations
for app_label, migrations in self.migrations.items():
for m1, m2 in zip(migrations, migrations[1:]):
m2.dependencies.append((app_label, m1.name))
# De-dupe dependencies
for migrations in self.migrations.values():
for migration in migrations:
migration.dependencies = list(set(migration.dependencies))
# Optimize migrations
for app_label, migrations in self.migrations.items():
for migration in migrations:
migration.operations = MigrationOptimizer().optimize(
migration.operations, app_label
)
def check_dependency(self, operation, dependency):
"""
Return True if the given operation depends on the given dependency,
False otherwise.
"""
# Created model
if (
dependency.field_name is None
and dependency.type == OperationDependency.Type.CREATE
):
return (
isinstance(operation, operations.CreateModel)
and operation.name_lower == dependency.model_name_lower
)
# Created field
elif (
dependency.field_name is not None
and dependency.type == OperationDependency.Type.CREATE
):
return (
isinstance(operation, operations.CreateModel)
and operation.name_lower == dependency.model_name_lower
and any(dependency.field_name == x for x, y in operation.fields)
) or (
isinstance(operation, operations.AddField)
and operation.model_name_lower == dependency.model_name_lower
and operation.name_lower == dependency.field_name_lower
)
# Removed field
elif (
dependency.field_name is not None
and dependency.type == OperationDependency.Type.REMOVE
):
return (
isinstance(operation, operations.RemoveField)
and operation.model_name_lower == dependency.model_name_lower
and operation.name_lower == dependency.field_name_lower
)
# Removed model
elif (
dependency.field_name is None
and dependency.type == OperationDependency.Type.REMOVE
):
return (
isinstance(operation, operations.DeleteModel)
and operation.name_lower == dependency.model_name_lower
)
# Field being altered
elif (
dependency.field_name is not None
and dependency.type == OperationDependency.Type.ALTER
):
return (
isinstance(operation, operations.AlterField)
and operation.model_name_lower == dependency.model_name_lower
and operation.name_lower == dependency.field_name_lower
)
# order_with_respect_to being unset for a field
elif (
dependency.field_name is not None
and dependency.type == OperationDependency.Type.REMOVE_ORDER_WRT
):
return (
isinstance(operation, operations.AlterOrderWithRespectTo)
and operation.name_lower == dependency.model_name_lower
and (operation.order_with_respect_to or "").lower()
!= dependency.field_name_lower
)
# Field is removed and part of an index/unique_together
elif (
dependency.field_name is not None
and dependency.type == OperationDependency.Type.ALTER_FOO_TOGETHER
):
return (
isinstance(
operation,
(operations.AlterUniqueTogether, operations.AlterIndexTogether),
)
and operation.name_lower == dependency.model_name_lower
)
# Field is removed and part of an index/constraint.
elif (
dependency.field_name is not None
and dependency.type == OperationDependency.Type.REMOVE_INDEX_OR_CONSTRAINT
):
return (
isinstance(
operation,
(operations.RemoveIndex, operations.RemoveConstraint),
)
and operation.model_name_lower == dependency.model_name_lower
)
# Unknown dependency. Raise an error.
else:
raise ValueError("Can't handle dependency %r" % (dependency,))
def add_operation(self, app_label, operation, dependencies=None, beginning=False):
# Dependencies are
# (app_label, model_name, field_name, create/delete as True/False)
operation._auto_deps = dependencies or []
if beginning:
self.generated_operations.setdefault(app_label, []).insert(0, operation)
else:
self.generated_operations.setdefault(app_label, []).append(operation)
def swappable_first_key(self, item):
"""
Place potential swappable models first in lists of created models (only
real way to solve #22783).
"""
try:
model_state = self.to_state.models[item]
base_names = {
base if isinstance(base, str) else base.__name__
for base in model_state.bases
}
string_version = "%s.%s" % (item[0], item[1])
if (
model_state.options.get("swappable")
or "AbstractUser" in base_names
or "AbstractBaseUser" in base_names
or settings.AUTH_USER_MODEL.lower() == string_version.lower()
):
return ("___" + item[0], "___" + item[1])
except LookupError:
pass
return item
def generate_renamed_models(self):
"""
Find any renamed models, generate the operations for them, and remove
the old entry from the model lists. Must be run before other
model-level generation.
"""
self.renamed_models = {}
self.renamed_models_rel = {}
added_models = self.new_model_keys - self.old_model_keys
for app_label, model_name in sorted(added_models):
model_state = self.to_state.models[app_label, model_name]
model_fields_def = self.only_relation_agnostic_fields(model_state.fields)
removed_models = self.old_model_keys - self.new_model_keys
for rem_app_label, rem_model_name in removed_models:
if rem_app_label == app_label:
rem_model_state = self.from_state.models[
rem_app_label, rem_model_name
]
rem_model_fields_def = self.only_relation_agnostic_fields(
rem_model_state.fields
)
if model_fields_def == rem_model_fields_def:
if self.questioner.ask_rename_model(
rem_model_state, model_state
):
dependencies = []
fields = list(model_state.fields.values()) + [
field.remote_field
for relations in self.to_state.relations[
app_label, model_name
].values()
for field in relations.values()
]
for field in fields:
if field.is_relation:
dependencies.extend(
self._get_dependencies_for_foreign_key(
app_label,
model_name,
field,
self.to_state,
)
)
self.add_operation(
app_label,
operations.RenameModel(
old_name=rem_model_state.name,
new_name=model_state.name,
),
dependencies=dependencies,
)
self.renamed_models[app_label, model_name] = rem_model_name
renamed_models_rel_key = "%s.%s" % (
rem_model_state.app_label,
rem_model_state.name_lower,
)
self.renamed_models_rel[renamed_models_rel_key] = (
"%s.%s"
% (
model_state.app_label,
model_state.name_lower,
)
)
self.old_model_keys.remove((rem_app_label, rem_model_name))
self.old_model_keys.add((app_label, model_name))
break
def generate_created_models(self):
"""
Find all new models (both managed and unmanaged) and make create
operations for them as well as separate operations to create any
foreign key or M2M relationships (these are optimized later, if
possible).
Defer any model options that refer to collections of fields that might
be deferred (e.g. unique_together).
"""
old_keys = self.old_model_keys | self.old_unmanaged_keys
added_models = self.new_model_keys - old_keys
added_unmanaged_models = self.new_unmanaged_keys - old_keys
all_added_models = chain(
sorted(added_models, key=self.swappable_first_key, reverse=True),
sorted(added_unmanaged_models, key=self.swappable_first_key, reverse=True),
)
for app_label, model_name in all_added_models:
model_state = self.to_state.models[app_label, model_name]
# Gather related fields
related_fields = {}
primary_key_rel = None
for field_name, field in model_state.fields.items():
if field.remote_field:
if field.remote_field.model:
if field.primary_key:
primary_key_rel = field.remote_field.model
elif not field.remote_field.parent_link:
related_fields[field_name] = field
if getattr(field.remote_field, "through", None):
related_fields[field_name] = field
# Are there indexes/unique_together to defer?
indexes = model_state.options.pop("indexes")
constraints = model_state.options.pop("constraints")
unique_together = model_state.options.pop("unique_together", None)
order_with_respect_to = model_state.options.pop(
"order_with_respect_to", None
)
# Depend on the deletion of any possible proxy version of us
dependencies = [
OperationDependency(
app_label, model_name, None, OperationDependency.Type.REMOVE
),
]
# Depend on all bases
for base in model_state.bases:
if isinstance(base, str) and "." in base:
base_app_label, base_name = base.split(".", 1)
dependencies.append(
OperationDependency(
base_app_label,
base_name,
None,
OperationDependency.Type.CREATE,
)
)
# Depend on the removal of base fields if the new model has
# a field with the same name.
old_base_model_state = self.from_state.models.get(
(base_app_label, base_name)
)
new_base_model_state = self.to_state.models.get(
(base_app_label, base_name)
)
if old_base_model_state and new_base_model_state:
removed_base_fields = (
set(old_base_model_state.fields)
.difference(
new_base_model_state.fields,
)
.intersection(model_state.fields)
)
for removed_base_field in removed_base_fields:
dependencies.append(
OperationDependency(
base_app_label,
base_name,
removed_base_field,
OperationDependency.Type.REMOVE,
)
)
# Depend on the other end of the primary key if it's a relation
if primary_key_rel:
dependencies.append(
OperationDependency(
*resolve_relation(primary_key_rel, app_label, model_name),
None,
OperationDependency.Type.CREATE,
),
)
# Generate creation operation
self.add_operation(
app_label,
operations.CreateModel(
name=model_state.name,
fields=[
d
for d in model_state.fields.items()
if d[0] not in related_fields
],
options=model_state.options,
bases=model_state.bases,
managers=model_state.managers,
),
dependencies=dependencies,
beginning=True,
)
# Don't add operations which modify the database for unmanaged
# models
if not model_state.options.get("managed", True):
continue
# Generate operations for each related field
for name, field in sorted(related_fields.items()):
dependencies = self._get_dependencies_for_foreign_key(
app_label,
model_name,
field,
self.to_state,
)
# Depend on our own model being created
dependencies.append(
OperationDependency(
app_label, model_name, None, OperationDependency.Type.CREATE
)
)
# Make operation
self.add_operation(
app_label,
operations.AddField(
model_name=model_name,
name=name,
field=field,
),
dependencies=list(set(dependencies)),
)
# Generate other opns
if order_with_respect_to:
self.add_operation(
app_label,
operations.AlterOrderWithRespectTo(
name=model_name,
order_with_respect_to=order_with_respect_to,
),
dependencies=[
OperationDependency(
app_label,
model_name,
order_with_respect_to,
OperationDependency.Type.CREATE,
),
OperationDependency(
app_label, model_name, None, OperationDependency.Type.CREATE
),
],
)
related_dependencies = [
OperationDependency(
app_label, model_name, name, OperationDependency.Type.CREATE
)
for name in sorted(related_fields)
]
related_dependencies.append(
OperationDependency(
app_label, model_name, None, OperationDependency.Type.CREATE
)
)
for index in indexes:
self.add_operation(
app_label,
operations.AddIndex(
model_name=model_name,
index=index,
),
dependencies=related_dependencies,
)
for constraint in constraints:
self.add_operation(
app_label,
operations.AddConstraint(
model_name=model_name,
constraint=constraint,
),
dependencies=related_dependencies,
)
if unique_together:
self.add_operation(
app_label,
operations.AlterUniqueTogether(
name=model_name,
unique_together=unique_together,
),
dependencies=related_dependencies,
)
# Fix relationships if the model changed from a proxy model to a
# concrete model.
relations = self.to_state.relations
if (app_label, model_name) in self.old_proxy_keys:
for related_model_key, related_fields in relations[
app_label, model_name
].items():
related_model_state = self.to_state.models[related_model_key]
for related_field_name, related_field in related_fields.items():
self.add_operation(
related_model_state.app_label,
operations.AlterField(
model_name=related_model_state.name,
name=related_field_name,
field=related_field,
),
dependencies=[
OperationDependency(
app_label,
model_name,
None,
OperationDependency.Type.CREATE,
)
],
)
def generate_created_proxies(self):
"""
Make CreateModel statements for proxy models. Use the same statements
as that way there's less code duplication, but for proxy models it's
safe to skip all the pointless field stuff and chuck out an operation.
"""
added = self.new_proxy_keys - self.old_proxy_keys
for app_label, model_name in sorted(added):
model_state = self.to_state.models[app_label, model_name]
assert model_state.options.get("proxy")
# Depend on the deletion of any possible non-proxy version of us
dependencies = [
OperationDependency(
app_label, model_name, None, OperationDependency.Type.REMOVE
),
]
# Depend on all bases
for base in model_state.bases:
if isinstance(base, str) and "." in base:
base_app_label, base_name = base.split(".", 1)
dependencies.append(
OperationDependency(
base_app_label,
base_name,
None,
OperationDependency.Type.CREATE,
)
)
# Generate creation operation
self.add_operation(
app_label,
operations.CreateModel(
name=model_state.name,
fields=[],
options=model_state.options,
bases=model_state.bases,
managers=model_state.managers,
),
# Depend on the deletion of any possible non-proxy version of
# us
dependencies=dependencies,
)
def generate_deleted_models(self):
"""
Find all deleted models (managed and unmanaged) and make delete
operations for them as well as separate operations to delete any
foreign key or M2M relationships (these are optimized later, if
possible).
Also bring forward removal of any model options that refer to
collections of fields - the inverse of generate_created_models().
"""
new_keys = self.new_model_keys | self.new_unmanaged_keys
deleted_models = self.old_model_keys - new_keys
deleted_unmanaged_models = self.old_unmanaged_keys - new_keys
all_deleted_models = chain(
sorted(deleted_models), sorted(deleted_unmanaged_models)
)
for app_label, model_name in all_deleted_models:
model_state = self.from_state.models[app_label, model_name]
# Gather related fields
related_fields = {}
for field_name, field in model_state.fields.items():
if field.remote_field:
if field.remote_field.model:
related_fields[field_name] = field
if getattr(field.remote_field, "through", None):
related_fields[field_name] = field
# Generate option removal first
unique_together = model_state.options.pop("unique_together", None)
if unique_together:
self.add_operation(
app_label,
operations.AlterUniqueTogether(
name=model_name,
unique_together=None,
),
)
if indexes := model_state.options.pop("indexes", None):
for index in indexes:
self.add_operation(
app_label,
operations.RemoveIndex(
model_name=model_name,
name=index.name,
),
)
if constraints := model_state.options.pop("constraints", None):
for constraint in constraints:
self.add_operation(
app_label,
operations.RemoveConstraint(
model_name=model_name,
name=constraint.name,
),
)
# Then remove each related field
for name in sorted(related_fields):
self.add_operation(
app_label,
operations.RemoveField(
model_name=model_name,
name=name,
),
dependencies=[
OperationDependency(
app_label,
model_name,
name,
OperationDependency.Type.REMOVE_INDEX_OR_CONSTRAINT,
),
],
)
# Finally, remove the model.
# This depends on both the removal/alteration of all incoming
# fields and the removal of all its own related fields, and if it's
# a through model the field that references it.
dependencies = []
relations = self.from_state.relations
for (
related_object_app_label,
object_name,
), relation_related_fields in relations[app_label, model_name].items():
for field_name, field in relation_related_fields.items():
dependencies.append(
OperationDependency(
related_object_app_label,
object_name,
field_name,
OperationDependency.Type.REMOVE,
),
)
if not field.many_to_many:
dependencies.append(
OperationDependency(
related_object_app_label,
object_name,
field_name,
OperationDependency.Type.ALTER,
),
)
for name in sorted(related_fields):
dependencies.append(
OperationDependency(
app_label, model_name, name, OperationDependency.Type.REMOVE
)
)
# We're referenced in another field's through=
through_user = self.through_users.get((app_label, model_state.name_lower))
if through_user:
dependencies.append(
OperationDependency(*through_user, OperationDependency.Type.REMOVE),
)
# Finally, make the operation, deduping any dependencies
self.add_operation(
app_label,
operations.DeleteModel(
name=model_state.name,
),
dependencies=list(set(dependencies)),
)
def generate_deleted_proxies(self):
"""Make DeleteModel options for proxy models."""
deleted = self.old_proxy_keys - self.new_proxy_keys
for app_label, model_name in sorted(deleted):
model_state = self.from_state.models[app_label, model_name]
assert model_state.options.get("proxy")
self.add_operation(
app_label,
operations.DeleteModel(
name=model_state.name,
),
)
def create_renamed_fields(self):
"""Work out renamed fields."""
self.renamed_operations = []
old_field_keys = self.old_field_keys.copy()
for app_label, model_name, field_name in sorted(
self.new_field_keys - old_field_keys
):
old_model_name = self.renamed_models.get(
(app_label, model_name), model_name
)
old_model_state = self.from_state.models[app_label, old_model_name]
new_model_state = self.to_state.models[app_label, model_name]
field = new_model_state.get_field(field_name)
# Scan to see if this is actually a rename!
field_dec = self.deep_deconstruct(field)
for rem_app_label, rem_model_name, rem_field_name in sorted(
old_field_keys - self.new_field_keys
):
if rem_app_label == app_label and rem_model_name == model_name:
old_field = old_model_state.get_field(rem_field_name)
old_field_dec = self.deep_deconstruct(old_field)
if (
field.remote_field
and field.remote_field.model
and "to" in old_field_dec[2]
):
old_rel_to = old_field_dec[2]["to"]
if old_rel_to in self.renamed_models_rel:
old_field_dec[2]["to"] = self.renamed_models_rel[old_rel_to]
old_field.set_attributes_from_name(rem_field_name)
old_db_column = old_field.get_attname_column()[1]
if old_field_dec == field_dec or (
# Was the field renamed and db_column equal to the
# old field's column added?
old_field_dec[0:2] == field_dec[0:2]
and dict(old_field_dec[2], db_column=old_db_column)
== field_dec[2]
):
if self.questioner.ask_rename(
model_name, rem_field_name, field_name, field
):
self.renamed_operations.append(
(
rem_app_label,
rem_model_name,
old_field.db_column,
rem_field_name,
app_label,
model_name,
field,
field_name,
)
)
old_field_keys.remove(
(rem_app_label, rem_model_name, rem_field_name)
)
old_field_keys.add((app_label, model_name, field_name))
self.renamed_fields[app_label, model_name, field_name] = (
rem_field_name
)
break
def generate_renamed_fields(self):
"""Generate RenameField operations."""
for (
rem_app_label,
rem_model_name,
rem_db_column,
rem_field_name,
app_label,
model_name,
field,
field_name,
) in self.renamed_operations:
# A db_column mismatch requires a prior noop AlterField for the
# subsequent RenameField to be a noop on attempts at preserving the
# old name.
if rem_db_column != field.db_column:
altered_field = field.clone()
altered_field.name = rem_field_name
self.add_operation(
app_label,
operations.AlterField(
model_name=model_name,
name=rem_field_name,
field=altered_field,
),
)
self.add_operation(
app_label,
operations.RenameField(
model_name=model_name,
old_name=rem_field_name,
new_name=field_name,
),
)
self.old_field_keys.remove((rem_app_label, rem_model_name, rem_field_name))
self.old_field_keys.add((app_label, model_name, field_name))
def generate_added_fields(self):
"""Make AddField operations."""
for app_label, model_name, field_name in sorted(
self.new_field_keys - self.old_field_keys
):
self._generate_added_field(app_label, model_name, field_name)
def _generate_added_field(self, app_label, model_name, field_name):
field = self.to_state.models[app_label, model_name].get_field(field_name)
# Adding a field always depends at least on its removal.
dependencies = [
OperationDependency(
app_label, model_name, field_name, OperationDependency.Type.REMOVE
)
]
# Fields that are foreignkeys/m2ms depend on stuff.
if field.remote_field and field.remote_field.model:
dependencies.extend(
self._get_dependencies_for_foreign_key(
app_label,
model_name,
field,
self.to_state,
)
)
if field.generated:
dependencies.extend(self._get_dependencies_for_generated_field(field))
# You can't just add NOT NULL fields with no default or fields
# which don't allow empty strings as default.
time_fields = (models.DateField, models.DateTimeField, models.TimeField)
auto_fields = (models.AutoField, models.SmallAutoField, models.BigAutoField)
preserve_default = (
field.null
or field.has_default()
or field.has_db_default()
or field.many_to_many
or (field.blank and field.empty_strings_allowed)
or (isinstance(field, time_fields) and field.auto_now)
or (isinstance(field, auto_fields))
)
if not preserve_default:
field = field.clone()
if isinstance(field, time_fields) and field.auto_now_add:
field.default = self.questioner.ask_auto_now_add_addition(
field_name, model_name
)
else:
field.default = self.questioner.ask_not_null_addition(
field_name, model_name
)
if field.unique and field.has_default() and callable(field.default):
self.questioner.ask_unique_callable_default_addition(field_name, model_name)
self.add_operation(
app_label,
operations.AddField(
model_name=model_name,
name=field_name,
field=field,
preserve_default=preserve_default,
),
dependencies=dependencies,
)
def generate_removed_fields(self):
"""Make RemoveField operations."""
for app_label, model_name, field_name in sorted(
self.old_field_keys - self.new_field_keys
):
self._generate_removed_field(app_label, model_name, field_name)
def _generate_removed_field(self, app_label, model_name, field_name):
self.add_operation(
app_label,
operations.RemoveField(
model_name=model_name,
name=field_name,
),
# Include dependencies such as order_with_respect_to, constraints,
# and any generated fields that may depend on this field. These
# are safely ignored if not present.
dependencies=[
OperationDependency(
app_label,
model_name,
field_name,
OperationDependency.Type.REMOVE_ORDER_WRT,
),
OperationDependency(
app_label,
model_name,
field_name,
OperationDependency.Type.ALTER_FOO_TOGETHER,
),
OperationDependency(
app_label,
model_name,
field_name,
OperationDependency.Type.REMOVE_INDEX_OR_CONSTRAINT,
),
*self._get_generated_field_dependencies_for_removed_field(
app_label, model_name, field_name
),
],
)
def generate_altered_fields(self):
"""
Make AlterField operations, or possibly RemovedField/AddField if alter
isn't possible.
"""
for app_label, model_name, field_name in sorted(
self.old_field_keys & self.new_field_keys
):
# Did the field change?
old_model_name = self.renamed_models.get(
(app_label, model_name), model_name
)
old_field_name = self.renamed_fields.get(
(app_label, model_name, field_name), field_name
)
old_field = self.from_state.models[app_label, old_model_name].get_field(
old_field_name
)
new_field = self.to_state.models[app_label, model_name].get_field(
field_name
)
dependencies = []
# Implement any model renames on relations; these are handled by
# RenameModel so we need to exclude them from the comparison
if hasattr(new_field, "remote_field") and getattr(
new_field.remote_field, "model", None
):
rename_key = resolve_relation(
new_field.remote_field.model, app_label, model_name
)
if rename_key in self.renamed_models:
new_field.remote_field.model = old_field.remote_field.model
# Handle ForeignKey which can only have a single to_field.
remote_field_name = getattr(new_field.remote_field, "field_name", None)
if remote_field_name:
to_field_rename_key = (*rename_key, remote_field_name)
if to_field_rename_key in self.renamed_fields:
# Repoint both model and field name because to_field
# inclusion in ForeignKey.deconstruct() is based on
# both.
new_field.remote_field.model = old_field.remote_field.model
new_field.remote_field.field_name = (
old_field.remote_field.field_name
)
# Handle ForeignObjects which can have multiple
# from_fields/to_fields.
from_fields = getattr(new_field, "from_fields", None)
if from_fields:
from_rename_key = (app_label, model_name)
new_field.from_fields = tuple(
[
self.renamed_fields.get(
(*from_rename_key, from_field), from_field
)
for from_field in from_fields
]
)
new_field.to_fields = tuple(
[
self.renamed_fields.get((*rename_key, to_field), to_field)
for to_field in new_field.to_fields
]
)
if old_from_fields := getattr(old_field, "from_fields", None):
old_field.from_fields = tuple(old_from_fields)
old_field.to_fields = tuple(old_field.to_fields)
dependencies.extend(
self._get_dependencies_for_foreign_key(
app_label,
model_name,
new_field,
self.to_state,
)
)
if hasattr(new_field, "remote_field") and getattr(
new_field.remote_field, "through", None
):
rename_key = resolve_relation(
new_field.remote_field.through, app_label, model_name
)
if rename_key in self.renamed_models:
new_field.remote_field.through = old_field.remote_field.through
old_field_dec = self.deep_deconstruct(old_field)
new_field_dec = self.deep_deconstruct(new_field)
# If the field was confirmed to be renamed it means that only
# db_column was allowed to change which generate_renamed_fields()
# already accounts for by adding an AlterField operation.
if old_field_dec != new_field_dec and old_field_name == field_name:
both_m2m = old_field.many_to_many and new_field.many_to_many
neither_m2m = not old_field.many_to_many and not new_field.many_to_many
if both_m2m or neither_m2m:
# Either both fields are m2m or neither is
preserve_default = True
if (
old_field.null
and not new_field.null
and not new_field.has_default()
and not new_field.has_db_default()
and not new_field.many_to_many
):
field = new_field.clone()
new_default = self.questioner.ask_not_null_alteration(
field_name, model_name
)
if new_default is not models.NOT_PROVIDED:
field.default = new_default
preserve_default = False
else:
field = new_field
self.add_operation(
app_label,
operations.AlterField(
model_name=model_name,
name=field_name,
field=field,
preserve_default=preserve_default,
),
dependencies=dependencies,
)
else:
# We cannot alter between m2m and concrete fields
self._generate_removed_field(app_label, model_name, field_name)
self._generate_added_field(app_label, model_name, field_name)
def create_altered_indexes(self):
option_name = operations.AddIndex.option_name
self.renamed_index_together_values = defaultdict(list)
for app_label, model_name in sorted(self.kept_model_keys):
old_model_name = self.renamed_models.get(
(app_label, model_name), model_name
)
old_model_state = self.from_state.models[app_label, old_model_name]
new_model_state = self.to_state.models[app_label, model_name]
old_indexes = old_model_state.options[option_name]
new_indexes = new_model_state.options[option_name]
added_indexes = [idx for idx in new_indexes if idx not in old_indexes]
removed_indexes = [idx for idx in old_indexes if idx not in new_indexes]
renamed_indexes = []
# Find renamed indexes.
remove_from_added = []
remove_from_removed = []
for new_index in added_indexes:
new_index_dec = new_index.deconstruct()
new_index_name = new_index_dec[2].pop("name")
for old_index in removed_indexes:
old_index_dec = old_index.deconstruct()
old_index_name = old_index_dec[2].pop("name")
# Indexes are the same except for the names.
if (
new_index_dec == old_index_dec
and new_index_name != old_index_name
):
renamed_indexes.append((old_index_name, new_index_name, None))
remove_from_added.append(new_index)
remove_from_removed.append(old_index)
# Find index_together changed to indexes.
for (
old_value,
new_value,
index_together_app_label,
index_together_model_name,
dependencies,
) in self._get_altered_foo_together_operations(
operations.AlterIndexTogether.option_name
):
if (
app_label != index_together_app_label
or model_name != index_together_model_name
):
continue
removed_values = old_value.difference(new_value)
for removed_index_together in removed_values:
renamed_index_together_indexes = []
for new_index in added_indexes:
_, args, kwargs = new_index.deconstruct()
# Ensure only 'fields' are defined in the Index.
if (
not args
and new_index.fields == list(removed_index_together)
and set(kwargs) == {"name", "fields"}
):
renamed_index_together_indexes.append(new_index)
if len(renamed_index_together_indexes) == 1:
renamed_index = renamed_index_together_indexes[0]
remove_from_added.append(renamed_index)
renamed_indexes.append(
(None, renamed_index.name, removed_index_together)
)
self.renamed_index_together_values[
index_together_app_label, index_together_model_name
].append(removed_index_together)
# Remove renamed indexes from the lists of added and removed
# indexes.
added_indexes = [
idx for idx in added_indexes if idx not in remove_from_added
]
removed_indexes = [
idx for idx in removed_indexes if idx not in remove_from_removed
]
self.altered_indexes.update(
{
(app_label, model_name): {
"added_indexes": added_indexes,
"removed_indexes": removed_indexes,
"renamed_indexes": renamed_indexes,
}
}
)
def generate_added_indexes(self):
for (app_label, model_name), alt_indexes in self.altered_indexes.items():
dependencies = self._get_dependencies_for_model(app_label, model_name)
for index in alt_indexes["added_indexes"]:
self.add_operation(
app_label,
operations.AddIndex(
model_name=model_name,
index=index,
),
dependencies=dependencies,
)
def generate_removed_indexes(self):
for (app_label, model_name), alt_indexes in self.altered_indexes.items():
for index in alt_indexes["removed_indexes"]:
self.add_operation(
app_label,
operations.RemoveIndex(
model_name=model_name,
name=index.name,
),
)
def generate_renamed_indexes(self):
for (app_label, model_name), alt_indexes in self.altered_indexes.items():
for old_index_name, new_index_name, old_fields in alt_indexes[
"renamed_indexes"
]:
self.add_operation(
app_label,
operations.RenameIndex(
model_name=model_name,
new_name=new_index_name,
old_name=old_index_name,
old_fields=old_fields,
),
)
def _constraint_should_be_dropped_and_recreated(
self, old_constraint, new_constraint
):
old_path, old_args, old_kwargs = old_constraint.deconstruct()
new_path, new_args, new_kwargs = new_constraint.deconstruct()
for attr in old_constraint.non_db_attrs:
old_kwargs.pop(attr, None)
for attr in new_constraint.non_db_attrs:
new_kwargs.pop(attr, None)
# Replace renamed fields if the db_column is preserved.
for (
_,
_,
rem_db_column,
rem_field_name,
_,
_,
field,
field_name,
) in self.renamed_operations:
if field.db_column and rem_db_column == field.db_column:
new_fields = new_kwargs["fields"]
try:
new_field_idx = new_fields.index(field_name)
except ValueError:
continue
new_kwargs["fields"] = tuple(
new_fields[:new_field_idx]
+ (rem_field_name,)
+ new_fields[new_field_idx + 1 :]
)
return (old_path, old_args, old_kwargs) != (new_path, new_args, new_kwargs)
def create_altered_constraints(self):
option_name = operations.AddConstraint.option_name
for app_label, model_name in sorted(self.kept_model_keys):
old_model_name = self.renamed_models.get(
(app_label, model_name), model_name
)
old_model_state = self.from_state.models[app_label, old_model_name]
new_model_state = self.to_state.models[app_label, model_name]
old_constraints = old_model_state.options[option_name]
new_constraints = new_model_state.options[option_name]
alt_constraints = []
alt_constraints_name = []
for old_c in old_constraints:
for new_c in new_constraints:
old_c_dec = old_c.deconstruct()
new_c_dec = new_c.deconstruct()
if (
old_c_dec != new_c_dec
and old_c.name == new_c.name
and not self._constraint_should_be_dropped_and_recreated(
old_c, new_c
)
):
alt_constraints.append(new_c)
alt_constraints_name.append(new_c.name)
add_constraints = [
c
for c in new_constraints
if c not in old_constraints and c.name not in alt_constraints_name
]
rem_constraints = [
c
for c in old_constraints
if c not in new_constraints and c.name not in alt_constraints_name
]
self.altered_constraints.update(
{
(app_label, model_name): {
"added_constraints": add_constraints,
"removed_constraints": rem_constraints,
"altered_constraints": alt_constraints,
}
}
)
def generate_added_constraints(self):
for (
app_label,
model_name,
), alt_constraints in self.altered_constraints.items():
dependencies = self._get_dependencies_for_model(app_label, model_name)
for constraint in alt_constraints["added_constraints"]:
self.add_operation(
app_label,
operations.AddConstraint(
model_name=model_name,
constraint=constraint,
),
dependencies=dependencies,
)
def generate_removed_constraints(self):
for (
app_label,
model_name,
), alt_constraints in self.altered_constraints.items():
for constraint in alt_constraints["removed_constraints"]:
self.add_operation(
app_label,
operations.RemoveConstraint(
model_name=model_name,
name=constraint.name,
),
)
def generate_altered_constraints(self):
for (
app_label,
model_name,
), alt_constraints in self.altered_constraints.items():
dependencies = self._get_dependencies_for_model(app_label, model_name)
for constraint in alt_constraints["altered_constraints"]:
self.add_operation(
app_label,
operations.AlterConstraint(
model_name=model_name,
name=constraint.name,
constraint=constraint,
),
dependencies=dependencies,
)
@staticmethod
def _get_dependencies_for_foreign_key(app_label, model_name, field, project_state):
remote_field_model = None
if hasattr(field.remote_field, "model"):
remote_field_model = field.remote_field.model
else:
relations = project_state.relations[app_label, model_name]
for (remote_app_label, remote_model_name), fields in relations.items():
if any(
field == related_field.remote_field
for related_field in fields.values()
):
remote_field_model = f"{remote_app_label}.{remote_model_name}"
break
# Account for FKs to swappable models
swappable_setting = getattr(field, "swappable_setting", None)
if swappable_setting is not None:
dep_app_label = "__setting__"
dep_object_name = swappable_setting
else:
dep_app_label, dep_object_name = resolve_relation(
remote_field_model,
app_label,
model_name,
)
dependencies = [
OperationDependency(
dep_app_label, dep_object_name, None, OperationDependency.Type.CREATE
)
]
if getattr(field.remote_field, "through", None):
through_app_label, through_object_name = resolve_relation(
field.remote_field.through,
app_label,
model_name,
)
dependencies.append(
OperationDependency(
through_app_label,
through_object_name,
None,
OperationDependency.Type.CREATE,
)
)
return dependencies
def _get_dependencies_for_generated_field(self, field):
dependencies = []
referenced_base_fields = [
name
for name, *lookups in models.Model._get_expr_references(field.expression)
]
newly_added_fields = sorted(self.new_field_keys - self.old_field_keys)
for app_label, model_name, added_field_name in newly_added_fields:
added_field = self.to_state.models[app_label, model_name].get_field(
added_field_name
)
if (
added_field.remote_field and added_field.remote_field.model
) or added_field.name in referenced_base_fields:
dependencies.append(
OperationDependency(
app_label,
model_name,
added_field.name,
OperationDependency.Type.CREATE,
)
)
return dependencies
def _get_generated_field_dependencies_for_removed_field(
self, app_label, model_name, field_name
):
dependencies = []
model_state = self.from_state.models[app_label, model_name]
generated_fields = (f for f in model_state.fields.values() if f.generated)
for field in generated_fields:
if any(
field_name == name
for name, *_ in models.Model._get_expr_references(field.expression)
):
dependencies.append(
OperationDependency(
app_label,
model_name,
field.name,
OperationDependency.Type.REMOVE,
)
)
return dependencies
def _get_dependencies_for_model(self, app_label, model_name):
"""Return foreign key dependencies of the given model."""
dependencies = []
model_state = self.to_state.models[app_label, model_name]
for field in model_state.fields.values():
if field.is_relation:
dependencies.extend(
self._get_dependencies_for_foreign_key(
app_label,
model_name,
field,
self.to_state,
)
)
return dependencies
def _get_altered_foo_together_operations(self, option_name):
for app_label, model_name in sorted(self.kept_model_keys):
old_model_name = self.renamed_models.get(
(app_label, model_name), model_name
)
old_model_state = self.from_state.models[app_label, old_model_name]
new_model_state = self.to_state.models[app_label, model_name]
# We run the old version through the field renames to account for
# those
old_value = old_model_state.options.get(option_name)
old_value = (
{
tuple(
self.renamed_fields.get((app_label, model_name, n), n)
for n in unique
)
for unique in old_value
}
if old_value
else set()
)
new_value = new_model_state.options.get(option_name)
new_value = set(new_value) if new_value else set()
if old_value != new_value:
dependencies = []
for foo_togethers in new_value:
for field_name in foo_togethers:
field = new_model_state.get_field(field_name)
if field.remote_field and field.remote_field.model:
dependencies.extend(
self._get_dependencies_for_foreign_key(
app_label,
model_name,
field,
self.to_state,
)
)
yield (
old_value,
new_value,
app_label,
model_name,
dependencies,
)
def _generate_removed_altered_foo_together(self, operation):
for (
old_value,
new_value,
app_label,
model_name,
dependencies,
) in self._get_altered_foo_together_operations(operation.option_name):
if operation == operations.AlterIndexTogether:
old_value = {
value
for value in old_value
if value
not in self.renamed_index_together_values[app_label, model_name]
}
removal_value = new_value.intersection(old_value)
if removal_value or old_value:
self.add_operation(
app_label,
operation(
name=model_name, **{operation.option_name: removal_value}
),
dependencies=dependencies,
)
def generate_removed_altered_unique_together(self):
self._generate_removed_altered_foo_together(operations.AlterUniqueTogether)
def _generate_altered_foo_together(self, operation):
for (
old_value,
new_value,
app_label,
model_name,
dependencies,
) in self._get_altered_foo_together_operations(operation.option_name):
removal_value = new_value.intersection(old_value)
if new_value != removal_value:
self.add_operation(
app_label,
operation(name=model_name, **{operation.option_name: new_value}),
dependencies=dependencies,
)
def generate_altered_unique_together(self):
self._generate_altered_foo_together(operations.AlterUniqueTogether)
def generate_altered_db_table(self):
models_to_check = self.kept_model_keys.union(
self.kept_proxy_keys, self.kept_unmanaged_keys
)
for app_label, model_name in sorted(models_to_check):
old_model_name = self.renamed_models.get(
(app_label, model_name), model_name
)
old_model_state = self.from_state.models[app_label, old_model_name]
new_model_state = self.to_state.models[app_label, model_name]
old_db_table_name = old_model_state.options.get("db_table")
new_db_table_name = new_model_state.options.get("db_table")
if old_db_table_name != new_db_table_name:
self.add_operation(
app_label,
operations.AlterModelTable(
name=model_name,
table=new_db_table_name,
),
)
def generate_altered_db_table_comment(self):
models_to_check = self.kept_model_keys.union(
self.kept_proxy_keys, self.kept_unmanaged_keys
)
for app_label, model_name in sorted(models_to_check):
old_model_name = self.renamed_models.get(
(app_label, model_name), model_name
)
old_model_state = self.from_state.models[app_label, old_model_name]
new_model_state = self.to_state.models[app_label, model_name]
old_db_table_comment = old_model_state.options.get("db_table_comment")
new_db_table_comment = new_model_state.options.get("db_table_comment")
if old_db_table_comment != new_db_table_comment:
self.add_operation(
app_label,
operations.AlterModelTableComment(
name=model_name,
table_comment=new_db_table_comment,
),
)
def generate_altered_options(self):
"""
Work out if any non-schema-affecting options have changed and make an
operation to represent them in state changes (in case Python code in
migrations needs them).
"""
models_to_check = self.kept_model_keys.union(
self.kept_proxy_keys,
self.kept_unmanaged_keys,
# unmanaged converted to managed
self.old_unmanaged_keys & self.new_model_keys,
# managed converted to unmanaged
self.old_model_keys & self.new_unmanaged_keys,
)
for app_label, model_name in sorted(models_to_check):
old_model_name = self.renamed_models.get(
(app_label, model_name), model_name
)
old_model_state = self.from_state.models[app_label, old_model_name]
new_model_state = self.to_state.models[app_label, model_name]
old_options = {
key: value
for key, value in old_model_state.options.items()
if key in AlterModelOptions.ALTER_OPTION_KEYS
}
new_options = {
key: value
for key, value in new_model_state.options.items()
if key in AlterModelOptions.ALTER_OPTION_KEYS
}
if old_options != new_options:
self.add_operation(
app_label,
operations.AlterModelOptions(
name=model_name,
options=new_options,
),
)
def generate_altered_order_with_respect_to(self):
for app_label, model_name in sorted(self.kept_model_keys):
old_model_name = self.renamed_models.get(
(app_label, model_name), model_name
)
old_model_state = self.from_state.models[app_label, old_model_name]
new_model_state = self.to_state.models[app_label, model_name]
if old_model_state.options.get(
"order_with_respect_to"
) != new_model_state.options.get("order_with_respect_to"):
# Make sure it comes second if we're adding
# (removal dependency is part of RemoveField)
dependencies = []
if new_model_state.options.get("order_with_respect_to"):
dependencies.append(
OperationDependency(
app_label,
model_name,
new_model_state.options["order_with_respect_to"],
OperationDependency.Type.CREATE,
)
)
# Actually generate the operation
self.add_operation(
app_label,
operations.AlterOrderWithRespectTo(
name=model_name,
order_with_respect_to=new_model_state.options.get(
"order_with_respect_to"
),
),
dependencies=dependencies,
)
def generate_altered_managers(self):
for app_label, model_name in sorted(self.kept_model_keys):
old_model_name = self.renamed_models.get(
(app_label, model_name), model_name
)
old_model_state = self.from_state.models[app_label, old_model_name]
new_model_state = self.to_state.models[app_label, model_name]
if old_model_state.managers != new_model_state.managers:
self.add_operation(
app_label,
operations.AlterModelManagers(
name=model_name,
managers=new_model_state.managers,
),
)
def arrange_for_graph(self, changes, graph, migration_name=None):
"""
Take a result from changes() and a MigrationGraph, and fix the names
and dependencies of the changes so they extend the graph from the leaf
nodes for each app.
"""
leaves = graph.leaf_nodes()
name_map = {}
for app_label, migrations in list(changes.items()):
if not migrations:
continue
# Find the app label's current leaf node
app_leaf = None
for leaf in leaves:
if leaf[0] == app_label:
app_leaf = leaf
break
# Do they want an initial migration for this app?
if app_leaf is None and not self.questioner.ask_initial(app_label):
# They don't.
for migration in migrations:
name_map[(app_label, migration.name)] = (app_label, "__first__")
del changes[app_label]
continue
# Work out the next number in the sequence
if app_leaf is None:
next_number = 1
else:
next_number = (self.parse_number(app_leaf[1]) or 0) + 1
# Name each migration
for i, migration in enumerate(migrations):
if i == 0 and app_leaf:
migration.dependencies.append(app_leaf)
new_name_parts = ["%04i" % next_number]
if migration_name:
new_name_parts.append(migration_name)
elif i == 0 and not app_leaf:
new_name_parts.append("initial")
else:
new_name_parts.append(migration.suggest_name()[:100])
new_name = "_".join(new_name_parts)
name_map[(app_label, migration.name)] = (app_label, new_name)
next_number += 1
migration.name = new_name
# Now fix dependencies
for migrations in changes.values():
for migration in migrations:
migration.dependencies = [
name_map.get(d, d) for d in migration.dependencies
]
return changes
def _trim_to_apps(self, changes, app_labels):
"""
Take changes from arrange_for_graph() and set of app labels, and return
a modified set of changes which trims out as many migrations that are
not in app_labels as possible. Note that some other migrations may
still be present as they may be required dependencies.
"""
# Gather other app dependencies in a first pass
app_dependencies = {}
for app_label, migrations in changes.items():
for migration in migrations:
for dep_app_label, name in migration.dependencies:
app_dependencies.setdefault(app_label, set()).add(dep_app_label)
required_apps = set(app_labels)
# Keep resolving till there's no change
old_required_apps = None
while old_required_apps != required_apps:
old_required_apps = set(required_apps)
required_apps.update(
*[app_dependencies.get(app_label, ()) for app_label in required_apps]
)
# Remove all migrations that aren't needed
for app_label in list(changes):
if app_label not in required_apps:
del changes[app_label]
return changes
@classmethod
def parse_number(cls, name):
"""
Given a migration name, try to extract a number from the beginning of
it. For a squashed migration such as '0001_squashed_0004…', return the
second number. If no number is found, return None.
"""
if squashed_match := re.search(r".*_squashed_(\d+)", name):
return int(squashed_match[1])
match = re.match(r"^\d+", name)
if match:
return int(match[0])
return None
|
import copy
import functools
import re
from unittest import mock
from django.apps import apps
from django.conf import settings
from django.contrib.auth.models import AbstractBaseUser
from django.core.validators import RegexValidator, validate_slug
from django.db import connection, migrations, models
from django.db.migrations.autodetector import MigrationAutodetector
from django.db.migrations.graph import MigrationGraph
from django.db.migrations.loader import MigrationLoader
from django.db.migrations.questioner import MigrationQuestioner
from django.db.migrations.state import ModelState, ProjectState
from django.db.models.functions import Concat, Lower, Upper
from django.test import SimpleTestCase, TestCase, override_settings
from django.test.utils import isolate_lru_cache
from .models import FoodManager, FoodQuerySet
class DeconstructibleObject:
"""
A custom deconstructible object.
"""
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
def deconstruct(self):
return (self.__module__ + "." + self.__class__.__name__, self.args, self.kwargs)
class BaseAutodetectorTests(TestCase):
def repr_changes(self, changes, include_dependencies=False):
output = ""
for app_label, migrations_ in sorted(changes.items()):
output += " %s:\n" % app_label
for migration in migrations_:
output += " %s\n" % migration.name
for operation in migration.operations:
output += " %s\n" % operation
if include_dependencies:
output += " Dependencies:\n"
if migration.dependencies:
for dep in migration.dependencies:
output += " %s\n" % (dep,)
else:
output += " None\n"
return output
def assertNumberMigrations(self, changes, app_label, number):
if len(changes.get(app_label, [])) != number:
self.fail(
"Incorrect number of migrations (%s) for %s (expected %s)\n%s"
% (
len(changes.get(app_label, [])),
app_label,
number,
self.repr_changes(changes),
)
)
def assertMigrationDependencies(self, changes, app_label, position, dependencies):
if not changes.get(app_label):
self.fail(
"No migrations found for %s\n%s"
% (app_label, self.repr_changes(changes))
)
if len(changes[app_label]) < position + 1:
self.fail(
"No migration at index %s for %s\n%s"
% (position, app_label, self.repr_changes(changes))
)
migration = changes[app_label][position]
if set(migration.dependencies) != set(dependencies):
self.fail(
"Migration dependencies mismatch for %s.%s (expected %s):\n%s"
% (
app_label,
migration.name,
dependencies,
self.repr_changes(changes, include_dependencies=True),
)
)
def assertOperationTypes(self, changes, app_label, position, types):
if not changes.get(app_label):
self.fail(
"No migrations found for %s\n%s"
% (app_label, self.repr_changes(changes))
)
if len(changes[app_label]) < position + 1:
self.fail(
"No migration at index %s for %s\n%s"
% (position, app_label, self.repr_changes(changes))
)
migration = changes[app_label][position]
real_types = [
operation.__class__.__name__ for operation in migration.operations
]
if types != real_types:
self.fail(
"Operation type mismatch for %s.%s (expected %s):\n%s"
% (
app_label,
migration.name,
types,
self.repr_changes(changes),
)
)
def assertOperationAttributes(
self, changes, app_label, position, operation_position, **attrs
):
if not changes.get(app_label):
self.fail(
"No migrations found for %s\n%s"
% (app_label, self.repr_changes(changes))
)
if len(changes[app_label]) < position + 1:
self.fail(
"No migration at index %s for %s\n%s"
% (position, app_label, self.repr_changes(changes))
)
migration = changes[app_label][position]
if len(changes[app_label]) < position + 1:
self.fail(
"No operation at index %s for %s.%s\n%s"
% (
operation_position,
app_label,
migration.name,
self.repr_changes(changes),
)
)
operation = migration.operations[operation_position]
for attr, value in attrs.items():
if getattr(operation, attr, None) != value:
self.fail(
"Attribute mismatch for %s.%s op #%s, %s (expected %r, got %r):\n%s"
% (
app_label,
migration.name,
operation_position,
attr,
value,
getattr(operation, attr, None),
self.repr_changes(changes),
)
)
def assertOperationFieldAttributes(
self, changes, app_label, position, operation_position, **attrs
):
if not changes.get(app_label):
self.fail(
"No migrations found for %s\n%s"
% (app_label, self.repr_changes(changes))
)
if len(changes[app_label]) < position + 1:
self.fail(
"No migration at index %s for %s\n%s"
% (position, app_label, self.repr_changes(changes))
)
migration = changes[app_label][position]
if len(changes[app_label]) < position + 1:
self.fail(
"No operation at index %s for %s.%s\n%s"
% (
operation_position,
app_label,
migration.name,
self.repr_changes(changes),
)
)
operation = migration.operations[operation_position]
if not hasattr(operation, "field"):
self.fail(
"No field attribute for %s.%s op #%s."
% (
app_label,
migration.name,
operation_position,
)
)
field = operation.field
for attr, value in attrs.items():
if getattr(field, attr, None) != value:
self.fail(
"Field attribute mismatch for %s.%s op #%s, field.%s (expected %r, "
"got %r):\n%s"
% (
app_label,
migration.name,
operation_position,
attr,
value,
getattr(field, attr, None),
self.repr_changes(changes),
)
)
def make_project_state(self, model_states):
"Shortcut to make ProjectStates from lists of predefined models"
project_state = ProjectState()
for model_state in model_states:
project_state.add_model(model_state.clone())
return project_state
def get_changes(self, before_states, after_states, questioner=None):
if not isinstance(before_states, ProjectState):
before_states = self.make_project_state(before_states)
if not isinstance(after_states, ProjectState):
after_states = self.make_project_state(after_states)
return MigrationAutodetector(
before_states,
after_states,
questioner,
)._detect_changes()
class AutodetectorTests(BaseAutodetectorTests):
"""
Tests the migration autodetector.
"""
author_empty = ModelState(
"testapp", "Author", [("id", models.AutoField(primary_key=True))]
)
author_name = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
],
)
author_name_null = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200, null=True)),
],
)
author_name_longer = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=400)),
],
)
author_name_renamed = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("names", models.CharField(max_length=200)),
],
)
author_name_default = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200, default="Ada Lovelace")),
],
)
author_name_db_default = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200, db_default="Ada Lovelace")),
],
)
author_name_check_constraint = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
],
{
"constraints": [
models.CheckConstraint(
condition=models.Q(name__contains="Bob"), name="name_contains_bob"
)
]
},
)
author_dates_of_birth_auto_now = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("date_of_birth", models.DateField(auto_now=True)),
("date_time_of_birth", models.DateTimeField(auto_now=True)),
("time_of_birth", models.TimeField(auto_now=True)),
],
)
author_dates_of_birth_auto_now_add = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("date_of_birth", models.DateField(auto_now_add=True)),
("date_time_of_birth", models.DateTimeField(auto_now_add=True)),
("time_of_birth", models.TimeField(auto_now_add=True)),
],
)
author_name_deconstructible_1 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200, default=DeconstructibleObject())),
],
)
author_name_deconstructible_2 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200, default=DeconstructibleObject())),
],
)
author_name_deconstructible_3 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200, default=models.IntegerField())),
],
)
author_name_deconstructible_4 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200, default=models.IntegerField())),
],
)
author_name_deconstructible_list_1 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200, default=[DeconstructibleObject(), 123]
),
),
],
)
author_name_deconstructible_list_2 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200, default=[DeconstructibleObject(), 123]
),
),
],
)
author_name_deconstructible_list_3 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200, default=[DeconstructibleObject(), 999]
),
),
],
)
author_name_deconstructible_tuple_1 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200, default=(DeconstructibleObject(), 123)
),
),
],
)
author_name_deconstructible_tuple_2 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200, default=(DeconstructibleObject(), 123)
),
),
],
)
author_name_deconstructible_tuple_3 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200, default=(DeconstructibleObject(), 999)
),
),
],
)
author_name_deconstructible_dict_1 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200,
default={"item": DeconstructibleObject(), "otheritem": 123},
),
),
],
)
author_name_deconstructible_dict_2 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200,
default={"item": DeconstructibleObject(), "otheritem": 123},
),
),
],
)
author_name_deconstructible_dict_3 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200,
default={"item": DeconstructibleObject(), "otheritem": 999},
),
),
],
)
author_name_nested_deconstructible_1 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200,
default=DeconstructibleObject(
DeconstructibleObject(1),
(
DeconstructibleObject("t1"),
DeconstructibleObject("t2"),
),
a=DeconstructibleObject("A"),
b=DeconstructibleObject(B=DeconstructibleObject("c")),
),
),
),
],
)
author_name_nested_deconstructible_2 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200,
default=DeconstructibleObject(
DeconstructibleObject(1),
(
DeconstructibleObject("t1"),
DeconstructibleObject("t2"),
),
a=DeconstructibleObject("A"),
b=DeconstructibleObject(B=DeconstructibleObject("c")),
),
),
),
],
)
author_name_nested_deconstructible_changed_arg = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200,
default=DeconstructibleObject(
DeconstructibleObject(1),
(
DeconstructibleObject("t1"),
DeconstructibleObject("t2-changed"),
),
a=DeconstructibleObject("A"),
b=DeconstructibleObject(B=DeconstructibleObject("c")),
),
),
),
],
)
author_name_nested_deconstructible_extra_arg = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200,
default=DeconstructibleObject(
DeconstructibleObject(1),
(
DeconstructibleObject("t1"),
DeconstructibleObject("t2"),
),
None,
a=DeconstructibleObject("A"),
b=DeconstructibleObject(B=DeconstructibleObject("c")),
),
),
),
],
)
author_name_nested_deconstructible_changed_kwarg = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200,
default=DeconstructibleObject(
DeconstructibleObject(1),
(
DeconstructibleObject("t1"),
DeconstructibleObject("t2"),
),
a=DeconstructibleObject("A"),
b=DeconstructibleObject(B=DeconstructibleObject("c-changed")),
),
),
),
],
)
author_name_nested_deconstructible_extra_kwarg = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200,
default=DeconstructibleObject(
DeconstructibleObject(1),
(
DeconstructibleObject("t1"),
DeconstructibleObject("t2"),
),
a=DeconstructibleObject("A"),
b=DeconstructibleObject(B=DeconstructibleObject("c")),
c=None,
),
),
),
],
)
author_custom_pk = ModelState(
"testapp", "Author", [("pk_field", models.IntegerField(primary_key=True))]
)
author_with_biography_non_blank = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField()),
("biography", models.TextField()),
],
)
author_with_biography_blank = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(blank=True)),
("biography", models.TextField(blank=True)),
],
)
author_with_book = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("book", models.ForeignKey("otherapp.Book", models.CASCADE)),
],
)
author_with_book_order_wrt = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("book", models.ForeignKey("otherapp.Book", models.CASCADE)),
],
options={"order_with_respect_to": "book"},
)
author_renamed_with_book = ModelState(
"testapp",
"Writer",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("book", models.ForeignKey("otherapp.Book", models.CASCADE)),
],
)
author_with_publisher_string = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("publisher_name", models.CharField(max_length=200)),
],
)
author_with_publisher = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("publisher", models.ForeignKey("testapp.Publisher", models.CASCADE)),
],
)
author_with_user = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("user", models.ForeignKey("auth.User", models.CASCADE)),
],
)
author_with_custom_user = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("user", models.ForeignKey("thirdapp.CustomUser", models.CASCADE)),
],
)
author_proxy = ModelState(
"testapp", "AuthorProxy", [], {"proxy": True}, ("testapp.author",)
)
author_proxy_options = ModelState(
"testapp",
"AuthorProxy",
[],
{
"proxy": True,
"verbose_name": "Super Author",
},
("testapp.author",),
)
author_proxy_notproxy = ModelState(
"testapp", "AuthorProxy", [], {}, ("testapp.author",)
)
author_proxy_third = ModelState(
"thirdapp", "AuthorProxy", [], {"proxy": True}, ("testapp.author",)
)
author_proxy_third_notproxy = ModelState(
"thirdapp", "AuthorProxy", [], {}, ("testapp.author",)
)
author_proxy_proxy = ModelState(
"testapp", "AAuthorProxyProxy", [], {"proxy": True}, ("testapp.authorproxy",)
)
author_unmanaged = ModelState(
"testapp", "AuthorUnmanaged", [], {"managed": False}, ("testapp.author",)
)
author_unmanaged_managed = ModelState(
"testapp", "AuthorUnmanaged", [], {}, ("testapp.author",)
)
author_unmanaged_default_pk = ModelState(
"testapp", "Author", [("id", models.AutoField(primary_key=True))]
)
author_unmanaged_custom_pk = ModelState(
"testapp",
"Author",
[
("pk_field", models.IntegerField(primary_key=True)),
],
)
author_with_m2m = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("publishers", models.ManyToManyField("testapp.Publisher")),
],
)
author_with_m2m_blank = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("publishers", models.ManyToManyField("testapp.Publisher", blank=True)),
],
)
author_with_m2m_through = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"publishers",
models.ManyToManyField("testapp.Publisher", through="testapp.Contract"),
),
],
)
author_with_renamed_m2m_through = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"publishers",
models.ManyToManyField("testapp.Publisher", through="testapp.Deal"),
),
],
)
author_with_former_m2m = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("publishers", models.CharField(max_length=100)),
],
)
author_with_options = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
],
{
"permissions": [("can_hire", "Can hire")],
"verbose_name": "Authi",
},
)
author_with_db_table_comment = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
],
{"db_table_comment": "Table comment"},
)
author_with_db_table_options = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
],
{"db_table": "author_one"},
)
author_with_new_db_table_options = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
],
{"db_table": "author_two"},
)
author_renamed_with_db_table_options = ModelState(
"testapp",
"NewAuthor",
[
("id", models.AutoField(primary_key=True)),
],
{"db_table": "author_one"},
)
author_renamed_with_new_db_table_options = ModelState(
"testapp",
"NewAuthor",
[
("id", models.AutoField(primary_key=True)),
],
{"db_table": "author_three"},
)
contract = ModelState(
"testapp",
"Contract",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("testapp.Author", models.CASCADE)),
("publisher", models.ForeignKey("testapp.Publisher", models.CASCADE)),
],
)
contract_renamed = ModelState(
"testapp",
"Deal",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("testapp.Author", models.CASCADE)),
("publisher", models.ForeignKey("testapp.Publisher", models.CASCADE)),
],
)
publisher = ModelState(
"testapp",
"Publisher",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=100)),
],
)
publisher_with_author = ModelState(
"testapp",
"Publisher",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("testapp.Author", models.CASCADE)),
("name", models.CharField(max_length=100)),
],
)
publisher_with_aardvark_author = ModelState(
"testapp",
"Publisher",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("testapp.Aardvark", models.CASCADE)),
("name", models.CharField(max_length=100)),
],
)
publisher_with_book = ModelState(
"testapp",
"Publisher",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("otherapp.Book", models.CASCADE)),
("name", models.CharField(max_length=100)),
],
)
other_pony = ModelState(
"otherapp",
"Pony",
[
("id", models.AutoField(primary_key=True)),
],
)
other_pony_food = ModelState(
"otherapp",
"Pony",
[
("id", models.AutoField(primary_key=True)),
],
managers=[
("food_qs", FoodQuerySet.as_manager()),
("food_mgr", FoodManager("a", "b")),
("food_mgr_kwargs", FoodManager("x", "y", 3, 4)),
],
)
other_stable = ModelState(
"otherapp", "Stable", [("id", models.AutoField(primary_key=True))]
)
third_thing = ModelState(
"thirdapp", "Thing", [("id", models.AutoField(primary_key=True))]
)
book = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("testapp.Author", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
)
book_proxy_fk = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("thirdapp.AuthorProxy", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
)
book_proxy_proxy_fk = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("testapp.AAuthorProxyProxy", models.CASCADE)),
],
)
book_migrations_fk = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("migrations.UnmigratedModel", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
)
book_with_no_author_fk = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.IntegerField()),
("title", models.CharField(max_length=200)),
],
)
book_with_no_author = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("title", models.CharField(max_length=200)),
],
)
book_with_author_renamed = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("testapp.Writer", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
)
book_with_field_and_author_renamed = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("writer", models.ForeignKey("testapp.Writer", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
)
book_with_multiple_authors = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("authors", models.ManyToManyField("testapp.Author")),
("title", models.CharField(max_length=200)),
],
)
book_with_multiple_authors_through_attribution = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
(
"authors",
models.ManyToManyField(
"testapp.Author", through="otherapp.Attribution"
),
),
("title", models.CharField(max_length=200)),
],
)
book_indexes = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("testapp.Author", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
{
"indexes": [
models.Index(fields=["author", "title"], name="book_title_author_idx")
],
},
)
book_unordered_indexes = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("testapp.Author", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
{
"indexes": [
models.Index(fields=["title", "author"], name="book_author_title_idx")
],
},
)
book_unique_together = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("testapp.Author", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
{
"unique_together": {("author", "title")},
},
)
book_unique_together_2 = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("testapp.Author", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
{
"unique_together": {("title", "author")},
},
)
book_unique_together_3 = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("newfield", models.IntegerField()),
("author", models.ForeignKey("testapp.Author", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
{
"unique_together": {("title", "newfield")},
},
)
book_unique_together_4 = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("newfield2", models.IntegerField()),
("author", models.ForeignKey("testapp.Author", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
{
"unique_together": {("title", "newfield2")},
},
)
attribution = ModelState(
"otherapp",
"Attribution",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("testapp.Author", models.CASCADE)),
("book", models.ForeignKey("otherapp.Book", models.CASCADE)),
],
)
edition = ModelState(
"thirdapp",
"Edition",
[
("id", models.AutoField(primary_key=True)),
("book", models.ForeignKey("otherapp.Book", models.CASCADE)),
],
)
custom_user = ModelState(
"thirdapp",
"CustomUser",
[
("id", models.AutoField(primary_key=True)),
("username", models.CharField(max_length=255)),
],
bases=(AbstractBaseUser,),
)
custom_user_no_inherit = ModelState(
"thirdapp",
"CustomUser",
[
("id", models.AutoField(primary_key=True)),
("username", models.CharField(max_length=255)),
],
)
aardvark = ModelState(
"thirdapp", "Aardvark", [("id", models.AutoField(primary_key=True))]
)
aardvark_testapp = ModelState(
"testapp", "Aardvark", [("id", models.AutoField(primary_key=True))]
)
aardvark_based_on_author = ModelState(
"testapp", "Aardvark", [], bases=("testapp.Author",)
)
aardvark_pk_fk_author = ModelState(
"testapp",
"Aardvark",
[
(
"id",
models.OneToOneField(
"testapp.Author", models.CASCADE, primary_key=True
),
),
],
)
knight = ModelState("eggs", "Knight", [("id", models.AutoField(primary_key=True))])
rabbit = ModelState(
"eggs",
"Rabbit",
[
("id", models.AutoField(primary_key=True)),
("knight", models.ForeignKey("eggs.Knight", models.CASCADE)),
("parent", models.ForeignKey("eggs.Rabbit", models.CASCADE)),
],
{
"unique_together": {("parent", "knight")},
"indexes": [
models.Index(
fields=["parent", "knight"], name="rabbit_circular_fk_index"
)
],
},
)
def test_arrange_for_graph(self):
"""Tests auto-naming of migrations for graph matching."""
# Make a fake graph
graph = MigrationGraph()
graph.add_node(("testapp", "0001_initial"), None)
graph.add_node(("testapp", "0002_foobar"), None)
graph.add_node(("otherapp", "0001_initial"), None)
graph.add_dependency(
"testapp.0002_foobar",
("testapp", "0002_foobar"),
("testapp", "0001_initial"),
)
graph.add_dependency(
"testapp.0002_foobar",
("testapp", "0002_foobar"),
("otherapp", "0001_initial"),
)
# Use project state to make a new migration change set
before = self.make_project_state([self.publisher, self.other_pony])
after = self.make_project_state(
[
self.author_empty,
self.publisher,
self.other_pony,
self.other_stable,
]
)
autodetector = MigrationAutodetector(before, after)
changes = autodetector._detect_changes()
# Run through arrange_for_graph
changes = autodetector.arrange_for_graph(changes, graph)
# Make sure there's a new name, deps match, etc.
self.assertEqual(changes["testapp"][0].name, "0003_author")
self.assertEqual(
changes["testapp"][0].dependencies, [("testapp", "0002_foobar")]
)
self.assertEqual(changes["otherapp"][0].name, "0002_stable")
self.assertEqual(
changes["otherapp"][0].dependencies, [("otherapp", "0001_initial")]
)
def test_arrange_for_graph_with_multiple_initial(self):
# Make a fake graph.
graph = MigrationGraph()
# Use project state to make a new migration change set.
before = self.make_project_state([])
after = self.make_project_state(
[self.author_with_book, self.book, self.attribution]
)
autodetector = MigrationAutodetector(
before, after, MigrationQuestioner({"ask_initial": True})
)
changes = autodetector._detect_changes()
changes = autodetector.arrange_for_graph(changes, graph)
self.assertEqual(changes["otherapp"][0].name, "0001_initial")
self.assertEqual(changes["otherapp"][0].dependencies, [])
self.assertEqual(changes["otherapp"][1].name, "0002_initial")
self.assertCountEqual(
changes["otherapp"][1].dependencies,
[("testapp", "0001_initial"), ("otherapp", "0001_initial")],
)
self.assertEqual(changes["testapp"][0].name, "0001_initial")
self.assertEqual(
changes["testapp"][0].dependencies, [("otherapp", "0001_initial")]
)
def test_trim_apps(self):
"""
Trim does not remove dependencies but does remove unwanted apps.
"""
# Use project state to make a new migration change set
before = self.make_project_state([])
after = self.make_project_state(
[self.author_empty, self.other_pony, self.other_stable, self.third_thing]
)
autodetector = MigrationAutodetector(
before, after, MigrationQuestioner({"ask_initial": True})
)
changes = autodetector._detect_changes()
# Run through arrange_for_graph
graph = MigrationGraph()
changes = autodetector.arrange_for_graph(changes, graph)
changes["testapp"][0].dependencies.append(("otherapp", "0001_initial"))
changes = autodetector._trim_to_apps(changes, {"testapp"})
# Make sure there's the right set of migrations
self.assertEqual(changes["testapp"][0].name, "0001_initial")
self.assertEqual(changes["otherapp"][0].name, "0001_initial")
self.assertNotIn("thirdapp", changes)
def test_custom_migration_name(self):
"""Tests custom naming of migrations for graph matching."""
# Make a fake graph
graph = MigrationGraph()
graph.add_node(("testapp", "0001_initial"), None)
graph.add_node(("testapp", "0002_foobar"), None)
graph.add_node(("otherapp", "0001_initial"), None)
graph.add_dependency(
"testapp.0002_foobar",
("testapp", "0002_foobar"),
("testapp", "0001_initial"),
)
# Use project state to make a new migration change set
before = self.make_project_state([])
after = self.make_project_state(
[self.author_empty, self.other_pony, self.other_stable]
)
autodetector = MigrationAutodetector(before, after)
changes = autodetector._detect_changes()
# Run through arrange_for_graph
migration_name = "custom_name"
changes = autodetector.arrange_for_graph(changes, graph, migration_name)
# Make sure there's a new name, deps match, etc.
self.assertEqual(changes["testapp"][0].name, "0003_%s" % migration_name)
self.assertEqual(
changes["testapp"][0].dependencies, [("testapp", "0002_foobar")]
)
self.assertEqual(changes["otherapp"][0].name, "0002_%s" % migration_name)
self.assertEqual(
changes["otherapp"][0].dependencies, [("otherapp", "0001_initial")]
)
def test_new_model(self):
"""Tests autodetection of new models."""
changes = self.get_changes([], [self.other_pony_food])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["CreateModel"])
self.assertOperationAttributes(changes, "otherapp", 0, 0, name="Pony")
self.assertEqual(
[name for name, mgr in changes["otherapp"][0].operations[0].managers],
["food_qs", "food_mgr", "food_mgr_kwargs"],
)
def test_old_model(self):
"""Tests deletion of old models."""
changes = self.get_changes([self.author_empty], [])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["DeleteModel"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Author")
def test_add_field(self):
"""Tests autodetection of new fields."""
changes = self.get_changes([self.author_empty], [self.author_name])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AddField"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="name")
@mock.patch(
"django.db.migrations.questioner.MigrationQuestioner.ask_not_null_addition",
side_effect=AssertionError("Should not have prompted for not null addition"),
)
def test_add_not_null_field_with_db_default(self, mocked_ask_method):
changes = self.get_changes([self.author_empty], [self.author_name_db_default])
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AddField"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="name", preserve_default=True
)
self.assertOperationFieldAttributes(
changes, "testapp", 0, 0, db_default="Ada Lovelace"
)
@mock.patch(
"django.db.migrations.questioner.MigrationQuestioner.ask_not_null_addition",
side_effect=AssertionError("Should not have prompted for not null addition"),
)
def test_add_date_fields_with_auto_now_not_asking_for_default(
self, mocked_ask_method
):
changes = self.get_changes(
[self.author_empty], [self.author_dates_of_birth_auto_now]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes, "testapp", 0, ["AddField", "AddField", "AddField"]
)
self.assertOperationFieldAttributes(changes, "testapp", 0, 0, auto_now=True)
self.assertOperationFieldAttributes(changes, "testapp", 0, 1, auto_now=True)
self.assertOperationFieldAttributes(changes, "testapp", 0, 2, auto_now=True)
@mock.patch(
"django.db.migrations.questioner.MigrationQuestioner.ask_not_null_addition",
side_effect=AssertionError("Should not have prompted for not null addition"),
)
def test_add_date_fields_with_auto_now_add_not_asking_for_null_addition(
self, mocked_ask_method
):
changes = self.get_changes(
[self.author_empty], [self.author_dates_of_birth_auto_now_add]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes, "testapp", 0, ["AddField", "AddField", "AddField"]
)
self.assertOperationFieldAttributes(changes, "testapp", 0, 0, auto_now_add=True)
self.assertOperationFieldAttributes(changes, "testapp", 0, 1, auto_now_add=True)
self.assertOperationFieldAttributes(changes, "testapp", 0, 2, auto_now_add=True)
@mock.patch(
"django.db.migrations.questioner.MigrationQuestioner.ask_auto_now_add_addition"
)
def test_add_date_fields_with_auto_now_add_asking_for_default(
self, mocked_ask_method
):
changes = self.get_changes(
[self.author_empty], [self.author_dates_of_birth_auto_now_add]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes, "testapp", 0, ["AddField", "AddField", "AddField"]
)
self.assertOperationFieldAttributes(changes, "testapp", 0, 0, auto_now_add=True)
self.assertOperationFieldAttributes(changes, "testapp", 0, 1, auto_now_add=True)
self.assertOperationFieldAttributes(changes, "testapp", 0, 2, auto_now_add=True)
self.assertEqual(mocked_ask_method.call_count, 3)
def test_add_field_before_generated_field(self):
initial_state = ModelState(
"testapp",
"Author",
[
("name", models.CharField(max_length=20)),
],
)
updated_state = ModelState(
"testapp",
"Author",
[
("name", models.CharField(max_length=20)),
("surname", models.CharField(max_length=20)),
(
"lower_full_name",
models.GeneratedField(
expression=Concat(Lower("name"), Lower("surname")),
output_field=models.CharField(max_length=30),
db_persist=True,
),
),
],
)
changes = self.get_changes([initial_state], [updated_state])
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AddField", "AddField"])
self.assertOperationFieldAttributes(
changes, "testapp", 0, 1, expression=Concat(Lower("name"), Lower("surname"))
)
def test_add_fk_before_generated_field(self):
initial_state = ModelState(
"testapp",
"Author",
[
("name", models.CharField(max_length=20)),
],
)
updated_state = [
ModelState(
"testapp",
"Publisher",
[
("name", models.CharField(max_length=20)),
],
),
ModelState(
"testapp",
"Author",
[
("name", models.CharField(max_length=20)),
(
"publisher",
models.ForeignKey("testapp.Publisher", models.CASCADE),
),
(
"lower_full_name",
models.GeneratedField(
expression=Concat("name", "publisher_id"),
output_field=models.CharField(max_length=20),
db_persist=True,
),
),
],
),
]
changes = self.get_changes([initial_state], updated_state)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes, "testapp", 0, ["CreateModel", "AddField", "AddField"]
)
self.assertOperationFieldAttributes(
changes, "testapp", 0, 2, expression=Concat("name", "publisher_id")
)
def test_remove_field(self):
"""Tests autodetection of removed fields."""
changes = self.get_changes([self.author_name], [self.author_empty])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["RemoveField"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="name")
def test_remove_generated_field_before_its_base_field(self):
initial_state = [
ModelState(
"testapp",
"Author",
[
("name", models.CharField(max_length=20)),
(
"upper_name",
models.GeneratedField(
expression=Upper("name"),
db_persist=True,
output_field=models.CharField(),
),
),
],
),
]
updated_state = [ModelState("testapp", "Author", [])]
changes = self.get_changes(initial_state, updated_state)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["RemoveField", "RemoveField"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="upper_name")
self.assertOperationAttributes(changes, "testapp", 0, 1, name="name")
def test_remove_generated_field_before_multiple_base_fields(self):
initial_state = [
ModelState(
"testapp",
"Author",
[
("first_name", models.CharField(max_length=20)),
("last_name", models.CharField(max_length=20)),
(
"full_name",
models.GeneratedField(
expression=Concat("first_name", "last_name"),
db_persist=True,
output_field=models.CharField(),
),
),
],
),
]
updated_state = [ModelState("testapp", "Author", [])]
changes = self.get_changes(initial_state, updated_state)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes, "testapp", 0, ["RemoveField", "RemoveField", "RemoveField"]
)
self.assertOperationAttributes(changes, "testapp", 0, 0, name="full_name")
self.assertOperationAttributes(changes, "testapp", 0, 1, name="first_name")
self.assertOperationAttributes(changes, "testapp", 0, 2, name="last_name")
def test_remove_generated_field_and_one_of_multiple_base_fields(self):
initial_state = [
ModelState(
"testapp",
"Author",
[
("first_name", models.CharField(max_length=20)),
("last_name", models.CharField(max_length=20)),
(
"full_name",
models.GeneratedField(
expression=Concat("first_name", "last_name"),
db_persist=True,
output_field=models.CharField(),
),
),
],
),
]
# Only remove full_name and first_name.
updated_state = [
ModelState(
"testapp",
"Author",
[
("last_name", models.CharField(max_length=20)),
],
),
]
changes = self.get_changes(initial_state, updated_state)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes,
"testapp",
0,
["RemoveField", "RemoveField"],
)
self.assertOperationAttributes(changes, "testapp", 0, 0, name="full_name")
self.assertOperationAttributes(changes, "testapp", 0, 1, name="first_name")
def test_alter_field(self):
"""Tests autodetection of new fields."""
changes = self.get_changes([self.author_name], [self.author_name_longer])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterField"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="name", preserve_default=True
)
def test_supports_functools_partial(self):
def _content_file_name(instance, filename, key, **kwargs):
return "{}/{}".format(instance, filename)
def content_file_name(key, **kwargs):
return functools.partial(_content_file_name, key, **kwargs)
# An unchanged partial reference.
before = [
ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"file",
models.FileField(
max_length=200, upload_to=content_file_name("file")
),
),
],
)
]
after = [
ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"file",
models.FileField(
max_length=200, upload_to=content_file_name("file")
),
),
],
)
]
changes = self.get_changes(before, after)
self.assertNumberMigrations(changes, "testapp", 0)
# A changed partial reference.
args_changed = [
ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"file",
models.FileField(
max_length=200, upload_to=content_file_name("other-file")
),
),
],
)
]
changes = self.get_changes(before, args_changed)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterField"])
# Can't use assertOperationFieldAttributes because we need the
# deconstructed version, i.e., the exploded func/args/keywords rather
# than the partial: we don't care if it's not the same instance of the
# partial, only if it's the same source function, args, and keywords.
value = changes["testapp"][0].operations[0].field.upload_to
self.assertEqual(
(_content_file_name, ("other-file",), {}),
(value.func, value.args, value.keywords),
)
kwargs_changed = [
ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"file",
models.FileField(
max_length=200,
upload_to=content_file_name("file", spam="eggs"),
),
),
],
)
]
changes = self.get_changes(before, kwargs_changed)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterField"])
value = changes["testapp"][0].operations[0].field.upload_to
self.assertEqual(
(_content_file_name, ("file",), {"spam": "eggs"}),
(value.func, value.args, value.keywords),
)
@mock.patch(
"django.db.migrations.questioner.MigrationQuestioner.ask_not_null_alteration",
side_effect=AssertionError("Should not have prompted for not null addition"),
)
def test_alter_field_to_not_null_with_default(self, mocked_ask_method):
"""
#23609 - Tests autodetection of nullable to non-nullable alterations.
"""
changes = self.get_changes([self.author_name_null], [self.author_name_default])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterField"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="name", preserve_default=True
)
self.assertOperationFieldAttributes(
changes, "testapp", 0, 0, default="Ada Lovelace"
)
@mock.patch(
"django.db.migrations.questioner.MigrationQuestioner.ask_not_null_alteration",
side_effect=AssertionError("Should not have prompted for not null alteration"),
)
def test_alter_field_to_not_null_with_db_default(self, mocked_ask_method):
changes = self.get_changes(
[self.author_name_null], [self.author_name_db_default]
)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterField"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="name", preserve_default=True
)
self.assertOperationFieldAttributes(
changes, "testapp", 0, 0, db_default="Ada Lovelace"
)
@mock.patch(
"django.db.migrations.questioner.MigrationQuestioner.ask_not_null_addition"
)
def test_add_auto_field_does_not_request_default(self, mocked_ask_method):
initial_state = ModelState(
"testapp",
"Author",
[
("pkfield", models.IntegerField(primary_key=True)),
],
)
for auto_field in [
models.AutoField,
models.BigAutoField,
models.SmallAutoField,
]:
with self.subTest(auto_field=auto_field):
updated_state = ModelState(
"testapp",
"Author",
[
("id", auto_field(primary_key=True)),
("pkfield", models.IntegerField(primary_key=False)),
],
)
self.get_changes([initial_state], [updated_state])
mocked_ask_method.assert_not_called()
@mock.patch(
"django.db.migrations.questioner.MigrationQuestioner.ask_not_null_alteration",
return_value=models.NOT_PROVIDED,
)
def test_alter_field_to_not_null_without_default(self, mocked_ask_method):
"""
#23609 - Tests autodetection of nullable to non-nullable alterations.
"""
changes = self.get_changes([self.author_name_null], [self.author_name])
self.assertEqual(mocked_ask_method.call_count, 1)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterField"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="name", preserve_default=True
)
self.assertOperationFieldAttributes(
changes, "testapp", 0, 0, default=models.NOT_PROVIDED
)
@mock.patch(
"django.db.migrations.questioner.MigrationQuestioner.ask_not_null_alteration",
return_value="Some Name",
)
def test_alter_field_to_not_null_oneoff_default(self, mocked_ask_method):
"""
#23609 - Tests autodetection of nullable to non-nullable alterations.
"""
changes = self.get_changes([self.author_name_null], [self.author_name])
self.assertEqual(mocked_ask_method.call_count, 1)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterField"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="name", preserve_default=False
)
self.assertOperationFieldAttributes(
changes, "testapp", 0, 0, default="Some Name"
)
def test_rename_field(self):
"""Tests autodetection of renamed fields."""
changes = self.get_changes(
[self.author_name],
[self.author_name_renamed],
MigrationQuestioner({"ask_rename": True}),
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["RenameField"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, old_name="name", new_name="names"
)
def test_rename_field_foreign_key_to_field(self):
before = [
ModelState(
"app",
"Foo",
[
("id", models.AutoField(primary_key=True)),
("field", models.IntegerField(unique=True)),
],
),
ModelState(
"app",
"Bar",
[
("id", models.AutoField(primary_key=True)),
(
"foo",
models.ForeignKey("app.Foo", models.CASCADE, to_field="field"),
),
],
),
]
after = [
ModelState(
"app",
"Foo",
[
("id", models.AutoField(primary_key=True)),
("renamed_field", models.IntegerField(unique=True)),
],
),
ModelState(
"app",
"Bar",
[
("id", models.AutoField(primary_key=True)),
(
"foo",
models.ForeignKey(
"app.Foo", models.CASCADE, to_field="renamed_field"
),
),
],
),
]
changes = self.get_changes(
before, after, MigrationQuestioner({"ask_rename": True})
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "app", 1)
self.assertOperationTypes(changes, "app", 0, ["RenameField"])
self.assertOperationAttributes(
changes, "app", 0, 0, old_name="field", new_name="renamed_field"
)
def test_foreign_object_from_to_fields_list(self):
author_state = ModelState(
"app",
"Author",
[("id", models.AutoField(primary_key=True))],
)
book_state = ModelState(
"app",
"Book",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField()),
("author_id", models.IntegerField()),
(
"author",
models.ForeignObject(
"app.Author",
models.CASCADE,
from_fields=["author_id"],
to_fields=["id"],
),
),
],
)
book_state_copy = copy.deepcopy(book_state)
changes = self.get_changes(
[author_state, book_state],
[author_state, book_state_copy],
)
self.assertEqual(changes, {})
def test_rename_foreign_object_fields(self):
fields = ("first", "second")
renamed_fields = ("first_renamed", "second_renamed")
before = [
ModelState(
"app",
"Foo",
[
("id", models.AutoField(primary_key=True)),
("first", models.IntegerField()),
("second", models.IntegerField()),
],
options={"unique_together": {fields}},
),
ModelState(
"app",
"Bar",
[
("id", models.AutoField(primary_key=True)),
("first", models.IntegerField()),
("second", models.IntegerField()),
(
"foo",
models.ForeignObject(
"app.Foo",
models.CASCADE,
from_fields=fields,
to_fields=fields,
),
),
],
),
]
# Case 1: to_fields renames.
after = [
ModelState(
"app",
"Foo",
[
("id", models.AutoField(primary_key=True)),
("first_renamed", models.IntegerField()),
("second_renamed", models.IntegerField()),
],
options={"unique_together": {renamed_fields}},
),
ModelState(
"app",
"Bar",
[
("id", models.AutoField(primary_key=True)),
("first", models.IntegerField()),
("second", models.IntegerField()),
(
"foo",
models.ForeignObject(
"app.Foo",
models.CASCADE,
from_fields=fields,
to_fields=renamed_fields,
),
),
],
),
]
changes = self.get_changes(
before, after, MigrationQuestioner({"ask_rename": True})
)
self.assertNumberMigrations(changes, "app", 1)
self.assertOperationTypes(
changes, "app", 0, ["RenameField", "RenameField", "AlterUniqueTogether"]
)
self.assertOperationAttributes(
changes,
"app",
0,
0,
model_name="foo",
old_name="first",
new_name="first_renamed",
)
self.assertOperationAttributes(
changes,
"app",
0,
1,
model_name="foo",
old_name="second",
new_name="second_renamed",
)
# Case 2: from_fields renames.
after = [
ModelState(
"app",
"Foo",
[
("id", models.AutoField(primary_key=True)),
("first", models.IntegerField()),
("second", models.IntegerField()),
],
options={"unique_together": {fields}},
),
ModelState(
"app",
"Bar",
[
("id", models.AutoField(primary_key=True)),
("first_renamed", models.IntegerField()),
("second_renamed", models.IntegerField()),
(
"foo",
models.ForeignObject(
"app.Foo",
models.CASCADE,
from_fields=renamed_fields,
to_fields=fields,
),
),
],
),
]
changes = self.get_changes(
before, after, MigrationQuestioner({"ask_rename": True})
)
self.assertNumberMigrations(changes, "app", 1)
self.assertOperationTypes(changes, "app", 0, ["RenameField", "RenameField"])
self.assertOperationAttributes(
changes,
"app",
0,
0,
model_name="bar",
old_name="first",
new_name="first_renamed",
)
self.assertOperationAttributes(
changes,
"app",
0,
1,
model_name="bar",
old_name="second",
new_name="second_renamed",
)
def test_rename_referenced_primary_key(self):
before = [
ModelState(
"app",
"Foo",
[
("id", models.CharField(primary_key=True, serialize=False)),
],
),
ModelState(
"app",
"Bar",
[
("id", models.AutoField(primary_key=True)),
("foo", models.ForeignKey("app.Foo", models.CASCADE)),
],
),
]
after = [
ModelState(
"app",
"Foo",
[("renamed_id", models.CharField(primary_key=True, serialize=False))],
),
ModelState(
"app",
"Bar",
[
("id", models.AutoField(primary_key=True)),
("foo", models.ForeignKey("app.Foo", models.CASCADE)),
],
),
]
changes = self.get_changes(
before, after, MigrationQuestioner({"ask_rename": True})
)
self.assertNumberMigrations(changes, "app", 1)
self.assertOperationTypes(changes, "app", 0, ["RenameField"])
self.assertOperationAttributes(
changes, "app", 0, 0, old_name="id", new_name="renamed_id"
)
def test_rename_field_preserved_db_column(self):
"""
RenameField is used if a field is renamed and db_column equal to the
old field's column is added.
"""
before = [
ModelState(
"app",
"Foo",
[
("id", models.AutoField(primary_key=True)),
("field", models.IntegerField()),
],
),
]
after = [
ModelState(
"app",
"Foo",
[
("id", models.AutoField(primary_key=True)),
("renamed_field", models.IntegerField(db_column="field")),
],
),
]
changes = self.get_changes(
before, after, MigrationQuestioner({"ask_rename": True})
)
self.assertNumberMigrations(changes, "app", 1)
self.assertOperationTypes(changes, "app", 0, ["AlterField", "RenameField"])
self.assertOperationAttributes(
changes,
"app",
0,
0,
model_name="foo",
name="field",
)
self.assertEqual(
changes["app"][0].operations[0].field.deconstruct(),
(
"field",
"django.db.models.IntegerField",
[],
{"db_column": "field"},
),
)
self.assertOperationAttributes(
changes,
"app",
0,
1,
model_name="foo",
old_name="field",
new_name="renamed_field",
)
def test_rename_related_field_preserved_db_column(self):
before = [
ModelState(
"app",
"Foo",
[
("id", models.AutoField(primary_key=True)),
],
),
ModelState(
"app",
"Bar",
[
("id", models.AutoField(primary_key=True)),
("foo", models.ForeignKey("app.Foo", models.CASCADE)),
],
),
]
after = [
ModelState(
"app",
"Foo",
[
("id", models.AutoField(primary_key=True)),
],
),
ModelState(
"app",
"Bar",
[
("id", models.AutoField(primary_key=True)),
(
"renamed_foo",
models.ForeignKey(
"app.Foo", models.CASCADE, db_column="foo_id"
),
),
],
),
]
changes = self.get_changes(
before, after, MigrationQuestioner({"ask_rename": True})
)
self.assertNumberMigrations(changes, "app", 1)
self.assertOperationTypes(changes, "app", 0, ["AlterField", "RenameField"])
self.assertOperationAttributes(
changes,
"app",
0,
0,
model_name="bar",
name="foo",
)
self.assertEqual(
changes["app"][0].operations[0].field.deconstruct(),
(
"foo",
"django.db.models.ForeignKey",
[],
{"to": "app.foo", "on_delete": models.CASCADE, "db_column": "foo_id"},
),
)
self.assertOperationAttributes(
changes,
"app",
0,
1,
model_name="bar",
old_name="foo",
new_name="renamed_foo",
)
def test_rename_field_preserve_db_column_preserve_constraint(self):
"""
Renaming a field that already had a db_column attribute and a
constraint generates two no-op operations: RenameField and
AlterConstraint.
"""
before = [
ModelState(
"app",
"Foo",
[
("id", models.AutoField(primary_key=True)),
("field", models.IntegerField(db_column="full_field1_name")),
("field2", models.IntegerField()),
],
options={
"constraints": [
models.UniqueConstraint(
fields=["field", "field2"],
name="unique_field",
),
],
},
),
]
after = [
ModelState(
"app",
"Foo",
[
("id", models.AutoField(primary_key=True)),
(
"full_field1_name",
models.IntegerField(db_column="full_field1_name"),
),
(
"field2",
models.IntegerField(),
),
],
options={
"constraints": [
models.UniqueConstraint(
fields=["full_field1_name", "field2"],
name="unique_field",
),
],
},
),
]
changes = self.get_changes(
before, after, MigrationQuestioner({"ask_rename": True})
)
self.assertNumberMigrations(changes, "app", 1)
self.assertOperationTypes(changes, "app", 0, ["RenameField", "AlterConstraint"])
self.assertOperationAttributes(
changes,
"app",
0,
1,
model_name="foo",
name="unique_field",
)
self.assertEqual(
changes["app"][0].operations[1].deconstruct(),
(
"AlterConstraint",
[],
{
"constraint": after[0].options["constraints"][0],
"model_name": "foo",
"name": "unique_field",
},
),
)
def test_rename_field_without_db_column_recreate_constraint(self):
"""Renaming a field without given db_column recreates a constraint."""
before = [
ModelState(
"app",
"Foo",
[
("id", models.AutoField(primary_key=True)),
("field", models.IntegerField()),
],
options={
"constraints": [
models.UniqueConstraint(
fields=["field"],
name="unique_field",
),
],
},
),
]
after = [
ModelState(
"app",
"Foo",
[
("id", models.AutoField(primary_key=True)),
(
"full_field1_name",
models.IntegerField(),
),
],
options={
"constraints": [
models.UniqueConstraint(
fields=["full_field1_name"],
name="unique_field",
),
],
},
),
]
changes = self.get_changes(
before, after, MigrationQuestioner({"ask_rename": True})
)
self.assertNumberMigrations(changes, "app", 1)
self.assertOperationTypes(
changes, "app", 0, ["RemoveConstraint", "RenameField", "AddConstraint"]
)
def test_rename_field_preserve_db_column_recreate_constraint(self):
"""Removing a field from the constraint triggers recreation."""
before = [
ModelState(
"app",
"Foo",
[
("id", models.AutoField(primary_key=True)),
("field1", models.IntegerField(db_column="field1")),
("field2", models.IntegerField(db_column="field2")),
],
options={
"constraints": [
models.UniqueConstraint(
fields=["field1", "field2"],
name="unique_fields",
),
],
},
),
]
after = [
ModelState(
"app",
"Foo",
[
("id", models.AutoField(primary_key=True)),
("renamed_field1", models.IntegerField(db_column="field1")),
("renamed_field2", models.IntegerField(db_column="field2")),
],
options={
"constraints": [
models.UniqueConstraint(
fields=["renamed_field1"],
name="unique_fields",
),
],
},
),
]
changes = self.get_changes(
before, after, MigrationQuestioner({"ask_rename": True})
)
self.assertNumberMigrations(changes, "app", 1)
self.assertOperationTypes(
changes,
"app",
0,
[
"RemoveConstraint",
"RenameField",
"RenameField",
"AddConstraint",
],
)
def test_rename_field_with_renamed_model(self):
changes = self.get_changes(
[self.author_name],
[
ModelState(
"testapp",
"RenamedAuthor",
[
("id", models.AutoField(primary_key=True)),
("renamed_name", models.CharField(max_length=200)),
],
),
],
MigrationQuestioner({"ask_rename_model": True, "ask_rename": True}),
)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["RenameModel", "RenameField"])
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
old_name="Author",
new_name="RenamedAuthor",
)
self.assertOperationAttributes(
changes,
"testapp",
0,
1,
old_name="name",
new_name="renamed_name",
)
def test_rename_model(self):
"""Tests autodetection of renamed models."""
changes = self.get_changes(
[self.author_with_book, self.book],
[self.author_renamed_with_book, self.book_with_author_renamed],
MigrationQuestioner({"ask_rename_model": True}),
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["RenameModel"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, old_name="Author", new_name="Writer"
)
# Now that RenameModel handles related fields too, there should be
# no AlterField for the related field.
self.assertNumberMigrations(changes, "otherapp", 0)
def test_rename_model_case(self):
"""
Model name is case-insensitive. Changing case doesn't lead to any
autodetected operations.
"""
author_renamed = ModelState(
"testapp",
"author",
[
("id", models.AutoField(primary_key=True)),
],
)
changes = self.get_changes(
[self.author_empty, self.book],
[author_renamed, self.book],
questioner=MigrationQuestioner({"ask_rename_model": True}),
)
self.assertNumberMigrations(changes, "testapp", 0)
self.assertNumberMigrations(changes, "otherapp", 0)
def test_renamed_referenced_m2m_model_case(self):
publisher_renamed = ModelState(
"testapp",
"publisher",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=100)),
],
)
changes = self.get_changes(
[self.publisher, self.author_with_m2m],
[publisher_renamed, self.author_with_m2m],
questioner=MigrationQuestioner({"ask_rename_model": True}),
)
self.assertNumberMigrations(changes, "testapp", 0)
self.assertNumberMigrations(changes, "otherapp", 0)
def test_rename_m2m_through_model(self):
"""
Tests autodetection of renamed models that are used in M2M relations as
through models.
"""
changes = self.get_changes(
[self.author_with_m2m_through, self.publisher, self.contract],
[
self.author_with_renamed_m2m_through,
self.publisher,
self.contract_renamed,
],
MigrationQuestioner({"ask_rename_model": True}),
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["RenameModel"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, old_name="Contract", new_name="Deal"
)
def test_rename_model_with_renamed_rel_field(self):
"""
Tests autodetection of renamed models while simultaneously renaming one
of the fields that relate to the renamed model.
"""
changes = self.get_changes(
[self.author_with_book, self.book],
[self.author_renamed_with_book, self.book_with_field_and_author_renamed],
MigrationQuestioner({"ask_rename": True, "ask_rename_model": True}),
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["RenameModel"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, old_name="Author", new_name="Writer"
)
# Right number/type of migrations for related field rename?
# Alter is already taken care of.
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["RenameField"])
self.assertOperationAttributes(
changes, "otherapp", 0, 0, old_name="author", new_name="writer"
)
def test_rename_model_with_fks_in_different_position(self):
"""
#24537 - The order of fields in a model does not influence
the RenameModel detection.
"""
before = [
ModelState(
"testapp",
"EntityA",
[
("id", models.AutoField(primary_key=True)),
],
),
ModelState(
"testapp",
"EntityB",
[
("id", models.AutoField(primary_key=True)),
("some_label", models.CharField(max_length=255)),
("entity_a", models.ForeignKey("testapp.EntityA", models.CASCADE)),
],
),
]
after = [
ModelState(
"testapp",
"EntityA",
[
("id", models.AutoField(primary_key=True)),
],
),
ModelState(
"testapp",
"RenamedEntityB",
[
("id", models.AutoField(primary_key=True)),
("entity_a", models.ForeignKey("testapp.EntityA", models.CASCADE)),
("some_label", models.CharField(max_length=255)),
],
),
]
changes = self.get_changes(
before, after, MigrationQuestioner({"ask_rename_model": True})
)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["RenameModel"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, old_name="EntityB", new_name="RenamedEntityB"
)
def test_rename_model_reverse_relation_dependencies(self):
"""
The migration to rename a model pointed to by a foreign key in another
app must run after the other app's migration that adds the foreign key
with model's original name. Therefore, the renaming migration has a
dependency on that other migration.
"""
before = [
ModelState(
"testapp",
"EntityA",
[
("id", models.AutoField(primary_key=True)),
],
),
ModelState(
"otherapp",
"EntityB",
[
("id", models.AutoField(primary_key=True)),
("entity_a", models.ForeignKey("testapp.EntityA", models.CASCADE)),
],
),
]
after = [
ModelState(
"testapp",
"RenamedEntityA",
[
("id", models.AutoField(primary_key=True)),
],
),
ModelState(
"otherapp",
"EntityB",
[
("id", models.AutoField(primary_key=True)),
(
"entity_a",
models.ForeignKey("testapp.RenamedEntityA", models.CASCADE),
),
],
),
]
changes = self.get_changes(
before, after, MigrationQuestioner({"ask_rename_model": True})
)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertMigrationDependencies(
changes, "testapp", 0, [("otherapp", "__first__")]
)
self.assertOperationTypes(changes, "testapp", 0, ["RenameModel"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, old_name="EntityA", new_name="RenamedEntityA"
)
def test_fk_dependency(self):
"""Having a ForeignKey automatically adds a dependency."""
# Note that testapp (author) has no dependencies,
# otherapp (book) depends on testapp (author),
# thirdapp (edition) depends on otherapp (book)
changes = self.get_changes([], [self.author_name, self.book, self.edition])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Author")
self.assertMigrationDependencies(changes, "testapp", 0, [])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["CreateModel"])
self.assertOperationAttributes(changes, "otherapp", 0, 0, name="Book")
self.assertMigrationDependencies(
changes, "otherapp", 0, [("testapp", "auto_1")]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "thirdapp", 1)
self.assertOperationTypes(changes, "thirdapp", 0, ["CreateModel"])
self.assertOperationAttributes(changes, "thirdapp", 0, 0, name="Edition")
self.assertMigrationDependencies(
changes, "thirdapp", 0, [("otherapp", "auto_1")]
)
def test_proxy_fk_dependency(self):
"""FK dependencies still work on proxy models."""
# Note that testapp (author) has no dependencies,
# otherapp (book) depends on testapp (authorproxy)
changes = self.get_changes(
[], [self.author_empty, self.author_proxy_third, self.book_proxy_fk]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Author")
self.assertMigrationDependencies(changes, "testapp", 0, [])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["CreateModel"])
self.assertOperationAttributes(changes, "otherapp", 0, 0, name="Book")
self.assertMigrationDependencies(
changes, "otherapp", 0, [("thirdapp", "auto_1")]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "thirdapp", 1)
self.assertOperationTypes(changes, "thirdapp", 0, ["CreateModel"])
self.assertOperationAttributes(changes, "thirdapp", 0, 0, name="AuthorProxy")
self.assertMigrationDependencies(
changes, "thirdapp", 0, [("testapp", "auto_1")]
)
def test_same_app_no_fk_dependency(self):
"""
A migration with a FK between two models of the same app
does not have a dependency to itself.
"""
changes = self.get_changes([], [self.author_with_publisher, self.publisher])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel", "CreateModel"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Publisher")
self.assertOperationAttributes(changes, "testapp", 0, 1, name="Author")
self.assertMigrationDependencies(changes, "testapp", 0, [])
def test_circular_fk_dependency(self):
"""
Having a circular ForeignKey dependency automatically
resolves the situation into 2 migrations on one side and 1 on the
other.
"""
changes = self.get_changes(
[], [self.author_with_book, self.book, self.publisher_with_book]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel", "CreateModel"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Author")
self.assertOperationAttributes(changes, "testapp", 0, 1, name="Publisher")
self.assertMigrationDependencies(
changes, "testapp", 0, [("otherapp", "auto_1")]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 2)
self.assertOperationTypes(changes, "otherapp", 0, ["CreateModel"])
self.assertOperationTypes(changes, "otherapp", 1, ["AddField"])
self.assertMigrationDependencies(changes, "otherapp", 0, [])
self.assertMigrationDependencies(
changes, "otherapp", 1, [("otherapp", "auto_1"), ("testapp", "auto_1")]
)
# both split migrations should be `initial`
self.assertTrue(changes["otherapp"][0].initial)
self.assertTrue(changes["otherapp"][1].initial)
def test_same_app_circular_fk_dependency(self):
"""
A migration with a FK between two models of the same app does
not have a dependency to itself.
"""
changes = self.get_changes(
[], [self.author_with_publisher, self.publisher_with_author]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes, "testapp", 0, ["CreateModel", "CreateModel", "AddField"]
)
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Author")
self.assertOperationAttributes(changes, "testapp", 0, 1, name="Publisher")
self.assertOperationAttributes(changes, "testapp", 0, 2, name="publisher")
self.assertMigrationDependencies(changes, "testapp", 0, [])
def test_same_app_circular_fk_dependency_with_unique_together_and_indexes(self):
"""
#22275 - A migration with circular FK dependency does not try
to create unique together constraint and indexes before creating all
required fields first.
"""
changes = self.get_changes([], [self.knight, self.rabbit])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "eggs", 1)
self.assertOperationTypes(
changes,
"eggs",
0,
["CreateModel", "CreateModel"],
)
self.assertNotIn("unique_together", changes["eggs"][0].operations[0].options)
self.assertMigrationDependencies(changes, "eggs", 0, [])
def test_alter_db_table_add(self):
"""Tests detection for adding db_table in model's options."""
changes = self.get_changes(
[self.author_empty], [self.author_with_db_table_options]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterModelTable"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="author", table="author_one"
)
def test_alter_db_table_change(self):
"""Tests detection for changing db_table in model's options'."""
changes = self.get_changes(
[self.author_with_db_table_options], [self.author_with_new_db_table_options]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterModelTable"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="author", table="author_two"
)
def test_alter_db_table_remove(self):
"""Tests detection for removing db_table in model's options."""
changes = self.get_changes(
[self.author_with_db_table_options], [self.author_empty]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterModelTable"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="author", table=None
)
def test_alter_db_table_no_changes(self):
"""
Alter_db_table doesn't generate a migration if no changes have been
made.
"""
changes = self.get_changes(
[self.author_with_db_table_options], [self.author_with_db_table_options]
)
# Right number of migrations?
self.assertEqual(len(changes), 0)
def test_keep_db_table_with_model_change(self):
"""
Tests when model changes but db_table stays as-is, autodetector must
not create more than one operation.
"""
changes = self.get_changes(
[self.author_with_db_table_options],
[self.author_renamed_with_db_table_options],
MigrationQuestioner({"ask_rename_model": True}),
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["RenameModel"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, old_name="Author", new_name="NewAuthor"
)
def test_alter_db_table_with_model_change(self):
"""
Tests when model and db_table changes, autodetector must create two
operations.
"""
changes = self.get_changes(
[self.author_with_db_table_options],
[self.author_renamed_with_new_db_table_options],
MigrationQuestioner({"ask_rename_model": True}),
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes, "testapp", 0, ["RenameModel", "AlterModelTable"]
)
self.assertOperationAttributes(
changes, "testapp", 0, 0, old_name="Author", new_name="NewAuthor"
)
self.assertOperationAttributes(
changes, "testapp", 0, 1, name="newauthor", table="author_three"
)
def test_alter_db_table_comment_add(self):
changes = self.get_changes(
[self.author_empty], [self.author_with_db_table_comment]
)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterModelTableComment"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="author", table_comment="Table comment"
)
def test_alter_db_table_comment_change(self):
author_with_new_db_table_comment = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
],
{"db_table_comment": "New table comment"},
)
changes = self.get_changes(
[self.author_with_db_table_comment],
[author_with_new_db_table_comment],
)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterModelTableComment"])
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
name="author",
table_comment="New table comment",
)
def test_alter_db_table_comment_remove(self):
changes = self.get_changes(
[self.author_with_db_table_comment],
[self.author_empty],
)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterModelTableComment"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="author", db_table_comment=None
)
def test_alter_db_table_comment_no_changes(self):
changes = self.get_changes(
[self.author_with_db_table_comment],
[self.author_with_db_table_comment],
)
self.assertNumberMigrations(changes, "testapp", 0)
def test_identical_regex_doesnt_alter(self):
from_state = ModelState(
"testapp",
"model",
[
(
"id",
models.AutoField(
primary_key=True,
validators=[
RegexValidator(
re.compile("^[-a-zA-Z0-9_]+\\Z"),
"Enter a valid “slug” consisting of letters, numbers, "
"underscores or hyphens.",
"invalid",
)
],
),
)
],
)
to_state = ModelState(
"testapp",
"model",
[("id", models.AutoField(primary_key=True, validators=[validate_slug]))],
)
changes = self.get_changes([from_state], [to_state])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 0)
def test_different_regex_does_alter(self):
from_state = ModelState(
"testapp",
"model",
[
(
"id",
models.AutoField(
primary_key=True,
validators=[
RegexValidator(
re.compile("^[a-z]+\\Z", 32),
"Enter a valid “slug” consisting of letters, numbers, "
"underscores or hyphens.",
"invalid",
)
],
),
)
],
)
to_state = ModelState(
"testapp",
"model",
[("id", models.AutoField(primary_key=True, validators=[validate_slug]))],
)
changes = self.get_changes([from_state], [to_state])
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterField"])
def test_alter_regex_string_to_compiled_regex(self):
regex_string = "^[a-z]+$"
from_state = ModelState(
"testapp",
"model",
[
(
"id",
models.AutoField(
primary_key=True, validators=[RegexValidator(regex_string)]
),
)
],
)
to_state = ModelState(
"testapp",
"model",
[
(
"id",
models.AutoField(
primary_key=True,
validators=[RegexValidator(re.compile(regex_string))],
),
)
],
)
changes = self.get_changes([from_state], [to_state])
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterField"])
def test_empty_unique_together(self):
"""Empty unique_together shouldn't generate a migration."""
# Explicitly testing for not specified, since this is the case after
# a CreateModel operation w/o any definition on the original model
model_state_not_specified = ModelState(
"a", "model", [("id", models.AutoField(primary_key=True))]
)
# Explicitly testing for None, since this was the issue in #23452 after
# an AlterUniqueTogether operation with e.g. () as value
model_state_none = ModelState(
"a",
"model",
[("id", models.AutoField(primary_key=True))],
{
"unique_together": None,
},
)
# Explicitly testing for the empty set, since we now always have sets.
# During removal (('col1', 'col2'),) --> () this becomes set([])
model_state_empty = ModelState(
"a",
"model",
[("id", models.AutoField(primary_key=True))],
{
"unique_together": set(),
},
)
def test(from_state, to_state, msg):
changes = self.get_changes([from_state], [to_state])
if changes:
ops = ", ".join(
o.__class__.__name__ for o in changes["a"][0].operations
)
self.fail("Created operation(s) %s from %s" % (ops, msg))
tests = (
(
model_state_not_specified,
model_state_not_specified,
'"not specified" to "not specified"',
),
(model_state_not_specified, model_state_none, '"not specified" to "None"'),
(
model_state_not_specified,
model_state_empty,
'"not specified" to "empty"',
),
(model_state_none, model_state_not_specified, '"None" to "not specified"'),
(model_state_none, model_state_none, '"None" to "None"'),
(model_state_none, model_state_empty, '"None" to "empty"'),
(
model_state_empty,
model_state_not_specified,
'"empty" to "not specified"',
),
(model_state_empty, model_state_none, '"empty" to "None"'),
(model_state_empty, model_state_empty, '"empty" to "empty"'),
)
for t in tests:
test(*t)
def test_create_model_with_indexes(self):
"""Test creation of new model with indexes already defined."""
added_index = models.Index(
fields=["name"], name="create_model_with_indexes_idx"
)
author = ModelState(
"otherapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
],
{
"indexes": [added_index],
},
)
changes = self.get_changes([], [author])
# Right number of migrations?
self.assertEqual(len(changes["otherapp"]), 1)
# Right number of actions?
migration = changes["otherapp"][0]
self.assertEqual(len(migration.operations), 1)
# Right actions order?
self.assertOperationTypes(changes, "otherapp", 0, ["CreateModel"])
self.assertOperationAttributes(changes, "otherapp", 0, 0, name="Author")
self.assertOperationAttributes(
changes,
"otherapp",
0,
0,
name="Author",
options={"indexes": [added_index]},
)
def test_add_indexes(self):
"""Test change detection of new indexes."""
changes = self.get_changes(
[self.author_empty, self.book], [self.author_empty, self.book_indexes]
)
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["AddIndex"])
added_index = models.Index(
fields=["author", "title"], name="book_title_author_idx"
)
self.assertOperationAttributes(
changes, "otherapp", 0, 0, model_name="book", index=added_index
)
def test_remove_indexes(self):
"""Test change detection of removed indexes."""
changes = self.get_changes(
[self.author_empty, self.book_indexes], [self.author_empty, self.book]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["RemoveIndex"])
self.assertOperationAttributes(
changes, "otherapp", 0, 0, model_name="book", name="book_title_author_idx"
)
def test_remove_field_with_model_options(self):
before_state = [
ModelState("testapp", "Animal", []),
ModelState(
"testapp",
"Dog",
fields=[
("name", models.CharField(max_length=100)),
(
"animal",
models.ForeignKey("testapp.Animal", on_delete=models.CASCADE),
),
],
options={
"indexes": [
models.Index(fields=("animal", "name"), name="animal_name_idx")
],
"constraints": [
models.UniqueConstraint(
fields=("animal", "name"), name="animal_name_idx"
),
],
},
),
]
changes = self.get_changes(before_state, [])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes,
"testapp",
0,
[
"RemoveIndex",
"RemoveConstraint",
"RemoveField",
"DeleteModel",
"DeleteModel",
],
)
def test_remove_field_with_remove_index_or_constraint_dependency(self):
before_state = [
ModelState("testapp", "Category", []),
ModelState(
"testapp",
"Model",
fields=[
("date", models.DateField(auto_now=True)),
(
"category",
models.ForeignKey(
"testapp.Category", models.SET_NULL, null=True
),
),
],
options={
"constraints": [
models.UniqueConstraint(
fields=("date", "category"), name="unique_category_for_date"
),
]
},
),
]
changes = self.get_changes(
before_state,
[
ModelState(
"testapp",
"Model",
fields=[
("date", models.DateField(auto_now=True)),
],
),
],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes,
"testapp",
0,
["RemoveConstraint", "RemoveField", "DeleteModel"],
)
def test_rename_indexes(self):
book_renamed_indexes = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("testapp.Author", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
{
"indexes": [
models.Index(
fields=["author", "title"], name="renamed_book_title_author_idx"
)
],
},
)
changes = self.get_changes(
[self.author_empty, self.book_indexes],
[self.author_empty, book_renamed_indexes],
)
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["RenameIndex"])
self.assertOperationAttributes(
changes,
"otherapp",
0,
0,
model_name="book",
new_name="renamed_book_title_author_idx",
old_name="book_title_author_idx",
)
def test_order_fields_indexes(self):
"""Test change detection of reordering of fields in indexes."""
changes = self.get_changes(
[self.author_empty, self.book_indexes],
[self.author_empty, self.book_unordered_indexes],
)
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["RemoveIndex", "AddIndex"])
self.assertOperationAttributes(
changes, "otherapp", 0, 0, model_name="book", name="book_title_author_idx"
)
added_index = models.Index(
fields=["title", "author"], name="book_author_title_idx"
)
self.assertOperationAttributes(
changes, "otherapp", 0, 1, model_name="book", index=added_index
)
def test_create_model_with_check_constraint(self):
"""Test creation of new model with constraints already defined."""
author = ModelState(
"otherapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
],
{
"constraints": [
models.CheckConstraint(
condition=models.Q(name__contains="Bob"),
name="name_contains_bob",
)
]
},
)
changes = self.get_changes([], [author])
constraint = models.CheckConstraint(
condition=models.Q(name__contains="Bob"), name="name_contains_bob"
)
# Right number of migrations?
self.assertEqual(len(changes["otherapp"]), 1)
# Right number of actions?
migration = changes["otherapp"][0]
self.assertEqual(len(migration.operations), 1)
# Right actions order?
self.assertOperationTypes(changes, "otherapp", 0, ["CreateModel"])
self.assertOperationAttributes(
changes,
"otherapp",
0,
0,
name="Author",
options={"constraints": [constraint]},
)
def test_add_constraints(self):
"""Test change detection of new constraints."""
changes = self.get_changes(
[self.author_name], [self.author_name_check_constraint]
)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AddConstraint"])
added_constraint = models.CheckConstraint(
condition=models.Q(name__contains="Bob"), name="name_contains_bob"
)
self.assertOperationAttributes(
changes, "testapp", 0, 0, model_name="author", constraint=added_constraint
)
def test_add_constraints_with_new_model(self):
book_with_unique_title_and_pony = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("title", models.CharField(max_length=200)),
("pony", models.ForeignKey("otherapp.Pony", models.CASCADE)),
],
{
"constraints": [
models.UniqueConstraint(
fields=["title", "pony"],
name="unique_title_pony",
)
]
},
)
changes = self.get_changes(
[self.book_with_no_author],
[book_with_unique_title_and_pony, self.other_pony],
)
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(
changes,
"otherapp",
0,
["CreateModel", "AddField", "AddConstraint"],
)
def test_add_constraints_with_dict_keys(self):
book_types = {"F": "Fantasy", "M": "Mystery"}
book_with_type = ModelState(
"testapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("type", models.CharField(max_length=1)),
],
{
"constraints": [
models.CheckConstraint(
condition=models.Q(type__in=book_types.keys()),
name="book_type_check",
),
],
},
)
book_with_resolved_type = ModelState(
"testapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("type", models.CharField(max_length=1)),
],
{
"constraints": [
models.CheckConstraint(
condition=models.Q(("type__in", tuple(book_types))),
name="book_type_check",
),
],
},
)
changes = self.get_changes([book_with_type], [book_with_resolved_type])
self.assertEqual(len(changes), 0)
def test_add_index_with_new_model(self):
book_with_index_title_and_pony = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("title", models.CharField(max_length=200)),
("pony", models.ForeignKey("otherapp.Pony", models.CASCADE)),
],
{
"indexes": [
models.Index(fields=["title", "pony"], name="index_title_pony"),
]
},
)
changes = self.get_changes(
[self.book_with_no_author],
[book_with_index_title_and_pony, self.other_pony],
)
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(
changes,
"otherapp",
0,
["CreateModel", "AddField", "AddIndex"],
)
def test_alter_constraint(self):
book_constraint = models.CheckConstraint(
condition=models.Q(title__contains="title"),
name="title_contains_title",
)
book_altered_constraint = models.CheckConstraint(
condition=models.Q(title__contains="title"),
name="title_contains_title",
violation_error_code="error_code",
)
author_altered_constraint = models.CheckConstraint(
condition=models.Q(name__contains="Bob"),
name="name_contains_bob",
violation_error_message="Name doesn't contain Bob",
)
book_check_constraint = copy.deepcopy(self.book)
book_check_constraint_with_error_message = copy.deepcopy(self.book)
author_name_check_constraint_with_error_message = copy.deepcopy(
self.author_name_check_constraint
)
book_check_constraint.options = {"constraints": [book_constraint]}
book_check_constraint_with_error_message.options = {
"constraints": [book_altered_constraint]
}
author_name_check_constraint_with_error_message.options = {
"constraints": [author_altered_constraint]
}
changes = self.get_changes(
[self.author_name_check_constraint, book_check_constraint],
[
author_name_check_constraint_with_error_message,
book_check_constraint_with_error_message,
],
)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterConstraint"])
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
model_name="author",
name="name_contains_bob",
constraint=author_altered_constraint,
)
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["AlterConstraint"])
self.assertOperationAttributes(
changes,
"otherapp",
0,
0,
model_name="book",
name="title_contains_title",
constraint=book_altered_constraint,
)
self.assertMigrationDependencies(
changes, "otherapp", 0, [("testapp", "auto_1")]
)
def test_remove_constraints(self):
"""Test change detection of removed constraints."""
changes = self.get_changes(
[self.author_name_check_constraint], [self.author_name]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["RemoveConstraint"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, model_name="author", name="name_contains_bob"
)
def test_constraint_dropped_and_recreated(self):
altered_constraint = models.CheckConstraint(
condition=models.Q(name__contains="bob"),
name="name_contains_bob",
)
author_name_check_constraint_lowercased = copy.deepcopy(
self.author_name_check_constraint
)
author_name_check_constraint_lowercased.options = {
"constraints": [altered_constraint]
}
changes = self.get_changes(
[self.author_name_check_constraint],
[author_name_check_constraint_lowercased],
)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes, "testapp", 0, ["RemoveConstraint", "AddConstraint"]
)
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
model_name="author",
name="name_contains_bob",
)
self.assertOperationAttributes(
changes,
"testapp",
0,
1,
model_name="author",
constraint=altered_constraint,
)
def test_add_unique_together(self):
"""Tests unique_together detection."""
changes = self.get_changes(
[self.author_empty, self.book],
[self.author_empty, self.book_unique_together],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["AlterUniqueTogether"])
self.assertOperationAttributes(
changes,
"otherapp",
0,
0,
name="book",
unique_together={("author", "title")},
)
def test_remove_unique_together(self):
"""Tests unique_together detection."""
changes = self.get_changes(
[self.author_empty, self.book_unique_together],
[self.author_empty, self.book],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["AlterUniqueTogether"])
self.assertOperationAttributes(
changes, "otherapp", 0, 0, name="book", unique_together=set()
)
def test_unique_together_remove_fk(self):
"""Tests unique_together and field removal detection & ordering"""
changes = self.get_changes(
[self.author_empty, self.book_unique_together],
[self.author_empty, self.book_with_no_author],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(
changes,
"otherapp",
0,
["AlterUniqueTogether", "RemoveField"],
)
self.assertOperationAttributes(
changes, "otherapp", 0, 0, name="book", unique_together=set()
)
self.assertOperationAttributes(
changes, "otherapp", 0, 1, model_name="book", name="author"
)
def test_unique_together_no_changes(self):
"""
unique_together doesn't generate a migration if no
changes have been made.
"""
changes = self.get_changes(
[self.author_empty, self.book_unique_together],
[self.author_empty, self.book_unique_together],
)
# Right number of migrations?
self.assertEqual(len(changes), 0)
def test_unique_together_ordering(self):
"""
unique_together also triggers on ordering changes.
"""
changes = self.get_changes(
[self.author_empty, self.book_unique_together],
[self.author_empty, self.book_unique_together_2],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(
changes,
"otherapp",
0,
["AlterUniqueTogether"],
)
self.assertOperationAttributes(
changes,
"otherapp",
0,
0,
name="book",
unique_together={("title", "author")},
)
def test_add_field_and_unique_together(self):
"""
Added fields will be created before using them in unique_together.
"""
changes = self.get_changes(
[self.author_empty, self.book],
[self.author_empty, self.book_unique_together_3],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(
changes,
"otherapp",
0,
["AddField", "AlterUniqueTogether"],
)
self.assertOperationAttributes(
changes,
"otherapp",
0,
1,
name="book",
unique_together={("title", "newfield")},
)
def test_create_model_and_unique_together(self):
author = ModelState(
"otherapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
],
)
book_with_author = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("otherapp.Author", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
{
"unique_together": {("title", "author")},
},
)
changes = self.get_changes(
[self.book_with_no_author], [author, book_with_author]
)
# Right number of migrations?
self.assertEqual(len(changes["otherapp"]), 1)
# Right number of actions?
migration = changes["otherapp"][0]
self.assertEqual(len(migration.operations), 3)
# Right actions order?
self.assertOperationTypes(
changes,
"otherapp",
0,
["CreateModel", "AddField", "AlterUniqueTogether"],
)
def test_remove_field_and_unique_together(self):
"""
Removed fields will be removed after updating unique_together.
"""
changes = self.get_changes(
[self.author_empty, self.book_unique_together_3],
[self.author_empty, self.book_unique_together],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(
changes,
"otherapp",
0,
["AlterUniqueTogether", "RemoveField"],
)
self.assertOperationAttributes(
changes,
"otherapp",
0,
0,
name="book",
unique_together={("author", "title")},
)
self.assertOperationAttributes(
changes,
"otherapp",
0,
1,
model_name="book",
name="newfield",
)
def test_alter_field_and_unique_together(self):
"""Fields are altered after deleting some unique_together."""
initial_author = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("age", models.IntegerField(db_index=True)),
],
{
"unique_together": {("name",)},
},
)
author_reversed_constraints = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200, unique=True)),
("age", models.IntegerField()),
],
{
"unique_together": {("age",)},
},
)
changes = self.get_changes([initial_author], [author_reversed_constraints])
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes,
"testapp",
0,
[
"AlterUniqueTogether",
"AlterField",
"AlterField",
"AlterUniqueTogether",
],
)
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
name="author",
unique_together=set(),
)
self.assertOperationAttributes(
changes,
"testapp",
0,
1,
model_name="author",
name="age",
)
self.assertOperationAttributes(
changes,
"testapp",
0,
2,
model_name="author",
name="name",
)
self.assertOperationAttributes(
changes,
"testapp",
0,
3,
name="author",
unique_together={("age",)},
)
def test_partly_alter_unique_together_increase(self):
initial_author = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("age", models.IntegerField()),
],
{
"unique_together": {("name",)},
},
)
author_new_constraints = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("age", models.IntegerField()),
],
{
"unique_together": {("name",), ("age",)},
},
)
changes = self.get_changes([initial_author], [author_new_constraints])
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes,
"testapp",
0,
["AlterUniqueTogether"],
)
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
name="author",
unique_together={("name",), ("age",)},
)
def test_partly_alter_unique_together_decrease(self):
initial_author = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("age", models.IntegerField()),
],
{
"unique_together": {("name",), ("age",)},
},
)
author_new_constraints = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("age", models.IntegerField()),
],
{
"unique_together": {("name",)},
},
)
changes = self.get_changes([initial_author], [author_new_constraints])
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes,
"testapp",
0,
["AlterUniqueTogether"],
)
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
name="author",
unique_together={("name",)},
)
def test_rename_field_and_unique_together(self):
"""Fields are renamed before updating unique_together."""
changes = self.get_changes(
[self.author_empty, self.book_unique_together_3],
[self.author_empty, self.book_unique_together_4],
MigrationQuestioner({"ask_rename": True}),
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(
changes,
"otherapp",
0,
["RenameField", "AlterUniqueTogether"],
)
self.assertOperationAttributes(
changes,
"otherapp",
0,
1,
name="book",
unique_together={("title", "newfield2")},
)
def test_proxy(self):
"""The autodetector correctly deals with proxy models."""
# First, we test adding a proxy model
changes = self.get_changes(
[self.author_empty], [self.author_empty, self.author_proxy]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel"])
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
name="AuthorProxy",
options={"proxy": True, "indexes": [], "constraints": []},
)
# Now, we test turning a proxy model into a non-proxy model
# It should delete the proxy then make the real one
changes = self.get_changes(
[self.author_empty, self.author_proxy],
[self.author_empty, self.author_proxy_notproxy],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["DeleteModel", "CreateModel"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="AuthorProxy")
self.assertOperationAttributes(
changes, "testapp", 0, 1, name="AuthorProxy", options={}
)
def test_proxy_non_model_parent(self):
class Mixin:
pass
author_proxy_non_model_parent = ModelState(
"testapp",
"AuthorProxy",
[],
{"proxy": True},
(Mixin, "testapp.author"),
)
changes = self.get_changes(
[self.author_empty],
[self.author_empty, author_proxy_non_model_parent],
)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel"])
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
name="AuthorProxy",
options={"proxy": True, "indexes": [], "constraints": []},
bases=(Mixin, "testapp.author"),
)
def test_proxy_custom_pk(self):
"""
#23415 - The autodetector must correctly deal with custom FK on proxy
models.
"""
# First, we test the default pk field name
changes = self.get_changes(
[], [self.author_empty, self.author_proxy_third, self.book_proxy_fk]
)
# The model the FK is pointing from and to.
self.assertEqual(
changes["otherapp"][0].operations[0].fields[2][1].remote_field.model,
"thirdapp.AuthorProxy",
)
# Now, we test the custom pk field name
changes = self.get_changes(
[], [self.author_custom_pk, self.author_proxy_third, self.book_proxy_fk]
)
# The model the FK is pointing from and to.
self.assertEqual(
changes["otherapp"][0].operations[0].fields[2][1].remote_field.model,
"thirdapp.AuthorProxy",
)
def test_proxy_to_mti_with_fk_to_proxy(self):
# First, test the pk table and field name.
to_state = self.make_project_state(
[self.author_empty, self.author_proxy_third, self.book_proxy_fk],
)
changes = self.get_changes([], to_state)
fk_field = changes["otherapp"][0].operations[0].fields[2][1]
self.assertEqual(
to_state.get_concrete_model_key(fk_field.remote_field.model),
("testapp", "author"),
)
self.assertEqual(fk_field.remote_field.model, "thirdapp.AuthorProxy")
# Change AuthorProxy to use MTI.
from_state = to_state.clone()
to_state = self.make_project_state(
[self.author_empty, self.author_proxy_third_notproxy, self.book_proxy_fk],
)
changes = self.get_changes(from_state, to_state)
# Right number/type of migrations for the AuthorProxy model?
self.assertNumberMigrations(changes, "thirdapp", 1)
self.assertOperationTypes(
changes, "thirdapp", 0, ["DeleteModel", "CreateModel"]
)
# Right number/type of migrations for the Book model with a FK to
# AuthorProxy?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["AlterField"])
# otherapp should depend on thirdapp.
self.assertMigrationDependencies(
changes, "otherapp", 0, [("thirdapp", "auto_1")]
)
# Now, test the pk table and field name.
fk_field = changes["otherapp"][0].operations[0].field
self.assertEqual(
to_state.get_concrete_model_key(fk_field.remote_field.model),
("thirdapp", "authorproxy"),
)
self.assertEqual(fk_field.remote_field.model, "thirdapp.AuthorProxy")
def test_proxy_to_mti_with_fk_to_proxy_proxy(self):
# First, test the pk table and field name.
to_state = self.make_project_state(
[
self.author_empty,
self.author_proxy,
self.author_proxy_proxy,
self.book_proxy_proxy_fk,
]
)
changes = self.get_changes([], to_state)
fk_field = changes["otherapp"][0].operations[0].fields[1][1]
self.assertEqual(
to_state.get_concrete_model_key(fk_field.remote_field.model),
("testapp", "author"),
)
self.assertEqual(fk_field.remote_field.model, "testapp.AAuthorProxyProxy")
# Change AuthorProxy to use MTI. FK still points to AAuthorProxyProxy,
# a proxy of AuthorProxy.
from_state = to_state.clone()
to_state = self.make_project_state(
[
self.author_empty,
self.author_proxy_notproxy,
self.author_proxy_proxy,
self.book_proxy_proxy_fk,
]
)
changes = self.get_changes(from_state, to_state)
# Right number/type of migrations for the AuthorProxy model?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["DeleteModel", "CreateModel"])
# Right number/type of migrations for the Book model with a FK to
# AAuthorProxyProxy?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["AlterField"])
# otherapp should depend on testapp.
self.assertMigrationDependencies(
changes, "otherapp", 0, [("testapp", "auto_1")]
)
# Now, test the pk table and field name.
fk_field = changes["otherapp"][0].operations[0].field
self.assertEqual(
to_state.get_concrete_model_key(fk_field.remote_field.model),
("testapp", "authorproxy"),
)
self.assertEqual(fk_field.remote_field.model, "testapp.AAuthorProxyProxy")
def test_unmanaged_create(self):
"""The autodetector correctly deals with managed models."""
# First, we test adding an unmanaged model
changes = self.get_changes(
[self.author_empty], [self.author_empty, self.author_unmanaged]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="AuthorUnmanaged", options={"managed": False}
)
def test_unmanaged_delete(self):
changes = self.get_changes(
[self.author_empty, self.author_unmanaged], [self.author_empty]
)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["DeleteModel"])
def test_unmanaged_to_managed(self):
# Now, we test turning an unmanaged model into a managed model
changes = self.get_changes(
[self.author_empty, self.author_unmanaged],
[self.author_empty, self.author_unmanaged_managed],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterModelOptions"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="authorunmanaged", options={}
)
def test_managed_to_unmanaged(self):
# Now, we turn managed to unmanaged.
changes = self.get_changes(
[self.author_empty, self.author_unmanaged_managed],
[self.author_empty, self.author_unmanaged],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterModelOptions"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="authorunmanaged", options={"managed": False}
)
def test_unmanaged_custom_pk(self):
"""
#23415 - The autodetector must correctly deal with custom FK on
unmanaged models.
"""
# First, we test the default pk field name
changes = self.get_changes([], [self.author_unmanaged_default_pk, self.book])
# The model the FK on the book model points to.
fk_field = changes["otherapp"][0].operations[0].fields[2][1]
self.assertEqual(fk_field.remote_field.model, "testapp.Author")
# Now, we test the custom pk field name
changes = self.get_changes([], [self.author_unmanaged_custom_pk, self.book])
# The model the FK on the book model points to.
fk_field = changes["otherapp"][0].operations[0].fields[2][1]
self.assertEqual(fk_field.remote_field.model, "testapp.Author")
@override_settings(AUTH_USER_MODEL="thirdapp.CustomUser")
def test_swappable(self):
with isolate_lru_cache(apps.get_swappable_settings_name):
changes = self.get_changes(
[self.custom_user], [self.custom_user, self.author_with_custom_user]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Author")
self.assertMigrationDependencies(
changes, "testapp", 0, [("__setting__", "AUTH_USER_MODEL")]
)
def test_swappable_lowercase(self):
model_state = ModelState(
"testapp",
"Document",
[
("id", models.AutoField(primary_key=True)),
(
"owner",
models.ForeignKey(
settings.AUTH_USER_MODEL.lower(),
models.CASCADE,
),
),
],
)
with isolate_lru_cache(apps.get_swappable_settings_name):
changes = self.get_changes([], [model_state])
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Document")
self.assertMigrationDependencies(
changes,
"testapp",
0,
[("__setting__", "AUTH_USER_MODEL")],
)
@override_settings(AUTH_USER_MODEL="thirdapp.CustomUser")
def test_swappable_many_to_many_model_case(self):
document_lowercase = ModelState(
"testapp",
"Document",
[
("id", models.AutoField(primary_key=True)),
("owners", models.ManyToManyField(settings.AUTH_USER_MODEL.lower())),
],
)
document = ModelState(
"testapp",
"Document",
[
("id", models.AutoField(primary_key=True)),
("owners", models.ManyToManyField(settings.AUTH_USER_MODEL)),
],
)
with isolate_lru_cache(apps.get_swappable_settings_name):
changes = self.get_changes(
[self.custom_user, document_lowercase],
[self.custom_user, document],
)
self.assertEqual(len(changes), 0)
def test_swappable_changed(self):
with isolate_lru_cache(apps.get_swappable_settings_name):
before = self.make_project_state([self.custom_user, self.author_with_user])
with override_settings(AUTH_USER_MODEL="thirdapp.CustomUser"):
after = self.make_project_state(
[self.custom_user, self.author_with_custom_user]
)
autodetector = MigrationAutodetector(before, after)
changes = autodetector._detect_changes()
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterField"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, model_name="author", name="user"
)
fk_field = changes["testapp"][0].operations[0].field
self.assertEqual(fk_field.remote_field.model, "thirdapp.CustomUser")
def test_add_field_with_default(self):
"""#22030 - Adding a field with a default should work."""
changes = self.get_changes([self.author_empty], [self.author_name_default])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AddField"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="name")
def test_custom_deconstructible(self):
"""
Two instances which deconstruct to the same value aren't considered a
change.
"""
changes = self.get_changes(
[self.author_name_deconstructible_1], [self.author_name_deconstructible_2]
)
# Right number of migrations?
self.assertEqual(len(changes), 0)
def test_deconstruct_field_kwarg(self):
"""Field instances are handled correctly by nested deconstruction."""
changes = self.get_changes(
[self.author_name_deconstructible_3], [self.author_name_deconstructible_4]
)
self.assertEqual(changes, {})
def test_deconstructible_list(self):
"""Nested deconstruction descends into lists."""
# When lists contain items that deconstruct to identical values, those
# lists should be considered equal for the purpose of detecting state
# changes (even if the original items are unequal).
changes = self.get_changes(
[self.author_name_deconstructible_list_1],
[self.author_name_deconstructible_list_2],
)
self.assertEqual(changes, {})
# Legitimate differences within the deconstructed lists should be
# reported as a change
changes = self.get_changes(
[self.author_name_deconstructible_list_1],
[self.author_name_deconstructible_list_3],
)
self.assertEqual(len(changes), 1)
def test_deconstructible_tuple(self):
"""Nested deconstruction descends into tuples."""
# When tuples contain items that deconstruct to identical values, those
# tuples should be considered equal for the purpose of detecting state
# changes (even if the original items are unequal).
changes = self.get_changes(
[self.author_name_deconstructible_tuple_1],
[self.author_name_deconstructible_tuple_2],
)
self.assertEqual(changes, {})
# Legitimate differences within the deconstructed tuples should be
# reported as a change
changes = self.get_changes(
[self.author_name_deconstructible_tuple_1],
[self.author_name_deconstructible_tuple_3],
)
self.assertEqual(len(changes), 1)
def test_deconstructible_dict(self):
"""Nested deconstruction descends into dict values."""
# When dicts contain items whose values deconstruct to identical
# values, those dicts should be considered equal for the purpose of
# detecting state changes (even if the original values are unequal).
changes = self.get_changes(
[self.author_name_deconstructible_dict_1],
[self.author_name_deconstructible_dict_2],
)
self.assertEqual(changes, {})
# Legitimate differences within the deconstructed dicts should be
# reported as a change
changes = self.get_changes(
[self.author_name_deconstructible_dict_1],
[self.author_name_deconstructible_dict_3],
)
self.assertEqual(len(changes), 1)
def test_nested_deconstructible_objects(self):
"""
Nested deconstruction is applied recursively to the args/kwargs of
deconstructed objects.
"""
# If the items within a deconstructed object's args/kwargs have the
# same deconstructed values - whether or not the items themselves are
# different instances - then the object as a whole is regarded as
# unchanged.
changes = self.get_changes(
[self.author_name_nested_deconstructible_1],
[self.author_name_nested_deconstructible_2],
)
self.assertEqual(changes, {})
# Differences that exist solely within the args list of a deconstructed
# object should be reported as changes
changes = self.get_changes(
[self.author_name_nested_deconstructible_1],
[self.author_name_nested_deconstructible_changed_arg],
)
self.assertEqual(len(changes), 1)
# Additional args should also be reported as a change
changes = self.get_changes(
[self.author_name_nested_deconstructible_1],
[self.author_name_nested_deconstructible_extra_arg],
)
self.assertEqual(len(changes), 1)
# Differences that exist solely within the kwargs dict of a
# deconstructed object should be reported as changes
changes = self.get_changes(
[self.author_name_nested_deconstructible_1],
[self.author_name_nested_deconstructible_changed_kwarg],
)
self.assertEqual(len(changes), 1)
# Additional kwargs should also be reported as a change
changes = self.get_changes(
[self.author_name_nested_deconstructible_1],
[self.author_name_nested_deconstructible_extra_kwarg],
)
self.assertEqual(len(changes), 1)
def test_deconstruct_type(self):
"""
#22951 -- Uninstantiated classes with deconstruct are correctly
returned by deep_deconstruct during serialization.
"""
author = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200,
# IntegerField intentionally not instantiated.
default=models.IntegerField,
),
),
],
)
changes = self.get_changes([], [author])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel"])
def test_replace_string_with_foreignkey(self):
"""
#22300 - Adding an FK in the same "spot" as a deleted CharField should
work.
"""
changes = self.get_changes(
[self.author_with_publisher_string],
[self.author_with_publisher, self.publisher],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes, "testapp", 0, ["CreateModel", "RemoveField", "AddField"]
)
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Publisher")
self.assertOperationAttributes(changes, "testapp", 0, 1, name="publisher_name")
self.assertOperationAttributes(changes, "testapp", 0, 2, name="publisher")
def test_foreign_key_removed_before_target_model(self):
"""
Removing an FK and the model it targets in the same change must remove
the FK field before the model to maintain consistency.
"""
changes = self.get_changes(
[self.author_with_publisher, self.publisher], [self.author_name]
) # removes both the model and FK
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["RemoveField", "DeleteModel"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="publisher")
self.assertOperationAttributes(changes, "testapp", 0, 1, name="Publisher")
@mock.patch(
"django.db.migrations.questioner.MigrationQuestioner.ask_not_null_addition",
side_effect=AssertionError("Should not have prompted for not null addition"),
)
def test_add_many_to_many(self, mocked_ask_method):
"""
#22435 - Adding a ManyToManyField should not prompt for a default.
"""
changes = self.get_changes(
[self.author_empty, self.publisher], [self.author_with_m2m, self.publisher]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AddField"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="publishers")
def test_alter_many_to_many(self):
changes = self.get_changes(
[self.author_with_m2m, self.publisher],
[self.author_with_m2m_blank, self.publisher],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterField"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="publishers")
def test_create_with_through_model(self):
"""
Adding a m2m with a through model and the models that use it should be
ordered correctly.
"""
changes = self.get_changes(
[], [self.author_with_m2m_through, self.publisher, self.contract]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes,
"testapp",
0,
[
"CreateModel",
"CreateModel",
"CreateModel",
"AddField",
],
)
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Author")
self.assertOperationAttributes(changes, "testapp", 0, 1, name="Publisher")
self.assertOperationAttributes(changes, "testapp", 0, 2, name="Contract")
self.assertOperationAttributes(
changes, "testapp", 0, 3, model_name="author", name="publishers"
)
def test_create_with_through_model_separate_apps(self):
author_with_m2m_through = ModelState(
"authors",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"publishers",
models.ManyToManyField(
"testapp.Publisher", through="contract.Contract"
),
),
],
)
contract = ModelState(
"contract",
"Contract",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("authors.Author", models.CASCADE)),
("publisher", models.ForeignKey("testapp.Publisher", models.CASCADE)),
],
)
changes = self.get_changes(
[], [author_with_m2m_through, self.publisher, contract]
)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertNumberMigrations(changes, "contract", 1)
self.assertNumberMigrations(changes, "authors", 2)
self.assertMigrationDependencies(
changes,
"authors",
1,
{("authors", "auto_1"), ("contract", "auto_1"), ("testapp", "auto_1")},
)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Publisher")
self.assertOperationTypes(changes, "contract", 0, ["CreateModel"])
self.assertOperationAttributes(changes, "contract", 0, 0, name="Contract")
self.assertOperationTypes(changes, "authors", 0, ["CreateModel"])
self.assertOperationTypes(changes, "authors", 1, ["AddField"])
self.assertOperationAttributes(changes, "authors", 0, 0, name="Author")
self.assertOperationAttributes(
changes, "authors", 1, 0, model_name="author", name="publishers"
)
def test_many_to_many_removed_before_through_model(self):
"""
Removing a ManyToManyField and the "through" model in the same change
must remove the field before the model to maintain consistency.
"""
changes = self.get_changes(
[
self.book_with_multiple_authors_through_attribution,
self.author_name,
self.attribution,
],
[self.book_with_no_author, self.author_name],
)
# Remove both the through model and ManyToMany
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(
changes, "otherapp", 0, ["RemoveField", "DeleteModel"]
)
self.assertOperationAttributes(
changes, "otherapp", 0, 0, name="authors", model_name="book"
)
self.assertOperationAttributes(changes, "otherapp", 0, 1, name="Attribution")
def test_many_to_many_removed_before_through_model_2(self):
"""
Removing a model that contains a ManyToManyField and the "through"
model in the same change must remove the field before the model to
maintain consistency.
"""
changes = self.get_changes(
[
self.book_with_multiple_authors_through_attribution,
self.author_name,
self.attribution,
],
[self.author_name],
)
# Remove both the through model and ManyToMany
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(
changes, "otherapp", 0, ["RemoveField", "DeleteModel", "DeleteModel"]
)
self.assertOperationAttributes(
changes, "otherapp", 0, 0, name="authors", model_name="book"
)
self.assertOperationAttributes(changes, "otherapp", 0, 1, name="Attribution")
self.assertOperationAttributes(changes, "otherapp", 0, 2, name="Book")
def test_m2m_w_through_multistep_remove(self):
"""
A model with a m2m field that specifies a "through" model cannot be
removed in the same migration as that through model as the schema will
pass through an inconsistent state. The autodetector should produce two
migrations to avoid this issue.
"""
changes = self.get_changes(
[self.author_with_m2m_through, self.publisher, self.contract],
[self.publisher],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes,
"testapp",
0,
["RemoveField", "RemoveField", "DeleteModel", "DeleteModel"],
)
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="author", model_name="contract"
)
self.assertOperationAttributes(
changes, "testapp", 0, 1, name="publisher", model_name="contract"
)
self.assertOperationAttributes(changes, "testapp", 0, 2, name="Author")
self.assertOperationAttributes(changes, "testapp", 0, 3, name="Contract")
def test_concrete_field_changed_to_many_to_many(self):
"""
#23938 - Changing a concrete field into a ManyToManyField
first removes the concrete field and then adds the m2m field.
"""
changes = self.get_changes(
[self.author_with_former_m2m], [self.author_with_m2m, self.publisher]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes, "testapp", 0, ["CreateModel", "RemoveField", "AddField"]
)
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Publisher")
self.assertOperationAttributes(
changes, "testapp", 0, 1, name="publishers", model_name="author"
)
self.assertOperationAttributes(
changes, "testapp", 0, 2, name="publishers", model_name="author"
)
def test_many_to_many_changed_to_concrete_field(self):
"""
#23938 - Changing a ManyToManyField into a concrete field
first removes the m2m field and then adds the concrete field.
"""
changes = self.get_changes(
[self.author_with_m2m, self.publisher], [self.author_with_former_m2m]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes, "testapp", 0, ["RemoveField", "DeleteModel", "AddField"]
)
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="publishers", model_name="author"
)
self.assertOperationAttributes(changes, "testapp", 0, 1, name="Publisher")
self.assertOperationAttributes(
changes, "testapp", 0, 2, name="publishers", model_name="author"
)
self.assertOperationFieldAttributes(changes, "testapp", 0, 2, max_length=100)
def test_non_circular_foreignkey_dependency_removal(self):
"""
If two models with a ForeignKey from one to the other are removed at
the same time, the autodetector should remove them in the correct
order.
"""
changes = self.get_changes(
[self.author_with_publisher, self.publisher_with_author], []
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes, "testapp", 0, ["RemoveField", "DeleteModel", "DeleteModel"]
)
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="author", model_name="publisher"
)
self.assertOperationAttributes(changes, "testapp", 0, 1, name="Author")
self.assertOperationAttributes(changes, "testapp", 0, 2, name="Publisher")
def test_alter_model_options(self):
"""Changing a model's options should make a change."""
changes = self.get_changes([self.author_empty], [self.author_with_options])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterModelOptions"])
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
options={
"permissions": [("can_hire", "Can hire")],
"verbose_name": "Authi",
},
)
# Changing them back to empty should also make a change
changes = self.get_changes([self.author_with_options], [self.author_empty])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterModelOptions"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="author", options={}
)
def test_alter_model_options_proxy(self):
"""Changing a proxy model's options should also make a change."""
changes = self.get_changes(
[self.author_proxy, self.author_empty],
[self.author_proxy_options, self.author_empty],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterModelOptions"])
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
name="authorproxy",
options={"verbose_name": "Super Author"},
)
def test_set_alter_order_with_respect_to(self):
"""Setting order_with_respect_to adds a field."""
changes = self.get_changes(
[self.book, self.author_with_book],
[self.book, self.author_with_book_order_wrt],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterOrderWithRespectTo"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="author", order_with_respect_to="book"
)
def test_add_alter_order_with_respect_to(self):
"""
Setting order_with_respect_to when adding the FK too does
things in the right order.
"""
changes = self.get_changes(
[self.author_name], [self.book, self.author_with_book_order_wrt]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes, "testapp", 0, ["AddField", "AlterOrderWithRespectTo"]
)
self.assertOperationAttributes(
changes, "testapp", 0, 0, model_name="author", name="book"
)
self.assertOperationAttributes(
changes, "testapp", 0, 1, name="author", order_with_respect_to="book"
)
def test_remove_alter_order_with_respect_to(self):
"""
Removing order_with_respect_to when removing the FK too does
things in the right order.
"""
changes = self.get_changes(
[self.book, self.author_with_book_order_wrt], [self.author_name]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes, "testapp", 0, ["AlterOrderWithRespectTo", "RemoveField"]
)
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="author", order_with_respect_to=None
)
self.assertOperationAttributes(
changes, "testapp", 0, 1, model_name="author", name="book"
)
def test_add_model_order_with_respect_to(self):
"""
Setting order_with_respect_to when adding the whole model
does things in the right order.
"""
changes = self.get_changes([], [self.book, self.author_with_book_order_wrt])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel"])
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
name="Author",
options={"order_with_respect_to": "book"},
)
self.assertNotIn(
"_order",
[name for name, field in changes["testapp"][0].operations[0].fields],
)
def test_add_model_order_with_respect_to_unique_together(self):
changes = self.get_changes(
[],
[
self.book,
ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("book", models.ForeignKey("otherapp.Book", models.CASCADE)),
],
options={
"order_with_respect_to": "book",
"unique_together": {("id", "_order")},
},
),
],
)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel"])
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
name="Author",
options={
"order_with_respect_to": "book",
"unique_together": {("id", "_order")},
},
)
def test_add_model_order_with_respect_to_constraint(self):
after = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("book", models.ForeignKey("otherapp.Book", models.CASCADE)),
],
options={
"order_with_respect_to": "book",
"constraints": [
models.CheckConstraint(
condition=models.Q(_order__gt=1), name="book_order_gt_1"
),
],
},
)
changes = self.get_changes([], [self.book, after])
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes,
"testapp",
0,
["CreateModel"],
)
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
name="Author",
options={
"order_with_respect_to": "book",
"constraints": [
models.CheckConstraint(
condition=models.Q(_order__gt=1), name="book_order_gt_1"
)
],
},
)
def test_add_model_order_with_respect_to_index(self):
after = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("book", models.ForeignKey("otherapp.Book", models.CASCADE)),
],
options={
"order_with_respect_to": "book",
"indexes": [models.Index(fields=["_order"], name="book_order_idx")],
},
)
changes = self.get_changes([], [self.book, after])
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel"])
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
name="Author",
options={
"order_with_respect_to": "book",
"indexes": [models.Index(fields=["_order"], name="book_order_idx")],
},
)
def test_set_alter_order_with_respect_to_index_constraint_unique_together(self):
tests = [
(
"AddIndex",
{
"indexes": [
models.Index(fields=["_order"], name="book_order_idx"),
]
},
),
(
"AddConstraint",
{
"constraints": [
models.CheckConstraint(
condition=models.Q(_order__gt=1),
name="book_order_gt_1",
),
]
},
),
("AlterUniqueTogether", {"unique_together": {("id", "_order")}}),
]
for operation, extra_option in tests:
with self.subTest(operation=operation):
after = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("book", models.ForeignKey("otherapp.Book", models.CASCADE)),
],
options={
"order_with_respect_to": "book",
**extra_option,
},
)
changes = self.get_changes(
[self.book, self.author_with_book],
[self.book, after],
)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes,
"testapp",
0,
[
"AlterOrderWithRespectTo",
operation,
],
)
def test_alter_model_managers(self):
"""
Changing the model managers adds a new operation.
"""
changes = self.get_changes([self.other_pony], [self.other_pony_food])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["AlterModelManagers"])
self.assertOperationAttributes(changes, "otherapp", 0, 0, name="pony")
self.assertEqual(
[name for name, mgr in changes["otherapp"][0].operations[0].managers],
["food_qs", "food_mgr", "food_mgr_kwargs"],
)
self.assertEqual(
changes["otherapp"][0].operations[0].managers[1][1].args, ("a", "b", 1, 2)
)
self.assertEqual(
changes["otherapp"][0].operations[0].managers[2][1].args, ("x", "y", 3, 4)
)
def test_swappable_first_inheritance(self):
"""Swappable models get their CreateModel first."""
changes = self.get_changes([], [self.custom_user, self.aardvark])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "thirdapp", 1)
self.assertOperationTypes(
changes, "thirdapp", 0, ["CreateModel", "CreateModel"]
)
self.assertOperationAttributes(changes, "thirdapp", 0, 0, name="CustomUser")
self.assertOperationAttributes(changes, "thirdapp", 0, 1, name="Aardvark")
def test_default_related_name_option(self):
model_state = ModelState(
"app",
"model",
[
("id", models.AutoField(primary_key=True)),
],
options={"default_related_name": "related_name"},
)
changes = self.get_changes([], [model_state])
self.assertNumberMigrations(changes, "app", 1)
self.assertOperationTypes(changes, "app", 0, ["CreateModel"])
self.assertOperationAttributes(
changes,
"app",
0,
0,
name="model",
options={"default_related_name": "related_name"},
)
altered_model_state = ModelState(
"app",
"Model",
[
("id", models.AutoField(primary_key=True)),
],
)
changes = self.get_changes([model_state], [altered_model_state])
self.assertNumberMigrations(changes, "app", 1)
self.assertOperationTypes(changes, "app", 0, ["AlterModelOptions"])
self.assertOperationAttributes(changes, "app", 0, 0, name="model", options={})
@override_settings(AUTH_USER_MODEL="thirdapp.CustomUser")
def test_swappable_first_setting(self):
"""Swappable models get their CreateModel first."""
with isolate_lru_cache(apps.get_swappable_settings_name):
changes = self.get_changes([], [self.custom_user_no_inherit, self.aardvark])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "thirdapp", 1)
self.assertOperationTypes(
changes, "thirdapp", 0, ["CreateModel", "CreateModel"]
)
self.assertOperationAttributes(changes, "thirdapp", 0, 0, name="CustomUser")
self.assertOperationAttributes(changes, "thirdapp", 0, 1, name="Aardvark")
def test_bases_first(self):
"""Bases of other models come first."""
changes = self.get_changes(
[], [self.aardvark_based_on_author, self.author_name]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel", "CreateModel"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Author")
self.assertOperationAttributes(changes, "testapp", 0, 1, name="Aardvark")
def test_bases_first_mixed_case_app_label(self):
app_label = "MiXedCaseApp"
changes = self.get_changes(
[],
[
ModelState(
app_label,
"owner",
[
("id", models.AutoField(primary_key=True)),
],
),
ModelState(
app_label,
"place",
[
("id", models.AutoField(primary_key=True)),
(
"owner",
models.ForeignKey("MiXedCaseApp.owner", models.CASCADE),
),
],
),
ModelState(app_label, "restaurant", [], bases=("MiXedCaseApp.place",)),
],
)
self.assertNumberMigrations(changes, app_label, 1)
self.assertOperationTypes(
changes,
app_label,
0,
[
"CreateModel",
"CreateModel",
"CreateModel",
],
)
self.assertOperationAttributes(changes, app_label, 0, 0, name="owner")
self.assertOperationAttributes(changes, app_label, 0, 1, name="place")
self.assertOperationAttributes(changes, app_label, 0, 2, name="restaurant")
def test_multiple_bases(self):
"""
Inheriting models doesn't move *_ptr fields into AddField operations.
"""
A = ModelState("app", "A", [("a_id", models.AutoField(primary_key=True))])
B = ModelState("app", "B", [("b_id", models.AutoField(primary_key=True))])
C = ModelState("app", "C", [], bases=("app.A", "app.B"))
D = ModelState("app", "D", [], bases=("app.A", "app.B"))
E = ModelState("app", "E", [], bases=("app.A", "app.B"))
changes = self.get_changes([], [A, B, C, D, E])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "app", 1)
self.assertOperationTypes(
changes,
"app",
0,
["CreateModel", "CreateModel", "CreateModel", "CreateModel", "CreateModel"],
)
self.assertOperationAttributes(changes, "app", 0, 0, name="A")
self.assertOperationAttributes(changes, "app", 0, 1, name="B")
self.assertOperationAttributes(changes, "app", 0, 2, name="C")
self.assertOperationAttributes(changes, "app", 0, 3, name="D")
self.assertOperationAttributes(changes, "app", 0, 4, name="E")
def test_proxy_bases_first(self):
"""Bases of proxies come first."""
changes = self.get_changes(
[], [self.author_empty, self.author_proxy, self.author_proxy_proxy]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes, "testapp", 0, ["CreateModel", "CreateModel", "CreateModel"]
)
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Author")
self.assertOperationAttributes(changes, "testapp", 0, 1, name="AuthorProxy")
self.assertOperationAttributes(
changes, "testapp", 0, 2, name="AAuthorProxyProxy"
)
def test_pk_fk_included(self):
"""
A relation used as the primary key is kept as part of CreateModel.
"""
changes = self.get_changes([], [self.aardvark_pk_fk_author, self.author_name])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel", "CreateModel"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Author")
self.assertOperationAttributes(changes, "testapp", 0, 1, name="Aardvark")
def test_first_dependency(self):
"""
A dependency to an app with no migrations uses __first__.
"""
# Load graph
loader = MigrationLoader(connection)
before = self.make_project_state([])
after = self.make_project_state([self.book_migrations_fk])
after.real_apps = {"migrations"}
autodetector = MigrationAutodetector(before, after)
changes = autodetector._detect_changes(graph=loader.graph)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["CreateModel"])
self.assertOperationAttributes(changes, "otherapp", 0, 0, name="Book")
self.assertMigrationDependencies(
changes, "otherapp", 0, [("migrations", "__first__")]
)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_last_dependency(self):
"""
A dependency to an app with existing migrations uses the
last migration of that app.
"""
# Load graph
loader = MigrationLoader(connection)
before = self.make_project_state([])
after = self.make_project_state([self.book_migrations_fk])
after.real_apps = {"migrations"}
autodetector = MigrationAutodetector(before, after)
changes = autodetector._detect_changes(graph=loader.graph)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["CreateModel"])
self.assertOperationAttributes(changes, "otherapp", 0, 0, name="Book")
self.assertMigrationDependencies(
changes, "otherapp", 0, [("migrations", "0002_second")]
)
def test_alter_fk_before_model_deletion(self):
"""
ForeignKeys are altered _before_ the model they used to
refer to are deleted.
"""
changes = self.get_changes(
[self.author_name, self.publisher_with_author],
[self.aardvark_testapp, self.publisher_with_aardvark_author],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes, "testapp", 0, ["CreateModel", "AlterField", "DeleteModel"]
)
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Aardvark")
self.assertOperationAttributes(changes, "testapp", 0, 1, name="author")
self.assertOperationAttributes(changes, "testapp", 0, 2, name="Author")
def test_fk_dependency_other_app(self):
"""
#23100 - ForeignKeys correctly depend on other apps' models.
"""
changes = self.get_changes(
[self.author_name, self.book], [self.author_with_book, self.book]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AddField"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="book")
self.assertMigrationDependencies(
changes, "testapp", 0, [("otherapp", "__first__")]
)
def test_alter_unique_together_fk_to_m2m(self):
changes = self.get_changes(
[self.author_name, self.book_unique_together],
[
self.author_name,
ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.ManyToManyField("testapp.Author")),
("title", models.CharField(max_length=200)),
],
),
],
)
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(
changes, "otherapp", 0, ["AlterUniqueTogether", "RemoveField", "AddField"]
)
self.assertOperationAttributes(
changes, "otherapp", 0, 0, name="book", unique_together=set()
)
self.assertOperationAttributes(
changes, "otherapp", 0, 1, model_name="book", name="author"
)
self.assertOperationAttributes(
changes, "otherapp", 0, 2, model_name="book", name="author"
)
def test_alter_field_to_fk_dependency_other_app(self):
changes = self.get_changes(
[self.author_empty, self.book_with_no_author_fk],
[self.author_empty, self.book],
)
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["AlterField"])
self.assertMigrationDependencies(
changes, "otherapp", 0, [("testapp", "__first__")]
)
def test_circular_dependency_mixed_addcreate(self):
"""
#23315 - The dependency resolver knows to put all CreateModel
before AddField and not become unsolvable.
"""
address = ModelState(
"a",
"Address",
[
("id", models.AutoField(primary_key=True)),
("country", models.ForeignKey("b.DeliveryCountry", models.CASCADE)),
],
)
person = ModelState(
"a",
"Person",
[
("id", models.AutoField(primary_key=True)),
],
)
apackage = ModelState(
"b",
"APackage",
[
("id", models.AutoField(primary_key=True)),
("person", models.ForeignKey("a.Person", models.CASCADE)),
],
)
country = ModelState(
"b",
"DeliveryCountry",
[
("id", models.AutoField(primary_key=True)),
],
)
changes = self.get_changes([], [address, person, apackage, country])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "a", 2)
self.assertNumberMigrations(changes, "b", 1)
self.assertOperationTypes(changes, "a", 0, ["CreateModel", "CreateModel"])
self.assertOperationTypes(changes, "a", 1, ["AddField"])
self.assertOperationTypes(changes, "b", 0, ["CreateModel", "CreateModel"])
@override_settings(AUTH_USER_MODEL="a.Tenant")
def test_circular_dependency_swappable(self):
"""
#23322 - The dependency resolver knows to explicitly resolve
swappable models.
"""
with isolate_lru_cache(apps.get_swappable_settings_name):
tenant = ModelState(
"a",
"Tenant",
[
("id", models.AutoField(primary_key=True)),
("primary_address", models.ForeignKey("b.Address", models.CASCADE)),
],
bases=(AbstractBaseUser,),
)
address = ModelState(
"b",
"Address",
[
("id", models.AutoField(primary_key=True)),
(
"tenant",
models.ForeignKey(settings.AUTH_USER_MODEL, models.CASCADE),
),
],
)
changes = self.get_changes([], [address, tenant])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "a", 2)
self.assertOperationTypes(changes, "a", 0, ["CreateModel"])
self.assertOperationTypes(changes, "a", 1, ["AddField"])
self.assertMigrationDependencies(changes, "a", 0, [])
self.assertMigrationDependencies(
changes, "a", 1, [("a", "auto_1"), ("b", "auto_1")]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "b", 1)
self.assertOperationTypes(changes, "b", 0, ["CreateModel"])
self.assertMigrationDependencies(
changes, "b", 0, [("__setting__", "AUTH_USER_MODEL")]
)
@override_settings(AUTH_USER_MODEL="b.Tenant")
def test_circular_dependency_swappable2(self):
"""
#23322 - The dependency resolver knows to explicitly resolve
swappable models but with the swappable not being the first migrated
model.
"""
with isolate_lru_cache(apps.get_swappable_settings_name):
address = ModelState(
"a",
"Address",
[
("id", models.AutoField(primary_key=True)),
(
"tenant",
models.ForeignKey(settings.AUTH_USER_MODEL, models.CASCADE),
),
],
)
tenant = ModelState(
"b",
"Tenant",
[
("id", models.AutoField(primary_key=True)),
("primary_address", models.ForeignKey("a.Address", models.CASCADE)),
],
bases=(AbstractBaseUser,),
)
changes = self.get_changes([], [address, tenant])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "a", 2)
self.assertOperationTypes(changes, "a", 0, ["CreateModel"])
self.assertOperationTypes(changes, "a", 1, ["AddField"])
self.assertMigrationDependencies(changes, "a", 0, [])
self.assertMigrationDependencies(
changes, "a", 1, [("__setting__", "AUTH_USER_MODEL"), ("a", "auto_1")]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "b", 1)
self.assertOperationTypes(changes, "b", 0, ["CreateModel"])
self.assertMigrationDependencies(changes, "b", 0, [("a", "auto_1")])
@override_settings(AUTH_USER_MODEL="a.Person")
def test_circular_dependency_swappable_self(self):
"""
#23322 - The dependency resolver knows to explicitly resolve
swappable models.
"""
with isolate_lru_cache(apps.get_swappable_settings_name):
person = ModelState(
"a",
"Person",
[
("id", models.AutoField(primary_key=True)),
(
"parent1",
models.ForeignKey(
settings.AUTH_USER_MODEL,
models.CASCADE,
related_name="children",
),
),
],
)
changes = self.get_changes([], [person])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "a", 1)
self.assertOperationTypes(changes, "a", 0, ["CreateModel"])
self.assertMigrationDependencies(changes, "a", 0, [])
@override_settings(AUTH_USER_MODEL="a.User")
def test_swappable_circular_multi_mti(self):
with isolate_lru_cache(apps.get_swappable_settings_name):
parent = ModelState(
"a",
"Parent",
[("user", models.ForeignKey(settings.AUTH_USER_MODEL, models.CASCADE))],
)
child = ModelState("a", "Child", [], bases=("a.Parent",))
user = ModelState("a", "User", [], bases=(AbstractBaseUser, "a.Child"))
changes = self.get_changes([], [parent, child, user])
self.assertNumberMigrations(changes, "a", 1)
self.assertOperationTypes(
changes, "a", 0, ["CreateModel", "CreateModel", "CreateModel", "AddField"]
)
@mock.patch(
"django.db.migrations.questioner.MigrationQuestioner.ask_not_null_addition",
side_effect=AssertionError("Should not have prompted for not null addition"),
)
def test_add_blank_textfield_and_charfield(self, mocked_ask_method):
"""
#23405 - Adding a NOT NULL and blank `CharField` or `TextField`
without default should not prompt for a default.
"""
changes = self.get_changes(
[self.author_empty], [self.author_with_biography_blank]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AddField", "AddField"])
self.assertOperationAttributes(changes, "testapp", 0, 0)
@mock.patch(
"django.db.migrations.questioner.MigrationQuestioner.ask_not_null_addition"
)
def test_add_non_blank_textfield_and_charfield(self, mocked_ask_method):
"""
#23405 - Adding a NOT NULL and non-blank `CharField` or `TextField`
without default should prompt for a default.
"""
changes = self.get_changes(
[self.author_empty], [self.author_with_biography_non_blank]
)
self.assertEqual(mocked_ask_method.call_count, 2)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AddField", "AddField"])
self.assertOperationAttributes(changes, "testapp", 0, 0)
def test_mti_inheritance_model_removal(self):
Animal = ModelState(
"app",
"Animal",
[
("id", models.AutoField(primary_key=True)),
],
)
Dog = ModelState("app", "Dog", [], bases=("app.Animal",))
changes = self.get_changes([Animal, Dog], [Animal])
self.assertNumberMigrations(changes, "app", 1)
self.assertOperationTypes(changes, "app", 0, ["DeleteModel"])
self.assertOperationAttributes(changes, "app", 0, 0, name="Dog")
def test_add_model_with_field_removed_from_base_model(self):
"""
Removing a base field takes place before adding a new inherited model
that has a field with the same name.
"""
before = [
ModelState(
"app",
"readable",
[
("id", models.AutoField(primary_key=True)),
("title", models.CharField(max_length=200)),
],
),
]
after = [
ModelState(
"app",
"readable",
[
("id", models.AutoField(primary_key=True)),
],
),
ModelState(
"app",
"book",
[
("title", models.CharField(max_length=200)),
],
bases=("app.readable",),
),
]
changes = self.get_changes(before, after)
self.assertNumberMigrations(changes, "app", 1)
self.assertOperationTypes(changes, "app", 0, ["RemoveField", "CreateModel"])
self.assertOperationAttributes(
changes, "app", 0, 0, name="title", model_name="readable"
)
self.assertOperationAttributes(changes, "app", 0, 1, name="book")
def test_parse_number(self):
tests = [
("no_number", None),
("0001_initial", 1),
("0002_model3", 2),
("0002_auto_20380101_1112", 2),
("0002_squashed_0003", 3),
("0002_model2_squashed_0003_other4", 3),
("0002_squashed_0003_squashed_0004", 4),
("0002_model2_squashed_0003_other4_squashed_0005_other6", 5),
("0002_custom_name_20380101_1112_squashed_0003_model", 3),
("2_squashed_4", 4),
]
for migration_name, expected_number in tests:
with self.subTest(migration_name=migration_name):
self.assertEqual(
MigrationAutodetector.parse_number(migration_name),
expected_number,
)
def test_add_custom_fk_with_hardcoded_to(self):
class HardcodedForeignKey(models.ForeignKey):
def __init__(self, *args, **kwargs):
kwargs["to"] = "testapp.Author"
super().__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
del kwargs["to"]
return name, path, args, kwargs
book_hardcoded_fk_to = ModelState(
"testapp",
"Book",
[
("author", HardcodedForeignKey(on_delete=models.CASCADE)),
],
)
changes = self.get_changes(
[self.author_empty],
[self.author_empty, book_hardcoded_fk_to],
)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Book")
@mock.patch(
"django.db.migrations.questioner.MigrationQuestioner.ask_not_null_addition"
)
def test_add_composite_pk(self, mocked_ask_method):
before = [
ModelState(
"app",
"foo",
[
("id", models.AutoField(primary_key=True)),
],
),
]
after = [
ModelState(
"app",
"foo",
[
("pk", models.CompositePrimaryKey("foo_id", "bar_id")),
("id", models.IntegerField()),
],
),
]
changes = self.get_changes(before, after)
self.assertEqual(mocked_ask_method.call_count, 0)
self.assertNumberMigrations(changes, "app", 1)
self.assertOperationTypes(changes, "app", 0, ["AddField", "AlterField"])
self.assertOperationAttributes(
changes,
"app",
0,
0,
name="pk",
model_name="foo",
preserve_default=True,
)
self.assertOperationAttributes(
changes,
"app",
0,
1,
name="id",
model_name="foo",
preserve_default=True,
)
def test_remove_composite_pk(self):
before = [
ModelState(
"app",
"foo",
[
("pk", models.CompositePrimaryKey("foo_id", "bar_id")),
("id", models.IntegerField()),
],
),
]
after = [
ModelState(
"app",
"foo",
[
("id", models.AutoField(primary_key=True)),
],
),
]
changes = self.get_changes(before, after)
self.assertNumberMigrations(changes, "app", 1)
self.assertOperationTypes(changes, "app", 0, ["RemoveField", "AlterField"])
self.assertOperationAttributes(
changes,
"app",
0,
0,
name="pk",
model_name="foo",
)
self.assertOperationAttributes(
changes,
"app",
0,
1,
name="id",
model_name="foo",
preserve_default=True,
)
class MigrationSuggestNameTests(SimpleTestCase):
def test_no_operations(self):
class Migration(migrations.Migration):
operations = []
migration = Migration("some_migration", "test_app")
self.assertIs(migration.suggest_name().startswith("auto_"), True)
def test_no_operations_initial(self):
class Migration(migrations.Migration):
initial = True
operations = []
migration = Migration("some_migration", "test_app")
self.assertEqual(migration.suggest_name(), "initial")
def test_single_operation(self):
class Migration(migrations.Migration):
operations = [migrations.CreateModel("Person", fields=[])]
migration = Migration("0001_initial", "test_app")
self.assertEqual(migration.suggest_name(), "person")
class Migration(migrations.Migration):
operations = [migrations.DeleteModel("Person")]
migration = Migration("0002_initial", "test_app")
self.assertEqual(migration.suggest_name(), "delete_person")
def test_single_operation_long_name(self):
class Migration(migrations.Migration):
operations = [migrations.CreateModel("A" * 53, fields=[])]
migration = Migration("some_migration", "test_app")
self.assertEqual(migration.suggest_name(), "a" * 53)
def test_two_operations(self):
class Migration(migrations.Migration):
operations = [
migrations.CreateModel("Person", fields=[]),
migrations.DeleteModel("Animal"),
]
migration = Migration("some_migration", "test_app")
self.assertEqual(migration.suggest_name(), "person_delete_animal")
def test_two_create_models(self):
class Migration(migrations.Migration):
operations = [
migrations.CreateModel("Person", fields=[]),
migrations.CreateModel("Animal", fields=[]),
]
migration = Migration("0001_initial", "test_app")
self.assertEqual(migration.suggest_name(), "person_animal")
def test_two_create_models_with_initial_true(self):
class Migration(migrations.Migration):
initial = True
operations = [
migrations.CreateModel("Person", fields=[]),
migrations.CreateModel("Animal", fields=[]),
]
migration = Migration("0001_initial", "test_app")
self.assertEqual(migration.suggest_name(), "initial")
def test_many_operations_suffix(self):
class Migration(migrations.Migration):
operations = [
migrations.CreateModel("Person1", fields=[]),
migrations.CreateModel("Person2", fields=[]),
migrations.CreateModel("Person3", fields=[]),
migrations.DeleteModel("Person4"),
migrations.DeleteModel("Person5"),
]
migration = Migration("some_migration", "test_app")
self.assertEqual(
migration.suggest_name(),
"person1_person2_person3_delete_person4_and_more",
)
def test_operation_with_no_suggested_name(self):
class Migration(migrations.Migration):
operations = [
migrations.CreateModel("Person", fields=[]),
migrations.RunSQL("SELECT 1 FROM person;"),
]
migration = Migration("some_migration", "test_app")
self.assertIs(migration.suggest_name().startswith("auto_"), True)
def test_operation_with_invalid_chars_in_suggested_name(self):
class Migration(migrations.Migration):
operations = [
migrations.AddConstraint(
"Person",
models.UniqueConstraint(
fields=["name"], name="person.name-*~unique!"
),
),
]
migration = Migration("some_migration", "test_app")
self.assertEqual(migration.suggest_name(), "person_person_name_unique_")
def test_none_name(self):
class Migration(migrations.Migration):
operations = [migrations.RunSQL("SELECT 1 FROM person;")]
migration = Migration("0001_initial", "test_app")
suggest_name = migration.suggest_name()
self.assertIs(suggest_name.startswith("auto_"), True)
def test_none_name_with_initial_true(self):
class Migration(migrations.Migration):
initial = True
operations = [migrations.RunSQL("SELECT 1 FROM person;")]
migration = Migration("0001_initial", "test_app")
self.assertEqual(migration.suggest_name(), "initial")
def test_auto(self):
migration = migrations.Migration("0001_initial", "test_app")
suggest_name = migration.suggest_name()
self.assertIs(suggest_name.startswith("auto_"), True)
|
./temp_repos/django/django/db/migrations/autodetector.py
|
./temp_repos/django/tests/migrations/test_autodetector.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'OperationDependency'.
Context:
- Class Name: OperationDependency
- Dependencies to Mock: to_state, questioner, from_state
- Key Imports: collections, django.db.migrations.questioner, itertools, django.conf, django.db, django.db.migrations.optimizer, django.db.migrations.operations.models, enum, django.db.migrations.migration, functools
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
OperationDependency
|
python
|
import datetime
import importlib
import os
import sys
from django.apps import apps
from django.core.management.base import OutputWrapper
from django.db.models import NOT_PROVIDED
from django.utils import timezone
from django.utils.version import get_docs_version
from .loader import MigrationLoader
class MigrationQuestioner:
"""
Give the autodetector responses to questions it might have.
This base class has a built-in noninteractive mode, but the
interactive subclass is what the command-line arguments will use.
"""
def __init__(self, defaults=None, specified_apps=None, dry_run=None):
self.defaults = defaults or {}
self.specified_apps = specified_apps or set()
self.dry_run = dry_run
def ask_initial(self, app_label):
"""Should we create an initial migration for the app?"""
# If it was specified on the command line, definitely true
if app_label in self.specified_apps:
return True
# Otherwise, we look to see if it has a migrations module
# without any Python files in it, apart from __init__.py.
# Apps from the new app template will have these; the Python
# file check will ensure we skip South ones.
try:
app_config = apps.get_app_config(app_label)
except LookupError: # It's a fake app.
return self.defaults.get("ask_initial", False)
migrations_import_path, _ = MigrationLoader.migrations_module(app_config.label)
if migrations_import_path is None:
# It's an application with migrations disabled.
return self.defaults.get("ask_initial", False)
try:
migrations_module = importlib.import_module(migrations_import_path)
except ImportError:
return self.defaults.get("ask_initial", False)
else:
if getattr(migrations_module, "__file__", None):
filenames = os.listdir(os.path.dirname(migrations_module.__file__))
elif hasattr(migrations_module, "__path__"):
if len(migrations_module.__path__) > 1:
return False
filenames = os.listdir(list(migrations_module.__path__)[0])
return not any(x.endswith(".py") for x in filenames if x != "__init__.py")
def ask_not_null_addition(self, field_name, model_name):
"""Adding a NOT NULL field to a model."""
# None means quit
return None
def ask_not_null_alteration(self, field_name, model_name):
"""Changing a NULL field to NOT NULL."""
# None means quit
return None
def ask_rename(self, model_name, old_name, new_name, field_instance):
"""Was this field really renamed?"""
return self.defaults.get("ask_rename", False)
def ask_rename_model(self, old_model_state, new_model_state):
"""Was this model really renamed?"""
return self.defaults.get("ask_rename_model", False)
def ask_merge(self, app_label):
"""Should these migrations really be merged?"""
return self.defaults.get("ask_merge", False)
def ask_auto_now_add_addition(self, field_name, model_name):
"""Adding an auto_now_add field to a model."""
# None means quit
return None
def ask_unique_callable_default_addition(self, field_name, model_name):
"""Adding a unique field with a callable default."""
# None means continue.
return None
class InteractiveMigrationQuestioner(MigrationQuestioner):
def __init__(
self, defaults=None, specified_apps=None, dry_run=None, prompt_output=None
):
super().__init__(
defaults=defaults, specified_apps=specified_apps, dry_run=dry_run
)
self.prompt_output = prompt_output or OutputWrapper(sys.stdout)
def _boolean_input(self, question, default=None):
self.prompt_output.write(f"{question} ", ending="")
result = input()
if not result and default is not None:
return default
while not result or result[0].lower() not in "yn":
self.prompt_output.write("Please answer yes or no: ", ending="")
result = input()
return result[0].lower() == "y"
def _choice_input(self, question, choices):
self.prompt_output.write(f"{question}")
for i, choice in enumerate(choices):
self.prompt_output.write(" %s) %s" % (i + 1, choice))
self.prompt_output.write("Select an option: ", ending="")
while True:
try:
result = input()
value = int(result)
except ValueError:
pass
except KeyboardInterrupt:
self.prompt_output.write("\nCancelled.")
sys.exit(1)
else:
if 0 < value <= len(choices):
return value
self.prompt_output.write("Please select a valid option: ", ending="")
def _ask_default(self, default=""):
"""
Prompt for a default value.
The ``default`` argument allows providing a custom default value (as a
string) which will be shown to the user and used as the return value
if the user doesn't provide any other input.
"""
self.prompt_output.write("Please enter the default value as valid Python.")
if default:
self.prompt_output.write(
f"Accept the default '{default}' by pressing 'Enter' or "
f"provide another value."
)
self.prompt_output.write(
"The datetime and django.utils.timezone modules are available, so "
"it is possible to provide e.g. timezone.now as a value."
)
self.prompt_output.write("Type 'exit' to exit this prompt")
while True:
if default:
prompt = "[default: {}] >>> ".format(default)
else:
prompt = ">>> "
self.prompt_output.write(prompt, ending="")
try:
code = input()
except KeyboardInterrupt:
self.prompt_output.write("\nCancelled.")
sys.exit(1)
if not code and default:
code = default
if not code:
self.prompt_output.write(
"Please enter some code, or 'exit' (without quotes) to exit."
)
elif code == "exit":
sys.exit(1)
else:
try:
return eval(code, {}, {"datetime": datetime, "timezone": timezone})
except Exception as e:
self.prompt_output.write(f"{e.__class__.__name__}: {e}")
def ask_not_null_addition(self, field_name, model_name):
"""Adding a NOT NULL field to a model."""
if not self.dry_run:
choice = self._choice_input(
f"It is impossible to add a non-nullable field '{field_name}' "
f"to {model_name} without specifying a default. This is "
f"because the database needs something to populate existing "
f"rows.\n"
f"Please select a fix:",
[
(
"Provide a one-off default now (will be set on all existing "
"rows with a null value for this column)"
),
"Quit and manually define a default value in models.py.",
],
)
if choice == 2:
sys.exit(3)
else:
return self._ask_default()
return None
def ask_not_null_alteration(self, field_name, model_name):
"""Changing a NULL field to NOT NULL."""
if not self.dry_run:
choice = self._choice_input(
f"It is impossible to change a nullable field '{field_name}' "
f"on {model_name} to non-nullable without providing a "
f"default. This is because the database needs something to "
f"populate existing rows.\n"
f"Please select a fix:",
[
(
"Provide a one-off default now (will be set on all existing "
"rows with a null value for this column)"
),
"Ignore for now. Existing rows that contain NULL values "
"will have to be handled manually, for example with a "
"RunPython or RunSQL operation.",
"Quit and manually define a default value in models.py.",
],
)
if choice == 2:
return NOT_PROVIDED
elif choice == 3:
sys.exit(3)
else:
return self._ask_default()
return None
def ask_rename(self, model_name, old_name, new_name, field_instance):
"""Was this field really renamed?"""
msg = "Was %s.%s renamed to %s.%s (a %s)? [y/N]"
return self._boolean_input(
msg
% (
model_name,
old_name,
model_name,
new_name,
field_instance.__class__.__name__,
),
False,
)
def ask_rename_model(self, old_model_state, new_model_state):
"""Was this model really renamed?"""
msg = "Was the model %s.%s renamed to %s? [y/N]"
return self._boolean_input(
msg
% (old_model_state.app_label, old_model_state.name, new_model_state.name),
False,
)
def ask_merge(self, app_label):
return self._boolean_input(
"\nMerging will only work if the operations printed above do not conflict\n"
+ "with each other (working on different fields or models)\n"
+ "Should these migration branches be merged? [y/N]",
False,
)
def ask_auto_now_add_addition(self, field_name, model_name):
"""Adding an auto_now_add field to a model."""
if not self.dry_run:
choice = self._choice_input(
f"It is impossible to add the field '{field_name}' with "
f"'auto_now_add=True' to {model_name} without providing a "
f"default. This is because the database needs something to "
f"populate existing rows.\n",
[
"Provide a one-off default now which will be set on all "
"existing rows",
"Quit and manually define a default value in models.py.",
],
)
if choice == 2:
sys.exit(3)
else:
return self._ask_default(default="timezone.now")
return None
def ask_unique_callable_default_addition(self, field_name, model_name):
"""Adding a unique field with a callable default."""
if not self.dry_run:
version = get_docs_version()
choice = self._choice_input(
f"Callable default on unique field {model_name}.{field_name} "
f"will not generate unique values upon migrating.\n"
f"Please choose how to proceed:\n",
[
f"Continue making this migration as the first step in "
f"writing a manual migration to generate unique values "
f"described here: "
f"https://docs.djangoproject.com/en/{version}/howto/"
f"writing-migrations/#migrations-that-add-unique-fields.",
"Quit and edit field options in models.py.",
],
)
if choice == 2:
sys.exit(3)
return None
class NonInteractiveMigrationQuestioner(MigrationQuestioner):
def __init__(
self,
defaults=None,
specified_apps=None,
dry_run=None,
verbosity=1,
log=None,
):
self.verbosity = verbosity
self.log = log
super().__init__(
defaults=defaults,
specified_apps=specified_apps,
dry_run=dry_run,
)
def log_lack_of_migration(self, field_name, model_name, reason):
if self.verbosity > 0:
self.log(
f"Field '{field_name}' on model '{model_name}' not migrated: "
f"{reason}."
)
def ask_not_null_addition(self, field_name, model_name):
# We can't ask the user, so act like the user aborted.
self.log_lack_of_migration(
field_name,
model_name,
"it is impossible to add a non-nullable field without specifying "
"a default",
)
sys.exit(3)
def ask_not_null_alteration(self, field_name, model_name):
# We can't ask the user, so set as not provided.
self.log(
f"Field '{field_name}' on model '{model_name}' given a default of "
f"NOT PROVIDED and must be corrected."
)
return NOT_PROVIDED
def ask_auto_now_add_addition(self, field_name, model_name):
# We can't ask the user, so act like the user aborted.
self.log_lack_of_migration(
field_name,
model_name,
"it is impossible to add a field with 'auto_now_add=True' without "
"specifying a default",
)
sys.exit(3)
|
import datetime
from io import StringIO
from unittest import mock
from django.core.management.base import OutputWrapper
from django.db.migrations.questioner import (
InteractiveMigrationQuestioner,
MigrationQuestioner,
)
from django.db.models import NOT_PROVIDED
from django.test import SimpleTestCase
from django.test.utils import override_settings
class QuestionerTests(SimpleTestCase):
@override_settings(
INSTALLED_APPS=["migrations"],
MIGRATION_MODULES={"migrations": None},
)
def test_ask_initial_with_disabled_migrations(self):
questioner = MigrationQuestioner()
self.assertIs(False, questioner.ask_initial("migrations"))
def test_ask_not_null_alteration(self):
questioner = MigrationQuestioner()
self.assertIsNone(
questioner.ask_not_null_alteration("field_name", "model_name")
)
@mock.patch("builtins.input", return_value="2")
def test_ask_not_null_alteration_not_provided(self, mock):
questioner = InteractiveMigrationQuestioner(
prompt_output=OutputWrapper(StringIO())
)
question = questioner.ask_not_null_alteration("field_name", "model_name")
self.assertEqual(question, NOT_PROVIDED)
class QuestionerHelperMethodsTests(SimpleTestCase):
def setUp(self):
self.prompt = OutputWrapper(StringIO())
self.questioner = InteractiveMigrationQuestioner(prompt_output=self.prompt)
@mock.patch("builtins.input", return_value="datetime.timedelta(days=1)")
def test_questioner_default_timedelta(self, mock_input):
value = self.questioner._ask_default()
self.assertEqual(value, datetime.timedelta(days=1))
@mock.patch("builtins.input", return_value="")
def test_questioner_default_no_user_entry(self, mock_input):
value = self.questioner._ask_default(default="datetime.timedelta(days=1)")
self.assertEqual(value, datetime.timedelta(days=1))
@mock.patch("builtins.input", side_effect=["", "exit"])
def test_questioner_no_default_no_user_entry(self, mock_input):
with self.assertRaises(SystemExit):
self.questioner._ask_default()
self.assertIn(
"Please enter some code, or 'exit' (without quotes) to exit.",
self.prompt.getvalue(),
)
@mock.patch("builtins.input", side_effect=["bad code", "exit"])
def test_questioner_no_default_syntax_error(self, mock_input):
with self.assertRaises(SystemExit):
self.questioner._ask_default()
self.assertIn("SyntaxError: invalid syntax", self.prompt.getvalue())
@mock.patch("builtins.input", side_effect=["datetim", "exit"])
def test_questioner_no_default_name_error(self, mock_input):
with self.assertRaises(SystemExit):
self.questioner._ask_default()
self.assertIn(
"NameError: name 'datetim' is not defined", self.prompt.getvalue()
)
@mock.patch("builtins.input", side_effect=["datetime.dat", "exit"])
def test_questioner_no_default_attribute_error(self, mock_input):
with self.assertRaises(SystemExit):
self.questioner._ask_default()
self.assertIn(
"AttributeError: module 'datetime' has no attribute 'dat'",
self.prompt.getvalue(),
)
@mock.patch("builtins.input", side_effect=[KeyboardInterrupt()])
def test_questioner_no_default_keyboard_interrupt(self, mock_input):
with self.assertRaises(SystemExit):
self.questioner._ask_default()
self.assertIn("Cancelled.\n", self.prompt.getvalue())
@mock.patch("builtins.input", side_effect=["", "n"])
def test_questioner_no_default_no_user_entry_boolean(self, mock_input):
value = self.questioner._boolean_input("Proceed?")
self.assertIs(value, False)
@mock.patch("builtins.input", return_value="")
def test_questioner_default_no_user_entry_boolean(self, mock_input):
value = self.questioner._boolean_input("Proceed?", default=True)
self.assertIs(value, True)
@mock.patch("builtins.input", side_effect=[10, "garbage", 1])
def test_questioner_bad_user_choice(self, mock_input):
question = "Make a choice:"
value = self.questioner._choice_input(question, choices="abc")
expected_msg = f"{question}\n" f" 1) a\n" f" 2) b\n" f" 3) c\n"
self.assertIn(expected_msg, self.prompt.getvalue())
self.assertEqual(value, 1)
@mock.patch("builtins.input", side_effect=[KeyboardInterrupt()])
def test_questioner_no_choice_keyboard_interrupt(self, mock_input):
question = "Make a choice:"
with self.assertRaises(SystemExit):
self.questioner._choice_input(question, choices="abc")
expected_msg = (
f"{question}\n"
f" 1) a\n"
f" 2) b\n"
f" 3) c\n"
f"Select an option: \n"
f"Cancelled.\n"
)
self.assertIn(expected_msg, self.prompt.getvalue())
|
./temp_repos/django/django/db/migrations/questioner.py
|
./temp_repos/django/tests/migrations/test_questioner.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'MigrationQuestioner'.
Context:
- Class Name: MigrationQuestioner
- Dependencies to Mock: log, prompt_output, specified_apps, defaults, dry_run, verbosity
- Key Imports: importlib, django.core.management.base, django.utils, django.utils.version, django.apps, loader, os, datetime, django.db.models, sys
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
MigrationQuestioner
|
python
|
import os
import re
from importlib import import_module
from django import get_version
from django.apps import apps
# SettingsReference imported for backwards compatibility in Django 2.2.
from django.conf import SettingsReference # NOQA
from django.db import migrations
from django.db.migrations.loader import MigrationLoader
from django.db.migrations.serializer import Serializer, serializer_factory
from django.utils.inspect import get_func_args
from django.utils.module_loading import module_dir
from django.utils.timezone import now
class OperationWriter:
def __init__(self, operation, indentation=2):
self.operation = operation
self.buff = []
self.indentation = indentation
def serialize(self):
def _write(_arg_name, _arg_value):
if _arg_name in self.operation.serialization_expand_args and isinstance(
_arg_value, (list, tuple, dict)
):
if isinstance(_arg_value, dict):
self.feed("%s={" % _arg_name)
self.indent()
for key, value in _arg_value.items():
key_string, key_imports = MigrationWriter.serialize(key)
arg_string, arg_imports = MigrationWriter.serialize(value)
args = arg_string.splitlines()
if len(args) > 1:
self.feed("%s: %s" % (key_string, args[0]))
for arg in args[1:-1]:
self.feed(arg)
self.feed("%s," % args[-1])
else:
self.feed("%s: %s," % (key_string, arg_string))
imports.update(key_imports)
imports.update(arg_imports)
self.unindent()
self.feed("},")
else:
self.feed("%s=[" % _arg_name)
self.indent()
for item in _arg_value:
arg_string, arg_imports = MigrationWriter.serialize(item)
args = arg_string.splitlines()
if len(args) > 1:
for arg in args[:-1]:
self.feed(arg)
self.feed("%s," % args[-1])
else:
self.feed("%s," % arg_string)
imports.update(arg_imports)
self.unindent()
self.feed("],")
else:
arg_string, arg_imports = MigrationWriter.serialize(_arg_value)
args = arg_string.splitlines()
if len(args) > 1:
self.feed("%s=%s" % (_arg_name, args[0]))
for arg in args[1:-1]:
self.feed(arg)
self.feed("%s," % args[-1])
else:
self.feed("%s=%s," % (_arg_name, arg_string))
imports.update(arg_imports)
imports = set()
name, args, kwargs = self.operation.deconstruct()
operation_args = get_func_args(self.operation.__init__)
# See if this operation is in django.db.migrations. If it is,
# We can just use the fact we already have that imported,
# otherwise, we need to add an import for the operation class.
if getattr(migrations, name, None) == self.operation.__class__:
self.feed("migrations.%s(" % name)
else:
imports.add("import %s" % (self.operation.__class__.__module__))
self.feed("%s.%s(" % (self.operation.__class__.__module__, name))
self.indent()
for i, arg in enumerate(args):
arg_value = arg
arg_name = operation_args[i]
_write(arg_name, arg_value)
i = len(args)
# Only iterate over remaining arguments
for arg_name in operation_args[i:]:
if arg_name in kwargs: # Don't sort to maintain signature order
arg_value = kwargs[arg_name]
_write(arg_name, arg_value)
self.unindent()
self.feed("),")
return self.render(), imports
def indent(self):
self.indentation += 1
def unindent(self):
self.indentation -= 1
def feed(self, line):
self.buff.append(" " * (self.indentation * 4) + line)
def render(self):
return "\n".join(self.buff)
class MigrationWriter:
"""
Take a Migration instance and is able to produce the contents
of the migration file from it.
"""
def __init__(self, migration, include_header=True):
self.migration = migration
self.include_header = include_header
self.needs_manual_porting = False
def as_string(self):
"""Return a string of the file contents."""
items = {
"replaces_str": "",
"initial_str": "",
"run_before_str": "",
"atomic_str": "",
}
imports = set()
# Deconstruct operations
operations = []
for operation in self.migration.operations:
operation_string, operation_imports = OperationWriter(operation).serialize()
imports.update(operation_imports)
operations.append(operation_string)
items["operations"] = "\n".join(operations) + "\n" if operations else ""
# Format dependencies and write out swappable dependencies right
dependencies = []
for dependency in self.migration.dependencies:
if dependency[0] == "__setting__":
dependencies.append(
" migrations.swappable_dependency(settings.%s),"
% dependency[1]
)
imports.add("from django.conf import settings")
else:
dependencies.append(" %s," % self.serialize(dependency)[0])
items["dependencies"] = (
"\n".join(sorted(dependencies)) + "\n" if dependencies else ""
)
# Format imports nicely, swapping imports of functions from migration
# files for comments
migration_imports = set()
for line in list(imports):
if re.match(r"^import (.*)\.\d+[^\s]*$", line):
migration_imports.add(line.split("import")[1].strip())
imports.remove(line)
self.needs_manual_porting = True
# django.db.migrations is always used, but models import may not be.
# If models import exists, merge it with migrations import.
if "from django.db import models" in imports:
imports.discard("from django.db import models")
imports.add("from django.db import migrations, models")
else:
imports.add("from django.db import migrations")
# Sort imports by the package / module to be imported (the part after
# "from" in "from ... import ..." or after "import" in "import ...").
# First group the "import" statements, then "from ... import ...".
sorted_imports = sorted(
imports, key=lambda i: (i.split()[0] == "from", i.split()[1])
)
items["imports"] = "\n".join(sorted_imports) + "\n" if imports else ""
if migration_imports:
items["imports"] += (
"\n\n# Functions from the following migrations need manual "
"copying.\n# Move them and any dependencies into this file, "
"then update the\n# RunPython operations to refer to the local "
"versions:\n# %s"
) % "\n# ".join(sorted(migration_imports))
if self.migration.replaces:
items["replaces_str"] = (
"\n replaces = %s\n" % self.serialize(self.migration.replaces)[0]
)
if self.migration.run_before:
items["run_before_str"] = (
"\n run_before = %s\n" % self.serialize(self.migration.run_before)[0]
)
# Hinting that goes into comment
if self.include_header:
items["migration_header"] = MIGRATION_HEADER_TEMPLATE % {
"version": get_version(),
"timestamp": now().strftime("%Y-%m-%d %H:%M"),
}
else:
items["migration_header"] = ""
if self.migration.initial:
items["initial_str"] = "\n initial = True\n"
if not self.migration.atomic:
items["atomic_str"] = "\n atomic = False\n"
return MIGRATION_TEMPLATE % items
@property
def basedir(self):
migrations_package_name, _ = MigrationLoader.migrations_module(
self.migration.app_label
)
if migrations_package_name is None:
raise ValueError(
"Django can't create migrations for app '%s' because "
"migrations have been disabled via the MIGRATION_MODULES "
"setting." % self.migration.app_label
)
# See if we can import the migrations module directly
try:
migrations_module = import_module(migrations_package_name)
except ImportError:
pass
else:
try:
return module_dir(migrations_module)
except ValueError:
pass
# Alright, see if it's a direct submodule of the app
app_config = apps.get_app_config(self.migration.app_label)
(
maybe_app_name,
_,
migrations_package_basename,
) = migrations_package_name.rpartition(".")
if app_config.name == maybe_app_name:
return os.path.join(app_config.path, migrations_package_basename)
# In case of using MIGRATION_MODULES setting and the custom package
# doesn't exist, create one, starting from an existing package
existing_dirs, missing_dirs = migrations_package_name.split("."), []
while existing_dirs:
missing_dirs.insert(0, existing_dirs.pop(-1))
try:
base_module = import_module(".".join(existing_dirs))
except (ImportError, ValueError):
continue
else:
try:
base_dir = module_dir(base_module)
except ValueError:
continue
else:
break
else:
raise ValueError(
"Could not locate an appropriate location to create "
"migrations package %s. Make sure the toplevel "
"package exists and can be imported." % migrations_package_name
)
final_dir = os.path.join(base_dir, *missing_dirs)
os.makedirs(final_dir, exist_ok=True)
for missing_dir in missing_dirs:
base_dir = os.path.join(base_dir, missing_dir)
with open(os.path.join(base_dir, "__init__.py"), "w"):
pass
return final_dir
@property
def filename(self):
return "%s.py" % self.migration.name
@property
def path(self):
return os.path.join(self.basedir, self.filename)
@classmethod
def serialize(cls, value):
return serializer_factory(value).serialize()
@classmethod
def register_serializer(cls, type_, serializer):
Serializer.register(type_, serializer)
@classmethod
def unregister_serializer(cls, type_):
Serializer.unregister(type_)
MIGRATION_HEADER_TEMPLATE = """\
# Generated by Django %(version)s on %(timestamp)s
"""
MIGRATION_TEMPLATE = """\
%(migration_header)s%(imports)s
class Migration(migrations.Migration):
%(replaces_str)s%(initial_str)s%(atomic_str)s%(run_before_str)s
dependencies = [
%(dependencies)s\
]
operations = [
%(operations)s\
]
"""
|
import datetime
import decimal
import enum
import functools
import math
import os
import pathlib
import re
import sys
import time
import uuid
import zoneinfo
from types import NoneType
from unittest import mock
import custom_migration_operations.more_operations
import custom_migration_operations.operations
from django import get_version
from django.conf import SettingsReference, settings
from django.core.validators import EmailValidator, RegexValidator
from django.db import migrations, models
from django.db.migrations.serializer import BaseSerializer
from django.db.migrations.writer import MigrationWriter, OperationWriter
from django.test import SimpleTestCase, override_settings
from django.test.utils import extend_sys_path
from django.utils.deconstruct import deconstructible
from django.utils.functional import SimpleLazyObject
from django.utils.timezone import get_default_timezone, get_fixed_timezone
from django.utils.translation import gettext_lazy as _
from .models import FoodManager, FoodQuerySet
def get_choices():
return [(i, str(i)) for i in range(3)]
class DeconstructibleInstances:
def deconstruct(self):
return ("DeconstructibleInstances", [], {})
@deconstructible
class DeconstructibleArbitrary:
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
class Money(decimal.Decimal):
def deconstruct(self):
return (
"%s.%s" % (self.__class__.__module__, self.__class__.__name__),
[str(self)],
{},
)
class TestModel1:
def upload_to(self):
return "/somewhere/dynamic/"
thing = models.FileField(upload_to=upload_to)
class TextEnum(enum.Enum):
A = "a-value"
B = "value-b"
class TextTranslatedEnum(enum.Enum):
A = _("a-value")
B = _("value-b")
class BinaryEnum(enum.Enum):
A = b"a-value"
B = b"value-b"
class IntEnum(enum.IntEnum):
A = 1
B = 2
class IntFlagEnum(enum.IntFlag):
A = 1
B = 2
def decorator(f):
@functools.wraps(f)
def wrapper(*args, **kwargs):
return f(*args, **kwargs)
return wrapper
@decorator
def function_with_decorator():
pass
@functools.cache
def function_with_cache():
pass
@functools.lru_cache(maxsize=10)
def function_with_lru_cache():
pass
class OperationWriterTests(SimpleTestCase):
def test_empty_signature(self):
operation = custom_migration_operations.operations.TestOperation()
buff, imports = OperationWriter(operation, indentation=0).serialize()
self.assertEqual(imports, {"import custom_migration_operations.operations"})
self.assertEqual(
buff,
"custom_migration_operations.operations.TestOperation(\n),",
)
def test_args_signature(self):
operation = custom_migration_operations.operations.ArgsOperation(1, 2)
buff, imports = OperationWriter(operation, indentation=0).serialize()
self.assertEqual(imports, {"import custom_migration_operations.operations"})
self.assertEqual(
buff,
"custom_migration_operations.operations.ArgsOperation(\n"
" arg1=1,\n"
" arg2=2,\n"
"),",
)
def test_kwargs_signature(self):
operation = custom_migration_operations.operations.KwargsOperation(kwarg1=1)
buff, imports = OperationWriter(operation, indentation=0).serialize()
self.assertEqual(imports, {"import custom_migration_operations.operations"})
self.assertEqual(
buff,
"custom_migration_operations.operations.KwargsOperation(\n"
" kwarg1=1,\n"
"),",
)
def test_args_kwargs_signature(self):
operation = custom_migration_operations.operations.ArgsKwargsOperation(
1, 2, kwarg2=4
)
buff, imports = OperationWriter(operation, indentation=0).serialize()
self.assertEqual(imports, {"import custom_migration_operations.operations"})
self.assertEqual(
buff,
"custom_migration_operations.operations.ArgsKwargsOperation(\n"
" arg1=1,\n"
" arg2=2,\n"
" kwarg2=4,\n"
"),",
)
def test_keyword_only_args_signature(self):
operation = (
custom_migration_operations.operations.ArgsAndKeywordOnlyArgsOperation(
1, 2, kwarg1=3, kwarg2=4
)
)
buff, imports = OperationWriter(operation, indentation=0).serialize()
self.assertEqual(imports, {"import custom_migration_operations.operations"})
self.assertEqual(
buff,
"custom_migration_operations.operations.ArgsAndKeywordOnlyArgsOperation(\n"
" arg1=1,\n"
" arg2=2,\n"
" kwarg1=3,\n"
" kwarg2=4,\n"
"),",
)
def test_nested_args_signature(self):
operation = custom_migration_operations.operations.ArgsOperation(
custom_migration_operations.operations.ArgsOperation(1, 2),
custom_migration_operations.operations.KwargsOperation(kwarg1=3, kwarg2=4),
)
buff, imports = OperationWriter(operation, indentation=0).serialize()
self.assertEqual(imports, {"import custom_migration_operations.operations"})
self.assertEqual(
buff,
"custom_migration_operations.operations.ArgsOperation(\n"
" arg1=custom_migration_operations.operations.ArgsOperation(\n"
" arg1=1,\n"
" arg2=2,\n"
" ),\n"
" arg2=custom_migration_operations.operations.KwargsOperation(\n"
" kwarg1=3,\n"
" kwarg2=4,\n"
" ),\n"
"),",
)
def test_multiline_args_signature(self):
operation = custom_migration_operations.operations.ArgsOperation(
"test\n arg1", "test\narg2"
)
buff, imports = OperationWriter(operation, indentation=0).serialize()
self.assertEqual(imports, {"import custom_migration_operations.operations"})
self.assertEqual(
buff,
"custom_migration_operations.operations.ArgsOperation(\n"
" arg1='test\\n arg1',\n"
" arg2='test\\narg2',\n"
"),",
)
def test_expand_args_signature(self):
operation = custom_migration_operations.operations.ExpandArgsOperation([1, 2])
buff, imports = OperationWriter(operation, indentation=0).serialize()
self.assertEqual(imports, {"import custom_migration_operations.operations"})
self.assertEqual(
buff,
"custom_migration_operations.operations.ExpandArgsOperation(\n"
" arg=[\n"
" 1,\n"
" 2,\n"
" ],\n"
"),",
)
def test_nested_operation_expand_args_signature(self):
operation = custom_migration_operations.operations.ExpandArgsOperation(
arg=[
custom_migration_operations.operations.KwargsOperation(
kwarg1=1,
kwarg2=2,
),
]
)
buff, imports = OperationWriter(operation, indentation=0).serialize()
self.assertEqual(imports, {"import custom_migration_operations.operations"})
self.assertEqual(
buff,
"custom_migration_operations.operations.ExpandArgsOperation(\n"
" arg=[\n"
" custom_migration_operations.operations.KwargsOperation(\n"
" kwarg1=1,\n"
" kwarg2=2,\n"
" ),\n"
" ],\n"
"),",
)
class WriterTests(SimpleTestCase):
"""
Tests the migration writer (makes migration files from Migration instances)
"""
class NestedEnum(enum.IntEnum):
A = 1
B = 2
class NestedChoices(models.TextChoices):
X = "X", "X value"
Y = "Y", "Y value"
@classmethod
def method(cls):
return cls.X
def safe_exec(self, string, value=None):
d = {}
try:
exec(string, globals(), d)
except Exception as e:
if value:
self.fail(
"Could not exec %r (from value %r): %s" % (string.strip(), value, e)
)
else:
self.fail("Could not exec %r: %s" % (string.strip(), e))
return d
def serialize_round_trip(self, value):
string, imports = MigrationWriter.serialize(value)
return self.safe_exec(
"%s\ntest_value_result = %s" % ("\n".join(imports), string), value
)["test_value_result"]
def assertSerializedEqual(self, value):
self.assertEqual(self.serialize_round_trip(value), value)
def assertSerializedResultEqual(self, value, target):
self.assertEqual(MigrationWriter.serialize(value), target)
def assertSerializedFieldEqual(self, value):
new_value = self.serialize_round_trip(value)
self.assertEqual(value.__class__, new_value.__class__)
self.assertEqual(value.max_length, new_value.max_length)
self.assertEqual(value.null, new_value.null)
self.assertEqual(value.unique, new_value.unique)
def assertSerializedFunctoolsPartialEqual(
self, value, expected_string, expected_imports
):
string, imports = MigrationWriter.serialize(value)
self.assertEqual(string, expected_string)
self.assertEqual(imports, expected_imports)
result = self.serialize_round_trip(value)
self.assertEqual(result.func, value.func)
self.assertEqual(result.args, value.args)
self.assertEqual(result.keywords, value.keywords)
return result
def test_serialize_numbers(self):
self.assertSerializedEqual(1)
self.assertSerializedEqual(1.2)
self.assertTrue(math.isinf(self.serialize_round_trip(float("inf"))))
self.assertTrue(math.isinf(self.serialize_round_trip(float("-inf"))))
self.assertTrue(math.isnan(self.serialize_round_trip(float("nan"))))
self.assertSerializedEqual(decimal.Decimal("1.3"))
self.assertSerializedResultEqual(
decimal.Decimal("1.3"), ("Decimal('1.3')", {"from decimal import Decimal"})
)
self.assertSerializedEqual(Money("1.3"))
self.assertSerializedResultEqual(
Money("1.3"),
("migrations.test_writer.Money('1.3')", {"import migrations.test_writer"}),
)
def test_serialize_constants(self):
self.assertSerializedEqual(None)
self.assertSerializedEqual(True)
self.assertSerializedEqual(False)
def test_serialize_strings(self):
self.assertSerializedEqual(b"foobar")
string, imports = MigrationWriter.serialize(b"foobar")
self.assertEqual(string, "b'foobar'")
self.assertSerializedEqual("föobár")
string, imports = MigrationWriter.serialize("foobar")
self.assertEqual(string, "'foobar'")
def test_serialize_multiline_strings(self):
self.assertSerializedEqual(b"foo\nbar")
string, imports = MigrationWriter.serialize(b"foo\nbar")
self.assertEqual(string, "b'foo\\nbar'")
self.assertSerializedEqual("föo\nbár")
string, imports = MigrationWriter.serialize("foo\nbar")
self.assertEqual(string, "'foo\\nbar'")
def test_serialize_collections(self):
self.assertSerializedEqual({1: 2})
self.assertSerializedEqual(["a", 2, True, None])
self.assertSerializedEqual({2, 3, "eighty"})
self.assertSerializedEqual({"lalalala": ["yeah", "no", "maybe"]})
self.assertSerializedEqual(_("Hello"))
def test_serialize_builtin_types(self):
self.assertSerializedEqual([list, tuple, dict, set, frozenset])
self.assertSerializedResultEqual(
[list, tuple, dict, set, frozenset],
("[list, tuple, dict, set, frozenset]", set()),
)
def test_serialize_lazy_objects(self):
pattern = re.compile(r"^foo$")
lazy_pattern = SimpleLazyObject(lambda: pattern)
self.assertEqual(self.serialize_round_trip(lazy_pattern), pattern)
def test_serialize_enums(self):
self.assertSerializedResultEqual(
TextEnum.A,
("migrations.test_writer.TextEnum['A']", {"import migrations.test_writer"}),
)
self.assertSerializedResultEqual(
TextTranslatedEnum.A,
(
"migrations.test_writer.TextTranslatedEnum['A']",
{"import migrations.test_writer"},
),
)
self.assertSerializedResultEqual(
BinaryEnum.A,
(
"migrations.test_writer.BinaryEnum['A']",
{"import migrations.test_writer"},
),
)
self.assertSerializedResultEqual(
IntEnum.B,
("migrations.test_writer.IntEnum['B']", {"import migrations.test_writer"}),
)
self.assertSerializedResultEqual(
self.NestedEnum.A,
(
"migrations.test_writer.WriterTests.NestedEnum['A']",
{"import migrations.test_writer"},
),
)
self.assertSerializedEqual(self.NestedEnum.A)
field = models.CharField(
default=TextEnum.B, choices=[(m.value, m) for m in TextEnum]
)
string = MigrationWriter.serialize(field)[0]
self.assertEqual(
string,
"models.CharField(choices=["
"('a-value', migrations.test_writer.TextEnum['A']), "
"('value-b', migrations.test_writer.TextEnum['B'])], "
"default=migrations.test_writer.TextEnum['B'])",
)
field = models.CharField(
default=TextTranslatedEnum.A,
choices=[(m.value, m) for m in TextTranslatedEnum],
)
string = MigrationWriter.serialize(field)[0]
self.assertEqual(
string,
"models.CharField(choices=["
"('a-value', migrations.test_writer.TextTranslatedEnum['A']), "
"('value-b', migrations.test_writer.TextTranslatedEnum['B'])], "
"default=migrations.test_writer.TextTranslatedEnum['A'])",
)
field = models.CharField(
default=BinaryEnum.B, choices=[(m.value, m) for m in BinaryEnum]
)
string = MigrationWriter.serialize(field)[0]
self.assertEqual(
string,
"models.CharField(choices=["
"(b'a-value', migrations.test_writer.BinaryEnum['A']), "
"(b'value-b', migrations.test_writer.BinaryEnum['B'])], "
"default=migrations.test_writer.BinaryEnum['B'])",
)
field = models.IntegerField(
default=IntEnum.A, choices=[(m.value, m) for m in IntEnum]
)
string = MigrationWriter.serialize(field)[0]
self.assertEqual(
string,
"models.IntegerField(choices=["
"(1, migrations.test_writer.IntEnum['A']), "
"(2, migrations.test_writer.IntEnum['B'])], "
"default=migrations.test_writer.IntEnum['A'])",
)
def test_serialize_enum_flags(self):
self.assertSerializedResultEqual(
IntFlagEnum.A,
(
"migrations.test_writer.IntFlagEnum['A']",
{"import migrations.test_writer"},
),
)
self.assertSerializedResultEqual(
IntFlagEnum.B,
(
"migrations.test_writer.IntFlagEnum['B']",
{"import migrations.test_writer"},
),
)
field = models.IntegerField(
default=IntFlagEnum.A, choices=[(m.value, m) for m in IntFlagEnum]
)
string = MigrationWriter.serialize(field)[0]
self.assertEqual(
string,
"models.IntegerField(choices=["
"(1, migrations.test_writer.IntFlagEnum['A']), "
"(2, migrations.test_writer.IntFlagEnum['B'])], "
"default=migrations.test_writer.IntFlagEnum['A'])",
)
self.assertSerializedResultEqual(
IntFlagEnum.A | IntFlagEnum.B,
(
"migrations.test_writer.IntFlagEnum['A'] | "
"migrations.test_writer.IntFlagEnum['B']",
{"import migrations.test_writer"},
),
)
def test_serialize_choices(self):
class TextChoices(models.TextChoices):
A = "A", "A value"
B = "B", "B value"
class IntegerChoices(models.IntegerChoices):
A = 1, "One"
B = 2, "Two"
class DateChoices(datetime.date, models.Choices):
DATE_1 = 1969, 7, 20, "First date"
DATE_2 = 1969, 11, 19, "Second date"
self.assertSerializedResultEqual(TextChoices.A, ("'A'", set()))
self.assertSerializedResultEqual(IntegerChoices.A, ("1", set()))
self.assertSerializedResultEqual(
DateChoices.DATE_1,
("datetime.date(1969, 7, 20)", {"import datetime"}),
)
field = models.CharField(default=TextChoices.B, choices=TextChoices)
string = MigrationWriter.serialize(field)[0]
self.assertEqual(
string,
"models.CharField(choices=[('A', 'A value'), ('B', 'B value')], "
"default='B')",
)
field = models.IntegerField(default=IntegerChoices.B, choices=IntegerChoices)
string = MigrationWriter.serialize(field)[0]
self.assertEqual(
string,
"models.IntegerField(choices=[(1, 'One'), (2, 'Two')], default=2)",
)
field = models.DateField(default=DateChoices.DATE_2, choices=DateChoices)
string = MigrationWriter.serialize(field)[0]
self.assertEqual(
string,
"models.DateField(choices=["
"(datetime.date(1969, 7, 20), 'First date'), "
"(datetime.date(1969, 11, 19), 'Second date')], "
"default=datetime.date(1969, 11, 19))",
)
def test_serialize_dictionary_choices(self):
for choices in ({"Group": [(2, "2"), (1, "1")]}, {"Group": {2: "2", 1: "1"}}):
with self.subTest(choices):
field = models.IntegerField(choices=choices)
string = MigrationWriter.serialize(field)[0]
self.assertEqual(
string,
"models.IntegerField(choices=[('Group', [(2, '2'), (1, '1')])])",
)
def test_serialize_callable_choices(self):
field = models.IntegerField(choices=get_choices)
string = MigrationWriter.serialize(field)[0]
self.assertEqual(
string,
"models.IntegerField(choices=migrations.test_writer.get_choices)",
)
def test_serialize_nested_class(self):
for nested_cls in [self.NestedEnum, self.NestedChoices]:
cls_name = nested_cls.__name__
with self.subTest(cls_name):
self.assertSerializedResultEqual(
nested_cls,
(
"migrations.test_writer.WriterTests.%s" % cls_name,
{"import migrations.test_writer"},
),
)
def test_serialize_nested_class_method(self):
self.assertSerializedResultEqual(
self.NestedChoices.method,
(
"migrations.test_writer.WriterTests.NestedChoices.method",
{"import migrations.test_writer"},
),
)
def test_serialize_uuid(self):
self.assertSerializedEqual(uuid.uuid1())
self.assertSerializedEqual(uuid.uuid4())
uuid_a = uuid.UUID("5c859437-d061-4847-b3f7-e6b78852f8c8")
uuid_b = uuid.UUID("c7853ec1-2ea3-4359-b02d-b54e8f1bcee2")
self.assertSerializedResultEqual(
uuid_a,
("uuid.UUID('5c859437-d061-4847-b3f7-e6b78852f8c8')", {"import uuid"}),
)
self.assertSerializedResultEqual(
uuid_b,
("uuid.UUID('c7853ec1-2ea3-4359-b02d-b54e8f1bcee2')", {"import uuid"}),
)
field = models.UUIDField(
choices=((uuid_a, "UUID A"), (uuid_b, "UUID B")), default=uuid_a
)
string = MigrationWriter.serialize(field)[0]
self.assertEqual(
string,
"models.UUIDField(choices=["
"(uuid.UUID('5c859437-d061-4847-b3f7-e6b78852f8c8'), 'UUID A'), "
"(uuid.UUID('c7853ec1-2ea3-4359-b02d-b54e8f1bcee2'), 'UUID B')], "
"default=uuid.UUID('5c859437-d061-4847-b3f7-e6b78852f8c8'))",
)
def test_serialize_pathlib(self):
# Pure path objects work in all platforms.
self.assertSerializedEqual(pathlib.PurePosixPath())
self.assertSerializedEqual(pathlib.PureWindowsPath())
path = pathlib.PurePosixPath("/path/file.txt")
expected = ("pathlib.PurePosixPath('/path/file.txt')", {"import pathlib"})
self.assertSerializedResultEqual(path, expected)
path = pathlib.PureWindowsPath("A:\\File.txt")
expected = ("pathlib.PureWindowsPath('A:/File.txt')", {"import pathlib"})
self.assertSerializedResultEqual(path, expected)
# Concrete path objects work on supported platforms.
if sys.platform == "win32":
self.assertSerializedEqual(pathlib.WindowsPath.cwd())
path = pathlib.WindowsPath("A:\\File.txt")
expected = ("pathlib.PureWindowsPath('A:/File.txt')", {"import pathlib"})
self.assertSerializedResultEqual(path, expected)
else:
self.assertSerializedEqual(pathlib.PosixPath.cwd())
path = pathlib.PosixPath("/path/file.txt")
expected = ("pathlib.PurePosixPath('/path/file.txt')", {"import pathlib"})
self.assertSerializedResultEqual(path, expected)
field = models.FilePathField(path=pathlib.PurePosixPath("/home/user"))
string, imports = MigrationWriter.serialize(field)
self.assertEqual(
string,
"models.FilePathField(path=pathlib.PurePosixPath('/home/user'))",
)
self.assertIn("import pathlib", imports)
def test_serialize_path_like(self):
with os.scandir(os.path.dirname(__file__)) as entries:
path_like = list(entries)[0]
expected = (repr(path_like.path), {})
self.assertSerializedResultEqual(path_like, expected)
field = models.FilePathField(path=path_like)
string = MigrationWriter.serialize(field)[0]
self.assertEqual(string, "models.FilePathField(path=%r)" % path_like.path)
def test_serialize_zoneinfo(self):
self.assertSerializedEqual(zoneinfo.ZoneInfo("Asia/Kolkata"))
self.assertSerializedResultEqual(
zoneinfo.ZoneInfo("Asia/Kolkata"),
(
"zoneinfo.ZoneInfo(key='Asia/Kolkata')",
{"import zoneinfo"},
),
)
self.assertSerializedResultEqual(
zoneinfo.ZoneInfo("Europe/Paris"),
("zoneinfo.ZoneInfo(key='Europe/Paris')", {"import zoneinfo"}),
)
def test_serialize_functions(self):
with self.assertRaisesMessage(ValueError, "Cannot serialize function: lambda"):
self.assertSerializedEqual(lambda x: 42)
self.assertSerializedEqual(models.SET_NULL)
string, imports = MigrationWriter.serialize(models.SET(42))
self.assertEqual(string, "models.SET(42)")
self.serialize_round_trip(models.SET(42))
def test_serialize_decorated_functions(self):
self.assertSerializedEqual(function_with_decorator)
self.assertSerializedEqual(function_with_cache)
self.assertSerializedEqual(function_with_lru_cache)
def test_serialize_datetime(self):
self.assertSerializedEqual(datetime.datetime.now())
self.assertSerializedEqual(datetime.datetime.now)
self.assertSerializedEqual(datetime.datetime.today())
self.assertSerializedEqual(datetime.datetime.today)
self.assertSerializedEqual(datetime.date.today())
self.assertSerializedEqual(datetime.date.today)
self.assertSerializedEqual(datetime.datetime.now().time())
self.assertSerializedEqual(
datetime.datetime(2014, 1, 1, 1, 1, tzinfo=get_default_timezone())
)
self.assertSerializedEqual(
datetime.datetime(2013, 12, 31, 22, 1, tzinfo=get_fixed_timezone(180))
)
self.assertSerializedResultEqual(
datetime.datetime(2014, 1, 1, 1, 1),
("datetime.datetime(2014, 1, 1, 1, 1)", {"import datetime"}),
)
self.assertSerializedResultEqual(
datetime.datetime(2012, 1, 1, 1, 1, tzinfo=datetime.UTC),
(
"datetime.datetime(2012, 1, 1, 1, 1, tzinfo=datetime.timezone.utc)",
{"import datetime"},
),
)
self.assertSerializedResultEqual(
datetime.datetime(2012, 1, 1, 1, 1, tzinfo=datetime.timezone.utc),
(
"datetime.datetime(2012, 1, 1, 1, 1, tzinfo=datetime.timezone.utc)",
{"import datetime"},
),
)
self.assertSerializedResultEqual(
datetime.datetime(
2012, 1, 1, 2, 1, tzinfo=zoneinfo.ZoneInfo("Europe/Paris")
),
(
"datetime.datetime(2012, 1, 1, 1, 1, tzinfo=datetime.timezone.utc)",
{"import datetime"},
),
)
def test_serialize_fields(self):
self.assertSerializedFieldEqual(models.CharField(max_length=255))
self.assertSerializedResultEqual(
models.CharField(max_length=255),
("models.CharField(max_length=255)", {"from django.db import models"}),
)
self.assertSerializedFieldEqual(models.TextField(null=True, blank=True))
self.assertSerializedResultEqual(
models.TextField(null=True, blank=True),
(
"models.TextField(blank=True, null=True)",
{"from django.db import models"},
),
)
def test_serialize_settings(self):
self.assertSerializedEqual(
SettingsReference(settings.AUTH_USER_MODEL, "AUTH_USER_MODEL")
)
self.assertSerializedResultEqual(
SettingsReference("someapp.model", "AUTH_USER_MODEL"),
("settings.AUTH_USER_MODEL", {"from django.conf import settings"}),
)
def test_serialize_iterators(self):
self.assertSerializedResultEqual(
((x, x * x) for x in range(3)), ("((0, 0), (1, 1), (2, 4))", set())
)
def test_serialize_compiled_regex(self):
"""
Make sure compiled regex can be serialized.
"""
regex = re.compile(r"^\w+$")
self.assertSerializedEqual(regex)
def test_serialize_class_based_validators(self):
"""
Ticket #22943: Test serialization of class-based validators, including
compiled regexes.
"""
validator = RegexValidator(message="hello")
string = MigrationWriter.serialize(validator)[0]
self.assertEqual(
string, "django.core.validators.RegexValidator(message='hello')"
)
self.serialize_round_trip(validator)
# Test with a compiled regex.
validator = RegexValidator(regex=re.compile(r"^\w+$"))
string = MigrationWriter.serialize(validator)[0]
self.assertEqual(
string,
"django.core.validators.RegexValidator(regex=re.compile('^\\\\w+$'))",
)
self.serialize_round_trip(validator)
# Test a string regex with flag
validator = RegexValidator(r"^[0-9]+$", flags=re.S)
string = MigrationWriter.serialize(validator)[0]
self.assertEqual(
string,
"django.core.validators.RegexValidator('^[0-9]+$', "
"flags=re.RegexFlag['DOTALL'])",
)
self.serialize_round_trip(validator)
# Test message and code
validator = RegexValidator("^[-a-zA-Z0-9_]+$", "Invalid", "invalid")
string = MigrationWriter.serialize(validator)[0]
self.assertEqual(
string,
"django.core.validators.RegexValidator('^[-a-zA-Z0-9_]+$', 'Invalid', "
"'invalid')",
)
self.serialize_round_trip(validator)
# Test with a subclass.
validator = EmailValidator(message="hello")
string = MigrationWriter.serialize(validator)[0]
self.assertEqual(
string, "django.core.validators.EmailValidator(message='hello')"
)
self.serialize_round_trip(validator)
validator = deconstructible(path="migrations.test_writer.EmailValidator")(
EmailValidator
)(message="hello")
string = MigrationWriter.serialize(validator)[0]
self.assertEqual(
string, "migrations.test_writer.EmailValidator(message='hello')"
)
validator = deconstructible(path="custom.EmailValidator")(EmailValidator)(
message="hello"
)
with self.assertRaisesMessage(ImportError, "No module named 'custom'"):
MigrationWriter.serialize(validator)
validator = deconstructible(path="django.core.validators.EmailValidator2")(
EmailValidator
)(message="hello")
with self.assertRaisesMessage(
ValueError,
"Could not find object EmailValidator2 in django.core.validators.",
):
MigrationWriter.serialize(validator)
def test_serialize_complex_func_index(self):
index = models.Index(
models.Func("rating", function="ABS"),
models.Case(
models.When(name="special", then=models.Value("X")),
default=models.Value("other"),
),
models.ExpressionWrapper(
models.F("pages"),
output_field=models.IntegerField(),
),
models.OrderBy(models.F("name").desc()),
name="complex_func_index",
)
string, imports = MigrationWriter.serialize(index)
self.assertEqual(
string,
"models.Index(models.Func('rating', function='ABS'), "
"models.Case(models.When(name='special', then=models.Value('X')), "
"default=models.Value('other')), "
"models.ExpressionWrapper("
"models.F('pages'), output_field=models.IntegerField()), "
"models.OrderBy(models.OrderBy(models.F('name'), descending=True)), "
"name='complex_func_index')",
)
self.assertEqual(imports, {"from django.db import models"})
def test_serialize_empty_nonempty_tuple(self):
"""
Ticket #22679: makemigrations generates invalid code for (an empty
tuple) default_permissions = ()
"""
empty_tuple = ()
one_item_tuple = ("a",)
many_items_tuple = ("a", "b", "c")
self.assertSerializedEqual(empty_tuple)
self.assertSerializedEqual(one_item_tuple)
self.assertSerializedEqual(many_items_tuple)
def test_serialize_range(self):
string, imports = MigrationWriter.serialize(range(1, 5))
self.assertEqual(string, "range(1, 5)")
self.assertEqual(imports, set())
def test_serialize_builtins(self):
string, imports = MigrationWriter.serialize(range)
self.assertEqual(string, "range")
self.assertEqual(imports, set())
def test_serialize_unbound_method_reference(self):
"""An unbound method used within a class body can be serialized."""
self.serialize_round_trip(TestModel1.thing)
def test_serialize_local_function_reference(self):
"""A reference in a local scope can't be serialized."""
class TestModel2:
def upload_to(self):
return "somewhere dynamic"
thing = models.FileField(upload_to=upload_to)
with self.assertRaisesMessage(
ValueError, "Could not find function upload_to in migrations.test_writer"
):
self.serialize_round_trip(TestModel2.thing)
def test_serialize_managers(self):
self.assertSerializedEqual(models.Manager())
self.assertSerializedResultEqual(
FoodQuerySet.as_manager(),
(
"migrations.models.FoodQuerySet.as_manager()",
{"import migrations.models"},
),
)
self.assertSerializedEqual(FoodManager("a", "b"))
self.assertSerializedEqual(FoodManager("x", "y", c=3, d=4))
def test_serialize_frozensets(self):
self.assertSerializedEqual(frozenset())
self.assertSerializedEqual(frozenset("let it go"))
self.assertSerializedResultEqual(
frozenset("cba"), ("frozenset(['a', 'b', 'c'])", set())
)
def test_serialize_set(self):
self.assertSerializedEqual(set())
self.assertSerializedResultEqual(set(), ("set()", set()))
self.assertSerializedEqual({"a"})
self.assertSerializedResultEqual({"a"}, ("{'a'}", set()))
self.assertSerializedEqual({"c", "b", "a"})
self.assertSerializedResultEqual({"c", "b", "a"}, ("{'a', 'b', 'c'}", set()))
def test_serialize_timedelta(self):
self.assertSerializedEqual(datetime.timedelta())
self.assertSerializedEqual(datetime.timedelta(minutes=42))
def test_serialize_functools_partial(self):
value = functools.partial(datetime.timedelta)
string, imports = MigrationWriter.serialize(value)
self.assertSerializedFunctoolsPartialEqual(
value,
"functools.partial(datetime.timedelta)",
{"import datetime", "import functools"},
)
def test_serialize_functools_partial_posarg(self):
value = functools.partial(datetime.timedelta, 1)
string, imports = MigrationWriter.serialize(value)
self.assertSerializedFunctoolsPartialEqual(
value,
"functools.partial(datetime.timedelta, 1)",
{"import datetime", "import functools"},
)
def test_serialize_functools_partial_kwarg(self):
value = functools.partial(datetime.timedelta, seconds=2)
string, imports = MigrationWriter.serialize(value)
self.assertSerializedFunctoolsPartialEqual(
value,
"functools.partial(datetime.timedelta, seconds=2)",
{"import datetime", "import functools"},
)
def test_serialize_functools_partial_mixed(self):
value = functools.partial(datetime.timedelta, 1, seconds=2)
string, imports = MigrationWriter.serialize(value)
self.assertSerializedFunctoolsPartialEqual(
value,
"functools.partial(datetime.timedelta, 1, seconds=2)",
{"import datetime", "import functools"},
)
def test_serialize_functools_partial_non_identifier_keyword(self):
value = functools.partial(datetime.timedelta, **{"kebab-case": 1})
string, imports = MigrationWriter.serialize(value)
self.assertSerializedFunctoolsPartialEqual(
value,
"functools.partial(datetime.timedelta, **{'kebab-case': 1})",
{"import datetime", "import functools"},
)
def test_serialize_functools_partialmethod(self):
value = functools.partialmethod(datetime.timedelta, 1, seconds=2)
string, imports = MigrationWriter.serialize(value)
result = self.assertSerializedFunctoolsPartialEqual(
value,
"functools.partialmethod(datetime.timedelta, 1, seconds=2)",
{"import datetime", "import functools"},
)
self.assertIsInstance(result, functools.partialmethod)
def test_serialize_type_none(self):
self.assertSerializedEqual(NoneType)
def test_serialize_type_model(self):
self.assertSerializedEqual(models.Model)
self.assertSerializedResultEqual(
MigrationWriter.serialize(models.Model),
("('models.Model', {'from django.db import models'})", set()),
)
def test_database_on_delete_serializer_value(self):
db_level_on_delete_options = [
models.DB_CASCADE,
models.DB_SET_DEFAULT,
models.DB_SET_NULL,
]
for option in db_level_on_delete_options:
self.assertSerializedEqual(option)
self.assertSerializedResultEqual(
MigrationWriter.serialize(option),
(
f"('django.db.models.deletion.{option.__name__}', "
"{'import django.db.models.deletion'})",
set(),
),
)
def test_simple_migration(self):
"""
Tests serializing a simple migration.
"""
fields = {
"charfield": models.DateTimeField(default=datetime.datetime.now),
"datetimefield": models.DateTimeField(default=datetime.datetime.now),
}
options = {
"verbose_name": "My model",
"verbose_name_plural": "My models",
}
migration = type(
"Migration",
(migrations.Migration,),
{
"operations": [
migrations.CreateModel(
"MyModel", tuple(fields.items()), options, (models.Model,)
),
migrations.CreateModel(
"MyModel2", tuple(fields.items()), bases=(models.Model,)
),
migrations.CreateModel(
name="MyModel3",
fields=tuple(fields.items()),
options=options,
bases=(models.Model,),
),
migrations.DeleteModel("MyModel"),
migrations.AddField(
"OtherModel", "datetimefield", fields["datetimefield"]
),
],
"dependencies": [("testapp", "some_other_one")],
},
)
writer = MigrationWriter(migration)
output = writer.as_string()
# We don't test the output formatting - that's too fragile.
# Just make sure it runs for now, and that things look alright.
result = self.safe_exec(output)
self.assertIn("Migration", result)
def test_migration_path(self):
test_apps = [
"migrations.migrations_test_apps.normal",
"migrations.migrations_test_apps.with_package_model",
"migrations.migrations_test_apps.without_init_file",
]
base_dir = os.path.dirname(os.path.dirname(__file__))
for app in test_apps:
with self.modify_settings(INSTALLED_APPS={"append": app}):
migration = migrations.Migration("0001_initial", app.split(".")[-1])
expected_path = os.path.join(
base_dir, *(app.split(".") + ["migrations", "0001_initial.py"])
)
writer = MigrationWriter(migration)
self.assertEqual(writer.path, expected_path)
@override_settings(
MIGRATION_MODULES={"namespace_app": "namespace_app.migrations"},
INSTALLED_APPS=[
"migrations.migrations_test_apps.distributed_app_location_2.namespace_app"
],
)
def test_migration_path_distributed_namespace(self):
base_dir = os.path.dirname(os.path.dirname(__file__))
test_apps_dir = os.path.join(base_dir, "migrations", "migrations_test_apps")
expected_msg = (
"Could not locate an appropriate location to create "
"migrations package namespace_app.migrations. Make sure the toplevel "
"package exists and can be imported."
)
with extend_sys_path(
os.path.join(test_apps_dir, "distributed_app_location_1"),
os.path.join(test_apps_dir, "distributed_app_location_2"),
):
migration = migrations.Migration("0001_initial", "namespace_app")
writer = MigrationWriter(migration)
with self.assertRaisesMessage(ValueError, expected_msg):
writer.path
def test_custom_operation(self):
migration = type(
"Migration",
(migrations.Migration,),
{
"operations": [
custom_migration_operations.operations.TestOperation(),
custom_migration_operations.operations.CreateModel(),
migrations.CreateModel("MyModel", (), {}, (models.Model,)),
custom_migration_operations.more_operations.TestOperation(),
],
"dependencies": [],
},
)
writer = MigrationWriter(migration)
output = writer.as_string()
result = self.safe_exec(output)
self.assertIn("custom_migration_operations", result)
self.assertNotEqual(
result["custom_migration_operations"].operations.TestOperation,
result["custom_migration_operations"].more_operations.TestOperation,
)
def test_sorted_dependencies(self):
migration = type(
"Migration",
(migrations.Migration,),
{
"operations": [
migrations.AddField("mymodel", "myfield", models.IntegerField()),
],
"dependencies": [
("testapp10", "0005_fifth"),
("testapp02", "0005_third"),
("testapp02", "0004_sixth"),
("testapp01", "0001_initial"),
],
},
)
output = MigrationWriter(migration, include_header=False).as_string()
self.assertIn(
" dependencies = [\n"
" ('testapp01', '0001_initial'),\n"
" ('testapp02', '0004_sixth'),\n"
" ('testapp02', '0005_third'),\n"
" ('testapp10', '0005_fifth'),\n"
" ]",
output,
)
def test_sorted_imports(self):
"""
#24155 - Tests ordering of imports.
"""
migration = type(
"Migration",
(migrations.Migration,),
{
"operations": [
migrations.AddField(
"mymodel",
"myfield",
models.DateTimeField(
default=datetime.datetime(
2012, 1, 1, 1, 1, tzinfo=datetime.UTC
),
),
),
migrations.AddField(
"mymodel",
"myfield2",
models.FloatField(default=time.time),
),
]
},
)
writer = MigrationWriter(migration)
output = writer.as_string()
self.assertIn(
"import datetime\nimport time\nfrom django.db import migrations, models\n",
output,
)
def test_migration_file_header_comments(self):
"""
Test comments at top of file.
"""
migration = type("Migration", (migrations.Migration,), {"operations": []})
dt = datetime.datetime(2015, 7, 31, 4, 40, 0, 0, tzinfo=datetime.UTC)
with mock.patch("django.db.migrations.writer.now", lambda: dt):
for include_header in (True, False):
with self.subTest(include_header=include_header):
writer = MigrationWriter(migration, include_header)
output = writer.as_string()
self.assertEqual(
include_header,
output.startswith(
"# Generated by Django %s on 2015-07-31 04:40\n\n"
% get_version()
),
)
if not include_header:
# Make sure the output starts with something that's not
# a comment or indentation or blank line
self.assertRegex(
output.splitlines(keepends=True)[0], r"^[^#\s]+"
)
def test_models_import_omitted(self):
"""
django.db.models shouldn't be imported if unused.
"""
migration = type(
"Migration",
(migrations.Migration,),
{
"operations": [
migrations.AlterModelOptions(
name="model",
options={
"verbose_name": "model",
"verbose_name_plural": "models",
},
),
]
},
)
writer = MigrationWriter(migration)
output = writer.as_string()
self.assertIn("from django.db import migrations\n", output)
def test_deconstruct_class_arguments(self):
# Yes, it doesn't make sense to use a class as a default for a
# CharField. It does make sense for custom fields though, for example
# an enumfield that takes the enum class as an argument.
string = MigrationWriter.serialize(
models.CharField(default=DeconstructibleInstances)
)[0]
self.assertEqual(
string,
"models.CharField(default=migrations.test_writer.DeconstructibleInstances)",
)
def test_serialize_non_identifier_keyword_args(self):
instance = DeconstructibleArbitrary(
**{"kebab-case": 1, "my_list": [1, 2, 3], "123foo": {"456bar": set()}},
regular="kebab-case",
**{"simple": 1, "complex": 3.1416},
)
string, imports = MigrationWriter.serialize(instance)
self.assertEqual(
string,
"migrations.test_writer.DeconstructibleArbitrary(complex=3.1416, "
"my_list=[1, 2, 3], regular='kebab-case', simple=1, "
"**{'123foo': {'456bar': set()}, 'kebab-case': 1})",
)
self.assertEqual(imports, {"import migrations.test_writer"})
result = self.serialize_round_trip(instance)
self.assertEqual(result.args, instance.args)
self.assertEqual(result.kwargs, instance.kwargs)
def test_serialize_generic_alias(self):
self.assertSerializedEqual(dict[str, float])
def test_serialize_generic_alias_complex_args(self):
self.assertSerializedEqual(dict[str, models.Manager])
def test_register_serializer(self):
class ComplexSerializer(BaseSerializer):
def serialize(self):
return "complex(%r)" % self.value, {}
MigrationWriter.register_serializer(complex, ComplexSerializer)
self.assertSerializedEqual(complex(1, 2))
MigrationWriter.unregister_serializer(complex)
with self.assertRaisesMessage(ValueError, "Cannot serialize: (1+2j)"):
self.assertSerializedEqual(complex(1, 2))
def test_register_non_serializer(self):
with self.assertRaisesMessage(
ValueError, "'TestModel1' must inherit from 'BaseSerializer'."
):
MigrationWriter.register_serializer(complex, TestModel1)
def test_composite_pk_import(self):
migration = type(
"Migration",
(migrations.Migration,),
{
"operations": [
migrations.AddField(
"foo",
"bar",
models.CompositePrimaryKey("foo_id", "bar_id"),
),
],
},
)
writer = MigrationWriter(migration)
output = writer.as_string()
self.assertEqual(output.count("import"), 1)
self.assertIn("from django.db import migrations, models", output)
def test_run_before(self):
for run_before, expected_run_before_str in [
([("foo", "0001_bar")], " run_before = [('foo', '0001_bar')]\n"),
(
[("foo", "0001_bar"), ("foo", "0002_baz")],
" run_before = [('foo', '0001_bar'), ('foo', '0002_baz')]\n",
),
]:
with self.subTest(run_before=run_before):
migration = type(
"Migration",
(migrations.Migration,),
{"operations": [], "run_before": run_before},
)
writer = MigrationWriter(migration)
output = writer.as_string()
self.assertIn(expected_run_before_str, output)
def test_atomic_is_false(self):
migration = type(
"Migration",
(migrations.Migration,),
{"operations": [], "atomic": False},
)
writer = MigrationWriter(migration)
output = writer.as_string()
self.assertIn(" atomic = False\n", output)
def test_default_attributes(self):
migration = type("Migration", (migrations.Migration,), {})
writer = MigrationWriter(migration)
output = writer.as_string()
self.assertIn(" dependencies = [\n ]\n", output)
self.assertIn(" operations = [\n ]\n", output)
self.assertNotIn("atomic", output)
self.assertNotIn("initial", output)
self.assertNotIn("run_before", output)
self.assertNotIn("replaces", output)
|
./temp_repos/django/django/db/migrations/writer.py
|
./temp_repos/django/tests/migrations/test_writer.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'OperationWriter'.
Context:
- Class Name: OperationWriter
- Dependencies to Mock: indentation, migration, include_header, operation
- Key Imports: django.db.migrations.serializer, django.utils.module_loading, django.conf, django.db, django.utils.timezone, django, importlib, django.db.migrations.loader, django.apps, django.utils.inspect
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
OperationWriter
|
python
|
from django.core.checks import Error, Tags, register
@register(Tags.commands)
def migrate_and_makemigrations_autodetector(**kwargs):
from django.core.management import get_commands, load_command_class
commands = get_commands()
make_migrations = load_command_class(commands["makemigrations"], "makemigrations")
migrate = load_command_class(commands["migrate"], "migrate")
if make_migrations.autodetector is not migrate.autodetector:
return [
Error(
"The migrate and makemigrations commands must have the same "
"autodetector.",
hint=(
f"makemigrations.Command.autodetector is "
f"{make_migrations.autodetector.__name__}, but "
f"migrate.Command.autodetector is "
f"{migrate.autodetector.__name__}."
),
id="commands.E001",
)
]
return []
|
import datetime
import importlib
import io
import os
import re
import shutil
import sys
from pathlib import Path
from unittest import mock
from django.apps import apps
from django.core.checks import Error, Tags, register
from django.core.checks.registry import registry
from django.core.management import CommandError, call_command
from django.core.management.base import SystemCheckError
from django.core.management.commands.makemigrations import (
Command as MakeMigrationsCommand,
)
from django.core.management.commands.migrate import Command as MigrateCommand
from django.db import (
ConnectionHandler,
DatabaseError,
OperationalError,
connection,
connections,
models,
)
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
from django.db.backends.utils import truncate_name
from django.db.migrations.autodetector import MigrationAutodetector
from django.db.migrations.exceptions import InconsistentMigrationHistory
from django.db.migrations.loader import MigrationLoader
from django.db.migrations.recorder import MigrationRecorder
from django.db.migrations.writer import MigrationWriter
from django.test import TestCase, override_settings, skipUnlessDBFeature
from django.test.utils import captured_stdout, extend_sys_path, isolate_apps
from django.utils import timezone
from django.utils.version import get_docs_version
from .models import UnicodeModel, UnserializableModel
from .routers import TestRouter
from .test_base import MigrationTestBase
HAS_BLACK = shutil.which("black")
class MigrateTests(MigrationTestBase):
"""
Tests running the migrate command.
"""
databases = {"default", "other"}
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_migrate(self):
"""
Tests basic usage of the migrate command.
"""
# No tables are created
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
self.assertTableNotExists("migrations_book")
# Run the migrations to 0001 only
stdout = io.StringIO()
call_command(
"migrate", "migrations", "0001", verbosity=2, stdout=stdout, no_color=True
)
stdout = stdout.getvalue()
self.assertIn(
"Target specific migration: 0001_initial, from migrations", stdout
)
self.assertIn("Applying migrations.0001_initial... OK", stdout)
self.assertIn("Running pre-migrate handlers for application migrations", stdout)
self.assertIn(
"Running post-migrate handlers for application migrations", stdout
)
# The correct tables exist
self.assertTableExists("migrations_author")
self.assertTableExists("migrations_tribble")
self.assertTableNotExists("migrations_book")
# Run migrations all the way
call_command("migrate", verbosity=0)
# The correct tables exist
self.assertTableExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
self.assertTableExists("migrations_book")
# Unmigrate everything
stdout = io.StringIO()
call_command(
"migrate", "migrations", "zero", verbosity=2, stdout=stdout, no_color=True
)
stdout = stdout.getvalue()
self.assertIn("Unapply all migrations: migrations", stdout)
self.assertIn("Unapplying migrations.0002_second... OK", stdout)
self.assertIn("Running pre-migrate handlers for application migrations", stdout)
self.assertIn(
"Running post-migrate handlers for application migrations", stdout
)
# Tables are gone
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
self.assertTableNotExists("migrations_book")
@mock.patch("django.core.management.base.BaseCommand.check")
@override_settings(
INSTALLED_APPS=[
"django.contrib.auth",
"django.contrib.contenttypes",
"migrations.migrations_test_apps.migrated_app",
]
)
def test_migrate_with_system_checks(self, mocked_check):
out = io.StringIO()
call_command("migrate", skip_checks=False, no_color=True, stdout=out)
self.assertIn("Apply all migrations: migrated_app", out.getvalue())
mocked_check.assert_called_once_with(databases=["default"])
def test_migrate_with_custom_system_checks(self):
original_checks = registry.registered_checks.copy()
@register(Tags.signals)
def my_check(app_configs, **kwargs):
return [Error("my error")]
self.addCleanup(setattr, registry, "registered_checks", original_checks)
class CustomMigrateCommandWithSignalsChecks(MigrateCommand):
requires_system_checks = [Tags.signals]
command = CustomMigrateCommandWithSignalsChecks()
with self.assertRaises(SystemCheckError):
call_command(command, skip_checks=False, stderr=io.StringIO())
class CustomMigrateCommandWithSecurityChecks(MigrateCommand):
requires_system_checks = [Tags.security]
command = CustomMigrateCommandWithSecurityChecks()
call_command(command, skip_checks=False, stdout=io.StringIO())
@override_settings(
INSTALLED_APPS=[
"django.contrib.auth",
"django.contrib.contenttypes",
"migrations.migrations_test_apps.migrated_app",
]
)
def test_migrate_runs_database_system_checks(self):
original_checks = registry.registered_checks.copy()
self.addCleanup(setattr, registry, "registered_checks", original_checks)
out = io.StringIO()
mock_check = mock.Mock(return_value=[])
register(mock_check, Tags.database)
call_command("migrate", skip_checks=False, no_color=True, stdout=out)
self.assertIn("Apply all migrations: migrated_app", out.getvalue())
mock_check.assert_called_once_with(app_configs=None, databases=["default"])
@override_settings(
INSTALLED_APPS=[
"migrations",
"migrations.migrations_test_apps.unmigrated_app_syncdb",
]
)
def test_app_without_migrations(self):
msg = "App 'unmigrated_app_syncdb' does not have migrations."
with self.assertRaisesMessage(CommandError, msg):
call_command("migrate", app_label="unmigrated_app_syncdb")
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_clashing_prefix"}
)
def test_ambiguous_prefix(self):
msg = (
"More than one migration matches 'a' in app 'migrations'. Please "
"be more specific."
)
with self.assertRaisesMessage(CommandError, msg):
call_command("migrate", app_label="migrations", migration_name="a")
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_unknown_prefix(self):
msg = "Cannot find a migration matching 'nonexistent' from app 'migrations'."
with self.assertRaisesMessage(CommandError, msg):
call_command(
"migrate", app_label="migrations", migration_name="nonexistent"
)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_initial_false"}
)
def test_migrate_initial_false(self):
"""
`Migration.initial = False` skips fake-initial detection.
"""
# Make sure no tables are created
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
# Run the migrations to 0001 only
call_command("migrate", "migrations", "0001", verbosity=0)
# Fake rollback
call_command("migrate", "migrations", "zero", fake=True, verbosity=0)
# Make sure fake-initial detection does not run
with self.assertRaises(DatabaseError):
call_command(
"migrate", "migrations", "0001", fake_initial=True, verbosity=0
)
call_command("migrate", "migrations", "0001", fake=True, verbosity=0)
# Real rollback
call_command("migrate", "migrations", "zero", verbosity=0)
# Make sure it's all gone
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
self.assertTableNotExists("migrations_book")
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations"},
DATABASE_ROUTERS=["migrations.routers.TestRouter"],
)
def test_migrate_fake_initial(self):
"""
--fake-initial only works if all tables created in the initial
migration of an app exists. Database routers must be obeyed when doing
that check.
"""
# Make sure no tables are created
for db in self.databases:
self.assertTableNotExists("migrations_author", using=db)
self.assertTableNotExists("migrations_tribble", using=db)
try:
# Run the migrations to 0001 only
call_command("migrate", "migrations", "0001", verbosity=0)
call_command("migrate", "migrations", "0001", verbosity=0, database="other")
# Make sure the right tables exist
self.assertTableExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
# Also check the "other" database
self.assertTableNotExists("migrations_author", using="other")
self.assertTableExists("migrations_tribble", using="other")
# Fake a roll-back
call_command("migrate", "migrations", "zero", fake=True, verbosity=0)
call_command(
"migrate",
"migrations",
"zero",
fake=True,
verbosity=0,
database="other",
)
# Make sure the tables still exist
self.assertTableExists("migrations_author")
self.assertTableExists("migrations_tribble", using="other")
# Try to run initial migration
with self.assertRaises(DatabaseError):
call_command("migrate", "migrations", "0001", verbosity=0)
# Run initial migration with an explicit --fake-initial
out = io.StringIO()
with mock.patch(
"django.core.management.color.supports_color", lambda *args: False
):
call_command(
"migrate",
"migrations",
"0001",
fake_initial=True,
stdout=out,
verbosity=1,
)
call_command(
"migrate",
"migrations",
"0001",
fake_initial=True,
verbosity=0,
database="other",
)
self.assertIn("migrations.0001_initial... faked", out.getvalue().lower())
# Run migrations all the way.
call_command("migrate", verbosity=0)
call_command("migrate", verbosity=0, database="other")
self.assertTableExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
self.assertTableExists("migrations_book")
self.assertTableNotExists("migrations_author", using="other")
self.assertTableNotExists("migrations_tribble", using="other")
self.assertTableNotExists("migrations_book", using="other")
# Fake a roll-back.
call_command("migrate", "migrations", "zero", fake=True, verbosity=0)
call_command(
"migrate",
"migrations",
"zero",
fake=True,
verbosity=0,
database="other",
)
self.assertTableExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
self.assertTableExists("migrations_book")
# Run initial migration.
with self.assertRaises(DatabaseError):
call_command("migrate", "migrations", verbosity=0)
# Run initial migration with an explicit --fake-initial.
with self.assertRaises(DatabaseError):
# Fails because "migrations_tribble" does not exist but needs
# to in order to make --fake-initial work.
call_command("migrate", "migrations", fake_initial=True, verbosity=0)
# Fake an apply.
call_command("migrate", "migrations", fake=True, verbosity=0)
call_command(
"migrate", "migrations", fake=True, verbosity=0, database="other"
)
finally:
# Unmigrate everything.
call_command("migrate", "migrations", "zero", verbosity=0)
call_command("migrate", "migrations", "zero", verbosity=0, database="other")
# Make sure it's all gone
for db in self.databases:
self.assertTableNotExists("migrations_author", using=db)
self.assertTableNotExists("migrations_tribble", using=db)
self.assertTableNotExists("migrations_book", using=db)
@skipUnlessDBFeature("ignores_table_name_case")
def test_migrate_fake_initial_case_insensitive(self):
with override_settings(
MIGRATION_MODULES={
"migrations": "migrations.test_fake_initial_case_insensitive.initial",
}
):
call_command("migrate", "migrations", "0001", verbosity=0)
call_command("migrate", "migrations", "zero", fake=True, verbosity=0)
with override_settings(
MIGRATION_MODULES={
"migrations": (
"migrations.test_fake_initial_case_insensitive.fake_initial"
),
}
):
out = io.StringIO()
call_command(
"migrate",
"migrations",
"0001",
fake_initial=True,
stdout=out,
verbosity=1,
no_color=True,
)
self.assertIn(
"migrations.0001_initial... faked",
out.getvalue().lower(),
)
@override_settings(
MIGRATION_MODULES={
"migrations": "migrations.test_migrations_fake_split_initial"
}
)
def test_migrate_fake_split_initial(self):
"""
Split initial migrations can be faked with --fake-initial.
"""
try:
call_command("migrate", "migrations", "0002", verbosity=0)
call_command("migrate", "migrations", "zero", fake=True, verbosity=0)
out = io.StringIO()
with mock.patch(
"django.core.management.color.supports_color", lambda *args: False
):
call_command(
"migrate",
"migrations",
"0002",
fake_initial=True,
stdout=out,
verbosity=1,
)
value = out.getvalue().lower()
self.assertIn("migrations.0001_initial... faked", value)
self.assertIn("migrations.0002_second... faked", value)
finally:
# Fake an apply.
call_command("migrate", "migrations", fake=True, verbosity=0)
# Unmigrate everything.
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_conflict"}
)
def test_migrate_conflict_exit(self):
"""
migrate exits if it detects a conflict.
"""
msg = (
"Conflicting migrations detected; multiple leaf nodes in the "
"migration graph: (0002_conflicting_second, 0002_second in "
"migrations).\n"
"To fix them run 'python manage.py makemigrations --merge'"
)
with self.assertRaisesMessage(CommandError, msg):
call_command("migrate", "migrations")
@override_settings(
MIGRATION_MODULES={
"migrations": "migrations.test_migrations",
}
)
def test_migrate_check(self):
with self.assertRaises(SystemExit):
call_command("migrate", "migrations", "0001", check_unapplied=True)
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
self.assertTableNotExists("migrations_book")
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.migrated_app",
]
)
def test_migrate_check_migrated_app(self):
out = io.StringIO()
try:
call_command("migrate", "migrated_app", verbosity=0)
call_command(
"migrate",
"migrated_app",
stdout=out,
check_unapplied=True,
)
self.assertEqual(out.getvalue(), "")
finally:
# Unmigrate everything.
call_command("migrate", "migrated_app", "zero", verbosity=0)
@override_settings(
MIGRATION_MODULES={
"migrations": "migrations.test_migrations_plan",
}
)
def test_migrate_check_plan(self):
out = io.StringIO()
with self.assertRaises(SystemExit):
call_command(
"migrate",
"migrations",
"0001",
check_unapplied=True,
plan=True,
stdout=out,
no_color=True,
)
self.assertEqual(
"Planned operations:\n"
"migrations.0001_initial\n"
" Create model Salamander\n"
" Raw Python operation -> Grow salamander tail.\n",
out.getvalue(),
)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_showmigrations_list(self):
"""
showmigrations --list displays migrations and whether or not they're
applied.
"""
out = io.StringIO()
with mock.patch(
"django.core.management.color.supports_color", lambda *args: True
):
call_command(
"showmigrations", format="list", stdout=out, verbosity=0, no_color=False
)
self.assertEqual(
"\x1b[1mmigrations\n\x1b[0m [ ] 0001_initial\n [ ] 0002_second\n",
out.getvalue().lower(),
)
call_command("migrate", "migrations", "0001", verbosity=0)
out = io.StringIO()
# Giving the explicit app_label tests for selective `show_list` in the
# command
call_command(
"showmigrations",
"migrations",
format="list",
stdout=out,
verbosity=0,
no_color=True,
)
self.assertEqual(
"migrations\n [x] 0001_initial\n [ ] 0002_second\n", out.getvalue().lower()
)
out = io.StringIO()
# Applied datetimes are displayed at verbosity 2+.
call_command(
"showmigrations", "migrations", stdout=out, verbosity=2, no_color=True
)
migration1 = MigrationRecorder(connection).migration_qs.get(
app="migrations", name="0001_initial"
)
self.assertEqual(
"migrations\n"
" [x] 0001_initial (applied at %s)\n"
" [ ] 0002_second\n" % migration1.applied.strftime("%Y-%m-%d %H:%M:%S"),
out.getvalue().lower(),
)
# Cleanup by unmigrating everything
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}
)
def test_showmigrations_list_squashed(self):
out = io.StringIO()
call_command(
"showmigrations", format="list", stdout=out, verbosity=2, no_color=True
)
self.assertEqual(
"migrations\n [ ] 0001_squashed_0002 (2 squashed migrations)\n",
out.getvalue().lower(),
)
out = io.StringIO()
call_command(
"migrate",
"migrations",
"0001_squashed_0002",
stdout=out,
verbosity=2,
no_color=True,
)
try:
self.assertIn(
"operations to perform:\n"
" target specific migration: 0001_squashed_0002, from migrations\n"
"running pre-migrate handlers for application migrations\n"
"running migrations:\n"
" applying migrations.0001_squashed_0002... ok (",
out.getvalue().lower(),
)
out = io.StringIO()
call_command(
"showmigrations", format="list", stdout=out, verbosity=2, no_color=True
)
self.assertEqual(
"migrations\n [x] 0001_squashed_0002 (2 squashed migrations)\n",
out.getvalue().lower(),
)
finally:
# Unmigrate everything.
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_run_before"}
)
def test_showmigrations_plan(self):
"""
Tests --plan output of showmigrations command
"""
out = io.StringIO()
call_command("showmigrations", format="plan", stdout=out)
self.assertEqual(
"[ ] migrations.0001_initial\n"
"[ ] migrations.0003_third\n"
"[ ] migrations.0002_second\n",
out.getvalue().lower(),
)
out = io.StringIO()
call_command("showmigrations", format="plan", stdout=out, verbosity=2)
self.assertEqual(
"[ ] migrations.0001_initial\n"
"[ ] migrations.0003_third ... (migrations.0001_initial)\n"
"[ ] migrations.0002_second ... (migrations.0001_initial, "
"migrations.0003_third)\n",
out.getvalue().lower(),
)
call_command("migrate", "migrations", "0003", verbosity=0)
out = io.StringIO()
call_command("showmigrations", format="plan", stdout=out)
self.assertEqual(
"[x] migrations.0001_initial\n"
"[x] migrations.0003_third\n"
"[ ] migrations.0002_second\n",
out.getvalue().lower(),
)
out = io.StringIO()
call_command("showmigrations", format="plan", stdout=out, verbosity=2)
self.assertEqual(
"[x] migrations.0001_initial\n"
"[x] migrations.0003_third ... (migrations.0001_initial)\n"
"[ ] migrations.0002_second ... (migrations.0001_initial, "
"migrations.0003_third)\n",
out.getvalue().lower(),
)
# Cleanup by unmigrating everything
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_plan"}
)
def test_migrate_plan(self):
"""Tests migrate --plan output."""
out = io.StringIO()
# Show the plan up to the third migration.
call_command(
"migrate", "migrations", "0003", plan=True, stdout=out, no_color=True
)
self.assertEqual(
"Planned operations:\n"
"migrations.0001_initial\n"
" Create model Salamander\n"
" Raw Python operation -> Grow salamander tail.\n"
"migrations.0002_second\n"
" Create model Book\n"
" Raw SQL operation -> ['SELECT * FROM migrations_book']\n"
"migrations.0003_third\n"
" Create model Author\n"
" Raw SQL operation -> ['SELECT * FROM migrations_author']\n",
out.getvalue(),
)
try:
# Migrate to the third migration.
call_command("migrate", "migrations", "0003", verbosity=0)
out = io.StringIO()
# Show the plan for when there is nothing to apply.
call_command(
"migrate", "migrations", "0003", plan=True, stdout=out, no_color=True
)
self.assertEqual(
"Planned operations:\n No planned migration operations.\n",
out.getvalue(),
)
out = io.StringIO()
# Show the plan for reverse migration back to 0001.
call_command(
"migrate", "migrations", "0001", plan=True, stdout=out, no_color=True
)
self.assertEqual(
"Planned operations:\n"
"migrations.0003_third\n"
" Undo Create model Author\n"
" Raw SQL operation -> ['SELECT * FROM migrations_book']\n"
"migrations.0002_second\n"
" Undo Create model Book\n"
" Raw SQL operation -> ['SELECT * FROM migrations_salamand…\n",
out.getvalue(),
)
out = io.StringIO()
# Show the migration plan to fourth, with truncated details.
call_command(
"migrate", "migrations", "0004", plan=True, stdout=out, no_color=True
)
self.assertEqual(
"Planned operations:\n"
"migrations.0004_fourth\n"
" Raw SQL operation -> SELECT * FROM migrations_author WHE…\n",
out.getvalue(),
)
# Show the plan when an operation is irreversible.
# Migrate to the fourth migration.
call_command("migrate", "migrations", "0004", verbosity=0)
out = io.StringIO()
call_command(
"migrate", "migrations", "0003", plan=True, stdout=out, no_color=True
)
self.assertEqual(
"Planned operations:\n"
"migrations.0004_fourth\n"
" Raw SQL operation -> IRREVERSIBLE\n",
out.getvalue(),
)
out = io.StringIO()
call_command(
"migrate", "migrations", "0005", plan=True, stdout=out, no_color=True
)
# Operation is marked as irreversible only in the revert plan.
self.assertEqual(
"Planned operations:\n"
"migrations.0005_fifth\n"
" Raw Python operation\n"
" Raw Python operation\n"
" Raw Python operation -> Feed salamander.\n",
out.getvalue(),
)
call_command("migrate", "migrations", "0005", verbosity=0)
out = io.StringIO()
call_command(
"migrate", "migrations", "0004", plan=True, stdout=out, no_color=True
)
self.assertEqual(
"Planned operations:\n"
"migrations.0005_fifth\n"
" Raw Python operation -> IRREVERSIBLE\n"
" Raw Python operation -> IRREVERSIBLE\n"
" Raw Python operation\n",
out.getvalue(),
)
finally:
# Cleanup by unmigrating everything: fake the irreversible, then
# migrate all to zero.
call_command("migrate", "migrations", "0003", fake=True, verbosity=0)
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_empty"}
)
def test_showmigrations_no_migrations(self):
out = io.StringIO()
call_command("showmigrations", stdout=out, no_color=True)
self.assertEqual("migrations\n (no migrations)\n", out.getvalue().lower())
@override_settings(
INSTALLED_APPS=["migrations.migrations_test_apps.unmigrated_app"]
)
def test_showmigrations_unmigrated_app(self):
out = io.StringIO()
call_command("showmigrations", "unmigrated_app", stdout=out, no_color=True)
try:
self.assertEqual(
"unmigrated_app\n (no migrations)\n", out.getvalue().lower()
)
finally:
# unmigrated_app.SillyModel has a foreign key to
# 'migrations.Tribble', but that model is only defined in a
# migration, so the global app registry never sees it and the
# reference is left dangling. Remove it to avoid problems in
# subsequent tests.
apps._pending_operations.pop(("migrations", "tribble"), None)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_empty"}
)
def test_showmigrations_plan_no_migrations(self):
"""
Tests --plan output of showmigrations command without migrations
"""
out = io.StringIO()
call_command("showmigrations", format="plan", stdout=out, no_color=True)
self.assertEqual("(no migrations)\n", out.getvalue().lower())
out = io.StringIO()
call_command(
"showmigrations", format="plan", stdout=out, verbosity=2, no_color=True
)
self.assertEqual("(no migrations)\n", out.getvalue().lower())
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed_complex"}
)
def test_showmigrations_plan_squashed(self):
"""
Tests --plan output of showmigrations command with squashed migrations.
"""
out = io.StringIO()
call_command("showmigrations", format="plan", stdout=out)
self.assertEqual(
"[ ] migrations.1_auto\n"
"[ ] migrations.2_auto\n"
"[ ] migrations.3_squashed_5\n"
"[ ] migrations.6_auto\n"
"[ ] migrations.7_auto\n",
out.getvalue().lower(),
)
out = io.StringIO()
call_command("showmigrations", format="plan", stdout=out, verbosity=2)
self.assertEqual(
"[ ] migrations.1_auto\n"
"[ ] migrations.2_auto ... (migrations.1_auto)\n"
"[ ] migrations.3_squashed_5 ... (migrations.2_auto)\n"
"[ ] migrations.6_auto ... (migrations.3_squashed_5)\n"
"[ ] migrations.7_auto ... (migrations.6_auto)\n",
out.getvalue().lower(),
)
call_command("migrate", "migrations", "3_squashed_5", verbosity=0)
out = io.StringIO()
call_command("showmigrations", format="plan", stdout=out)
self.assertEqual(
"[x] migrations.1_auto\n"
"[x] migrations.2_auto\n"
"[x] migrations.3_squashed_5\n"
"[ ] migrations.6_auto\n"
"[ ] migrations.7_auto\n",
out.getvalue().lower(),
)
out = io.StringIO()
call_command("showmigrations", format="plan", stdout=out, verbosity=2)
self.assertEqual(
"[x] migrations.1_auto\n"
"[x] migrations.2_auto ... (migrations.1_auto)\n"
"[x] migrations.3_squashed_5 ... (migrations.2_auto)\n"
"[ ] migrations.6_auto ... (migrations.3_squashed_5)\n"
"[ ] migrations.7_auto ... (migrations.6_auto)\n",
out.getvalue().lower(),
)
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.mutate_state_b",
"migrations.migrations_test_apps.alter_fk.author_app",
"migrations.migrations_test_apps.alter_fk.book_app",
]
)
def test_showmigrations_plan_single_app_label(self):
"""
`showmigrations --plan app_label` output with a single app_label.
"""
# Single app with no dependencies on other apps.
out = io.StringIO()
call_command("showmigrations", "mutate_state_b", format="plan", stdout=out)
self.assertEqual(
"[ ] mutate_state_b.0001_initial\n[ ] mutate_state_b.0002_add_field\n",
out.getvalue(),
)
# Single app with dependencies.
out = io.StringIO()
call_command("showmigrations", "author_app", format="plan", stdout=out)
self.assertEqual(
"[ ] author_app.0001_initial\n"
"[ ] book_app.0001_initial\n"
"[ ] author_app.0002_alter_id\n",
out.getvalue(),
)
# Some migrations already applied.
call_command("migrate", "author_app", "0001", verbosity=0)
out = io.StringIO()
call_command("showmigrations", "author_app", format="plan", stdout=out)
self.assertEqual(
"[X] author_app.0001_initial\n"
"[ ] book_app.0001_initial\n"
"[ ] author_app.0002_alter_id\n",
out.getvalue(),
)
# Cleanup by unmigrating author_app.
call_command("migrate", "author_app", "zero", verbosity=0)
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.mutate_state_b",
"migrations.migrations_test_apps.alter_fk.author_app",
"migrations.migrations_test_apps.alter_fk.book_app",
]
)
def test_showmigrations_plan_multiple_app_labels(self):
"""
`showmigrations --plan app_label` output with multiple app_labels.
"""
# Multiple apps: author_app depends on book_app; mutate_state_b doesn't
# depend on other apps.
out = io.StringIO()
call_command(
"showmigrations", "mutate_state_b", "author_app", format="plan", stdout=out
)
self.assertEqual(
"[ ] author_app.0001_initial\n"
"[ ] book_app.0001_initial\n"
"[ ] author_app.0002_alter_id\n"
"[ ] mutate_state_b.0001_initial\n"
"[ ] mutate_state_b.0002_add_field\n",
out.getvalue(),
)
# Multiple apps: args order shouldn't matter (the same result is
# expected as above).
out = io.StringIO()
call_command(
"showmigrations", "author_app", "mutate_state_b", format="plan", stdout=out
)
self.assertEqual(
"[ ] author_app.0001_initial\n"
"[ ] book_app.0001_initial\n"
"[ ] author_app.0002_alter_id\n"
"[ ] mutate_state_b.0001_initial\n"
"[ ] mutate_state_b.0002_add_field\n",
out.getvalue(),
)
@override_settings(
INSTALLED_APPS=["migrations.migrations_test_apps.unmigrated_app"]
)
def test_showmigrations_plan_app_label_no_migrations(self):
out = io.StringIO()
call_command(
"showmigrations", "unmigrated_app", format="plan", stdout=out, no_color=True
)
try:
self.assertEqual("(no migrations)\n", out.getvalue())
finally:
# unmigrated_app.SillyModel has a foreign key to
# 'migrations.Tribble', but that model is only defined in a
# migration, so the global app registry never sees it and the
# reference is left dangling. Remove it to avoid problems in
# subsequent tests.
apps._pending_operations.pop(("migrations", "tribble"), None)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_sqlmigrate_forwards(self):
"""
sqlmigrate outputs forward looking SQL.
"""
out = io.StringIO()
call_command("sqlmigrate", "migrations", "0001", stdout=out, no_color=True)
lines = out.getvalue().splitlines()
if connection.features.can_rollback_ddl:
self.assertEqual(lines[0], connection.ops.start_transaction_sql())
self.assertEqual(lines[-1], connection.ops.end_transaction_sql())
lines = lines[1:-1]
self.assertEqual(
lines[:3],
[
"--",
"-- Create model Author",
"--",
],
)
self.assertIn(
"create table %s" % connection.ops.quote_name("migrations_author").lower(),
lines[3].lower(),
)
pos = lines.index("--", 3)
self.assertEqual(
lines[pos : pos + 3],
[
"--",
"-- Create model Tribble",
"--",
],
)
self.assertIn(
"create table %s" % connection.ops.quote_name("migrations_tribble").lower(),
lines[pos + 3].lower(),
)
pos = lines.index("--", pos + 3)
self.assertEqual(
lines[pos : pos + 3],
[
"--",
"-- Add field bool to tribble",
"--",
],
)
pos = lines.index("--", pos + 3)
self.assertEqual(
lines[pos : pos + 3],
[
"--",
"-- Alter unique_together for author (1 constraint(s))",
"--",
],
)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_sqlmigrate_backwards(self):
"""
sqlmigrate outputs reverse looking SQL.
"""
# Cannot generate the reverse SQL unless we've applied the migration.
call_command("migrate", "migrations", verbosity=0)
out = io.StringIO()
call_command(
"sqlmigrate",
"migrations",
"0001",
stdout=out,
backwards=True,
no_color=True,
)
lines = out.getvalue().splitlines()
try:
if connection.features.can_rollback_ddl:
self.assertEqual(lines[0], connection.ops.start_transaction_sql())
self.assertEqual(lines[-1], connection.ops.end_transaction_sql())
lines = lines[1:-1]
self.assertEqual(
lines[:3],
[
"--",
"-- Alter unique_together for author (1 constraint(s))",
"--",
],
)
pos = lines.index("--", 3)
self.assertEqual(
lines[pos : pos + 3],
[
"--",
"-- Add field bool to tribble",
"--",
],
)
pos = lines.index("--", pos + 3)
self.assertEqual(
lines[pos : pos + 3],
[
"--",
"-- Create model Tribble",
"--",
],
)
next_pos = lines.index("--", pos + 3)
drop_table_sql = (
"drop table %s"
% connection.ops.quote_name("migrations_tribble").lower()
)
for line in lines[pos + 3 : next_pos]:
if drop_table_sql in line.lower():
break
else:
self.fail("DROP TABLE (tribble) not found.")
pos = next_pos
self.assertEqual(
lines[pos : pos + 3],
[
"--",
"-- Create model Author",
"--",
],
)
drop_table_sql = (
"drop table %s" % connection.ops.quote_name("migrations_author").lower()
)
for line in lines[pos + 3 :]:
if drop_table_sql in line.lower():
break
else:
self.fail("DROP TABLE (author) not found.")
finally:
# Unmigrate everything.
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_non_atomic"}
)
def test_sqlmigrate_for_non_atomic_migration(self):
"""
Transaction wrappers aren't shown for non-atomic migrations.
"""
out = io.StringIO()
call_command("sqlmigrate", "migrations", "0001", stdout=out)
output = out.getvalue().lower()
queries = [q.strip() for q in output.splitlines()]
if connection.ops.start_transaction_sql():
self.assertNotIn(connection.ops.start_transaction_sql().lower(), queries)
self.assertNotIn(connection.ops.end_transaction_sql().lower(), queries)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_sqlmigrate_for_non_transactional_databases(self):
"""
Transaction wrappers aren't shown for databases that don't support
transactional DDL.
"""
out = io.StringIO()
with mock.patch.object(connection.features, "can_rollback_ddl", False):
call_command("sqlmigrate", "migrations", "0001", stdout=out)
output = out.getvalue().lower()
queries = [q.strip() for q in output.splitlines()]
start_transaction_sql = connection.ops.start_transaction_sql()
if start_transaction_sql:
self.assertNotIn(start_transaction_sql.lower(), queries)
self.assertNotIn(connection.ops.end_transaction_sql().lower(), queries)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}
)
def test_sqlmigrate_ambiguous_prefix_squashed_migrations(self):
msg = (
"More than one migration matches '0001' in app 'migrations'. "
"Please be more specific."
)
with self.assertRaisesMessage(CommandError, msg):
call_command("sqlmigrate", "migrations", "0001")
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}
)
def test_sqlmigrate_squashed_migration(self):
out = io.StringIO()
call_command("sqlmigrate", "migrations", "0001_squashed_0002", stdout=out)
output = out.getvalue().lower()
self.assertIn("-- create model author", output)
self.assertIn("-- create model book", output)
self.assertNotIn("-- create model tribble", output)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}
)
def test_sqlmigrate_replaced_migration(self):
out = io.StringIO()
call_command("sqlmigrate", "migrations", "0001_initial", stdout=out)
output = out.getvalue().lower()
self.assertIn("-- create model author", output)
self.assertIn("-- create model tribble", output)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_no_operations"}
)
def test_sqlmigrate_no_operations(self):
err = io.StringIO()
call_command("sqlmigrate", "migrations", "0001_initial", stderr=err)
self.assertEqual(err.getvalue(), "No operations found.\n")
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_noop"}
)
def test_sqlmigrate_noop(self):
out = io.StringIO()
call_command("sqlmigrate", "migrations", "0001", stdout=out)
lines = out.getvalue().splitlines()
if connection.features.can_rollback_ddl:
lines = lines[1:-1]
self.assertEqual(
lines,
[
"--",
"-- Raw SQL operation",
"--",
"-- (no-op)",
],
)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_manual_porting"}
)
def test_sqlmigrate_unrepresentable(self):
out = io.StringIO()
call_command("sqlmigrate", "migrations", "0002", stdout=out)
lines = out.getvalue().splitlines()
if connection.features.can_rollback_ddl:
lines = lines[1:-1]
self.assertEqual(
lines,
[
"--",
"-- Raw Python operation",
"--",
"-- THIS OPERATION CANNOT BE WRITTEN AS SQL",
],
)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_sqlmigrate_transaction_keywords_not_colorized(self):
out = io.StringIO()
with mock.patch(
"django.core.management.color.supports_color", lambda *args: True
):
call_command("sqlmigrate", "migrations", "0001", stdout=out, no_color=False)
self.assertNotIn("\x1b", out.getvalue())
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_no_operations"},
INSTALLED_APPS=["django.contrib.auth"],
)
def test_sqlmigrate_system_checks_colorized(self):
with (
mock.patch(
"django.core.management.color.supports_color", lambda *args: True
),
self.assertRaisesMessage(SystemCheckError, "\x1b"),
):
call_command(
"sqlmigrate", "migrations", "0001", skip_checks=False, no_color=False
)
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.migrated_app",
"migrations.migrations_test_apps.migrated_unapplied_app",
"migrations.migrations_test_apps.unmigrated_app",
],
)
def test_regression_22823_unmigrated_fk_to_migrated_model(self):
"""
Assuming you have 3 apps, `A`, `B`, and `C`, such that:
* `A` has migrations
* `B` has a migration we want to apply
* `C` has no migrations, but has an FK to `A`
When we try to migrate "B", an exception occurs because the
"B" was not included in the ProjectState that is used to detect
soft-applied migrations (#22823).
"""
call_command("migrate", "migrated_unapplied_app", verbosity=0)
# unmigrated_app.SillyModel has a foreign key to 'migrations.Tribble',
# but that model is only defined in a migration, so the global app
# registry never sees it and the reference is left dangling. Remove it
# to avoid problems in subsequent tests.
apps._pending_operations.pop(("migrations", "tribble"), None)
@override_settings(
INSTALLED_APPS=["migrations.migrations_test_apps.unmigrated_app_syncdb"]
)
def test_migrate_syncdb_deferred_sql_executed_with_schemaeditor(self):
"""
For an app without migrations, editor.execute() is used for executing
the syncdb deferred SQL.
"""
stdout = io.StringIO()
with mock.patch.object(BaseDatabaseSchemaEditor, "execute") as execute:
call_command(
"migrate", run_syncdb=True, verbosity=1, stdout=stdout, no_color=True
)
create_table_count = len(
[call for call in execute.mock_calls if "CREATE TABLE" in str(call)]
)
self.assertEqual(create_table_count, 2)
# There's at least one deferred SQL for creating the foreign key
# index.
self.assertGreater(len(execute.mock_calls), 2)
stdout = stdout.getvalue()
self.assertIn("Synchronize unmigrated apps: unmigrated_app_syncdb", stdout)
self.assertIn("Creating tables...", stdout)
table_name = truncate_name(
"unmigrated_app_syncdb_classroom", connection.ops.max_name_length()
)
self.assertIn("Creating table %s" % table_name, stdout)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_migrate_syncdb_app_with_migrations(self):
msg = "Can't use run_syncdb with app 'migrations' as it has migrations."
with self.assertRaisesMessage(CommandError, msg):
call_command("migrate", "migrations", run_syncdb=True, verbosity=0)
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.unmigrated_app_syncdb",
"migrations.migrations_test_apps.unmigrated_app_simple",
]
)
def test_migrate_syncdb_app_label(self):
"""
Running migrate --run-syncdb with an app_label only creates tables for
the specified app.
"""
stdout = io.StringIO()
with mock.patch.object(BaseDatabaseSchemaEditor, "execute") as execute:
call_command(
"migrate", "unmigrated_app_syncdb", run_syncdb=True, stdout=stdout
)
create_table_count = len(
[call for call in execute.mock_calls if "CREATE TABLE" in str(call)]
)
self.assertEqual(create_table_count, 2)
self.assertGreater(len(execute.mock_calls), 2)
self.assertIn(
"Synchronize unmigrated app: unmigrated_app_syncdb", stdout.getvalue()
)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}
)
def test_migrate_record_replaced(self):
"""
Running a single squashed migration should record all of the original
replaced migrations as run.
"""
recorder = MigrationRecorder(connection)
out = io.StringIO()
call_command("migrate", "migrations", verbosity=0)
call_command("showmigrations", "migrations", stdout=out, no_color=True)
self.assertEqual(
"migrations\n [x] 0001_squashed_0002 (2 squashed migrations)\n",
out.getvalue().lower(),
)
applied_migrations = recorder.applied_migrations()
self.assertIn(("migrations", "0001_initial"), applied_migrations)
self.assertIn(("migrations", "0002_second"), applied_migrations)
self.assertIn(("migrations", "0001_squashed_0002"), applied_migrations)
# Rollback changes
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}
)
def test_migrate_record_squashed(self):
"""
Running migrate for a squashed migration should record as run
if all of the replaced migrations have been run (#25231).
"""
recorder = MigrationRecorder(connection)
recorder.record_applied("migrations", "0001_initial")
recorder.record_applied("migrations", "0002_second")
out = io.StringIO()
call_command("showmigrations", "migrations", stdout=out, no_color=True)
self.assertEqual(
"migrations\n"
" [-] 0001_squashed_0002 (2 squashed migrations) "
"run 'manage.py migrate' to finish recording.\n",
out.getvalue().lower(),
)
out = io.StringIO()
call_command("migrate", "migrations", verbosity=0)
call_command("showmigrations", "migrations", stdout=out, no_color=True)
self.assertEqual(
"migrations\n [x] 0001_squashed_0002 (2 squashed migrations)\n",
out.getvalue().lower(),
)
self.assertIn(
("migrations", "0001_squashed_0002"), recorder.applied_migrations()
)
# No changes were actually applied so there is nothing to rollback
def test_migrate_partially_applied_squashed_migration(self):
"""
Migrating to a squashed migration specified by name should succeed
even if it is partially applied.
"""
with self.temporary_migration_module(module="migrations.test_migrations"):
recorder = MigrationRecorder(connection)
try:
call_command("migrate", "migrations", "0001_initial", verbosity=0)
call_command(
"squashmigrations",
"migrations",
"0002",
interactive=False,
verbosity=0,
)
call_command(
"migrate",
"migrations",
"0001_squashed_0002_second",
verbosity=0,
)
applied_migrations = recorder.applied_migrations()
self.assertIn(("migrations", "0002_second"), applied_migrations)
finally:
# Unmigrate everything.
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}
)
def test_migrate_forward_to_squashed_migration(self):
try:
call_command("migrate", "migrations", "0001_initial", verbosity=0)
finally:
# Unmigrate everything.
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}
)
def test_migrate_backward_to_squashed_migration(self):
try:
call_command("migrate", "migrations", "0001_squashed_0002", verbosity=0)
self.assertTableExists("migrations_author")
self.assertTableExists("migrations_book")
call_command("migrate", "migrations", "0001_initial", verbosity=0)
self.assertTableExists("migrations_author")
self.assertTableNotExists("migrations_book")
finally:
# Unmigrate everything.
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_migrate_inconsistent_history(self):
"""
Running migrate with some migrations applied before their dependencies
should not be allowed.
"""
recorder = MigrationRecorder(connection)
recorder.record_applied("migrations", "0002_second")
msg = (
"Migration migrations.0002_second is applied before its dependency "
"migrations.0001_initial"
)
with self.assertRaisesMessage(InconsistentMigrationHistory, msg):
call_command("migrate")
applied_migrations = recorder.applied_migrations()
self.assertNotIn(("migrations", "0001_initial"), applied_migrations)
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.migrated_unapplied_app",
"migrations.migrations_test_apps.migrated_app",
]
)
def test_migrate_not_reflected_changes(self):
class NewModel1(models.Model):
class Meta:
app_label = "migrated_app"
class NewModel2(models.Model):
class Meta:
app_label = "migrated_unapplied_app"
out = io.StringIO()
try:
call_command("migrate", verbosity=0)
call_command("migrate", stdout=out, no_color=True)
self.assertEqual(
"operations to perform:\n"
" apply all migrations: migrated_app, migrated_unapplied_app\n"
"running migrations:\n"
" no migrations to apply.\n"
" your models in app(s): 'migrated_app', "
"'migrated_unapplied_app' have changes that are not yet "
"reflected in a migration, and so won't be applied.\n"
" run 'manage.py makemigrations' to make new migrations, and "
"then re-run 'manage.py migrate' to apply them.\n",
out.getvalue().lower(),
)
finally:
# Unmigrate everything.
call_command("migrate", "migrated_app", "zero", verbosity=0)
call_command("migrate", "migrated_unapplied_app", "zero", verbosity=0)
@override_settings(
MIGRATION_MODULES={
"migrations": "migrations.test_migrations_squashed_no_replaces",
}
)
def test_migrate_prune(self):
"""
With prune=True, references to migration files deleted from the
migrations module (such as after being squashed) are removed from the
django_migrations table.
"""
recorder = MigrationRecorder(connection)
recorder.record_applied("migrations", "0001_initial")
recorder.record_applied("migrations", "0002_second")
recorder.record_applied("migrations", "0001_squashed_0002")
out = io.StringIO()
try:
call_command("migrate", "migrations", prune=True, stdout=out, no_color=True)
self.assertEqual(
out.getvalue(),
"Pruning migrations:\n"
" Pruning migrations.0001_initial OK\n"
" Pruning migrations.0002_second OK\n",
)
applied_migrations = [
migration
for migration in recorder.applied_migrations()
if migration[0] == "migrations"
]
self.assertEqual(applied_migrations, [("migrations", "0001_squashed_0002")])
finally:
recorder.record_unapplied("migrations", "0001_initial")
recorder.record_unapplied("migrations", "0001_second")
recorder.record_unapplied("migrations", "0001_squashed_0002")
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}
)
def test_prune_deleted_squashed_migrations_in_replaces(self):
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_squashed"
) as migration_dir:
try:
call_command("migrate", "migrations", verbosity=0)
# Delete the replaced migrations.
os.remove(os.path.join(migration_dir, "0001_initial.py"))
os.remove(os.path.join(migration_dir, "0002_second.py"))
# --prune cannot be used before removing the "replaces"
# attribute.
call_command(
"migrate",
"migrations",
prune=True,
stdout=out,
no_color=True,
)
self.assertEqual(
out.getvalue(),
"Pruning migrations:\n"
" Cannot use --prune because the following squashed "
"migrations have their 'replaces' attributes and may not "
"be recorded as applied:\n"
" migrations.0001_squashed_0002\n"
" Re-run 'manage.py migrate' if they are not marked as "
"applied, and remove 'replaces' attributes in their "
"Migration classes.\n",
)
finally:
# Unmigrate everything.
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}
)
def test_prune_no_migrations_to_prune(self):
out = io.StringIO()
call_command("migrate", "migrations", prune=True, stdout=out, no_color=True)
self.assertEqual(
out.getvalue(),
"Pruning migrations:\n No migrations to prune.\n",
)
out = io.StringIO()
call_command(
"migrate",
"migrations",
prune=True,
stdout=out,
no_color=True,
verbosity=0,
)
self.assertEqual(out.getvalue(), "")
def test_prune_no_app_label(self):
msg = "Migrations can be pruned only when an app is specified."
with self.assertRaisesMessage(CommandError, msg):
call_command("migrate", prune=True)
@override_settings(
MIGRATION_MODULES={
"migrations": "migrations.test_migrations_squashed_no_replaces",
"migrations2": "migrations2.test_migrations_2_squashed_with_replaces",
},
INSTALLED_APPS=["migrations", "migrations2"],
)
def test_prune_respect_app_label(self):
recorder = MigrationRecorder(connection)
recorder.record_applied("migrations", "0001_initial")
recorder.record_applied("migrations", "0002_second")
recorder.record_applied("migrations", "0001_squashed_0002")
# Second app has squashed migrations with replaces.
recorder.record_applied("migrations2", "0001_initial")
recorder.record_applied("migrations2", "0002_second")
recorder.record_applied("migrations2", "0001_squashed_0002")
out = io.StringIO()
try:
call_command("migrate", "migrations", prune=True, stdout=out, no_color=True)
self.assertEqual(
out.getvalue(),
"Pruning migrations:\n"
" Pruning migrations.0001_initial OK\n"
" Pruning migrations.0002_second OK\n",
)
applied_migrations = [
migration
for migration in recorder.applied_migrations()
if migration[0] in ["migrations", "migrations2"]
]
self.assertEqual(
applied_migrations,
[
("migrations", "0001_squashed_0002"),
("migrations2", "0001_initial"),
("migrations2", "0002_second"),
("migrations2", "0001_squashed_0002"),
],
)
finally:
recorder.record_unapplied("migrations", "0001_initial")
recorder.record_unapplied("migrations", "0001_second")
recorder.record_unapplied("migrations", "0001_squashed_0002")
recorder.record_unapplied("migrations2", "0001_initial")
recorder.record_unapplied("migrations2", "0002_second")
recorder.record_unapplied("migrations2", "0001_squashed_0002")
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.with_generic_model",
]
)
def test_migrate_model_inherit_generic(self):
call_command("migrate", verbosity=0)
class MakeMigrationsTests(MigrationTestBase):
"""
Tests running the makemigrations command.
"""
def setUp(self):
super().setUp()
self._old_models = apps.app_configs["migrations"].models.copy()
def tearDown(self):
apps.app_configs["migrations"].models = self._old_models
apps.all_models["migrations"] = self._old_models
apps.clear_cache()
super().tearDown()
def test_files_content(self):
self.assertTableNotExists("migrations_unicodemodel")
apps.register_model("migrations", UnicodeModel)
with self.temporary_migration_module() as migration_dir:
call_command("makemigrations", "migrations", verbosity=0)
# Check for empty __init__.py file in migrations folder
init_file = os.path.join(migration_dir, "__init__.py")
self.assertTrue(os.path.exists(init_file))
with open(init_file) as fp:
content = fp.read()
self.assertEqual(content, "")
# Check for existing 0001_initial.py file in migration folder
initial_file = os.path.join(migration_dir, "0001_initial.py")
self.assertTrue(os.path.exists(initial_file))
with open(initial_file, encoding="utf-8") as fp:
content = fp.read()
self.assertIn("migrations.CreateModel", content)
self.assertIn("initial = True", content)
self.assertIn("úñí©óðé µóðéø", content) # Meta.verbose_name
self.assertIn("úñí©óðé µóðéøß", content) # Meta.verbose_name_plural
self.assertIn("ÚÑÍ¢ÓÐÉ", content) # title.verbose_name
self.assertIn("“Ðjáñgó”", content) # title.default
def test_makemigrations_order(self):
"""
makemigrations should recognize number-only migrations (0001.py).
"""
module = "migrations.test_migrations_order"
with self.temporary_migration_module(module=module) as migration_dir:
if hasattr(importlib, "invalidate_caches"):
# importlib caches os.listdir() on some platforms like macOS
# (#23850).
importlib.invalidate_caches()
call_command(
"makemigrations", "migrations", "--empty", "-n", "a", "-v", "0"
)
self.assertTrue(os.path.exists(os.path.join(migration_dir, "0002_a.py")))
def test_makemigrations_empty_connections(self):
empty_connections = ConnectionHandler({"default": {}})
with mock.patch(
"django.core.management.commands.makemigrations.connections",
new=empty_connections,
):
# with no apps
out = io.StringIO()
call_command("makemigrations", stdout=out)
self.assertIn("No changes detected", out.getvalue())
# with an app
with self.temporary_migration_module() as migration_dir:
call_command("makemigrations", "migrations", verbosity=0)
init_file = os.path.join(migration_dir, "__init__.py")
self.assertTrue(os.path.exists(init_file))
@override_settings(INSTALLED_APPS=["migrations", "migrations2"])
def test_makemigrations_consistency_checks_respect_routers(self):
"""
The history consistency checks in makemigrations respect
settings.DATABASE_ROUTERS.
"""
def patched_has_table(migration_recorder):
if migration_recorder.connection is connections["other"]:
raise Exception("Other connection")
else:
return mock.DEFAULT
self.assertTableNotExists("migrations_unicodemodel")
apps.register_model("migrations", UnicodeModel)
with mock.patch.object(
MigrationRecorder, "has_table", autospec=True, side_effect=patched_has_table
) as has_table:
with self.temporary_migration_module() as migration_dir:
call_command("makemigrations", "migrations", verbosity=0)
initial_file = os.path.join(migration_dir, "0001_initial.py")
self.assertTrue(os.path.exists(initial_file))
self.assertEqual(has_table.call_count, 1) # 'default' is checked
# Router says not to migrate 'other' so consistency shouldn't
# be checked.
with self.settings(DATABASE_ROUTERS=["migrations.routers.TestRouter"]):
call_command("makemigrations", "migrations", verbosity=0)
self.assertEqual(has_table.call_count, 2) # 'default' again
# With a router that doesn't prohibit migrating 'other',
# consistency is checked.
with self.settings(
DATABASE_ROUTERS=["migrations.routers.DefaultOtherRouter"]
):
with self.assertRaisesMessage(Exception, "Other connection"):
call_command("makemigrations", "migrations", verbosity=0)
self.assertEqual(has_table.call_count, 4) # 'default' and 'other'
# With a router that doesn't allow migrating on any database,
# no consistency checks are made.
with self.settings(DATABASE_ROUTERS=["migrations.routers.TestRouter"]):
with mock.patch.object(
TestRouter, "allow_migrate", return_value=False
) as allow_migrate:
call_command("makemigrations", "migrations", verbosity=0)
allow_migrate.assert_any_call(
"other", "migrations", model_name="UnicodeModel"
)
# allow_migrate() is called with the correct arguments.
self.assertGreater(len(allow_migrate.mock_calls), 0)
called_aliases = set()
for mock_call in allow_migrate.mock_calls:
_, call_args, call_kwargs = mock_call
connection_alias, app_name = call_args
called_aliases.add(connection_alias)
# Raises an error if invalid app_name/model_name occurs.
apps.get_app_config(app_name).get_model(call_kwargs["model_name"])
self.assertEqual(called_aliases, set(connections))
self.assertEqual(has_table.call_count, 4)
def test_failing_migration(self):
# If a migration fails to serialize, it shouldn't generate an empty
# file. #21280
apps.register_model("migrations", UnserializableModel)
with self.temporary_migration_module() as migration_dir:
with self.assertRaisesMessage(ValueError, "Cannot serialize"):
call_command("makemigrations", "migrations", verbosity=0)
initial_file = os.path.join(migration_dir, "0001_initial.py")
self.assertFalse(os.path.exists(initial_file))
def test_makemigrations_conflict_exit(self):
"""
makemigrations exits if it detects a conflict.
"""
with self.temporary_migration_module(
module="migrations.test_migrations_conflict"
):
with self.assertRaises(CommandError) as context:
call_command("makemigrations")
self.assertEqual(
str(context.exception),
"Conflicting migrations detected; multiple leaf nodes in the "
"migration graph: (0002_conflicting_second, 0002_second in "
"migrations).\n"
"To fix them run 'python manage.py makemigrations --merge'",
)
def test_makemigrations_merge_no_conflict(self):
"""
makemigrations exits if in merge mode with no conflicts.
"""
out = io.StringIO()
with self.temporary_migration_module(module="migrations.test_migrations"):
call_command("makemigrations", merge=True, stdout=out)
self.assertIn("No conflicts detected to merge.", out.getvalue())
def test_makemigrations_empty_no_app_specified(self):
"""
makemigrations exits if no app is specified with 'empty' mode.
"""
msg = "You must supply at least one app label when using --empty."
with self.assertRaisesMessage(CommandError, msg):
call_command("makemigrations", empty=True)
def test_makemigrations_empty_migration(self):
"""
makemigrations properly constructs an empty migration.
"""
with self.temporary_migration_module() as migration_dir:
call_command("makemigrations", "migrations", empty=True, verbosity=0)
# Check for existing 0001_initial.py file in migration folder
initial_file = os.path.join(migration_dir, "0001_initial.py")
self.assertTrue(os.path.exists(initial_file))
with open(initial_file, encoding="utf-8") as fp:
content = fp.read()
# Remove all whitespace to check for empty dependencies and
# operations
content = content.replace(" ", "")
self.assertIn(
"dependencies=[]" if HAS_BLACK else "dependencies=[\n]", content
)
self.assertIn(
"operations=[]" if HAS_BLACK else "operations=[\n]", content
)
@override_settings(MIGRATION_MODULES={"migrations": None})
def test_makemigrations_disabled_migrations_for_app(self):
"""
makemigrations raises a nice error when migrations are disabled for an
app.
"""
msg = (
"Django can't create migrations for app 'migrations' because migrations "
"have been disabled via the MIGRATION_MODULES setting."
)
with self.assertRaisesMessage(ValueError, msg):
call_command("makemigrations", "migrations", empty=True, verbosity=0)
def test_makemigrations_no_changes_no_apps(self):
"""
makemigrations exits when there are no changes and no apps are
specified.
"""
out = io.StringIO()
call_command("makemigrations", stdout=out)
self.assertIn("No changes detected", out.getvalue())
def test_makemigrations_no_changes(self):
"""
makemigrations exits when there are no changes to an app.
"""
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_no_changes"
):
call_command("makemigrations", "migrations", stdout=out)
self.assertIn("No changes detected in app 'migrations'", out.getvalue())
def test_makemigrations_no_apps_initial(self):
"""
makemigrations should detect initial is needed on empty migration
modules if no app provided.
"""
out = io.StringIO()
with self.temporary_migration_module(module="migrations.test_migrations_empty"):
call_command("makemigrations", stdout=out)
self.assertIn("0001_initial.py", out.getvalue())
def test_makemigrations_no_init(self):
"""Migration directories without an __init__.py file are allowed."""
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_no_init"
):
call_command("makemigrations", stdout=out)
self.assertIn("0001_initial.py", out.getvalue())
def test_makemigrations_no_init_ambiguous(self):
"""
Migration directories without an __init__.py file are not allowed if
there are multiple namespace search paths that resolve to them.
"""
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_no_init"
) as migration_dir:
# Copy the project directory into another place under sys.path.
app_dir = Path(migration_dir).parent
os.remove(app_dir / "__init__.py")
project_dir = app_dir.parent
dest = project_dir.parent / "other_dir_in_path"
shutil.copytree(project_dir, dest)
with extend_sys_path(str(dest)):
call_command("makemigrations", stdout=out)
self.assertEqual("No changes detected\n", out.getvalue())
def test_makemigrations_migrations_announce(self):
"""
makemigrations announces the migration at the default verbosity level.
"""
out = io.StringIO()
with self.temporary_migration_module():
call_command("makemigrations", "migrations", stdout=out)
self.assertIn("Migrations for 'migrations'", out.getvalue())
def test_makemigrations_no_common_ancestor(self):
"""
makemigrations fails to merge migrations with no common ancestor.
"""
with self.assertRaises(ValueError) as context:
with self.temporary_migration_module(
module="migrations.test_migrations_no_ancestor"
):
call_command("makemigrations", "migrations", merge=True)
exception_message = str(context.exception)
self.assertIn("Could not find common ancestor of", exception_message)
self.assertIn("0002_second", exception_message)
self.assertIn("0002_conflicting_second", exception_message)
def test_makemigrations_interactive_reject(self):
"""
makemigrations enters and exits interactive mode properly.
"""
# Monkeypatch interactive questioner to auto reject
with mock.patch("builtins.input", mock.Mock(return_value="N")):
with self.temporary_migration_module(
module="migrations.test_migrations_conflict"
) as migration_dir:
with captured_stdout():
call_command(
"makemigrations",
"migrations",
name="merge",
merge=True,
interactive=True,
verbosity=0,
)
merge_file = os.path.join(migration_dir, "0003_merge.py")
self.assertFalse(os.path.exists(merge_file))
def test_makemigrations_interactive_accept(self):
"""
makemigrations enters interactive mode and merges properly.
"""
# Monkeypatch interactive questioner to auto accept
with mock.patch("builtins.input", mock.Mock(return_value="y")):
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_conflict"
) as migration_dir:
call_command(
"makemigrations",
"migrations",
name="merge",
merge=True,
interactive=True,
stdout=out,
)
merge_file = os.path.join(migration_dir, "0003_merge.py")
self.assertTrue(os.path.exists(merge_file))
self.assertIn("Created new merge migration", out.getvalue())
def test_makemigrations_default_merge_name(self):
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_conflict"
) as migration_dir:
call_command(
"makemigrations",
"migrations",
merge=True,
interactive=False,
stdout=out,
)
merge_file = os.path.join(
migration_dir,
"0003_merge_0002_conflicting_second_0002_second.py",
)
self.assertIs(os.path.exists(merge_file), True)
with open(merge_file, encoding="utf-8") as fp:
content = fp.read()
if HAS_BLACK:
target_str = '("migrations", "0002_conflicting_second")'
else:
target_str = "('migrations', '0002_conflicting_second')"
self.assertIn(target_str, content)
self.assertIn("Created new merge migration %s" % merge_file, out.getvalue())
@mock.patch("django.db.migrations.utils.datetime")
def test_makemigrations_auto_merge_name(self, mock_datetime):
mock_datetime.datetime.now.return_value = datetime.datetime(2016, 1, 2, 3, 4)
with mock.patch("builtins.input", mock.Mock(return_value="y")):
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_conflict_long_name"
) as migration_dir:
call_command(
"makemigrations",
"migrations",
merge=True,
interactive=True,
stdout=out,
)
merge_file = os.path.join(migration_dir, "0003_merge_20160102_0304.py")
self.assertTrue(os.path.exists(merge_file))
self.assertIn("Created new merge migration", out.getvalue())
def test_makemigrations_non_interactive_not_null_addition(self):
"""
Non-interactive makemigrations fails when a default is missing on a
new not-null field.
"""
class SillyModel(models.Model):
silly_field = models.BooleanField(default=False)
silly_int = models.IntegerField()
class Meta:
app_label = "migrations"
with self.assertRaises(SystemExit):
with self.temporary_migration_module(
module="migrations.test_migrations_no_default"
):
with captured_stdout() as out:
call_command("makemigrations", "migrations", interactive=False)
self.assertIn(
"Field 'silly_int' on model 'sillymodel' not migrated: it is "
"impossible to add a non-nullable field without specifying a "
"default.",
out.getvalue(),
)
def test_makemigrations_interactive_not_null_addition(self):
"""
makemigrations messages when adding a NOT NULL field in interactive
mode.
"""
class Author(models.Model):
silly_field = models.BooleanField(null=False)
class Meta:
app_label = "migrations"
input_msg = (
"It is impossible to add a non-nullable field 'silly_field' to "
"author without specifying a default. This is because the "
"database needs something to populate existing rows.\n"
"Please select a fix:\n"
" 1) Provide a one-off default now (will be set on all existing "
"rows with a null value for this column)\n"
" 2) Quit and manually define a default value in models.py."
)
with self.temporary_migration_module(module="migrations.test_migrations"):
# 2 - quit.
with mock.patch("builtins.input", return_value="2"):
with captured_stdout() as out, self.assertRaises(SystemExit):
call_command("makemigrations", "migrations", interactive=True)
self.assertIn(input_msg, out.getvalue())
# 1 - provide a default.
with mock.patch("builtins.input", return_value="1"):
with captured_stdout() as out:
call_command("makemigrations", "migrations", interactive=True)
output = out.getvalue()
self.assertIn(input_msg, output)
self.assertIn("Please enter the default value as valid Python.", output)
self.assertIn(
"The datetime and django.utils.timezone modules are "
"available, so it is possible to provide e.g. timezone.now as "
"a value",
output,
)
self.assertIn("Type 'exit' to exit this prompt", output)
def test_makemigrations_non_interactive_not_null_alteration(self):
"""
Non-interactive makemigrations fails when a default is missing on a
field changed to not-null.
"""
class Author(models.Model):
name = models.CharField(max_length=255)
slug = models.SlugField()
age = models.IntegerField(default=0)
class Meta:
app_label = "migrations"
with self.temporary_migration_module(module="migrations.test_migrations"):
with captured_stdout() as out:
call_command("makemigrations", "migrations", interactive=False)
self.assertIn("Alter field slug on author", out.getvalue())
self.assertIn(
"Field 'slug' on model 'author' given a default of NOT PROVIDED "
"and must be corrected.",
out.getvalue(),
)
def test_makemigrations_interactive_not_null_alteration(self):
"""
makemigrations messages when changing a NULL field to NOT NULL in
interactive mode.
"""
class Author(models.Model):
slug = models.SlugField(null=False)
class Meta:
app_label = "migrations"
input_msg = (
"It is impossible to change a nullable field 'slug' on author to "
"non-nullable without providing a default. This is because the "
"database needs something to populate existing rows.\n"
"Please select a fix:\n"
" 1) Provide a one-off default now (will be set on all existing "
"rows with a null value for this column)\n"
" 2) Ignore for now. Existing rows that contain NULL values will "
"have to be handled manually, for example with a RunPython or "
"RunSQL operation.\n"
" 3) Quit and manually define a default value in models.py."
)
with self.temporary_migration_module(module="migrations.test_migrations"):
# No message appears if --dry-run.
with captured_stdout() as out:
call_command(
"makemigrations",
"migrations",
interactive=True,
dry_run=True,
)
self.assertNotIn(input_msg, out.getvalue())
# 3 - quit.
with mock.patch("builtins.input", return_value="3"):
with captured_stdout() as out, self.assertRaises(SystemExit):
call_command("makemigrations", "migrations", interactive=True)
self.assertIn(input_msg, out.getvalue())
# 1 - provide a default.
with mock.patch("builtins.input", return_value="1"):
with captured_stdout() as out:
call_command("makemigrations", "migrations", interactive=True)
output = out.getvalue()
self.assertIn(input_msg, output)
self.assertIn("Please enter the default value as valid Python.", output)
self.assertIn(
"The datetime and django.utils.timezone modules are "
"available, so it is possible to provide e.g. timezone.now as "
"a value",
output,
)
self.assertIn("Type 'exit' to exit this prompt", output)
def test_makemigrations_non_interactive_no_model_rename(self):
"""
makemigrations adds and removes a possible model rename in
non-interactive mode.
"""
class RenamedModel(models.Model):
silly_field = models.BooleanField(default=False)
class Meta:
app_label = "migrations"
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_no_default"
):
call_command("makemigrations", "migrations", interactive=False, stdout=out)
self.assertIn("Delete model SillyModel", out.getvalue())
self.assertIn("Create model RenamedModel", out.getvalue())
def test_makemigrations_non_interactive_no_field_rename(self):
"""
makemigrations adds and removes a possible field rename in
non-interactive mode.
"""
class SillyModel(models.Model):
silly_rename = models.BooleanField(default=False)
class Meta:
app_label = "migrations"
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_no_default"
):
call_command("makemigrations", "migrations", interactive=False, stdout=out)
self.assertIn("Remove field silly_field from sillymodel", out.getvalue())
self.assertIn("Add field silly_rename to sillymodel", out.getvalue())
@mock.patch("builtins.input", return_value="Y")
def test_makemigrations_model_rename_interactive(self, mock_input):
class RenamedModel(models.Model):
silly_field = models.BooleanField(default=False)
class Meta:
app_label = "migrations"
with self.temporary_migration_module(
module="migrations.test_migrations_no_default",
):
with captured_stdout() as out:
call_command("makemigrations", "migrations", interactive=True)
self.assertIn("Rename model SillyModel to RenamedModel", out.getvalue())
@mock.patch("builtins.input", return_value="Y")
def test_makemigrations_field_rename_interactive(self, mock_input):
class SillyModel(models.Model):
silly_rename = models.BooleanField(default=False)
class Meta:
app_label = "migrations"
with self.temporary_migration_module(
module="migrations.test_migrations_no_default",
):
with captured_stdout() as out:
call_command("makemigrations", "migrations", interactive=True)
self.assertIn(
"Rename field silly_field on sillymodel to silly_rename",
out.getvalue(),
)
def test_makemigrations_handle_merge(self):
"""
makemigrations properly merges the conflicting migrations with
--noinput.
"""
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_conflict"
) as migration_dir:
call_command(
"makemigrations",
"migrations",
name="merge",
merge=True,
interactive=False,
stdout=out,
)
merge_file = os.path.join(migration_dir, "0003_merge.py")
self.assertTrue(os.path.exists(merge_file))
output = out.getvalue()
self.assertIn("Merging migrations", output)
self.assertIn("Branch 0002_second", output)
self.assertIn("Branch 0002_conflicting_second", output)
self.assertIn("Created new merge migration", output)
def test_makemigration_merge_dry_run(self):
"""
makemigrations respects --dry-run option when fixing migration
conflicts (#24427).
"""
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_conflict"
) as migration_dir:
call_command(
"makemigrations",
"migrations",
name="merge",
dry_run=True,
merge=True,
interactive=False,
stdout=out,
)
merge_file = os.path.join(migration_dir, "0003_merge.py")
self.assertFalse(os.path.exists(merge_file))
output = out.getvalue()
self.assertIn("Merging migrations", output)
self.assertIn("Branch 0002_second", output)
self.assertIn("Branch 0002_conflicting_second", output)
self.assertNotIn("Created new merge migration", output)
def test_makemigration_merge_dry_run_verbosity_3(self):
"""
`makemigrations --merge --dry-run` writes the merge migration file to
stdout with `verbosity == 3` (#24427).
"""
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_conflict"
) as migration_dir:
call_command(
"makemigrations",
"migrations",
name="merge",
dry_run=True,
merge=True,
interactive=False,
stdout=out,
verbosity=3,
)
merge_file = os.path.join(migration_dir, "0003_merge.py")
self.assertFalse(os.path.exists(merge_file))
output = out.getvalue()
self.assertIn("Merging migrations", output)
self.assertIn("Branch 0002_second", output)
self.assertIn("Branch 0002_conflicting_second", output)
self.assertNotIn("Created new merge migration", output)
# Additional output caused by verbosity 3
# The complete merge migration file that would be written
self.assertIn("class Migration(migrations.Migration):", output)
self.assertIn("dependencies = [", output)
self.assertIn("('migrations', '0002_second')", output)
self.assertIn("('migrations', '0002_conflicting_second')", output)
self.assertIn("operations = [", output)
self.assertIn("]", output)
def test_makemigrations_dry_run(self):
"""
`makemigrations --dry-run` should not ask for defaults.
"""
class SillyModel(models.Model):
silly_field = models.BooleanField(default=False)
silly_date = models.DateField() # Added field without a default
silly_auto_now = models.DateTimeField(auto_now_add=True)
class Meta:
app_label = "migrations"
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_no_default"
):
call_command("makemigrations", "migrations", dry_run=True, stdout=out)
# Output the expected changes directly, without asking for defaults
self.assertIn("Add field silly_date to sillymodel", out.getvalue())
def test_makemigrations_dry_run_verbosity_3(self):
"""
Allow `makemigrations --dry-run` to output the migrations file to
stdout (with verbosity == 3).
"""
class SillyModel(models.Model):
silly_field = models.BooleanField(default=False)
silly_char = models.CharField(default="")
class Meta:
app_label = "migrations"
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_no_default"
):
call_command(
"makemigrations", "migrations", dry_run=True, stdout=out, verbosity=3
)
# Normal --dry-run output
self.assertIn("+ Add field silly_char to sillymodel", out.getvalue())
# Additional output caused by verbosity 3
# The complete migrations file that would be written
self.assertIn("class Migration(migrations.Migration):", out.getvalue())
self.assertIn("dependencies = [", out.getvalue())
self.assertIn("('migrations', '0001_initial'),", out.getvalue())
self.assertIn("migrations.AddField(", out.getvalue())
self.assertIn("model_name='sillymodel',", out.getvalue())
self.assertIn("name='silly_char',", out.getvalue())
def test_makemigrations_scriptable(self):
"""
With scriptable=True, log output is diverted to stderr, and only the
paths of generated migration files are written to stdout.
"""
out = io.StringIO()
err = io.StringIO()
with self.temporary_migration_module(
module="migrations.migrations.test_migrations",
) as migration_dir:
call_command(
"makemigrations",
"migrations",
scriptable=True,
stdout=out,
stderr=err,
)
initial_file = os.path.join(migration_dir, "0001_initial.py")
self.assertEqual(out.getvalue(), f"{initial_file}\n")
self.assertIn(" + Create model ModelWithCustomBase\n", err.getvalue())
@mock.patch("builtins.input", return_value="Y")
def test_makemigrations_scriptable_merge(self, mock_input):
out = io.StringIO()
err = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_conflict",
) as migration_dir:
call_command(
"makemigrations",
"migrations",
merge=True,
name="merge",
scriptable=True,
stdout=out,
stderr=err,
)
merge_file = os.path.join(migration_dir, "0003_merge.py")
self.assertEqual(out.getvalue(), f"{merge_file}\n")
self.assertIn(f"Created new merge migration {merge_file}", err.getvalue())
def test_makemigrations_failure_to_format_code(self):
self.assertFormatterFailureCaught("makemigrations", "migrations")
def test_merge_makemigrations_failure_to_format_code(self):
self.assertFormatterFailureCaught("makemigrations", "migrations", empty=True)
self.assertFormatterFailureCaught(
"makemigrations",
"migrations",
merge=True,
interactive=False,
module="migrations.test_migrations_conflict",
)
def test_makemigrations_migrations_modules_path_not_exist(self):
"""
makemigrations creates migrations when specifying a custom location
for migration files using MIGRATION_MODULES if the custom path
doesn't already exist.
"""
class SillyModel(models.Model):
silly_field = models.BooleanField(default=False)
class Meta:
app_label = "migrations"
out = io.StringIO()
migration_module = "migrations.test_migrations_path_doesnt_exist.foo.bar"
with self.temporary_migration_module(module=migration_module) as migration_dir:
call_command("makemigrations", "migrations", stdout=out)
# Migrations file is actually created in the expected path.
initial_file = os.path.join(migration_dir, "0001_initial.py")
self.assertTrue(os.path.exists(initial_file))
# Command output indicates the migration is created.
self.assertIn(" + Create model SillyModel", out.getvalue())
@override_settings(MIGRATION_MODULES={"migrations": "some.nonexistent.path"})
def test_makemigrations_migrations_modules_nonexistent_toplevel_package(self):
msg = (
"Could not locate an appropriate location to create migrations "
"package some.nonexistent.path. Make sure the toplevel package "
"exists and can be imported."
)
with self.assertRaisesMessage(ValueError, msg):
call_command("makemigrations", "migrations", empty=True, verbosity=0)
def test_makemigrations_interactive_by_default(self):
"""
The user is prompted to merge by default if there are conflicts and
merge is True. Answer negative to differentiate it from behavior when
--noinput is specified.
"""
# Monkeypatch interactive questioner to auto reject
out = io.StringIO()
with mock.patch("builtins.input", mock.Mock(return_value="N")):
with self.temporary_migration_module(
module="migrations.test_migrations_conflict"
) as migration_dir:
call_command(
"makemigrations", "migrations", name="merge", merge=True, stdout=out
)
merge_file = os.path.join(migration_dir, "0003_merge.py")
# This will fail if interactive is False by default
self.assertFalse(os.path.exists(merge_file))
self.assertNotIn("Created new merge migration", out.getvalue())
@override_settings(
INSTALLED_APPS=[
"migrations",
"migrations.migrations_test_apps.unspecified_app_with_conflict",
]
)
def test_makemigrations_unspecified_app_with_conflict_no_merge(self):
"""
makemigrations does not raise a CommandError when an unspecified app
has conflicting migrations.
"""
with self.temporary_migration_module(
module="migrations.test_migrations_no_changes"
):
call_command("makemigrations", "migrations", merge=False, verbosity=0)
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.migrated_app",
"migrations.migrations_test_apps.unspecified_app_with_conflict",
]
)
def test_makemigrations_unspecified_app_with_conflict_merge(self):
"""
makemigrations does not create a merge for an unspecified app even if
it has conflicting migrations.
"""
# Monkeypatch interactive questioner to auto accept
with mock.patch("builtins.input", mock.Mock(return_value="y")):
out = io.StringIO()
with self.temporary_migration_module(
app_label="migrated_app"
) as migration_dir:
call_command(
"makemigrations",
"migrated_app",
name="merge",
merge=True,
interactive=True,
stdout=out,
)
merge_file = os.path.join(migration_dir, "0003_merge.py")
self.assertFalse(os.path.exists(merge_file))
self.assertIn("No conflicts detected to merge.", out.getvalue())
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.migrated_app",
"migrations.migrations_test_apps.conflicting_app_with_dependencies",
]
)
def test_makemigrations_merge_dont_output_dependency_operations(self):
"""
makemigrations --merge does not output any operations from apps that
don't belong to a given app.
"""
# Monkeypatch interactive questioner to auto accept
with mock.patch("builtins.input", mock.Mock(return_value="N")):
out = io.StringIO()
with mock.patch(
"django.core.management.color.supports_color", lambda *args: False
):
call_command(
"makemigrations",
"conflicting_app_with_dependencies",
merge=True,
interactive=True,
stdout=out,
)
self.assertEqual(
out.getvalue().lower(),
"merging conflicting_app_with_dependencies\n"
" branch 0002_conflicting_second\n"
" + create model something\n"
" branch 0002_second\n"
" - delete model tribble\n"
" - remove field silly_field from author\n"
" + add field rating to author\n"
" + create model book\n"
"\n"
"merging will only work if the operations printed above do not "
"conflict\n"
"with each other (working on different fields or models)\n"
"should these migration branches be merged? [y/n] ",
)
def test_makemigrations_with_custom_name(self):
"""
makemigrations --name generate a custom migration name.
"""
with self.temporary_migration_module() as migration_dir:
def cmd(migration_count, migration_name, *args):
call_command(
"makemigrations",
"migrations",
"--verbosity",
"0",
"--name",
migration_name,
*args,
)
migration_file = os.path.join(
migration_dir, "%s_%s.py" % (migration_count, migration_name)
)
# Check for existing migration file in migration folder
self.assertTrue(os.path.exists(migration_file))
with open(migration_file, encoding="utf-8") as fp:
content = fp.read()
content = content.replace(" ", "")
return content
# generate an initial migration
migration_name_0001 = "my_initial_migration"
content = cmd("0001", migration_name_0001)
self.assertIn(
"dependencies=[]" if HAS_BLACK else "dependencies=[\n]", content
)
# importlib caches os.listdir() on some platforms like macOS
# (#23850).
if hasattr(importlib, "invalidate_caches"):
importlib.invalidate_caches()
# generate an empty migration
migration_name_0002 = "my_custom_migration"
content = cmd("0002", migration_name_0002, "--empty")
if HAS_BLACK:
template_str = 'dependencies=[\n("migrations","0001_%s"),\n]'
else:
template_str = "dependencies=[\n('migrations','0001_%s'),\n]"
self.assertIn(
template_str % migration_name_0001,
content,
)
self.assertIn("operations=[]" if HAS_BLACK else "operations=[\n]", content)
def test_makemigrations_with_invalid_custom_name(self):
msg = "The migration name must be a valid Python identifier."
with self.assertRaisesMessage(CommandError, msg):
call_command(
"makemigrations", "migrations", "--name", "invalid name", "--empty"
)
def test_makemigrations_check_with_changes(self):
"""
makemigrations --check should exit with a non-zero status when
there are changes to an app requiring migrations.
"""
out = io.StringIO()
with self.temporary_migration_module() as tmpdir:
with self.assertRaises(SystemExit) as cm:
call_command(
"makemigrations",
"--check",
"migrations",
stdout=out,
)
self.assertEqual(os.listdir(tmpdir), ["__init__.py"])
self.assertEqual(cm.exception.code, 1)
self.assertIn("Migrations for 'migrations':", out.getvalue())
def test_makemigrations_check_no_changes(self):
"""
makemigrations --check should exit with a zero status when there are no
changes.
"""
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_no_changes"
):
call_command("makemigrations", "--check", "migrations", stdout=out)
self.assertEqual("No changes detected in app 'migrations'\n", out.getvalue())
def test_makemigrations_migration_path_output(self):
"""
makemigrations should print the relative paths to the migrations unless
they are outside of the current tree, in which case the absolute path
should be shown.
"""
out = io.StringIO()
apps.register_model("migrations", UnicodeModel)
with self.temporary_migration_module() as migration_dir:
call_command("makemigrations", "migrations", stdout=out)
self.assertIn(
os.path.join(migration_dir, "0001_initial.py"), out.getvalue()
)
def test_makemigrations_migration_path_output_valueerror(self):
"""
makemigrations prints the absolute path if os.path.relpath() raises a
ValueError when it's impossible to obtain a relative path, e.g. on
Windows if Django is installed on a different drive than where the
migration files are created.
"""
out = io.StringIO()
with self.temporary_migration_module() as migration_dir:
with mock.patch("os.path.relpath", side_effect=ValueError):
call_command("makemigrations", "migrations", stdout=out)
self.assertIn(os.path.join(migration_dir, "0001_initial.py"), out.getvalue())
def test_makemigrations_inconsistent_history(self):
"""
makemigrations should raise InconsistentMigrationHistory exception if
there are some migrations applied before their dependencies.
"""
recorder = MigrationRecorder(connection)
recorder.record_applied("migrations", "0002_second")
msg = (
"Migration migrations.0002_second is applied before its dependency "
"migrations.0001_initial"
)
with self.temporary_migration_module(module="migrations.test_migrations"):
with self.assertRaisesMessage(InconsistentMigrationHistory, msg):
call_command("makemigrations")
def test_makemigrations_inconsistent_history_db_failure(self):
msg = (
"Got an error checking a consistent migration history performed "
"for database connection 'default': could not connect to server"
)
with mock.patch(
"django.db.migrations.loader.MigrationLoader.check_consistent_history",
side_effect=OperationalError("could not connect to server"),
):
with self.temporary_migration_module():
with self.assertWarns(RuntimeWarning) as cm:
call_command("makemigrations", verbosity=0)
self.assertEqual(str(cm.warning), msg)
@mock.patch("builtins.input", return_value="1")
@mock.patch(
"django.db.migrations.questioner.sys.stdin",
mock.MagicMock(encoding=sys.getdefaultencoding()),
)
def test_makemigrations_auto_now_add_interactive(self, *args):
"""
makemigrations prompts the user when adding auto_now_add to an existing
model.
"""
class Entry(models.Model):
title = models.CharField(max_length=255)
creation_date = models.DateTimeField(auto_now_add=True)
class Meta:
app_label = "migrations"
input_msg = (
"It is impossible to add the field 'creation_date' with "
"'auto_now_add=True' to entry without providing a default. This "
"is because the database needs something to populate existing "
"rows.\n"
" 1) Provide a one-off default now which will be set on all "
"existing rows\n"
" 2) Quit and manually define a default value in models.py."
)
# Monkeypatch interactive questioner to auto accept
prompt_stdout = io.StringIO()
with self.temporary_migration_module(module="migrations.test_auto_now_add"):
call_command(
"makemigrations", "migrations", interactive=True, stdout=prompt_stdout
)
prompt_output = prompt_stdout.getvalue()
self.assertIn(input_msg, prompt_output)
self.assertIn("Please enter the default value as valid Python.", prompt_output)
self.assertIn(
"Accept the default 'timezone.now' by pressing 'Enter' or provide "
"another value.",
prompt_output,
)
self.assertIn("Type 'exit' to exit this prompt", prompt_output)
self.assertIn("Add field creation_date to entry", prompt_output)
@mock.patch("builtins.input", return_value="2")
def test_makemigrations_auto_now_add_interactive_quit(self, mock_input):
class Author(models.Model):
publishing_date = models.DateField(auto_now_add=True)
class Meta:
app_label = "migrations"
with self.temporary_migration_module(module="migrations.test_migrations"):
with captured_stdout():
with self.assertRaises(SystemExit):
call_command("makemigrations", "migrations", interactive=True)
def test_makemigrations_non_interactive_auto_now_add_addition(self):
"""
Non-interactive makemigrations fails when a default is missing on a
new field when auto_now_add=True.
"""
class Entry(models.Model):
creation_date = models.DateTimeField(auto_now_add=True)
class Meta:
app_label = "migrations"
with self.temporary_migration_module(module="migrations.test_auto_now_add"):
with self.assertRaises(SystemExit), captured_stdout() as out:
call_command("makemigrations", "migrations", interactive=False)
self.assertIn(
"Field 'creation_date' on model 'entry' not migrated: it is "
"impossible to add a field with 'auto_now_add=True' without "
"specifying a default.",
out.getvalue(),
)
def test_makemigrations_interactive_unique_callable_default_addition(self):
"""
makemigrations prompts the user when adding a unique field with
a callable default.
"""
class Book(models.Model):
created = models.DateTimeField(unique=True, default=timezone.now)
class Meta:
app_label = "migrations"
version = get_docs_version()
input_msg = (
f"Callable default on unique field book.created will not generate "
f"unique values upon migrating.\n"
f"Please choose how to proceed:\n"
f" 1) Continue making this migration as the first step in writing "
f"a manual migration to generate unique values described here: "
f"https://docs.djangoproject.com/en/{version}/howto/"
f"writing-migrations/#migrations-that-add-unique-fields.\n"
f" 2) Quit and edit field options in models.py.\n"
)
with self.temporary_migration_module(module="migrations.test_migrations"):
# 2 - quit.
with mock.patch("builtins.input", return_value="2"):
with captured_stdout() as out, self.assertRaises(SystemExit):
call_command("makemigrations", "migrations", interactive=True)
out_value = out.getvalue()
self.assertIn(input_msg, out_value)
self.assertNotIn("Add field created to book", out_value)
# 1 - continue.
with mock.patch("builtins.input", return_value="1"):
with captured_stdout() as out:
call_command("makemigrations", "migrations", interactive=True)
out_value = out.getvalue()
self.assertIn(input_msg, out_value)
self.assertIn("Add field created to book", out_value)
def test_makemigrations_non_interactive_unique_callable_default_addition(self):
class Book(models.Model):
created = models.DateTimeField(unique=True, default=timezone.now)
class Meta:
app_label = "migrations"
with self.temporary_migration_module(module="migrations.test_migrations"):
with captured_stdout() as out:
call_command("makemigrations", "migrations", interactive=False)
out_value = out.getvalue()
self.assertIn("Add field created to book", out_value)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"},
)
def test_makemigrations_continues_number_sequence_after_squash(self):
with self.temporary_migration_module(
module="migrations.test_migrations_squashed"
):
with captured_stdout() as out:
call_command(
"makemigrations",
"migrations",
interactive=False,
empty=True,
)
out_value = out.getvalue()
self.assertIn("0003_auto", out_value)
def test_makemigrations_update(self):
with self.temporary_migration_module(
module="migrations.test_migrations"
) as migration_dir:
migration_file = os.path.join(migration_dir, "0002_second.py")
with open(migration_file) as fp:
initial_content = fp.read()
with captured_stdout() as out:
call_command("makemigrations", "migrations", update=True)
self.assertFalse(
any(
filename.startswith("0003")
for filename in os.listdir(migration_dir)
)
)
self.assertIs(os.path.exists(migration_file), False)
new_migration_file = os.path.join(
migration_dir,
"0002_delete_tribble_author_rating_modelwithcustombase_and_more.py",
)
with open(new_migration_file) as fp:
self.assertNotEqual(initial_content, fp.read())
self.assertIn(f"Deleted {migration_file}", out.getvalue())
def test_makemigrations_update_existing_name(self):
with self.temporary_migration_module(
module="migrations.test_auto_now_add"
) as migration_dir:
migration_file = os.path.join(migration_dir, "0001_initial.py")
with open(migration_file) as fp:
initial_content = fp.read()
with captured_stdout() as out:
call_command("makemigrations", "migrations", update=True)
self.assertIs(os.path.exists(migration_file), False)
new_migration_file = os.path.join(
migration_dir,
"0001_initial_updated.py",
)
with open(new_migration_file) as fp:
self.assertNotEqual(initial_content, fp.read())
self.assertIn(f"Deleted {migration_file}", out.getvalue())
def test_makemigrations_update_custom_name(self):
custom_name = "delete_something"
with self.temporary_migration_module(
module="migrations.test_migrations"
) as migration_dir:
old_migration_file = os.path.join(migration_dir, "0002_second.py")
with open(old_migration_file) as fp:
initial_content = fp.read()
with captured_stdout() as out:
call_command(
"makemigrations", "migrations", update=True, name=custom_name
)
self.assertFalse(
any(
filename.startswith("0003")
for filename in os.listdir(migration_dir)
)
)
self.assertIs(os.path.exists(old_migration_file), False)
new_migration_file = os.path.join(migration_dir, f"0002_{custom_name}.py")
self.assertIs(os.path.exists(new_migration_file), True)
with open(new_migration_file) as fp:
self.assertNotEqual(initial_content, fp.read())
self.assertIn(f"Deleted {old_migration_file}", out.getvalue())
def test_makemigrations_update_applied_migration(self):
recorder = MigrationRecorder(connection)
recorder.record_applied("migrations", "0001_initial")
recorder.record_applied("migrations", "0002_second")
with self.temporary_migration_module(module="migrations.test_migrations"):
msg = "Cannot update applied migration 'migrations.0002_second'."
with self.assertRaisesMessage(CommandError, msg):
call_command("makemigrations", "migrations", update=True)
def test_makemigrations_update_no_migration(self):
with self.temporary_migration_module(module="migrations.test_migrations_empty"):
msg = "App migrations has no migration, cannot update last migration."
with self.assertRaisesMessage(CommandError, msg):
call_command("makemigrations", "migrations", update=True)
def test_makemigrations_update_squash_migration(self):
with self.temporary_migration_module(
module="migrations.test_migrations_squashed"
):
msg = "Cannot update squash migration 'migrations.0001_squashed_0002'."
with self.assertRaisesMessage(CommandError, msg):
call_command("makemigrations", "migrations", update=True)
def test_makemigrations_update_manual_porting(self):
with self.temporary_migration_module(
module="migrations.test_migrations_plan"
) as migration_dir:
with captured_stdout() as out:
call_command("makemigrations", "migrations", update=True)
# Previous migration exists.
previous_migration_file = os.path.join(migration_dir, "0005_fifth.py")
self.assertIs(os.path.exists(previous_migration_file), True)
# New updated migration exists.
files = [f for f in os.listdir(migration_dir) if f.startswith("0005_auto")]
updated_migration_file = os.path.join(migration_dir, files[0])
self.assertIs(os.path.exists(updated_migration_file), True)
self.assertIn(
f"Updated migration {updated_migration_file} requires manual porting.\n"
f"Previous migration {previous_migration_file} was kept and must be "
f"deleted after porting functions manually.",
out.getvalue(),
)
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.alter_fk.author_app",
"migrations.migrations_test_apps.alter_fk.book_app",
]
)
def test_makemigrations_update_dependency_migration(self):
with self.temporary_migration_module(app_label="book_app"):
msg = (
"Cannot update migration 'book_app.0001_initial' that migrations "
"'author_app.0002_alter_id' depend on."
)
with self.assertRaisesMessage(CommandError, msg):
call_command("makemigrations", "book_app", update=True)
class SquashMigrationsTests(MigrationTestBase):
"""
Tests running the squashmigrations command.
"""
def test_squashmigrations_squashes(self):
"""
squashmigrations squashes migrations.
"""
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations"
) as migration_dir:
call_command(
"squashmigrations",
"migrations",
"0002",
interactive=False,
stdout=out,
no_color=True,
)
squashed_migration_file = os.path.join(
migration_dir, "0001_squashed_0002_second.py"
)
self.assertTrue(os.path.exists(squashed_migration_file))
self.assertEqual(
out.getvalue(),
"Will squash the following migrations:\n"
" - 0001_initial\n"
" - 0002_second\n"
"Optimizing...\n"
" Optimized from 8 operations to 2 operations.\n"
"Created new squashed migration %s\n"
" You should commit this migration but leave the old ones in place;\n"
" the new migration will be used for new installs. Once you are sure\n"
" all instances of the codebase have applied the migrations you "
"squashed,\n"
" you can delete them.\n" % squashed_migration_file,
)
def test_squashmigrations_replacement_cycle(self):
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_squashed_loop"
):
# Hits a squash replacement cycle check error, but the actual
# failure is dependent on the order in which the files are read on
# disk.
with self.assertRaisesRegex(
CommandError,
r"Cyclical squash replacement found, starting at"
r" \('migrations', '2_(squashed|auto)'\)",
):
call_command(
"migrate", "migrations", "--plan", interactive=False, stdout=out
)
def test_squashmigrations_squashes_already_squashed(self):
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_squashed_complex"
):
call_command(
"squashmigrations",
"migrations",
"3_squashed_5",
"--squashed-name",
"double_squash",
stdout=out,
interactive=False,
)
loader = MigrationLoader(connection)
migration = loader.disk_migrations[("migrations", "0001_double_squash")]
# Confirm the replaces mechanism holds the squashed migration
# (and not what it squashes, as the squash operations are what
# end up being used).
self.assertEqual(
migration.replaces,
[
("migrations", "1_auto"),
("migrations", "2_auto"),
("migrations", "3_squashed_5"),
],
)
out = io.StringIO()
call_command(
"migrate", "migrations", "--plan", interactive=False, stdout=out
)
migration_plan = re.findall("migrations.(.+)\n", out.getvalue())
self.assertEqual(migration_plan, ["0001_double_squash", "6_auto", "7_auto"])
def test_squash_partially_applied(self):
"""
Replacement migrations are partially applied. Then we squash again and
verify that only unapplied migrations will be applied by "migrate".
"""
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_squashed_partially_applied"
):
# Apply first 2 migrations.
call_command("migrate", "migrations", "0002", interactive=False, stdout=out)
# Squash the 2 migrations, that we just applied + 1 more.
call_command(
"squashmigrations",
"migrations",
"0001",
"0003",
"--squashed-name",
"squashed_0001_0003",
stdout=out,
interactive=False,
)
# Update the 4th migration to depend on the squash(replacement)
# migration.
loader = MigrationLoader(connection)
migration = loader.disk_migrations[
("migrations", "0004_remove_mymodel1_field_1_mymodel1_field_3_and_more")
]
migration.dependencies = [("migrations", "0001_squashed_0001_0003")]
writer = MigrationWriter(migration)
with open(writer.path, "w", encoding="utf-8") as fh:
fh.write(writer.as_string())
# Squash the squash(replacement) migration with the 4th migration.
call_command(
"squashmigrations",
"migrations",
"0001_squashed_0001_0003",
"0004",
"--squashed-name",
"squashed_0001_0004",
stdout=out,
interactive=False,
)
loader = MigrationLoader(connection)
migration = loader.disk_migrations[
("migrations", "0001_squashed_0001_0004")
]
self.assertEqual(
migration.replaces,
[
("migrations", "0001_squashed_0001_0003"),
(
"migrations",
"0004_remove_mymodel1_field_1_mymodel1_field_3_and_more",
),
],
)
# Verify that only unapplied migrations will be applied.
out = io.StringIO()
call_command(
"migrate", "migrations", "--plan", interactive=False, stdout=out
)
migration_plan = re.findall("migrations.(.+)\n", out.getvalue())
self.assertEqual(
migration_plan,
[
"0003_alter_mymodel2_unique_together",
"0004_remove_mymodel1_field_1_mymodel1_field_3_and_more",
],
)
def test_double_replaced_migrations_are_recorded(self):
"""
All recursively replaced migrations should be recorded/unrecorded, when
migrating an app with double squashed migrations.
"""
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_squashed_double"
):
recorder = MigrationRecorder(connection)
applied_app_labels = [
app_label for app_label, _ in recorder.applied_migrations()
]
self.assertNotIn("migrations", applied_app_labels)
call_command(
"migrate", "migrations", "--plan", interactive=False, stdout=out
)
migration_plan = re.findall("migrations.(.+)\n", out.getvalue())
# Only the top-level replacement migration should be applied.
self.assertEqual(migration_plan, ["0005_squashed_0003_and_0004"])
call_command("migrate", "migrations", interactive=False, verbosity=0)
applied_migrations = recorder.applied_migrations()
# Make sure all replaced migrations are recorded.
self.assertIn(("migrations", "0001_initial"), applied_migrations)
self.assertIn(("migrations", "0002_auto"), applied_migrations)
self.assertIn(
("migrations", "0003_squashed_0001_and_0002"), applied_migrations
)
self.assertIn(("migrations", "0004_auto"), applied_migrations)
self.assertIn(
("migrations", "0005_squashed_0003_and_0004"), applied_migrations
)
# Unapply all migrations from this app.
call_command(
"migrate", "migrations", "zero", interactive=False, verbosity=0
)
applied_app_labels = [
app_label for app_label, _ in recorder.applied_migrations()
]
self.assertNotIn("migrations", applied_app_labels)
def test_double_replaced_migrations_are_checked_correctly(self):
"""
If replaced migrations are already applied and replacing migrations
are not, then migrate should not fail with
InconsistentMigrationHistory.
"""
with self.temporary_migration_module():
call_command(
"makemigrations",
"migrations",
"--empty",
interactive=False,
verbosity=0,
)
call_command(
"makemigrations",
"migrations",
"--empty",
interactive=False,
verbosity=0,
)
call_command(
"makemigrations",
"migrations",
"--empty",
interactive=False,
verbosity=0,
)
call_command(
"makemigrations",
"migrations",
"--empty",
interactive=False,
verbosity=0,
)
call_command("migrate", "migrations", interactive=False, verbosity=0)
call_command(
"squashmigrations",
"migrations",
"0001",
"0002",
interactive=False,
verbosity=0,
)
call_command(
"squashmigrations",
"migrations",
"0001_initial_squashed",
"0003",
interactive=False,
verbosity=0,
)
call_command("migrate", "migrations", interactive=False, verbosity=0)
def test_squashmigrations_initial_attribute(self):
with self.temporary_migration_module(
module="migrations.test_migrations"
) as migration_dir:
call_command(
"squashmigrations", "migrations", "0002", interactive=False, verbosity=0
)
squashed_migration_file = os.path.join(
migration_dir, "0001_squashed_0002_second.py"
)
with open(squashed_migration_file, encoding="utf-8") as fp:
content = fp.read()
self.assertIn("initial = True", content)
def test_squashmigrations_optimizes(self):
"""
squashmigrations optimizes operations.
"""
out = io.StringIO()
with self.temporary_migration_module(module="migrations.test_migrations"):
call_command(
"squashmigrations",
"migrations",
"0002",
interactive=False,
verbosity=1,
stdout=out,
)
self.assertIn("Optimized from 8 operations to 2 operations.", out.getvalue())
def test_ticket_23799_squashmigrations_no_optimize(self):
"""
squashmigrations --no-optimize doesn't optimize operations.
"""
out = io.StringIO()
with self.temporary_migration_module(module="migrations.test_migrations"):
call_command(
"squashmigrations",
"migrations",
"0002",
interactive=False,
verbosity=1,
no_optimize=True,
stdout=out,
)
self.assertIn("Skipping optimization", out.getvalue())
def test_squashmigrations_valid_start(self):
"""
squashmigrations accepts a starting migration.
"""
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_no_changes"
) as migration_dir:
call_command(
"squashmigrations",
"migrations",
"0002",
"0003",
interactive=False,
verbosity=1,
stdout=out,
)
squashed_migration_file = os.path.join(
migration_dir, "0002_second_squashed_0003_third.py"
)
with open(squashed_migration_file, encoding="utf-8") as fp:
content = fp.read()
if HAS_BLACK:
test_str = ' ("migrations", "0001_initial")'
else:
test_str = " ('migrations', '0001_initial')"
self.assertIn(test_str, content)
self.assertNotIn("initial = True", content)
out = out.getvalue()
self.assertNotIn(" - 0001_initial", out)
self.assertIn(" - 0002_second", out)
self.assertIn(" - 0003_third", out)
def test_squashmigrations_invalid_start(self):
"""
squashmigrations doesn't accept a starting migration after the ending
migration.
"""
with self.temporary_migration_module(
module="migrations.test_migrations_no_changes"
):
msg = (
"The migration 'migrations.0003_third' cannot be found. Maybe "
"it comes after the migration 'migrations.0002_second'"
)
with self.assertRaisesMessage(CommandError, msg):
call_command(
"squashmigrations",
"migrations",
"0003",
"0002",
interactive=False,
verbosity=0,
)
def test_squashed_name_with_start_migration_name(self):
"""--squashed-name specifies the new migration's name."""
squashed_name = "squashed_name"
with self.temporary_migration_module(
module="migrations.test_migrations"
) as migration_dir:
call_command(
"squashmigrations",
"migrations",
"0001",
"0002",
squashed_name=squashed_name,
interactive=False,
verbosity=0,
)
squashed_migration_file = os.path.join(
migration_dir, "0001_%s.py" % squashed_name
)
self.assertTrue(os.path.exists(squashed_migration_file))
def test_squashed_name_without_start_migration_name(self):
"""--squashed-name also works if a start migration is omitted."""
squashed_name = "squashed_name"
with self.temporary_migration_module(
module="migrations.test_migrations"
) as migration_dir:
call_command(
"squashmigrations",
"migrations",
"0001",
squashed_name=squashed_name,
interactive=False,
verbosity=0,
)
squashed_migration_file = os.path.join(
migration_dir, "0001_%s.py" % squashed_name
)
self.assertTrue(os.path.exists(squashed_migration_file))
def test_squashed_name_exists(self):
msg = "Migration 0001_initial already exists. Use a different name."
with self.temporary_migration_module(module="migrations.test_migrations"):
with self.assertRaisesMessage(CommandError, msg):
call_command(
"squashmigrations",
"migrations",
"0001",
"0002",
squashed_name="initial",
interactive=False,
verbosity=0,
)
def test_squashmigrations_manual_porting(self):
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_manual_porting",
) as migration_dir:
call_command(
"squashmigrations",
"migrations",
"0002",
interactive=False,
stdout=out,
no_color=True,
)
squashed_migration_file = os.path.join(
migration_dir,
"0001_squashed_0002_second.py",
)
self.assertTrue(os.path.exists(squashed_migration_file))
black_warning = ""
if HAS_BLACK:
black_warning = (
"Squashed migration couldn't be formatted using the "
'"black" command. You can call it manually.\n'
)
self.assertEqual(
out.getvalue(),
f"Will squash the following migrations:\n"
f" - 0001_initial\n"
f" - 0002_second\n"
f"Optimizing...\n"
f" No optimizations possible.\n"
f"Created new squashed migration {squashed_migration_file}\n"
f" You should commit this migration but leave the old ones in place;\n"
f" the new migration will be used for new installs. Once you are sure\n"
f" all instances of the codebase have applied the migrations you "
f"squashed,\n"
f" you can delete them.\n"
f"Manual porting required\n"
f" Your migrations contained functions that must be manually copied "
f"over,\n"
f" as we could not safely copy their implementation.\n"
f" See the comment at the top of the squashed migration for details.\n"
+ black_warning,
)
def test_failure_to_format_code(self):
self.assertFormatterFailureCaught(
"squashmigrations", "migrations", "0002", interactive=False
)
class AppLabelErrorTests(TestCase):
"""
This class inherits TestCase because MigrationTestBase uses
`available_apps = ['migrations']` which means that it's the only installed
app. 'django.contrib.auth' must be in INSTALLED_APPS for some of these
tests.
"""
nonexistent_app_error = "No installed app with label 'nonexistent_app'."
did_you_mean_auth_error = (
"No installed app with label 'django.contrib.auth'. Did you mean 'auth'?"
)
def test_makemigrations_nonexistent_app_label(self):
err = io.StringIO()
with self.assertRaises(SystemExit):
call_command("makemigrations", "nonexistent_app", stderr=err)
self.assertIn(self.nonexistent_app_error, err.getvalue())
def test_makemigrations_app_name_specified_as_label(self):
err = io.StringIO()
with self.assertRaises(SystemExit):
call_command("makemigrations", "django.contrib.auth", stderr=err)
self.assertIn(self.did_you_mean_auth_error, err.getvalue())
def test_migrate_nonexistent_app_label(self):
with self.assertRaisesMessage(CommandError, self.nonexistent_app_error):
call_command("migrate", "nonexistent_app")
def test_migrate_app_name_specified_as_label(self):
with self.assertRaisesMessage(CommandError, self.did_you_mean_auth_error):
call_command("migrate", "django.contrib.auth")
def test_showmigrations_nonexistent_app_label(self):
err = io.StringIO()
with self.assertRaises(SystemExit):
call_command("showmigrations", "nonexistent_app", stderr=err)
self.assertIn(self.nonexistent_app_error, err.getvalue())
def test_showmigrations_app_name_specified_as_label(self):
err = io.StringIO()
with self.assertRaises(SystemExit):
call_command("showmigrations", "django.contrib.auth", stderr=err)
self.assertIn(self.did_you_mean_auth_error, err.getvalue())
def test_sqlmigrate_nonexistent_app_label(self):
with self.assertRaisesMessage(CommandError, self.nonexistent_app_error):
call_command("sqlmigrate", "nonexistent_app", "0002")
def test_sqlmigrate_app_name_specified_as_label(self):
with self.assertRaisesMessage(CommandError, self.did_you_mean_auth_error):
call_command("sqlmigrate", "django.contrib.auth", "0002")
def test_squashmigrations_nonexistent_app_label(self):
with self.assertRaisesMessage(CommandError, self.nonexistent_app_error):
call_command("squashmigrations", "nonexistent_app", "0002")
def test_squashmigrations_app_name_specified_as_label(self):
with self.assertRaisesMessage(CommandError, self.did_you_mean_auth_error):
call_command("squashmigrations", "django.contrib.auth", "0002")
def test_optimizemigration_nonexistent_app_label(self):
with self.assertRaisesMessage(CommandError, self.nonexistent_app_error):
call_command("optimizemigration", "nonexistent_app", "0002")
def test_optimizemigration_app_name_specified_as_label(self):
with self.assertRaisesMessage(CommandError, self.did_you_mean_auth_error):
call_command("optimizemigration", "django.contrib.auth", "0002")
class OptimizeMigrationTests(MigrationTestBase):
def test_no_optimization_possible(self):
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations"
) as migration_dir:
call_command(
"optimizemigration", "migrations", "0002", stdout=out, no_color=True
)
migration_file = os.path.join(migration_dir, "0002_second.py")
self.assertTrue(os.path.exists(migration_file))
call_command(
"optimizemigration",
"migrations",
"0002",
stdout=out,
no_color=True,
verbosity=0,
)
self.assertEqual(out.getvalue(), "No optimizations possible.\n")
def test_optimization(self):
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations"
) as migration_dir:
call_command(
"optimizemigration", "migrations", "0001", stdout=out, no_color=True
)
initial_migration_file = os.path.join(migration_dir, "0001_initial.py")
self.assertTrue(os.path.exists(initial_migration_file))
with open(initial_migration_file) as fp:
content = fp.read()
self.assertIn(
(
'("bool", models.BooleanField'
if HAS_BLACK
else "('bool', models.BooleanField"
),
content,
)
self.assertEqual(
out.getvalue(),
f"Optimizing from 4 operations to 2 operations.\n"
f"Optimized migration {initial_migration_file}\n",
)
def test_optimization_no_verbosity(self):
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations"
) as migration_dir:
call_command(
"optimizemigration",
"migrations",
"0001",
stdout=out,
no_color=True,
verbosity=0,
)
initial_migration_file = os.path.join(migration_dir, "0001_initial.py")
self.assertTrue(os.path.exists(initial_migration_file))
with open(initial_migration_file) as fp:
content = fp.read()
self.assertIn(
(
'("bool", models.BooleanField'
if HAS_BLACK
else "('bool', models.BooleanField"
),
content,
)
self.assertEqual(out.getvalue(), "")
def test_creates_replace_migration_manual_porting(self):
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_manual_porting"
) as migration_dir:
call_command(
"optimizemigration", "migrations", "0003", stdout=out, no_color=True
)
optimized_migration_file = os.path.join(
migration_dir, "0003_third_optimized.py"
)
self.assertTrue(os.path.exists(optimized_migration_file))
with open(optimized_migration_file) as fp:
content = fp.read()
self.assertIn("replaces = [", content)
black_warning = ""
if HAS_BLACK:
black_warning = (
"Optimized migration couldn't be formatted using the "
'"black" command. You can call it manually.\n'
)
self.assertEqual(
out.getvalue(),
"Optimizing from 3 operations to 2 operations.\n"
"Manual porting required\n"
" Your migrations contained functions that must be manually copied over,\n"
" as we could not safely copy their implementation.\n"
" See the comment at the top of the optimized migration for details.\n"
+ black_warning
+ f"Optimized migration {optimized_migration_file}\n",
)
def test_fails_squash_migration_manual_porting(self):
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_manual_porting"
) as migration_dir:
version = get_docs_version()
msg = (
f"Migration will require manual porting but is already a squashed "
f"migration.\nTransition to a normal migration first: "
f"https://docs.djangoproject.com/en/{version}/topics/migrations/"
f"#squashing-migrations"
)
with self.assertRaisesMessage(CommandError, msg):
call_command("optimizemigration", "migrations", "0004", stdout=out)
optimized_migration_file = os.path.join(
migration_dir, "0004_fourth_optimized.py"
)
self.assertFalse(os.path.exists(optimized_migration_file))
self.assertEqual(
out.getvalue(), "Optimizing from 3 operations to 2 operations.\n"
)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_optimizemigration_check(self):
with self.assertRaises(SystemExit):
call_command(
"optimizemigration", "--check", "migrations", "0001", verbosity=0
)
call_command("optimizemigration", "--check", "migrations", "0002", verbosity=0)
@override_settings(
INSTALLED_APPS=["migrations.migrations_test_apps.unmigrated_app_simple"],
)
def test_app_without_migrations(self):
msg = "App 'unmigrated_app_simple' does not have migrations."
with self.assertRaisesMessage(CommandError, msg):
call_command("optimizemigration", "unmigrated_app_simple", "0001")
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_clashing_prefix"},
)
def test_ambigious_prefix(self):
msg = (
"More than one migration matches 'a' in app 'migrations'. Please "
"be more specific."
)
with self.assertRaisesMessage(CommandError, msg):
call_command("optimizemigration", "migrations", "a")
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_unknown_prefix(self):
msg = "Cannot find a migration matching 'nonexistent' from app 'migrations'."
with self.assertRaisesMessage(CommandError, msg):
call_command("optimizemigration", "migrations", "nonexistent")
def test_failure_to_format_code(self):
self.assertFormatterFailureCaught("optimizemigration", "migrations", "0001")
class CustomMigrationCommandTests(MigrationTestBase):
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations"},
INSTALLED_APPS=["migrations.migrations_test_apps.migrated_app"],
)
@isolate_apps("migrations.migrations_test_apps.migrated_app")
def test_makemigrations_custom_autodetector(self):
class CustomAutodetector(MigrationAutodetector):
def changes(self, *args, **kwargs):
return []
class CustomMakeMigrationsCommand(MakeMigrationsCommand):
autodetector = CustomAutodetector
class NewModel(models.Model):
class Meta:
app_label = "migrated_app"
out = io.StringIO()
command = CustomMakeMigrationsCommand(stdout=out)
call_command(command, "migrated_app", stdout=out)
self.assertIn("No changes detected", out.getvalue())
@override_settings(INSTALLED_APPS=["migrations.migrations_test_apps.migrated_app"])
@isolate_apps("migrations.migrations_test_apps.migrated_app")
def test_migrate_custom_autodetector(self):
class CustomAutodetector(MigrationAutodetector):
def changes(self, *args, **kwargs):
return []
class CustomMigrateCommand(MigrateCommand):
autodetector = CustomAutodetector
class NewModel(models.Model):
class Meta:
app_label = "migrated_app"
out = io.StringIO()
command = CustomMigrateCommand(stdout=out)
out = io.StringIO()
try:
call_command(command, verbosity=0)
call_command(command, stdout=out, no_color=True)
command_stdout = out.getvalue().lower()
self.assertEqual(
"operations to perform:\n"
" apply all migrations: migrated_app\n"
"running migrations:\n"
" no migrations to apply.\n",
command_stdout,
)
finally:
call_command(command, "migrated_app", "zero", verbosity=0)
|
./temp_repos/django/django/core/checks/commands.py
|
./temp_repos/django/tests/migrations/test_commands.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'Unknown'.
Context:
- Class Name: Unknown
- Dependencies to Mock: None detected
- Key Imports: django.core.checks, django.core.management
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
Unknown
|
python
|
from django.apps.registry import apps as global_apps
from django.db import migrations, router
from .exceptions import InvalidMigrationPlan
from .loader import MigrationLoader
from .recorder import MigrationRecorder
from .state import ProjectState
class MigrationExecutor:
"""
End-to-end migration execution - load migrations and run them up or down
to a specified set of targets.
"""
def __init__(self, connection, progress_callback=None):
self.connection = connection
self.loader = MigrationLoader(self.connection)
self.recorder = MigrationRecorder(self.connection)
self.progress_callback = progress_callback
def migration_plan(self, targets, clean_start=False):
"""
Given a set of targets, return a list of (Migration instance,
backwards?).
"""
plan = []
if clean_start:
applied = {}
else:
applied = dict(self.loader.applied_migrations)
for target in targets:
# If the target is (app_label, None), that means unmigrate
# everything
if target[1] is None:
for root in self.loader.graph.root_nodes():
if root[0] == target[0]:
for migration in self.loader.graph.backwards_plan(root):
if migration in applied:
plan.append((self.loader.graph.nodes[migration], True))
applied.pop(migration)
# If the target is missing, it's likely a replaced migration.
# Reload the graph without replacements.
elif (
self.loader.replace_migrations
and target not in self.loader.graph.node_map
):
self.loader.replace_migrations = False
self.loader.build_graph()
return self.migration_plan(targets, clean_start=clean_start)
# If the migration is already applied, do backwards mode,
# otherwise do forwards mode.
elif target in applied:
# Don't migrate backwards all the way to the target node (that
# may roll back dependencies in other apps that don't need to
# be rolled back); instead roll back through target's immediate
# child(ren) in the same app, and no further.
next_in_app = sorted(
n
for n in self.loader.graph.node_map[target].children
if n[0] == target[0]
)
for node in next_in_app:
for migration in self.loader.graph.backwards_plan(node):
if migration in applied:
plan.append((self.loader.graph.nodes[migration], True))
applied.pop(migration)
else:
for migration in self.loader.graph.forwards_plan(target):
if migration not in applied:
plan.append((self.loader.graph.nodes[migration], False))
applied[migration] = self.loader.graph.nodes[migration]
return plan
def _create_project_state(self, with_applied_migrations=False):
"""
Create a project state including all the applications without
migrations and applied migrations if with_applied_migrations=True.
"""
state = ProjectState(real_apps=self.loader.unmigrated_apps)
if with_applied_migrations:
# Create the forwards plan Django would follow on an empty database
full_plan = self.migration_plan(
self.loader.graph.leaf_nodes(), clean_start=True
)
applied_migrations = {
self.loader.graph.nodes[key]
for key in self.loader.applied_migrations
if key in self.loader.graph.nodes
}
for migration, _ in full_plan:
if migration in applied_migrations:
migration.mutate_state(state, preserve=False)
return state
def migrate(self, targets, plan=None, state=None, fake=False, fake_initial=False):
"""
Migrate the database up to the given targets.
Django first needs to create all project states before a migration is
(un)applied and in a second step run all the database operations.
"""
# The django_migrations table must be present to record applied
# migrations, but don't create it if there are no migrations to apply.
if plan == []:
if not self.recorder.has_table():
return self._create_project_state(with_applied_migrations=False)
else:
self.recorder.ensure_schema()
if plan is None:
plan = self.migration_plan(targets)
# Create the forwards plan Django would follow on an empty database
full_plan = self.migration_plan(
self.loader.graph.leaf_nodes(), clean_start=True
)
all_forwards = all(not backwards for mig, backwards in plan)
all_backwards = all(backwards for mig, backwards in plan)
if not plan:
if state is None:
# The resulting state should include applied migrations.
state = self._create_project_state(with_applied_migrations=True)
elif all_forwards == all_backwards:
# This should only happen if there's a mixed plan
raise InvalidMigrationPlan(
"Migration plans with both forwards and backwards migrations "
"are not supported. Please split your migration process into "
"separate plans of only forwards OR backwards migrations.",
plan,
)
elif all_forwards:
if state is None:
# The resulting state should still include applied migrations.
state = self._create_project_state(with_applied_migrations=True)
state = self._migrate_all_forwards(
state, plan, full_plan, fake=fake, fake_initial=fake_initial
)
else:
# No need to check for `elif all_backwards` here, as that condition
# would always evaluate to true.
state = self._migrate_all_backwards(plan, full_plan, fake=fake)
self.check_replacements()
return state
def _migrate_all_forwards(self, state, plan, full_plan, fake, fake_initial):
"""
Take a list of 2-tuples of the form (migration instance, False) and
apply them in the order they occur in the full_plan.
"""
migrations_to_run = {m[0] for m in plan}
for migration, _ in full_plan:
if not migrations_to_run:
# We remove every migration that we applied from these sets so
# that we can bail out once the last migration has been applied
# and don't always run until the very end of the migration
# process.
break
if migration in migrations_to_run:
if "apps" not in state.__dict__:
if self.progress_callback:
self.progress_callback("render_start")
state.apps # Render all -- performance critical
if self.progress_callback:
self.progress_callback("render_success")
state = self.apply_migration(
state, migration, fake=fake, fake_initial=fake_initial
)
migrations_to_run.remove(migration)
return state
def _migrate_all_backwards(self, plan, full_plan, fake):
"""
Take a list of 2-tuples of the form (migration instance, True) and
unapply them in reverse order they occur in the full_plan.
Since unapplying a migration requires the project state prior to that
migration, Django will compute the migration states before each of them
in a first run over the plan and then unapply them in a second run over
the plan.
"""
migrations_to_run = {m[0] for m in plan}
# Holds all migration states prior to the migrations being unapplied
states = {}
state = self._create_project_state()
applied_migrations = {
self.loader.graph.nodes[key]
for key in self.loader.applied_migrations
if key in self.loader.graph.nodes
}
if self.progress_callback:
self.progress_callback("render_start")
for migration, _ in full_plan:
if not migrations_to_run:
# We remove every migration that we applied from this set so
# that we can bail out once the last migration has been applied
# and don't always run until the very end of the migration
# process.
break
if migration in migrations_to_run:
if "apps" not in state.__dict__:
state.apps # Render all -- performance critical
# The state before this migration
states[migration] = state
# The old state keeps as-is, we continue with the new state
state = migration.mutate_state(state, preserve=True)
migrations_to_run.remove(migration)
elif migration in applied_migrations:
# Only mutate the state if the migration is actually applied
# to make sure the resulting state doesn't include changes
# from unrelated migrations.
migration.mutate_state(state, preserve=False)
if self.progress_callback:
self.progress_callback("render_success")
for migration, _ in plan:
self.unapply_migration(states[migration], migration, fake=fake)
applied_migrations.remove(migration)
# Generate the post migration state by starting from the state before
# the last migration is unapplied and mutating it to include all the
# remaining applied migrations.
last_unapplied_migration = plan[-1][0]
state = states[last_unapplied_migration]
# Avoid mutating state with apps rendered as it's an expensive
# operation.
del state.apps
for index, (migration, _) in enumerate(full_plan):
if migration == last_unapplied_migration:
for migration, _ in full_plan[index:]:
if migration in applied_migrations:
migration.mutate_state(state, preserve=False)
break
return state
def apply_migration(self, state, migration, fake=False, fake_initial=False):
"""Run a migration forwards."""
migration_recorded = False
if self.progress_callback:
self.progress_callback("apply_start", migration, fake)
if not fake:
if fake_initial:
# Test to see if this is an already-applied initial migration
applied, state = self.detect_soft_applied(state, migration)
if applied:
fake = True
if not fake:
# Alright, do it normally
with self.connection.schema_editor(
atomic=migration.atomic
) as schema_editor:
state = migration.apply(state, schema_editor)
if not schema_editor.deferred_sql:
self.record_migration(migration.app_label, migration.name)
migration_recorded = True
if not migration_recorded:
self.record_migration(migration.app_label, migration.name)
# Report progress
if self.progress_callback:
self.progress_callback("apply_success", migration, fake)
return state
def record_migration(self, app_label, name, forward=True):
migration = self.loader.disk_migrations.get((app_label, name))
# For replacement migrations, record individual statuses
if migration and migration.replaces:
for replaced_app_label, replaced_name in migration.replaces:
self.record_migration(replaced_app_label, replaced_name, forward)
if forward:
self.recorder.record_applied(app_label, name)
else:
self.recorder.record_unapplied(app_label, name)
def unapply_migration(self, state, migration, fake=False):
"""Run a migration backwards."""
if self.progress_callback:
self.progress_callback("unapply_start", migration, fake)
if not fake:
with self.connection.schema_editor(
atomic=migration.atomic
) as schema_editor:
state = migration.unapply(state, schema_editor)
self.record_migration(migration.app_label, migration.name, forward=False)
# Report progress
if self.progress_callback:
self.progress_callback("unapply_success", migration, fake)
return state
def check_replacements(self):
"""
Mark replacement migrations applied if their replaced set all are.
Do this unconditionally on every migrate, rather than just when
migrations are applied or unapplied, to correctly handle the case
when a new squash migration is pushed to a deployment that already had
all its replaced migrations applied. In this case no new migration will
be applied, but the applied state of the squashed migration must be
maintained.
"""
applied = self.recorder.applied_migrations()
for key, migration in self.loader.replacements.items():
if key not in applied and self.loader.all_replaced_applied(key, applied):
self.recorder.record_applied(*key)
def detect_soft_applied(self, project_state, migration):
"""
Test whether a migration has been implicitly applied - that the
tables or columns it would create exist. This is intended only for use
on initial migrations (as it only looks for CreateModel and AddField).
"""
def should_skip_detecting_model(migration, model):
"""
No need to detect tables for proxy models, unmanaged models, or
models that can't be migrated on the current database.
"""
return (
model._meta.proxy
or not model._meta.managed
or not router.allow_migrate(
self.connection.alias,
migration.app_label,
model_name=model._meta.model_name,
)
)
if migration.initial is None:
# Bail if the migration isn't the first one in its app
if any(app == migration.app_label for app, name in migration.dependencies):
return False, project_state
elif migration.initial is False:
# Bail if it's NOT an initial migration
return False, project_state
if project_state is None:
after_state = self.loader.project_state(
(migration.app_label, migration.name), at_end=True
)
else:
after_state = migration.mutate_state(project_state)
apps = after_state.apps
found_create_model_migration = False
found_add_field_migration = False
fold_identifier_case = self.connection.features.ignores_table_name_case
with self.connection.cursor() as cursor:
existing_table_names = set(
self.connection.introspection.table_names(cursor)
)
if fold_identifier_case:
existing_table_names = {
name.casefold() for name in existing_table_names
}
# Make sure all create model and add field operations are done
for operation in migration.operations:
if isinstance(operation, migrations.CreateModel):
model = apps.get_model(migration.app_label, operation.name)
if model._meta.swapped:
# We have to fetch the model to test with from the
# main app cache, as it's not a direct dependency.
model = global_apps.get_model(model._meta.swapped)
if should_skip_detecting_model(migration, model):
continue
db_table = model._meta.db_table
if fold_identifier_case:
db_table = db_table.casefold()
if db_table not in existing_table_names:
return False, project_state
found_create_model_migration = True
elif isinstance(operation, migrations.AddField):
model = apps.get_model(migration.app_label, operation.model_name)
if model._meta.swapped:
# We have to fetch the model to test with from the
# main app cache, as it's not a direct dependency.
model = global_apps.get_model(model._meta.swapped)
if should_skip_detecting_model(migration, model):
continue
table = model._meta.db_table
field = model._meta.get_field(operation.name)
# Handle implicit many-to-many tables created by AddField.
if field.many_to_many:
through_db_table = field.remote_field.through._meta.db_table
if fold_identifier_case:
through_db_table = through_db_table.casefold()
if through_db_table not in existing_table_names:
return False, project_state
else:
found_add_field_migration = True
continue
with self.connection.cursor() as cursor:
columns = self.connection.introspection.get_table_description(
cursor, table
)
for column in columns:
field_column = field.column
column_name = column.name
if fold_identifier_case:
column_name = column_name.casefold()
field_column = field_column.casefold()
if column_name == field_column:
found_add_field_migration = True
break
else:
return False, project_state
# If we get this far and we found at least one CreateModel or AddField
# migration, the migration is considered implicitly applied.
return (found_create_model_migration or found_add_field_migration), after_state
|
from unittest import mock
from django.apps.registry import apps as global_apps
from django.db import DatabaseError, connection, migrations, models
from django.db.migrations.exceptions import InvalidMigrationPlan
from django.db.migrations.executor import MigrationExecutor
from django.db.migrations.graph import MigrationGraph
from django.db.migrations.recorder import MigrationRecorder
from django.db.migrations.state import ProjectState
from django.test import (
SimpleTestCase,
modify_settings,
override_settings,
skipUnlessDBFeature,
)
from django.test.utils import isolate_lru_cache
from .test_base import MigrationTestBase
@modify_settings(INSTALLED_APPS={"append": "migrations2"})
class ExecutorTests(MigrationTestBase):
"""
Tests the migration executor (full end-to-end running).
Bear in mind that if these are failing you should fix the other
test failures first, as they may be propagating into here.
"""
available_apps = [
"migrations",
"migrations2",
"django.contrib.auth",
"django.contrib.contenttypes",
]
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_run(self):
"""
Tests running a simple set of migrations.
"""
executor = MigrationExecutor(connection)
# Let's look at the plan first and make sure it's up to scratch
plan = executor.migration_plan([("migrations", "0002_second")])
self.assertEqual(
plan,
[
(executor.loader.graph.nodes["migrations", "0001_initial"], False),
(executor.loader.graph.nodes["migrations", "0002_second"], False),
],
)
# Were the tables there before?
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_book")
# Alright, let's try running it
executor.migrate([("migrations", "0002_second")])
# Are the tables there now?
self.assertTableExists("migrations_author")
self.assertTableExists("migrations_book")
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
# Alright, let's undo what we did
plan = executor.migration_plan([("migrations", None)])
self.assertEqual(
plan,
[
(executor.loader.graph.nodes["migrations", "0002_second"], True),
(executor.loader.graph.nodes["migrations", "0001_initial"], True),
],
)
executor.migrate([("migrations", None)])
# Are the tables gone?
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_book")
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}
)
def test_run_with_squashed(self):
"""
Tests running a squashed migration from zero (should ignore what it
replaces)
"""
executor = MigrationExecutor(connection)
# Check our leaf node is the squashed one
leaves = [
key for key in executor.loader.graph.leaf_nodes() if key[0] == "migrations"
]
self.assertEqual(leaves, [("migrations", "0001_squashed_0002")])
# Check the plan
plan = executor.migration_plan([("migrations", "0001_squashed_0002")])
self.assertEqual(
plan,
[
(
executor.loader.graph.nodes["migrations", "0001_squashed_0002"],
False,
),
],
)
# Were the tables there before?
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_book")
# Alright, let's try running it
executor.migrate([("migrations", "0001_squashed_0002")])
# Are the tables there now?
self.assertTableExists("migrations_author")
self.assertTableExists("migrations_book")
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
# Alright, let's undo what we did. Should also just use squashed.
plan = executor.migration_plan([("migrations", None)])
self.assertEqual(
plan,
[
(executor.loader.graph.nodes["migrations", "0001_squashed_0002"], True),
],
)
executor.migrate([("migrations", None)])
# Are the tables gone?
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_book")
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"},
)
def test_migrate_backward_to_squashed_migration(self):
executor = MigrationExecutor(connection)
try:
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_book")
executor.migrate([("migrations", "0001_squashed_0002")])
self.assertTableExists("migrations_author")
self.assertTableExists("migrations_book")
executor.loader.build_graph()
# Migrate backward to a squashed migration.
executor.migrate([("migrations", "0001_initial")])
self.assertTableExists("migrations_author")
self.assertTableNotExists("migrations_book")
finally:
# Unmigrate everything.
executor = MigrationExecutor(connection)
executor.migrate([("migrations", None)])
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_book")
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_non_atomic"}
)
def test_non_atomic_migration(self):
"""
Applying a non-atomic migration works as expected.
"""
executor = MigrationExecutor(connection)
with self.assertRaisesMessage(RuntimeError, "Abort migration"):
executor.migrate([("migrations", "0001_initial")])
self.assertTableExists("migrations_publisher")
migrations_apps = executor.loader.project_state(
("migrations", "0001_initial")
).apps
Publisher = migrations_apps.get_model("migrations", "Publisher")
self.assertTrue(Publisher.objects.exists())
self.assertTableNotExists("migrations_book")
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_atomic_operation"}
)
def test_atomic_operation_in_non_atomic_migration(self):
"""
An atomic operation is properly rolled back inside a non-atomic
migration.
"""
executor = MigrationExecutor(connection)
with self.assertRaisesMessage(RuntimeError, "Abort migration"):
executor.migrate([("migrations", "0001_initial")])
migrations_apps = executor.loader.project_state(
("migrations", "0001_initial")
).apps
Editor = migrations_apps.get_model("migrations", "Editor")
self.assertFalse(Editor.objects.exists())
# Record previous migration as successful.
executor.migrate([("migrations", "0001_initial")], fake=True)
# Rebuild the graph to reflect the new DB state.
executor.loader.build_graph()
# Migrating backwards is also atomic.
with self.assertRaisesMessage(RuntimeError, "Abort migration"):
executor.migrate([("migrations", None)])
self.assertFalse(Editor.objects.exists())
@override_settings(
MIGRATION_MODULES={
"migrations": "migrations.test_migrations",
"migrations2": "migrations2.test_migrations_2",
}
)
def test_empty_plan(self):
"""
Re-planning a full migration of a fully-migrated set doesn't
perform spurious unmigrations and remigrations.
There was previously a bug where the executor just always performed the
backwards plan for applied migrations - which even for the most recent
migration in an app, might include other, dependent apps, and these
were being unmigrated.
"""
# Make the initial plan, check it
executor = MigrationExecutor(connection)
plan = executor.migration_plan(
[
("migrations", "0002_second"),
("migrations2", "0001_initial"),
]
)
self.assertEqual(
plan,
[
(executor.loader.graph.nodes["migrations", "0001_initial"], False),
(executor.loader.graph.nodes["migrations", "0002_second"], False),
(executor.loader.graph.nodes["migrations2", "0001_initial"], False),
],
)
# Fake-apply all migrations
executor.migrate(
[("migrations", "0002_second"), ("migrations2", "0001_initial")], fake=True
)
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
# Now plan a second time and make sure it's empty
plan = executor.migration_plan(
[
("migrations", "0002_second"),
("migrations2", "0001_initial"),
]
)
self.assertEqual(plan, [])
# The resulting state should include applied migrations.
state = executor.migrate(
[
("migrations", "0002_second"),
("migrations2", "0001_initial"),
]
)
self.assertIn(("migrations", "book"), state.models)
self.assertIn(("migrations", "author"), state.models)
self.assertIn(("migrations2", "otherauthor"), state.models)
# Erase all the fake records
executor.recorder.record_unapplied("migrations2", "0001_initial")
executor.recorder.record_unapplied("migrations", "0002_second")
executor.recorder.record_unapplied("migrations", "0001_initial")
@override_settings(
MIGRATION_MODULES={
"migrations": "migrations.test_migrations",
"migrations2": "migrations2.test_migrations_2_no_deps",
}
)
def test_mixed_plan_not_supported(self):
"""
Although the MigrationExecutor interfaces allows for mixed migration
plans (combined forwards and backwards migrations) this is not
supported.
"""
# Prepare for mixed plan
executor = MigrationExecutor(connection)
plan = executor.migration_plan([("migrations", "0002_second")])
self.assertEqual(
plan,
[
(executor.loader.graph.nodes["migrations", "0001_initial"], False),
(executor.loader.graph.nodes["migrations", "0002_second"], False),
],
)
executor.migrate(None, plan)
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
self.assertIn(
("migrations", "0001_initial"), executor.loader.applied_migrations
)
self.assertIn(("migrations", "0002_second"), executor.loader.applied_migrations)
self.assertNotIn(
("migrations2", "0001_initial"), executor.loader.applied_migrations
)
# Generate mixed plan
plan = executor.migration_plan(
[
("migrations", None),
("migrations2", "0001_initial"),
]
)
msg = (
"Migration plans with both forwards and backwards migrations are "
"not supported. Please split your migration process into separate "
"plans of only forwards OR backwards migrations."
)
with self.assertRaisesMessage(InvalidMigrationPlan, msg) as cm:
executor.migrate(None, plan)
self.assertEqual(
cm.exception.args[1],
[
(executor.loader.graph.nodes["migrations", "0002_second"], True),
(executor.loader.graph.nodes["migrations", "0001_initial"], True),
(executor.loader.graph.nodes["migrations2", "0001_initial"], False),
],
)
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
executor.migrate(
[
("migrations", None),
("migrations2", None),
]
)
# Are the tables gone?
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_book")
self.assertTableNotExists("migrations2_otherauthor")
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_soft_apply(self):
"""
Tests detection of initial migrations already having been applied.
"""
state = {"faked": None}
def fake_storer(phase, migration=None, fake=None):
state["faked"] = fake
executor = MigrationExecutor(connection, progress_callback=fake_storer)
# Were the tables there before?
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
# Run it normally
self.assertEqual(
executor.migration_plan([("migrations", "0001_initial")]),
[
(executor.loader.graph.nodes["migrations", "0001_initial"], False),
],
)
executor.migrate([("migrations", "0001_initial")])
# Are the tables there now?
self.assertTableExists("migrations_author")
self.assertTableExists("migrations_tribble")
# We shouldn't have faked that one
self.assertIs(state["faked"], False)
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
# Fake-reverse that
executor.migrate([("migrations", None)], fake=True)
# Are the tables still there?
self.assertTableExists("migrations_author")
self.assertTableExists("migrations_tribble")
# Make sure that was faked
self.assertIs(state["faked"], True)
# Finally, migrate forwards; this should fake-apply our initial
# migration
executor.loader.build_graph()
self.assertEqual(
executor.migration_plan([("migrations", "0001_initial")]),
[
(executor.loader.graph.nodes["migrations", "0001_initial"], False),
],
)
# Applying the migration should raise a database level error
# because we haven't given the --fake-initial option
with self.assertRaises(DatabaseError):
executor.migrate([("migrations", "0001_initial")])
# Reset the faked state
state = {"faked": None}
# Allow faking of initial CreateModel operations
executor.migrate([("migrations", "0001_initial")], fake_initial=True)
self.assertIs(state["faked"], True)
# And migrate back to clean up the database
executor.loader.build_graph()
executor.migrate([("migrations", None)])
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
@override_settings(
MIGRATION_MODULES={
"migrations": "migrations.test_migrations_custom_user",
"django.contrib.auth": "django.contrib.auth.migrations",
},
AUTH_USER_MODEL="migrations.Author",
)
def test_custom_user(self):
"""
Regression test for #22325 - references to a custom user model defined
in the same app are not resolved correctly.
"""
with isolate_lru_cache(global_apps.get_swappable_settings_name):
executor = MigrationExecutor(connection)
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
# Migrate forwards
executor.migrate([("migrations", "0001_initial")])
self.assertTableExists("migrations_author")
self.assertTableExists("migrations_tribble")
# The soft-application detection works.
# Change table_names to not return auth_user during this as it
# wouldn't be there in a normal run, and ensure migrations.Author
# exists in the global app registry temporarily.
old_table_names = connection.introspection.table_names
connection.introspection.table_names = lambda c: [
x for x in old_table_names(c) if x != "auth_user"
]
migrations_apps = executor.loader.project_state(
("migrations", "0001_initial"),
).apps
global_apps.get_app_config("migrations").models["author"] = (
migrations_apps.get_model("migrations", "author")
)
try:
migration = executor.loader.get_migration("auth", "0001_initial")
self.assertIs(executor.detect_soft_applied(None, migration)[0], True)
finally:
connection.introspection.table_names = old_table_names
del global_apps.get_app_config("migrations").models["author"]
# Migrate back to clean up the database.
executor.loader.build_graph()
executor.migrate([("migrations", None)])
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
@override_settings(
MIGRATION_MODULES={
"migrations": "migrations.test_add_many_to_many_field_initial",
},
)
def test_detect_soft_applied_add_field_manytomanyfield(self):
"""
executor.detect_soft_applied() detects ManyToManyField tables from an
AddField operation. This checks the case of AddField in a migration
with other operations (0001) and the case of AddField in its own
migration (0002).
"""
tables = [
# from 0001
"migrations_project",
"migrations_task",
"migrations_project_tasks",
# from 0002
"migrations_task_projects",
]
executor = MigrationExecutor(connection)
# Create the tables for 0001 but make it look like the migration hasn't
# been applied.
executor.migrate([("migrations", "0001_initial")])
executor.migrate([("migrations", None)], fake=True)
for table in tables[:3]:
self.assertTableExists(table)
# Table detection sees 0001 is applied but not 0002.
migration = executor.loader.get_migration("migrations", "0001_initial")
self.assertIs(executor.detect_soft_applied(None, migration)[0], True)
migration = executor.loader.get_migration("migrations", "0002_initial")
self.assertIs(executor.detect_soft_applied(None, migration)[0], False)
# Create the tables for both migrations but make it look like neither
# has been applied.
executor.loader.build_graph()
executor.migrate([("migrations", "0001_initial")], fake=True)
executor.migrate([("migrations", "0002_initial")])
executor.loader.build_graph()
executor.migrate([("migrations", None)], fake=True)
# Table detection sees 0002 is applied.
migration = executor.loader.get_migration("migrations", "0002_initial")
self.assertIs(executor.detect_soft_applied(None, migration)[0], True)
# Leave the tables for 0001 except the many-to-many table. That missing
# table should cause detect_soft_applied() to return False.
with connection.schema_editor() as editor:
for table in tables[2:]:
editor.execute(editor.sql_delete_table % {"table": table})
migration = executor.loader.get_migration("migrations", "0001_initial")
self.assertIs(executor.detect_soft_applied(None, migration)[0], False)
# Cleanup by removing the remaining tables.
with connection.schema_editor() as editor:
for table in tables[:2]:
editor.execute(editor.sql_delete_table % {"table": table})
for table in tables:
self.assertTableNotExists(table)
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.lookuperror_a",
"migrations.migrations_test_apps.lookuperror_b",
"migrations.migrations_test_apps.lookuperror_c",
]
)
def test_unrelated_model_lookups_forwards(self):
"""
#24123 - All models of apps already applied which are
unrelated to the first app being applied are part of the initial model
state.
"""
try:
executor = MigrationExecutor(connection)
self.assertTableNotExists("lookuperror_a_a1")
self.assertTableNotExists("lookuperror_b_b1")
self.assertTableNotExists("lookuperror_c_c1")
executor.migrate([("lookuperror_b", "0003_b3")])
self.assertTableExists("lookuperror_b_b3")
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
# Migrate forwards -- This led to a lookup LookupErrors because
# lookuperror_b.B2 is already applied
executor.migrate(
[
("lookuperror_a", "0004_a4"),
("lookuperror_c", "0003_c3"),
]
)
self.assertTableExists("lookuperror_a_a4")
self.assertTableExists("lookuperror_c_c3")
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
finally:
# Cleanup
executor.migrate(
[
("lookuperror_a", None),
("lookuperror_b", None),
("lookuperror_c", None),
]
)
self.assertTableNotExists("lookuperror_a_a1")
self.assertTableNotExists("lookuperror_b_b1")
self.assertTableNotExists("lookuperror_c_c1")
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.lookuperror_a",
"migrations.migrations_test_apps.lookuperror_b",
"migrations.migrations_test_apps.lookuperror_c",
]
)
def test_unrelated_model_lookups_backwards(self):
"""
#24123 - All models of apps being unapplied which are
unrelated to the first app being unapplied are part of the initial
model state.
"""
try:
executor = MigrationExecutor(connection)
self.assertTableNotExists("lookuperror_a_a1")
self.assertTableNotExists("lookuperror_b_b1")
self.assertTableNotExists("lookuperror_c_c1")
executor.migrate(
[
("lookuperror_a", "0004_a4"),
("lookuperror_b", "0003_b3"),
("lookuperror_c", "0003_c3"),
]
)
self.assertTableExists("lookuperror_b_b3")
self.assertTableExists("lookuperror_a_a4")
self.assertTableExists("lookuperror_c_c3")
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
# Migrate backwards -- This led to a lookup LookupErrors because
# lookuperror_b.B2 is not in the initial state (unrelated to app c)
executor.migrate([("lookuperror_a", None)])
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
finally:
# Cleanup
executor.migrate([("lookuperror_b", None), ("lookuperror_c", None)])
self.assertTableNotExists("lookuperror_a_a1")
self.assertTableNotExists("lookuperror_b_b1")
self.assertTableNotExists("lookuperror_c_c1")
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.mutate_state_a",
"migrations.migrations_test_apps.mutate_state_b",
]
)
def test_unrelated_applied_migrations_mutate_state(self):
"""
#26647 - Unrelated applied migrations should be part of the final
state in both directions.
"""
executor = MigrationExecutor(connection)
executor.migrate(
[
("mutate_state_b", "0002_add_field"),
]
)
# Migrate forward.
executor.loader.build_graph()
state = executor.migrate(
[
("mutate_state_a", "0001_initial"),
]
)
self.assertIn("added", state.models["mutate_state_b", "b"].fields)
executor.loader.build_graph()
# Migrate backward.
state = executor.migrate(
[
("mutate_state_a", None),
]
)
self.assertIn("added", state.models["mutate_state_b", "b"].fields)
executor.migrate(
[
("mutate_state_b", None),
]
)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_process_callback(self):
"""
#24129 - Tests callback process
"""
call_args_list = []
def callback(*args):
call_args_list.append(args)
executor = MigrationExecutor(connection, progress_callback=callback)
# Were the tables there before?
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
executor.migrate(
[
("migrations", "0001_initial"),
("migrations", "0002_second"),
]
)
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
executor.migrate(
[
("migrations", None),
("migrations", None),
]
)
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
migrations = executor.loader.graph.nodes
expected = [
("render_start",),
("render_success",),
("apply_start", migrations["migrations", "0001_initial"], False),
("apply_success", migrations["migrations", "0001_initial"], False),
("apply_start", migrations["migrations", "0002_second"], False),
("apply_success", migrations["migrations", "0002_second"], False),
("render_start",),
("render_success",),
("unapply_start", migrations["migrations", "0002_second"], False),
("unapply_success", migrations["migrations", "0002_second"], False),
("unapply_start", migrations["migrations", "0001_initial"], False),
("unapply_success", migrations["migrations", "0001_initial"], False),
]
self.assertEqual(call_args_list, expected)
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.alter_fk.author_app",
"migrations.migrations_test_apps.alter_fk.book_app",
]
)
def test_alter_id_type_with_fk(self):
try:
executor = MigrationExecutor(connection)
self.assertTableNotExists("author_app_author")
self.assertTableNotExists("book_app_book")
# Apply initial migrations
executor.migrate(
[
("author_app", "0001_initial"),
("book_app", "0001_initial"),
]
)
self.assertTableExists("author_app_author")
self.assertTableExists("book_app_book")
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
# Apply PK type alteration
executor.migrate([("author_app", "0002_alter_id")])
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
finally:
# We can't simply unapply the migrations here because there is no
# implicit cast from VARCHAR to INT on the database level.
with connection.schema_editor() as editor:
editor.execute(editor.sql_delete_table % {"table": "book_app_book"})
editor.execute(editor.sql_delete_table % {"table": "author_app_author"})
self.assertTableNotExists("author_app_author")
self.assertTableNotExists("book_app_book")
executor.migrate([("author_app", None)], fake=True)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}
)
def test_apply_all_replaced_marks_replacement_as_applied(self):
"""
Applying all replaced migrations marks replacement as applied (#24628).
"""
recorder = MigrationRecorder(connection)
# Place the database in a state where the replaced migrations are
# partially applied: 0001 is applied, 0002 is not.
recorder.record_applied("migrations", "0001_initial")
executor = MigrationExecutor(connection)
# Use fake because we don't actually have the first migration
# applied, so the second will fail. And there's no need to actually
# create/modify tables here, we're just testing the
# MigrationRecord, which works the same with or without fake.
executor.migrate([("migrations", "0002_second")], fake=True)
# Because we've now applied 0001 and 0002 both, their squashed
# replacement should be marked as applied.
self.assertIn(
("migrations", "0001_squashed_0002"),
recorder.applied_migrations(),
)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}
)
def test_migrate_marks_replacement_applied_even_if_it_did_nothing(self):
"""
A new squash migration will be marked as applied even if all its
replaced migrations were previously already applied (#24628).
"""
recorder = MigrationRecorder(connection)
# Record all replaced migrations as applied
recorder.record_applied("migrations", "0001_initial")
recorder.record_applied("migrations", "0002_second")
executor = MigrationExecutor(connection)
executor.migrate([("migrations", "0001_squashed_0002")])
# Because 0001 and 0002 are both applied, even though this migrate run
# didn't apply anything new, their squashed replacement should be
# marked as applied.
self.assertIn(
("migrations", "0001_squashed_0002"),
recorder.applied_migrations(),
)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}
)
def test_migrate_marks_replacement_unapplied(self):
executor = MigrationExecutor(connection)
executor.migrate([("migrations", "0001_squashed_0002")])
try:
self.assertIn(
("migrations", "0001_squashed_0002"),
executor.recorder.applied_migrations(),
)
finally:
executor.loader.build_graph()
executor.migrate([("migrations", None)])
self.assertNotIn(
("migrations", "0001_squashed_0002"),
executor.recorder.applied_migrations(),
)
# When the feature is False, the operation and the record won't be
# performed in a transaction and the test will systematically pass.
@skipUnlessDBFeature("can_rollback_ddl")
def test_migrations_applied_and_recorded_atomically(self):
"""Migrations are applied and recorded atomically."""
class Migration(migrations.Migration):
operations = [
migrations.CreateModel(
"model",
[
("id", models.AutoField(primary_key=True)),
],
),
]
executor = MigrationExecutor(connection)
with mock.patch(
"django.db.migrations.executor.MigrationExecutor.record_migration"
) as record_migration:
record_migration.side_effect = RuntimeError("Recording migration failed.")
with self.assertRaisesMessage(RuntimeError, "Recording migration failed."):
executor.apply_migration(
ProjectState(),
Migration("0001_initial", "record_migration"),
)
executor.migrate([("migrations", "0001_initial")])
# The migration isn't recorded as applied since it failed.
migration_recorder = MigrationRecorder(connection)
self.assertIs(
migration_recorder.migration_qs.filter(
app="record_migration",
name="0001_initial",
).exists(),
False,
)
self.assertTableNotExists("record_migration_model")
def test_migrations_not_applied_on_deferred_sql_failure(self):
"""Migrations are not recorded if deferred SQL application fails."""
class DeferredSQL:
def __str__(self):
raise DatabaseError("Failed to apply deferred SQL")
class Migration(migrations.Migration):
atomic = False
def apply(self, project_state, schema_editor, collect_sql=False):
schema_editor.deferred_sql.append(DeferredSQL())
executor = MigrationExecutor(connection)
with self.assertRaisesMessage(DatabaseError, "Failed to apply deferred SQL"):
executor.apply_migration(
ProjectState(),
Migration("0001_initial", "deferred_sql"),
)
# The migration isn't recorded as applied since it failed.
migration_recorder = MigrationRecorder(connection)
self.assertIs(
migration_recorder.migration_qs.filter(
app="deferred_sql",
name="0001_initial",
).exists(),
False,
)
@mock.patch.object(MigrationRecorder, "has_table", return_value=False)
def test_migrate_skips_schema_creation(self, mocked_has_table):
"""
The django_migrations table is not created if there are no migrations
to record.
"""
executor = MigrationExecutor(connection)
# 0 queries, since the query for has_table is being mocked.
with self.assertNumQueries(0):
executor.migrate([], plan=[])
class FakeLoader:
def __init__(self, graph, applied):
self.graph = graph
self.applied_migrations = applied
self.replace_migrations = True
class FakeMigration:
"""Really all we need is any object with a debug-useful repr."""
def __init__(self, name):
self.name = name
def __repr__(self):
return "M<%s>" % self.name
class ExecutorUnitTests(SimpleTestCase):
"""(More) isolated unit tests for executor methods."""
def test_minimize_rollbacks(self):
"""
Minimize unnecessary rollbacks in connected apps.
When you say "./manage.py migrate appA 0001", rather than migrating to
just after appA-0001 in the linearized migration plan (which could roll
back migrations in other apps that depend on appA 0001, but don't need
to be rolled back since we're not rolling back appA 0001), we migrate
to just before appA-0002.
"""
a1_impl = FakeMigration("a1")
a1 = ("a", "1")
a2_impl = FakeMigration("a2")
a2 = ("a", "2")
b1_impl = FakeMigration("b1")
b1 = ("b", "1")
graph = MigrationGraph()
graph.add_node(a1, a1_impl)
graph.add_node(a2, a2_impl)
graph.add_node(b1, b1_impl)
graph.add_dependency(None, b1, a1)
graph.add_dependency(None, a2, a1)
executor = MigrationExecutor(None)
executor.loader = FakeLoader(
graph,
{
a1: a1_impl,
b1: b1_impl,
a2: a2_impl,
},
)
plan = executor.migration_plan({a1})
self.assertEqual(plan, [(a2_impl, True)])
def test_minimize_rollbacks_branchy(self):
r"""
Minimize rollbacks when target has multiple in-app children.
a: 1 <---- 3 <--\
\ \- 2 <--- 4
\ \
b: \- 1 <--- 2
"""
a1_impl = FakeMigration("a1")
a1 = ("a", "1")
a2_impl = FakeMigration("a2")
a2 = ("a", "2")
a3_impl = FakeMigration("a3")
a3 = ("a", "3")
a4_impl = FakeMigration("a4")
a4 = ("a", "4")
b1_impl = FakeMigration("b1")
b1 = ("b", "1")
b2_impl = FakeMigration("b2")
b2 = ("b", "2")
graph = MigrationGraph()
graph.add_node(a1, a1_impl)
graph.add_node(a2, a2_impl)
graph.add_node(a3, a3_impl)
graph.add_node(a4, a4_impl)
graph.add_node(b1, b1_impl)
graph.add_node(b2, b2_impl)
graph.add_dependency(None, a2, a1)
graph.add_dependency(None, a3, a1)
graph.add_dependency(None, a4, a2)
graph.add_dependency(None, a4, a3)
graph.add_dependency(None, b2, b1)
graph.add_dependency(None, b1, a1)
graph.add_dependency(None, b2, a2)
executor = MigrationExecutor(None)
executor.loader = FakeLoader(
graph,
{
a1: a1_impl,
b1: b1_impl,
a2: a2_impl,
b2: b2_impl,
a3: a3_impl,
a4: a4_impl,
},
)
plan = executor.migration_plan({a1})
should_be_rolled_back = [b2_impl, a4_impl, a2_impl, a3_impl]
exp = [(m, True) for m in should_be_rolled_back]
self.assertEqual(plan, exp)
def test_backwards_nothing_to_do(self):
r"""
If the current state satisfies the given target, do nothing.
a: 1 <--- 2
b: \- 1
c: \- 1
If a1 is applied already and a2 is not, and we're asked to migrate to
a1, don't apply or unapply b1 or c1, regardless of their current state.
"""
a1_impl = FakeMigration("a1")
a1 = ("a", "1")
a2_impl = FakeMigration("a2")
a2 = ("a", "2")
b1_impl = FakeMigration("b1")
b1 = ("b", "1")
c1_impl = FakeMigration("c1")
c1 = ("c", "1")
graph = MigrationGraph()
graph.add_node(a1, a1_impl)
graph.add_node(a2, a2_impl)
graph.add_node(b1, b1_impl)
graph.add_node(c1, c1_impl)
graph.add_dependency(None, a2, a1)
graph.add_dependency(None, b1, a1)
graph.add_dependency(None, c1, a1)
executor = MigrationExecutor(None)
executor.loader = FakeLoader(
graph,
{
a1: a1_impl,
b1: b1_impl,
},
)
plan = executor.migration_plan({a1})
self.assertEqual(plan, [])
|
./temp_repos/django/django/db/migrations/executor.py
|
./temp_repos/django/tests/migrations/test_executor.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'MigrationExecutor'.
Context:
- Class Name: MigrationExecutor
- Dependencies to Mock: progress_callback, connection
- Key Imports: recorder, state, django.db, exceptions, django.apps.registry, loader
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
MigrationExecutor
|
python
|
"""
Helpers to manipulate deferred DDL statements that might need to be adjusted or
discarded within when executing a migration.
"""
from copy import deepcopy
class Reference:
"""Base class that defines the reference interface."""
def references_table(self, table):
"""
Return whether or not this instance references the specified table.
"""
return False
def references_column(self, table, column):
"""
Return whether or not this instance references the specified column.
"""
return False
def references_index(self, table, index):
"""
Return whether or not this instance references the specified index.
"""
return False
def rename_table_references(self, old_table, new_table):
"""
Rename all references to the old_name to the new_table.
"""
pass
def rename_column_references(self, table, old_column, new_column):
"""
Rename all references to the old_column to the new_column.
"""
pass
def __repr__(self):
return "<%s %r>" % (self.__class__.__name__, str(self))
def __str__(self):
raise NotImplementedError(
"Subclasses must define how they should be converted to string."
)
class Table(Reference):
"""Hold a reference to a table."""
def __init__(self, table, quote_name):
self.table = table
self.quote_name = quote_name
def references_table(self, table):
return self.table == table
def references_index(self, table, index):
return self.references_table(table) and str(self) == index
def rename_table_references(self, old_table, new_table):
if self.table == old_table:
self.table = new_table
def __str__(self):
return self.quote_name(self.table)
class TableColumns(Table):
"""Base class for references to multiple columns of a table."""
def __init__(self, table, columns):
self.table = table
self.columns = columns
def references_column(self, table, column):
return self.table == table and column in self.columns
def rename_column_references(self, table, old_column, new_column):
if self.table == table:
for index, column in enumerate(self.columns):
if column == old_column:
self.columns[index] = new_column
class Columns(TableColumns):
"""Hold a reference to one or many columns."""
def __init__(self, table, columns, quote_name, col_suffixes=()):
self.quote_name = quote_name
self.col_suffixes = col_suffixes
super().__init__(table, columns)
def __str__(self):
def col_str(column, idx):
col = self.quote_name(column)
try:
suffix = self.col_suffixes[idx]
if suffix:
col = "{} {}".format(col, suffix)
except IndexError:
pass
return col
return ", ".join(
col_str(column, idx) for idx, column in enumerate(self.columns)
)
class IndexName(TableColumns):
"""Hold a reference to an index name."""
def __init__(self, table, columns, suffix, create_index_name):
self.suffix = suffix
self.create_index_name = create_index_name
super().__init__(table, columns)
def __str__(self):
return self.create_index_name(self.table, self.columns, self.suffix)
class IndexColumns(Columns):
def __init__(self, table, columns, quote_name, col_suffixes=(), opclasses=()):
self.opclasses = opclasses
super().__init__(table, columns, quote_name, col_suffixes)
def __str__(self):
def col_str(column, idx):
# Index.__init__() guarantees that self.opclasses is the same
# length as self.columns.
col = "{} {}".format(self.quote_name(column), self.opclasses[idx])
try:
suffix = self.col_suffixes[idx]
if suffix:
col = "{} {}".format(col, suffix)
except IndexError:
pass
return col
return ", ".join(
col_str(column, idx) for idx, column in enumerate(self.columns)
)
class ForeignKeyName(TableColumns):
"""Hold a reference to a foreign key name."""
def __init__(
self,
from_table,
from_columns,
to_table,
to_columns,
suffix_template,
create_fk_name,
):
self.to_reference = TableColumns(to_table, to_columns)
self.suffix_template = suffix_template
self.create_fk_name = create_fk_name
super().__init__(
from_table,
from_columns,
)
def references_table(self, table):
return super().references_table(table) or self.to_reference.references_table(
table
)
def references_column(self, table, column):
return super().references_column(
table, column
) or self.to_reference.references_column(table, column)
def rename_table_references(self, old_table, new_table):
super().rename_table_references(old_table, new_table)
self.to_reference.rename_table_references(old_table, new_table)
def rename_column_references(self, table, old_column, new_column):
super().rename_column_references(table, old_column, new_column)
self.to_reference.rename_column_references(table, old_column, new_column)
def __str__(self):
suffix = self.suffix_template % {
"to_table": self.to_reference.table,
"to_column": self.to_reference.columns[0],
}
return self.create_fk_name(self.table, self.columns, suffix)
class Statement(Reference):
"""
Statement template and formatting parameters container.
Allows keeping a reference to a statement without interpolating identifiers
that might have to be adjusted if they're referencing a table or column
that is removed
"""
def __init__(self, template, **parts):
self.template = template
self.parts = parts
def references_table(self, table):
return any(
hasattr(part, "references_table") and part.references_table(table)
for part in self.parts.values()
)
def references_column(self, table, column):
return any(
hasattr(part, "references_column") and part.references_column(table, column)
for part in self.parts.values()
)
def references_index(self, table, index):
return any(
hasattr(part, "references_index") and part.references_index(table, index)
for part in self.parts.values()
)
def rename_table_references(self, old_table, new_table):
for part in self.parts.values():
if hasattr(part, "rename_table_references"):
part.rename_table_references(old_table, new_table)
def rename_column_references(self, table, old_column, new_column):
for part in self.parts.values():
if hasattr(part, "rename_column_references"):
part.rename_column_references(table, old_column, new_column)
def __str__(self):
return self.template % self.parts
class Expressions(TableColumns):
def __init__(self, table, expressions, compiler, quote_value):
self.compiler = compiler
self.expressions = expressions
self.quote_value = quote_value
columns = [
col.target.column
for col in self.compiler.query._gen_cols([self.expressions])
]
super().__init__(table, columns)
def rename_table_references(self, old_table, new_table):
if self.table != old_table:
return
self.expressions = self.expressions.relabeled_clone({old_table: new_table})
super().rename_table_references(old_table, new_table)
def rename_column_references(self, table, old_column, new_column):
if self.table != table:
return
expressions = deepcopy(self.expressions)
self.columns = []
for col in self.compiler.query._gen_cols([expressions]):
if col.target.column == old_column:
col.target.column = new_column
self.columns.append(col.target.column)
self.expressions = expressions
def __str__(self):
sql, params = self.compiler.compile(self.expressions)
params = map(self.quote_value, params)
return sql % tuple(params)
|
from django.db import connection
from django.db.backends.ddl_references import (
Columns,
Expressions,
ForeignKeyName,
IndexName,
Statement,
Table,
)
from django.db.models import ExpressionList, F
from django.db.models.functions import Upper
from django.db.models.indexes import IndexExpression
from django.db.models.sql import Query
from django.test import SimpleTestCase, TransactionTestCase
from .models import Person
class TableTests(SimpleTestCase):
def setUp(self):
self.reference = Table("table", lambda table: table.upper())
def test_references_table(self):
self.assertIs(self.reference.references_table("table"), True)
self.assertIs(self.reference.references_table("other"), False)
def test_rename_table_references(self):
self.reference.rename_table_references("other", "table")
self.assertIs(self.reference.references_table("table"), True)
self.assertIs(self.reference.references_table("other"), False)
self.reference.rename_table_references("table", "other")
self.assertIs(self.reference.references_table("table"), False)
self.assertIs(self.reference.references_table("other"), True)
def test_repr(self):
self.assertEqual(repr(self.reference), "<Table 'TABLE'>")
def test_str(self):
self.assertEqual(str(self.reference), "TABLE")
class ColumnsTests(TableTests):
def setUp(self):
self.reference = Columns(
"table", ["first_column", "second_column"], lambda column: column.upper()
)
def test_references_column(self):
self.assertIs(self.reference.references_column("other", "first_column"), False)
self.assertIs(self.reference.references_column("table", "third_column"), False)
self.assertIs(self.reference.references_column("table", "first_column"), True)
def test_rename_column_references(self):
self.reference.rename_column_references("other", "first_column", "third_column")
self.assertIs(self.reference.references_column("table", "first_column"), True)
self.assertIs(self.reference.references_column("table", "third_column"), False)
self.assertIs(self.reference.references_column("other", "third_column"), False)
self.reference.rename_column_references("table", "third_column", "first_column")
self.assertIs(self.reference.references_column("table", "first_column"), True)
self.assertIs(self.reference.references_column("table", "third_column"), False)
self.reference.rename_column_references("table", "first_column", "third_column")
self.assertIs(self.reference.references_column("table", "first_column"), False)
self.assertIs(self.reference.references_column("table", "third_column"), True)
def test_repr(self):
self.assertEqual(
repr(self.reference), "<Columns 'FIRST_COLUMN, SECOND_COLUMN'>"
)
def test_str(self):
self.assertEqual(str(self.reference), "FIRST_COLUMN, SECOND_COLUMN")
class IndexNameTests(ColumnsTests):
def setUp(self):
def create_index_name(table_name, column_names, suffix):
return ", ".join(
"%s_%s_%s" % (table_name, column_name, suffix)
for column_name in column_names
)
self.reference = IndexName(
"table", ["first_column", "second_column"], "suffix", create_index_name
)
def test_repr(self):
self.assertEqual(
repr(self.reference),
"<IndexName 'table_first_column_suffix, table_second_column_suffix'>",
)
def test_str(self):
self.assertEqual(
str(self.reference), "table_first_column_suffix, table_second_column_suffix"
)
class ForeignKeyNameTests(IndexNameTests):
def setUp(self):
def create_foreign_key_name(table_name, column_names, suffix):
return ", ".join(
"%s_%s_%s" % (table_name, column_name, suffix)
for column_name in column_names
)
self.reference = ForeignKeyName(
"table",
["first_column", "second_column"],
"to_table",
["to_first_column", "to_second_column"],
"%(to_table)s_%(to_column)s_fk",
create_foreign_key_name,
)
def test_references_table(self):
super().test_references_table()
self.assertIs(self.reference.references_table("to_table"), True)
def test_references_column(self):
super().test_references_column()
self.assertIs(
self.reference.references_column("to_table", "second_column"), False
)
self.assertIs(
self.reference.references_column("to_table", "to_second_column"), True
)
def test_rename_table_references(self):
super().test_rename_table_references()
self.reference.rename_table_references("to_table", "other_to_table")
self.assertIs(self.reference.references_table("other_to_table"), True)
self.assertIs(self.reference.references_table("to_table"), False)
def test_rename_column_references(self):
super().test_rename_column_references()
self.reference.rename_column_references(
"to_table", "second_column", "third_column"
)
self.assertIs(self.reference.references_column("table", "second_column"), True)
self.assertIs(
self.reference.references_column("to_table", "to_second_column"), True
)
self.reference.rename_column_references(
"to_table", "to_first_column", "to_third_column"
)
self.assertIs(
self.reference.references_column("to_table", "to_first_column"), False
)
self.assertIs(
self.reference.references_column("to_table", "to_third_column"), True
)
def test_repr(self):
self.assertEqual(
repr(self.reference),
"<ForeignKeyName 'table_first_column_to_table_to_first_column_fk, "
"table_second_column_to_table_to_first_column_fk'>",
)
def test_str(self):
self.assertEqual(
str(self.reference),
"table_first_column_to_table_to_first_column_fk, "
"table_second_column_to_table_to_first_column_fk",
)
class MockReference:
def __init__(
self, representation, referenced_tables, referenced_columns, referenced_indexes
):
self.representation = representation
self.referenced_tables = referenced_tables
self.referenced_columns = referenced_columns
self.referenced_indexes = referenced_indexes
def references_table(self, table):
return table in self.referenced_tables
def references_column(self, table, column):
return (table, column) in self.referenced_columns
def references_index(self, table, index):
return (table, index) in self.referenced_indexes
def rename_table_references(self, old_table, new_table):
if old_table in self.referenced_tables:
self.referenced_tables.remove(old_table)
self.referenced_tables.add(new_table)
def rename_column_references(self, table, old_column, new_column):
column = (table, old_column)
if column in self.referenced_columns:
self.referenced_columns.remove(column)
self.referenced_columns.add((table, new_column))
def __str__(self):
return self.representation
class StatementTests(SimpleTestCase):
def test_references_table(self):
statement = Statement(
"", reference=MockReference("", {"table"}, {}, {}), non_reference=""
)
self.assertIs(statement.references_table("table"), True)
self.assertIs(statement.references_table("other"), False)
def test_references_column(self):
statement = Statement(
"",
reference=MockReference("", {}, {("table", "column")}, {}),
non_reference="",
)
self.assertIs(statement.references_column("table", "column"), True)
self.assertIs(statement.references_column("other", "column"), False)
def test_references_index(self):
statement = Statement(
"",
reference=MockReference("", {}, {}, {("table", "index")}),
non_reference="",
)
self.assertIs(statement.references_index("table", "index"), True)
self.assertIs(statement.references_index("other", "index"), False)
def test_rename_table_references(self):
reference = MockReference("", {"table"}, {}, {})
statement = Statement("", reference=reference, non_reference="")
statement.rename_table_references("table", "other")
self.assertEqual(reference.referenced_tables, {"other"})
def test_rename_column_references(self):
reference = MockReference("", {}, {("table", "column")}, {})
statement = Statement("", reference=reference, non_reference="")
statement.rename_column_references("table", "column", "other")
self.assertEqual(reference.referenced_columns, {("table", "other")})
def test_repr(self):
reference = MockReference("reference", {}, {}, {})
statement = Statement(
"%(reference)s - %(non_reference)s",
reference=reference,
non_reference="non_reference",
)
self.assertEqual(repr(statement), "<Statement 'reference - non_reference'>")
def test_str(self):
reference = MockReference("reference", {}, {}, {})
statement = Statement(
"%(reference)s - %(non_reference)s",
reference=reference,
non_reference="non_reference",
)
self.assertEqual(str(statement), "reference - non_reference")
class ExpressionsTests(TransactionTestCase):
available_apps = []
def setUp(self):
compiler = Person.objects.all().query.get_compiler(connection.alias)
self.editor = connection.schema_editor()
self.expressions = Expressions(
table=Person._meta.db_table,
expressions=ExpressionList(
IndexExpression(F("first_name")),
IndexExpression(F("last_name").desc()),
IndexExpression(Upper("last_name")),
).resolve_expression(compiler.query),
compiler=compiler,
quote_value=self.editor.quote_value,
)
def test_references_table(self):
self.assertIs(self.expressions.references_table(Person._meta.db_table), True)
self.assertIs(self.expressions.references_table("other"), False)
def test_references_column(self):
table = Person._meta.db_table
self.assertIs(self.expressions.references_column(table, "first_name"), True)
self.assertIs(self.expressions.references_column(table, "last_name"), True)
self.assertIs(self.expressions.references_column(table, "other"), False)
def test_rename_table_references(self):
table = Person._meta.db_table
self.expressions.rename_table_references(table, "other")
self.assertIs(self.expressions.references_table(table), False)
self.assertIs(self.expressions.references_table("other"), True)
self.assertIn(
"%s.%s"
% (
self.editor.quote_name("other"),
self.editor.quote_name("first_name"),
),
str(self.expressions),
)
def test_rename_table_references_without_alias(self):
compiler = Query(Person, alias_cols=False).get_compiler(connection=connection)
table = Person._meta.db_table
expressions = Expressions(
table=table,
expressions=ExpressionList(
IndexExpression(Upper("last_name")),
IndexExpression(F("first_name")),
).resolve_expression(compiler.query),
compiler=compiler,
quote_value=self.editor.quote_value,
)
expressions.rename_table_references(table, "other")
self.assertIs(expressions.references_table(table), False)
self.assertIs(expressions.references_table("other"), True)
expected_str = "(UPPER(%s)), %s" % (
self.editor.quote_name("last_name"),
self.editor.quote_name("first_name"),
)
self.assertEqual(str(expressions), expected_str)
def test_rename_column_references(self):
table = Person._meta.db_table
self.expressions.rename_column_references(table, "first_name", "other")
self.assertIs(self.expressions.references_column(table, "other"), True)
self.assertIs(self.expressions.references_column(table, "first_name"), False)
self.assertIn(
"%s.%s" % (self.editor.quote_name(table), self.editor.quote_name("other")),
str(self.expressions),
)
def test_str(self):
table_name = self.editor.quote_name(Person._meta.db_table)
expected_str = "%s.%s, %s.%s DESC, (UPPER(%s.%s))" % (
table_name,
self.editor.quote_name("first_name"),
table_name,
self.editor.quote_name("last_name"),
table_name,
self.editor.quote_name("last_name"),
)
self.assertEqual(str(self.expressions), expected_str)
|
./temp_repos/django/django/db/backends/ddl_references.py
|
./temp_repos/django/tests/backends/test_ddl_references.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'Reference'.
Context:
- Class Name: Reference
- Dependencies to Mock: template, col_suffixes, expressions, suffix, table, create_index_name, from_table, quote_value, opclasses, to_columns, to_table, suffix_template, create_fk_name, from_columns, quote_name, columns, compiler
- Key Imports: copy
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
Reference
|
python
|
import os
import subprocess
import sys
from django.db.backends.base.creation import BaseDatabaseCreation
from .client import DatabaseClient
class DatabaseCreation(BaseDatabaseCreation):
def sql_table_creation_suffix(self):
suffix = []
test_settings = self.connection.settings_dict["TEST"]
if test_settings["CHARSET"]:
suffix.append("CHARACTER SET %s" % test_settings["CHARSET"])
if test_settings["COLLATION"]:
suffix.append("COLLATE %s" % test_settings["COLLATION"])
return " ".join(suffix)
def _execute_create_test_db(self, cursor, parameters, keepdb=False):
try:
super()._execute_create_test_db(cursor, parameters, keepdb)
except Exception as e:
if len(e.args) < 1 or e.args[0] != 1007:
# All errors except "database exists" (1007) cancel tests.
self.log("Got an error creating the test database: %s" % e)
sys.exit(2)
else:
raise
def _clone_test_db(self, suffix, verbosity, keepdb=False):
source_database_name = self.connection.settings_dict["NAME"]
target_database_name = self.get_test_db_clone_settings(suffix)["NAME"]
test_db_params = {
"dbname": self.connection.ops.quote_name(target_database_name),
"suffix": self.sql_table_creation_suffix(),
}
with self._nodb_cursor() as cursor:
try:
self._execute_create_test_db(cursor, test_db_params, keepdb)
except Exception:
if keepdb:
# If the database should be kept, skip everything else.
return
try:
if verbosity >= 1:
self.log(
"Destroying old test database for alias %s..."
% (
self._get_database_display_str(
verbosity, target_database_name
),
)
)
cursor.execute("DROP DATABASE %(dbname)s" % test_db_params)
self._execute_create_test_db(cursor, test_db_params, keepdb)
except Exception as e:
self.log("Got an error recreating the test database: %s" % e)
sys.exit(2)
self._clone_db(source_database_name, target_database_name)
def _clone_db(self, source_database_name, target_database_name):
cmd_args, cmd_env = DatabaseClient.settings_to_cmd_args_env(
self.connection.settings_dict, []
)
dump_cmd = [
"mysqldump",
*cmd_args[1:-1],
"--routines",
"--events",
source_database_name,
]
dump_env = load_env = {**os.environ, **cmd_env} if cmd_env else None
load_cmd = cmd_args
load_cmd[-1] = target_database_name
with (
subprocess.Popen(
dump_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=dump_env
) as dump_proc,
subprocess.Popen(
load_cmd,
stdin=dump_proc.stdout,
stdout=subprocess.DEVNULL,
stderr=subprocess.PIPE,
env=load_env,
) as load_proc,
):
# Allow dump_proc to receive a SIGPIPE if the load process exits.
dump_proc.stdout.close()
dump_err = dump_proc.stderr.read().decode(errors="replace")
load_err = load_proc.stderr.read().decode(errors="replace")
if dump_proc.returncode != 0:
self.log(
f"Got an error on mysqldump when cloning the test database: {dump_err}"
)
sys.exit(dump_proc.returncode)
if load_proc.returncode != 0:
self.log(f"Got an error cloning the test database: {load_err}")
sys.exit(load_proc.returncode)
|
import subprocess
import unittest
from io import BytesIO, StringIO
from unittest import mock
from django.db import DatabaseError, connection
from django.db.backends.base.creation import BaseDatabaseCreation
from django.db.backends.mysql.creation import DatabaseCreation
from django.test import SimpleTestCase
from django.test.utils import captured_stderr
@unittest.skipUnless(connection.vendor == "mysql", "MySQL tests")
class DatabaseCreationTests(SimpleTestCase):
def _execute_raise_database_exists(self, cursor, parameters, keepdb=False):
raise DatabaseError(
1007, "Can't create database '%s'; database exists" % parameters["dbname"]
)
def _execute_raise_access_denied(self, cursor, parameters, keepdb=False):
raise DatabaseError(1044, "Access denied for user")
def patch_test_db_creation(self, execute_create_test_db):
return mock.patch.object(
BaseDatabaseCreation, "_execute_create_test_db", execute_create_test_db
)
@mock.patch("sys.stdout", new_callable=StringIO)
@mock.patch("sys.stderr", new_callable=StringIO)
def test_create_test_db_database_exists(self, *mocked_objects):
# Simulate test database creation raising "database exists"
creation = DatabaseCreation(connection)
with self.patch_test_db_creation(self._execute_raise_database_exists):
with mock.patch("builtins.input", return_value="no"):
with self.assertRaises(SystemExit):
# SystemExit is raised if the user answers "no" to the
# prompt asking if it's okay to delete the test database.
creation._create_test_db(
verbosity=0, autoclobber=False, keepdb=False
)
# "Database exists" shouldn't appear when keepdb is on
creation._create_test_db(verbosity=0, autoclobber=False, keepdb=True)
@mock.patch("sys.stdout", new_callable=StringIO)
@mock.patch("sys.stderr", new_callable=StringIO)
def test_create_test_db_unexpected_error(self, *mocked_objects):
# Simulate test database creation raising unexpected error
creation = DatabaseCreation(connection)
with self.patch_test_db_creation(self._execute_raise_access_denied):
with self.assertRaises(SystemExit):
creation._create_test_db(verbosity=0, autoclobber=False, keepdb=False)
def test_clone_test_db_database_exists(self):
creation = DatabaseCreation(connection)
with self.patch_test_db_creation(self._execute_raise_database_exists):
with mock.patch.object(DatabaseCreation, "_clone_db") as _clone_db:
creation._clone_test_db("suffix", verbosity=0, keepdb=True)
_clone_db.assert_not_called()
def test_clone_test_db_options_ordering(self):
creation = DatabaseCreation(connection)
mock_subprocess_call = mock.MagicMock()
mock_subprocess_call.returncode = 0
try:
saved_settings = connection.settings_dict
connection.settings_dict = {
"NAME": "source_db",
"USER": "",
"PASSWORD": "",
"PORT": "",
"HOST": "",
"ENGINE": "django.db.backends.mysql",
"OPTIONS": {
"read_default_file": "my.cnf",
},
}
with mock.patch.object(subprocess, "Popen") as mocked_popen:
mocked_popen.return_value.__enter__.return_value = mock_subprocess_call
creation._clone_db("source_db", "target_db")
mocked_popen.assert_has_calls(
[
mock.call(
[
"mysqldump",
"--defaults-file=my.cnf",
"--routines",
"--events",
"source_db",
],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env=None,
),
]
)
finally:
connection.settings_dict = saved_settings
def test_clone_test_db_subprocess_mysqldump_error(self):
creation = DatabaseCreation(connection)
mock_subprocess_call = mock.MagicMock()
mock_subprocess_call.returncode = 0
# Simulate mysqldump in test database cloning raises an error.
msg = "Couldn't execute 'SELECT ...'"
mock_subprocess_call_error = mock.MagicMock()
mock_subprocess_call_error.returncode = 2
mock_subprocess_call_error.stderr = BytesIO(msg.encode())
with mock.patch.object(subprocess, "Popen") as mocked_popen:
mocked_popen.return_value.__enter__.side_effect = [
mock_subprocess_call_error, # mysqldump mock
mock_subprocess_call, # load mock
]
with captured_stderr() as err, self.assertRaises(SystemExit) as cm:
creation._clone_db("source_db", "target_db")
self.assertEqual(cm.exception.code, 2)
self.assertIn(
f"Got an error on mysqldump when cloning the test database: {msg}",
err.getvalue(),
)
def test_clone_test_db_subprocess_mysql_error(self):
creation = DatabaseCreation(connection)
mock_subprocess_call = mock.MagicMock()
mock_subprocess_call.returncode = 0
# Simulate load in test database cloning raises an error.
msg = "Some error"
mock_subprocess_call_error = mock.MagicMock()
mock_subprocess_call_error.returncode = 3
mock_subprocess_call_error.stderr = BytesIO(msg.encode())
with mock.patch.object(subprocess, "Popen") as mocked_popen:
mocked_popen.return_value.__enter__.side_effect = [
mock_subprocess_call, # mysqldump mock
mock_subprocess_call_error, # load mock
]
with captured_stderr() as err, self.assertRaises(SystemExit) as cm:
creation._clone_db("source_db", "target_db")
self.assertEqual(cm.exception.code, 3)
self.assertIn(f"Got an error cloning the test database: {msg}", err.getvalue())
|
./temp_repos/django/django/db/backends/mysql/creation.py
|
./temp_repos/django/tests/backends/mysql/test_creation.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'DatabaseCreation'.
Context:
- Class Name: DatabaseCreation
- Dependencies to Mock: None detected
- Key Imports: client, subprocess, django.db.backends.base.creation, os, sys
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
DatabaseCreation
|
python
|
from django.contrib.gis.db.backends.base.features import BaseSpatialFeatures
from django.db.backends.mysql.features import DatabaseFeatures as MySQLDatabaseFeatures
from django.utils.functional import cached_property
class DatabaseFeatures(BaseSpatialFeatures, MySQLDatabaseFeatures):
empty_intersection_returns_none = False
has_spatialrefsys_table = False
supports_add_srs_entry = False
supports_distance_geodetic = False
supports_length_geodetic = False
supports_area_geodetic = False
supports_transform = False
supports_null_geometries = False
supports_num_points_poly = False
unsupported_geojson_options = {"crs"}
@cached_property
def supports_geometry_field_unique_index(self):
# Not supported in MySQL since
# https://dev.mysql.com/worklog/task/?id=11808
return self.connection.mysql_is_mariadb
|
from unittest import mock, skipUnless
from django.db import connection
from django.db.backends.mysql.features import DatabaseFeatures
from django.test import TestCase
@skipUnless(connection.vendor == "mysql", "MySQL tests")
class TestFeatures(TestCase):
def test_supports_transactions(self):
"""
All storage engines except MyISAM support transactions.
"""
del connection.features.supports_transactions
with mock.patch(
"django.db.connection.features._mysql_storage_engine", "InnoDB"
):
self.assertTrue(connection.features.supports_transactions)
del connection.features.supports_transactions
with mock.patch(
"django.db.connection.features._mysql_storage_engine", "MyISAM"
):
self.assertFalse(connection.features.supports_transactions)
del connection.features.supports_transactions
def test_allows_auto_pk_0(self):
with mock.MagicMock() as _connection:
_connection.sql_mode = {"NO_AUTO_VALUE_ON_ZERO"}
database_features = DatabaseFeatures(_connection)
self.assertIs(database_features.allows_auto_pk_0, True)
def test_allows_group_by_selected_pks(self):
with mock.MagicMock() as _connection:
_connection.mysql_is_mariadb = False
database_features = DatabaseFeatures(_connection)
self.assertIs(database_features.allows_group_by_selected_pks, True)
with mock.MagicMock() as _connection:
_connection.mysql_is_mariadb = False
_connection.sql_mode = {}
database_features = DatabaseFeatures(_connection)
self.assertIs(database_features.allows_group_by_selected_pks, True)
with mock.MagicMock() as _connection:
_connection.mysql_is_mariadb = True
_connection.sql_mode = {"ONLY_FULL_GROUP_BY"}
database_features = DatabaseFeatures(_connection)
self.assertIs(database_features.allows_group_by_selected_pks, False)
|
./temp_repos/django/django/contrib/gis/db/backends/mysql/features.py
|
./temp_repos/django/tests/backends/mysql/test_features.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'DatabaseFeatures'.
Context:
- Class Name: DatabaseFeatures
- Dependencies to Mock: None detected
- Key Imports: django.utils.functional, django.contrib.gis.db.backends.base.features, django.db.backends.mysql.features
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
DatabaseFeatures
|
python
|
import os
import subprocess
import sys
from django.db.backends.base.creation import BaseDatabaseCreation
from .client import DatabaseClient
class DatabaseCreation(BaseDatabaseCreation):
def sql_table_creation_suffix(self):
suffix = []
test_settings = self.connection.settings_dict["TEST"]
if test_settings["CHARSET"]:
suffix.append("CHARACTER SET %s" % test_settings["CHARSET"])
if test_settings["COLLATION"]:
suffix.append("COLLATE %s" % test_settings["COLLATION"])
return " ".join(suffix)
def _execute_create_test_db(self, cursor, parameters, keepdb=False):
try:
super()._execute_create_test_db(cursor, parameters, keepdb)
except Exception as e:
if len(e.args) < 1 or e.args[0] != 1007:
# All errors except "database exists" (1007) cancel tests.
self.log("Got an error creating the test database: %s" % e)
sys.exit(2)
else:
raise
def _clone_test_db(self, suffix, verbosity, keepdb=False):
source_database_name = self.connection.settings_dict["NAME"]
target_database_name = self.get_test_db_clone_settings(suffix)["NAME"]
test_db_params = {
"dbname": self.connection.ops.quote_name(target_database_name),
"suffix": self.sql_table_creation_suffix(),
}
with self._nodb_cursor() as cursor:
try:
self._execute_create_test_db(cursor, test_db_params, keepdb)
except Exception:
if keepdb:
# If the database should be kept, skip everything else.
return
try:
if verbosity >= 1:
self.log(
"Destroying old test database for alias %s..."
% (
self._get_database_display_str(
verbosity, target_database_name
),
)
)
cursor.execute("DROP DATABASE %(dbname)s" % test_db_params)
self._execute_create_test_db(cursor, test_db_params, keepdb)
except Exception as e:
self.log("Got an error recreating the test database: %s" % e)
sys.exit(2)
self._clone_db(source_database_name, target_database_name)
def _clone_db(self, source_database_name, target_database_name):
cmd_args, cmd_env = DatabaseClient.settings_to_cmd_args_env(
self.connection.settings_dict, []
)
dump_cmd = [
"mysqldump",
*cmd_args[1:-1],
"--routines",
"--events",
source_database_name,
]
dump_env = load_env = {**os.environ, **cmd_env} if cmd_env else None
load_cmd = cmd_args
load_cmd[-1] = target_database_name
with (
subprocess.Popen(
dump_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=dump_env
) as dump_proc,
subprocess.Popen(
load_cmd,
stdin=dump_proc.stdout,
stdout=subprocess.DEVNULL,
stderr=subprocess.PIPE,
env=load_env,
) as load_proc,
):
# Allow dump_proc to receive a SIGPIPE if the load process exits.
dump_proc.stdout.close()
dump_err = dump_proc.stderr.read().decode(errors="replace")
load_err = load_proc.stderr.read().decode(errors="replace")
if dump_proc.returncode != 0:
self.log(
f"Got an error on mysqldump when cloning the test database: {dump_err}"
)
sys.exit(dump_proc.returncode)
if load_proc.returncode != 0:
self.log(f"Got an error cloning the test database: {load_err}")
sys.exit(load_proc.returncode)
|
import unittest
from contextlib import contextmanager
from io import StringIO
from unittest import mock
from django.core.exceptions import ImproperlyConfigured
from django.db import DatabaseError, connection
from django.db.backends.base.creation import BaseDatabaseCreation
from django.test import SimpleTestCase
try:
from django.db.backends.postgresql.psycopg_any import errors
except ImportError:
pass
else:
from django.db.backends.postgresql.creation import DatabaseCreation
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL tests")
class DatabaseCreationTests(SimpleTestCase):
@contextmanager
def changed_test_settings(self, **kwargs):
settings = connection.settings_dict["TEST"]
saved_values = {}
for name in kwargs:
if name in settings:
saved_values[name] = settings[name]
for name, value in kwargs.items():
settings[name] = value
try:
yield
finally:
for name in kwargs:
if name in saved_values:
settings[name] = saved_values[name]
else:
del settings[name]
def check_sql_table_creation_suffix(self, settings, expected):
with self.changed_test_settings(**settings):
creation = DatabaseCreation(connection)
suffix = creation.sql_table_creation_suffix()
self.assertEqual(suffix, expected)
def test_sql_table_creation_suffix_with_none_settings(self):
settings = {"CHARSET": None, "TEMPLATE": None}
self.check_sql_table_creation_suffix(settings, "")
def test_sql_table_creation_suffix_with_encoding(self):
settings = {"CHARSET": "UTF8"}
self.check_sql_table_creation_suffix(settings, "WITH ENCODING 'UTF8'")
def test_sql_table_creation_suffix_with_template(self):
settings = {"TEMPLATE": "template0"}
self.check_sql_table_creation_suffix(settings, 'WITH TEMPLATE "template0"')
def test_sql_table_creation_suffix_with_encoding_and_template(self):
settings = {"CHARSET": "UTF8", "TEMPLATE": "template0"}
self.check_sql_table_creation_suffix(
settings, '''WITH ENCODING 'UTF8' TEMPLATE "template0"'''
)
def test_sql_table_creation_raises_with_collation(self):
settings = {"COLLATION": "test"}
msg = (
"PostgreSQL does not support collation setting at database "
"creation time."
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
self.check_sql_table_creation_suffix(settings, None)
def _execute_raise_database_already_exists(self, cursor, parameters, keepdb=False):
error = errors.DuplicateDatabase(
"database %s already exists" % parameters["dbname"]
)
raise DatabaseError() from error
def _execute_raise_permission_denied(self, cursor, parameters, keepdb=False):
error = errors.InsufficientPrivilege("permission denied to create database")
raise DatabaseError() from error
def patch_test_db_creation(self, execute_create_test_db):
return mock.patch.object(
BaseDatabaseCreation, "_execute_create_test_db", execute_create_test_db
)
@mock.patch("sys.stdout", new_callable=StringIO)
@mock.patch("sys.stderr", new_callable=StringIO)
def test_create_test_db(self, *mocked_objects):
creation = DatabaseCreation(connection)
# Simulate test database creation raising "database already exists"
with self.patch_test_db_creation(self._execute_raise_database_already_exists):
with mock.patch("builtins.input", return_value="no"):
with self.assertRaises(SystemExit):
# SystemExit is raised if the user answers "no" to the
# prompt asking if it's okay to delete the test database.
creation._create_test_db(
verbosity=0, autoclobber=False, keepdb=False
)
# "Database already exists" error is ignored when keepdb is on
creation._create_test_db(verbosity=0, autoclobber=False, keepdb=True)
# Simulate test database creation raising unexpected error
with self.patch_test_db_creation(self._execute_raise_permission_denied):
with mock.patch.object(
DatabaseCreation, "_database_exists", return_value=False
):
with self.assertRaises(SystemExit):
creation._create_test_db(
verbosity=0, autoclobber=False, keepdb=False
)
with self.assertRaises(SystemExit):
creation._create_test_db(
verbosity=0, autoclobber=False, keepdb=True
)
# Simulate test database creation raising "insufficient privileges".
# An error shouldn't appear when keepdb is on and the database already
# exists.
with self.patch_test_db_creation(self._execute_raise_permission_denied):
with mock.patch.object(
DatabaseCreation, "_database_exists", return_value=True
):
creation._create_test_db(verbosity=0, autoclobber=False, keepdb=True)
|
./temp_repos/django/django/db/backends/mysql/creation.py
|
./temp_repos/django/tests/backends/postgresql/test_creation.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'DatabaseCreation'.
Context:
- Class Name: DatabaseCreation
- Dependencies to Mock: None detected
- Key Imports: client, subprocess, django.db.backends.base.creation, os, sys
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
DatabaseCreation
|
python
|
import re
from django.contrib.auth.views import (
INTERNAL_RESET_SESSION_TOKEN,
PasswordResetConfirmView,
)
from django.test import Client
def extract_token_from_url(url):
token_search = re.search(r"/reset/.*/(.+?)/", url)
if token_search:
return token_search[1]
class PasswordResetConfirmClient(Client):
"""
This client eases testing the password reset flow by emulating the
PasswordResetConfirmView's redirect and saving of the reset token in the
user's session. This request puts 'my-token' in the session and redirects
to '/reset/bla/set-password/':
>>> client = PasswordResetConfirmClient()
>>> client.get('/reset/bla/my-token/')
"""
reset_url_token = PasswordResetConfirmView.reset_url_token
def _get_password_reset_confirm_redirect_url(self, url):
token = extract_token_from_url(url)
if not token:
return url
# Add the token to the session
session = self.session
session[INTERNAL_RESET_SESSION_TOKEN] = token
session.save()
return url.replace(token, self.reset_url_token)
def get(self, path, *args, **kwargs):
redirect_url = self._get_password_reset_confirm_redirect_url(path)
return super().get(redirect_url, *args, **kwargs)
def post(self, path, *args, **kwargs):
redirect_url = self._get_password_reset_confirm_redirect_url(path)
return super().post(redirect_url, *args, **kwargs)
|
from unittest import mock
from django.db import connection
from django.db.backends.base.client import BaseDatabaseClient
from django.test import SimpleTestCase
class SimpleDatabaseClientTests(SimpleTestCase):
def setUp(self):
self.client = BaseDatabaseClient(connection=connection)
def test_settings_to_cmd_args_env(self):
msg = (
"subclasses of BaseDatabaseClient must provide a "
"settings_to_cmd_args_env() method or override a runshell()."
)
with self.assertRaisesMessage(NotImplementedError, msg):
self.client.settings_to_cmd_args_env(None, None)
def test_runshell_use_environ(self):
for env in [None, {}]:
with self.subTest(env=env):
with mock.patch("subprocess.run") as run:
with mock.patch.object(
BaseDatabaseClient,
"settings_to_cmd_args_env",
return_value=([], env),
):
self.client.runshell(None)
run.assert_called_once_with([], env=None, check=True)
|
./temp_repos/django/tests/auth_tests/client.py
|
./temp_repos/django/tests/backends/base/test_client.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'PasswordResetConfirmClient'.
Context:
- Class Name: PasswordResetConfirmClient
- Dependencies to Mock: None detected
- Key Imports: django.contrib.auth.views, django.test, re
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
PasswordResetConfirmClient
|
python
|
import os
import subprocess
import sys
from django.db.backends.base.creation import BaseDatabaseCreation
from .client import DatabaseClient
class DatabaseCreation(BaseDatabaseCreation):
def sql_table_creation_suffix(self):
suffix = []
test_settings = self.connection.settings_dict["TEST"]
if test_settings["CHARSET"]:
suffix.append("CHARACTER SET %s" % test_settings["CHARSET"])
if test_settings["COLLATION"]:
suffix.append("COLLATE %s" % test_settings["COLLATION"])
return " ".join(suffix)
def _execute_create_test_db(self, cursor, parameters, keepdb=False):
try:
super()._execute_create_test_db(cursor, parameters, keepdb)
except Exception as e:
if len(e.args) < 1 or e.args[0] != 1007:
# All errors except "database exists" (1007) cancel tests.
self.log("Got an error creating the test database: %s" % e)
sys.exit(2)
else:
raise
def _clone_test_db(self, suffix, verbosity, keepdb=False):
source_database_name = self.connection.settings_dict["NAME"]
target_database_name = self.get_test_db_clone_settings(suffix)["NAME"]
test_db_params = {
"dbname": self.connection.ops.quote_name(target_database_name),
"suffix": self.sql_table_creation_suffix(),
}
with self._nodb_cursor() as cursor:
try:
self._execute_create_test_db(cursor, test_db_params, keepdb)
except Exception:
if keepdb:
# If the database should be kept, skip everything else.
return
try:
if verbosity >= 1:
self.log(
"Destroying old test database for alias %s..."
% (
self._get_database_display_str(
verbosity, target_database_name
),
)
)
cursor.execute("DROP DATABASE %(dbname)s" % test_db_params)
self._execute_create_test_db(cursor, test_db_params, keepdb)
except Exception as e:
self.log("Got an error recreating the test database: %s" % e)
sys.exit(2)
self._clone_db(source_database_name, target_database_name)
def _clone_db(self, source_database_name, target_database_name):
cmd_args, cmd_env = DatabaseClient.settings_to_cmd_args_env(
self.connection.settings_dict, []
)
dump_cmd = [
"mysqldump",
*cmd_args[1:-1],
"--routines",
"--events",
source_database_name,
]
dump_env = load_env = {**os.environ, **cmd_env} if cmd_env else None
load_cmd = cmd_args
load_cmd[-1] = target_database_name
with (
subprocess.Popen(
dump_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=dump_env
) as dump_proc,
subprocess.Popen(
load_cmd,
stdin=dump_proc.stdout,
stdout=subprocess.DEVNULL,
stderr=subprocess.PIPE,
env=load_env,
) as load_proc,
):
# Allow dump_proc to receive a SIGPIPE if the load process exits.
dump_proc.stdout.close()
dump_err = dump_proc.stderr.read().decode(errors="replace")
load_err = load_proc.stderr.read().decode(errors="replace")
if dump_proc.returncode != 0:
self.log(
f"Got an error on mysqldump when cloning the test database: {dump_err}"
)
sys.exit(dump_proc.returncode)
if load_proc.returncode != 0:
self.log(f"Got an error cloning the test database: {load_err}")
sys.exit(load_proc.returncode)
|
import copy
import datetime
import os
from unittest import mock
from django.db import DEFAULT_DB_ALIAS, connection, connections
from django.db.backends.base.creation import TEST_DATABASE_PREFIX, BaseDatabaseCreation
from django.test import SimpleTestCase, TransactionTestCase
from django.test.utils import override_settings
from django.utils.deprecation import RemovedInDjango70Warning
from ..models import (
CircularA,
CircularB,
Object,
ObjectReference,
ObjectSelfReference,
SchoolBus,
SchoolClass,
)
def get_connection_copy():
# Get a copy of the default connection. (Can't use django.db.connection
# because it'll modify the default connection itself.)
test_connection = copy.copy(connections[DEFAULT_DB_ALIAS])
test_connection.settings_dict = copy.deepcopy(
connections[DEFAULT_DB_ALIAS].settings_dict
)
return test_connection
class TestDbSignatureTests(SimpleTestCase):
def test_default_name(self):
# A test db name isn't set.
prod_name = "hodor"
test_connection = get_connection_copy()
test_connection.settings_dict["NAME"] = prod_name
test_connection.settings_dict["TEST"] = {"NAME": None}
signature = BaseDatabaseCreation(test_connection).test_db_signature()
self.assertEqual(signature[3], TEST_DATABASE_PREFIX + prod_name)
def test_custom_test_name(self):
# A regular test db name is set.
test_name = "hodor"
test_connection = get_connection_copy()
test_connection.settings_dict["TEST"] = {"NAME": test_name}
signature = BaseDatabaseCreation(test_connection).test_db_signature()
self.assertEqual(signature[3], test_name)
def test_custom_test_name_with_test_prefix(self):
# A test db name prefixed with TEST_DATABASE_PREFIX is set.
test_name = TEST_DATABASE_PREFIX + "hodor"
test_connection = get_connection_copy()
test_connection.settings_dict["TEST"] = {"NAME": test_name}
signature = BaseDatabaseCreation(test_connection).test_db_signature()
self.assertEqual(signature[3], test_name)
@override_settings(INSTALLED_APPS=["backends.base.app_unmigrated"])
@mock.patch.object(connection, "ensure_connection")
@mock.patch.object(connection, "prepare_database")
@mock.patch(
"django.db.migrations.recorder.MigrationRecorder.has_table", return_value=False
)
@mock.patch("django.core.management.commands.migrate.Command.sync_apps")
class TestDbCreationTests(SimpleTestCase):
available_apps = ["backends.base.app_unmigrated"]
@mock.patch("django.db.migrations.executor.MigrationExecutor.migrate")
def test_migrate_test_setting_false(
self, mocked_migrate, mocked_sync_apps, *mocked_objects
):
test_connection = get_connection_copy()
test_connection.settings_dict["TEST"]["MIGRATE"] = False
creation = test_connection.creation_class(test_connection)
if connection.vendor == "oracle":
# Don't close connection on Oracle.
creation.connection.close = mock.Mock()
old_database_name = test_connection.settings_dict["NAME"]
try:
with mock.patch.object(creation, "_create_test_db"):
creation.create_test_db(verbosity=0, autoclobber=True)
# Migrations don't run.
mocked_migrate.assert_called()
args, kwargs = mocked_migrate.call_args
self.assertEqual(args, ([],))
self.assertEqual(kwargs["plan"], [])
# App is synced.
mocked_sync_apps.assert_called()
mocked_args, _ = mocked_sync_apps.call_args
self.assertEqual(mocked_args[1], {"app_unmigrated"})
finally:
with mock.patch.object(creation, "_destroy_test_db"):
creation.destroy_test_db(old_database_name, verbosity=0)
@mock.patch("django.db.migrations.executor.MigrationRecorder.ensure_schema")
def test_migrate_test_setting_false_ensure_schema(
self,
mocked_ensure_schema,
mocked_sync_apps,
*mocked_objects,
):
test_connection = get_connection_copy()
test_connection.settings_dict["TEST"]["MIGRATE"] = False
creation = test_connection.creation_class(test_connection)
if connection.vendor == "oracle":
# Don't close connection on Oracle.
creation.connection.close = mock.Mock()
old_database_name = test_connection.settings_dict["NAME"]
try:
with mock.patch.object(creation, "_create_test_db"):
creation.create_test_db(verbosity=0, autoclobber=True)
# The django_migrations table is not created.
mocked_ensure_schema.assert_not_called()
# App is synced.
mocked_sync_apps.assert_called()
mocked_args, _ = mocked_sync_apps.call_args
self.assertEqual(mocked_args[1], {"app_unmigrated"})
finally:
with mock.patch.object(creation, "_destroy_test_db"):
creation.destroy_test_db(old_database_name, verbosity=0)
@mock.patch("django.db.migrations.executor.MigrationExecutor.migrate")
def test_migrate_test_setting_true(
self, mocked_migrate, mocked_sync_apps, *mocked_objects
):
test_connection = get_connection_copy()
test_connection.settings_dict["TEST"]["MIGRATE"] = True
creation = test_connection.creation_class(test_connection)
if connection.vendor == "oracle":
# Don't close connection on Oracle.
creation.connection.close = mock.Mock()
old_database_name = test_connection.settings_dict["NAME"]
try:
with mock.patch.object(creation, "_create_test_db"):
creation.create_test_db(verbosity=0, autoclobber=True)
# Migrations run.
mocked_migrate.assert_called()
args, kwargs = mocked_migrate.call_args
self.assertEqual(args, ([("app_unmigrated", "0001_initial")],))
self.assertEqual(len(kwargs["plan"]), 1)
# App is not synced.
mocked_sync_apps.assert_not_called()
finally:
with mock.patch.object(creation, "_destroy_test_db"):
creation.destroy_test_db(old_database_name, verbosity=0)
@mock.patch.dict(os.environ, {"RUNNING_DJANGOS_TEST_SUITE": ""})
@mock.patch("django.db.migrations.executor.MigrationExecutor.migrate")
@mock.patch.object(BaseDatabaseCreation, "mark_expected_failures_and_skips")
def test_mark_expected_failures_and_skips_call(
self, mark_expected_failures_and_skips, *mocked_objects
):
"""
mark_expected_failures_and_skips() isn't called unless
RUNNING_DJANGOS_TEST_SUITE is 'true'.
"""
test_connection = get_connection_copy()
creation = test_connection.creation_class(test_connection)
if connection.vendor == "oracle":
# Don't close connection on Oracle.
creation.connection.close = mock.Mock()
old_database_name = test_connection.settings_dict["NAME"]
try:
with mock.patch.object(creation, "_create_test_db"):
creation.create_test_db(verbosity=0, autoclobber=True)
self.assertIs(mark_expected_failures_and_skips.called, False)
finally:
with mock.patch.object(creation, "_destroy_test_db"):
creation.destroy_test_db(old_database_name, verbosity=0)
@mock.patch("django.db.migrations.executor.MigrationExecutor.migrate")
@mock.patch.object(BaseDatabaseCreation, "serialize_db_to_string")
def test_serialize_deprecation(self, serialize_db_to_string, *mocked_objects):
test_connection = get_connection_copy()
creation = test_connection.creation_class(test_connection)
if connection.vendor == "oracle":
# Don't close connection on Oracle.
creation.connection.close = mock.Mock()
old_database_name = test_connection.settings_dict["NAME"]
msg = (
"DatabaseCreation.create_test_db(serialize) is deprecated. Call "
"DatabaseCreation.serialize_test_db() once all test databases are set up "
"instead if you need fixtures persistence between tests."
)
try:
with (
self.assertWarnsMessage(RemovedInDjango70Warning, msg) as ctx,
mock.patch.object(creation, "_create_test_db"),
):
creation.create_test_db(verbosity=0, serialize=True)
self.assertEqual(ctx.filename, __file__)
serialize_db_to_string.assert_called_once_with()
finally:
with mock.patch.object(creation, "_destroy_test_db"):
creation.destroy_test_db(old_database_name, verbosity=0)
# Now with `serialize` False.
serialize_db_to_string.reset_mock()
try:
with (
self.assertWarnsMessage(RemovedInDjango70Warning, msg) as ctx,
mock.patch.object(creation, "_create_test_db"),
):
creation.create_test_db(verbosity=0, serialize=False)
self.assertEqual(ctx.filename, __file__)
serialize_db_to_string.assert_not_called()
finally:
with mock.patch.object(creation, "_destroy_test_db"):
creation.destroy_test_db(old_database_name, verbosity=0)
class TestDeserializeDbFromString(TransactionTestCase):
available_apps = ["backends"]
def test_circular_reference(self):
# deserialize_db_from_string() handles circular references.
data = """
[
{
"model": "backends.object",
"pk": 1,
"fields": {"obj_ref": 1, "related_objects": []}
},
{
"model": "backends.objectreference",
"pk": 1,
"fields": {"obj": 1}
}
]
"""
connection.creation.deserialize_db_from_string(data)
obj = Object.objects.get()
obj_ref = ObjectReference.objects.get()
self.assertEqual(obj.obj_ref, obj_ref)
self.assertEqual(obj_ref.obj, obj)
def test_self_reference(self):
# serialize_db_to_string() and deserialize_db_from_string() handles
# self references.
obj_1 = ObjectSelfReference.objects.create(key="X")
obj_2 = ObjectSelfReference.objects.create(key="Y", obj=obj_1)
obj_1.obj = obj_2
obj_1.save()
# Serialize objects.
with mock.patch("django.db.migrations.loader.MigrationLoader") as loader:
# serialize_db_to_string() serializes only migrated apps, so mark
# the backends app as migrated.
loader_instance = loader.return_value
loader_instance.migrated_apps = {"backends"}
data = connection.creation.serialize_db_to_string()
ObjectSelfReference.objects.all().delete()
# Deserialize objects.
connection.creation.deserialize_db_from_string(data)
obj_1 = ObjectSelfReference.objects.get(key="X")
obj_2 = ObjectSelfReference.objects.get(key="Y")
self.assertEqual(obj_1.obj, obj_2)
self.assertEqual(obj_2.obj, obj_1)
def test_circular_reference_with_natural_key(self):
# serialize_db_to_string() and deserialize_db_from_string() handles
# circular references for models with natural keys.
obj_a = CircularA.objects.create(key="A")
obj_b = CircularB.objects.create(key="B", obj=obj_a)
obj_a.obj = obj_b
obj_a.save()
# Serialize objects.
with mock.patch("django.db.migrations.loader.MigrationLoader") as loader:
# serialize_db_to_string() serializes only migrated apps, so mark
# the backends app as migrated.
loader_instance = loader.return_value
loader_instance.migrated_apps = {"backends"}
data = connection.creation.serialize_db_to_string()
CircularA.objects.all().delete()
CircularB.objects.all().delete()
# Deserialize objects.
connection.creation.deserialize_db_from_string(data)
obj_a = CircularA.objects.get()
obj_b = CircularB.objects.get()
self.assertEqual(obj_a.obj, obj_b)
self.assertEqual(obj_b.obj, obj_a)
def test_serialize_db_to_string_base_manager(self):
SchoolClass.objects.create(year=1000, last_updated=datetime.datetime.now())
with mock.patch("django.db.migrations.loader.MigrationLoader") as loader:
# serialize_db_to_string() serializes only migrated apps, so mark
# the backends app as migrated.
loader_instance = loader.return_value
loader_instance.migrated_apps = {"backends"}
data = connection.creation.serialize_db_to_string()
self.assertIn('"model": "backends.schoolclass"', data)
self.assertIn('"year": 1000', data)
def test_serialize_db_to_string_base_manager_with_prefetch_related(self):
sclass = SchoolClass.objects.create(
year=2000, last_updated=datetime.datetime.now()
)
bus = SchoolBus.objects.create(number=1)
bus.schoolclasses.add(sclass)
with mock.patch("django.db.migrations.loader.MigrationLoader") as loader:
# serialize_db_to_string() serializes only migrated apps, so mark
# the backends app as migrated.
loader_instance = loader.return_value
loader_instance.migrated_apps = {"backends"}
data = connection.creation.serialize_db_to_string()
self.assertIn('"model": "backends.schoolbus"', data)
self.assertIn('"model": "backends.schoolclass"', data)
self.assertIn(f'"schoolclasses": [{sclass.pk}]', data)
class SkipTestClass:
def skip_function(self):
pass
def skip_test_function():
pass
def expected_failure_test_function():
pass
class TestMarkTests(SimpleTestCase):
def test_mark_expected_failures_and_skips(self):
test_connection = get_connection_copy()
creation = BaseDatabaseCreation(test_connection)
creation.connection.features.django_test_expected_failures = {
"backends.base.test_creation.expected_failure_test_function",
}
creation.connection.features.django_test_skips = {
"skip test class": {
"backends.base.test_creation.SkipTestClass",
},
"skip test function": {
"backends.base.test_creation.skip_test_function",
},
}
creation.mark_expected_failures_and_skips()
self.assertIs(
expected_failure_test_function.__unittest_expecting_failure__,
True,
)
self.assertIs(SkipTestClass.__unittest_skip__, True)
self.assertEqual(
SkipTestClass.__unittest_skip_why__,
"skip test class",
)
self.assertIs(skip_test_function.__unittest_skip__, True)
self.assertEqual(
skip_test_function.__unittest_skip_why__,
"skip test function",
)
|
./temp_repos/django/django/db/backends/mysql/creation.py
|
./temp_repos/django/tests/backends/base/test_creation.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'DatabaseCreation'.
Context:
- Class Name: DatabaseCreation
- Dependencies to Mock: None detected
- Key Imports: client, subprocess, django.db.backends.base.creation, os, sys
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
DatabaseCreation
|
python
|
from django.apps import apps
from django.contrib.sites.models import Site
from django.core.cache import cache
from django.test import TestCase, modify_settings, override_settings
from .models import I18nTestModel, TestModel
@modify_settings(INSTALLED_APPS={"append": "django.contrib.sitemaps"})
@override_settings(ROOT_URLCONF="sitemaps_tests.urls.http")
class SitemapTestsBase(TestCase):
protocol = "http"
sites_installed = apps.is_installed("django.contrib.sites")
domain = "example.com" if sites_installed else "testserver"
@classmethod
def setUpTestData(cls):
# Create an object for sitemap content.
TestModel.objects.create(name="Test Object")
cls.i18n_model = I18nTestModel.objects.create(name="Test Object")
def setUp(self):
self.base_url = "%s://%s" % (self.protocol, self.domain)
cache.clear()
@classmethod
def setUpClass(cls):
super().setUpClass()
# This cleanup is necessary because contrib.sites cache
# makes tests interfere with each other, see #11505
Site.objects.clear_cache()
|
import gc
from unittest.mock import MagicMock, patch
from django.db import DEFAULT_DB_ALIAS, connection, connections, transaction
from django.db.backends.base.base import BaseDatabaseWrapper
from django.test import (
SimpleTestCase,
TestCase,
TransactionTestCase,
skipUnlessDBFeature,
)
from django.test.runner import DebugSQLTextTestResult
from django.test.utils import CaptureQueriesContext, override_settings
from ..models import Person, Square
class DatabaseWrapperTests(SimpleTestCase):
def test_repr(self):
conn = connections[DEFAULT_DB_ALIAS]
self.assertEqual(
repr(conn),
f"<DatabaseWrapper vendor={connection.vendor!r} alias='default'>",
)
def test_initialization_class_attributes(self):
"""
The "initialization" class attributes like client_class and
creation_class should be set on the class and reflected in the
corresponding instance attributes of the instantiated backend.
"""
conn = connections[DEFAULT_DB_ALIAS]
conn_class = type(conn)
attr_names = [
("client_class", "client"),
("creation_class", "creation"),
("features_class", "features"),
("introspection_class", "introspection"),
("ops_class", "ops"),
("validation_class", "validation"),
]
for class_attr_name, instance_attr_name in attr_names:
class_attr_value = getattr(conn_class, class_attr_name)
self.assertIsNotNone(class_attr_value)
instance_attr_value = getattr(conn, instance_attr_name)
self.assertIsInstance(instance_attr_value, class_attr_value)
def test_initialization_display_name(self):
self.assertEqual(BaseDatabaseWrapper.display_name, "unknown")
self.assertNotEqual(connection.display_name, "unknown")
def test_get_database_version(self):
with patch.object(BaseDatabaseWrapper, "__init__", return_value=None):
msg = (
"subclasses of BaseDatabaseWrapper may require a "
"get_database_version() method."
)
with self.assertRaisesMessage(NotImplementedError, msg):
BaseDatabaseWrapper().get_database_version()
def test_check_database_version_supported_with_none_as_database_version(self):
with patch.object(connection.features, "minimum_database_version", None):
connection.check_database_version_supported()
def test_release_memory_without_garbage_collection(self):
# Schedule the restore of the garbage collection settings.
self.addCleanup(gc.set_debug, 0)
self.addCleanup(gc.enable)
# Disable automatic garbage collection to control when it's triggered,
# then run a full collection cycle to ensure `gc.garbage` is empty.
gc.disable()
gc.collect()
# The garbage list isn't automatically populated to avoid CPU overhead,
# so debugging needs to be enabled to track all unreachable items and
# have them stored in `gc.garbage`.
gc.set_debug(gc.DEBUG_SAVEALL)
# Create a new connection that will be closed during the test, and also
# ensure that a `DatabaseErrorWrapper` is created for this connection.
test_connection = connection.copy()
with test_connection.wrap_database_errors:
self.assertEqual(test_connection.queries, [])
# Close the connection and remove references to it. This will mark all
# objects related to the connection as garbage to be collected.
test_connection.close()
test_connection = None
# Enforce garbage collection to populate `gc.garbage` for inspection.
gc.collect()
self.assertEqual(gc.garbage, [])
class DatabaseWrapperLoggingTests(TransactionTestCase):
available_apps = ["backends"]
@override_settings(DEBUG=True)
def test_commit_debug_log(self):
conn = connections[DEFAULT_DB_ALIAS]
with CaptureQueriesContext(conn):
with self.assertLogs("django.db.backends", "DEBUG") as cm:
with transaction.atomic():
Person.objects.create(first_name="first", last_name="last")
self.assertGreaterEqual(len(conn.queries_log), 3)
self.assertEqual(conn.queries_log[-3]["sql"], "BEGIN")
self.assertRegex(
cm.output[0],
r"DEBUG:django.db.backends:\(\d+.\d{3}\) "
rf"BEGIN; args=None; alias={DEFAULT_DB_ALIAS}",
)
self.assertEqual(conn.queries_log[-1]["sql"], "COMMIT")
self.assertRegex(
cm.output[-1],
r"DEBUG:django.db.backends:\(\d+.\d{3}\) "
rf"COMMIT; args=None; alias={DEFAULT_DB_ALIAS}",
)
@override_settings(DEBUG=True)
def test_rollback_debug_log(self):
conn = connections[DEFAULT_DB_ALIAS]
with CaptureQueriesContext(conn):
with self.assertLogs("django.db.backends", "DEBUG") as cm:
with self.assertRaises(Exception), transaction.atomic():
Person.objects.create(first_name="first", last_name="last")
raise Exception("Force rollback")
self.assertEqual(conn.queries_log[-1]["sql"], "ROLLBACK")
self.assertRegex(
cm.output[-1],
r"DEBUG:django.db.backends:\(\d+.\d{3}\) "
rf"ROLLBACK; args=None; alias={DEFAULT_DB_ALIAS}",
)
def test_no_logs_without_debug(self):
if isinstance(self._outcome.result, DebugSQLTextTestResult):
self.skipTest("--debug-sql interferes with this test")
with self.assertNoLogs("django.db.backends", "DEBUG"):
with self.assertRaises(Exception), transaction.atomic():
Person.objects.create(first_name="first", last_name="last")
raise Exception("Force rollback")
conn = connections[DEFAULT_DB_ALIAS]
self.assertEqual(len(conn.queries_log), 0)
class ExecuteWrapperTests(TestCase):
@staticmethod
def call_execute(connection, params=None):
ret_val = "1" if params is None else "%s"
sql = "SELECT " + ret_val + connection.features.bare_select_suffix
with connection.cursor() as cursor:
cursor.execute(sql, params)
def call_executemany(self, connection, params=None):
# executemany() must use an update query. Make sure it does nothing
# by putting a false condition in the WHERE clause.
sql = "DELETE FROM {} WHERE 0=1 AND 0=%s".format(Square._meta.db_table)
if params is None:
params = [(i,) for i in range(3)]
with connection.cursor() as cursor:
cursor.executemany(sql, params)
@staticmethod
def mock_wrapper():
return MagicMock(side_effect=lambda execute, *args: execute(*args))
def test_wrapper_invoked(self):
wrapper = self.mock_wrapper()
with connection.execute_wrapper(wrapper):
self.call_execute(connection)
self.assertTrue(wrapper.called)
(_, sql, params, many, context), _ = wrapper.call_args
self.assertIn("SELECT", sql)
self.assertIsNone(params)
self.assertIs(many, False)
self.assertEqual(context["connection"], connection)
def test_wrapper_invoked_many(self):
wrapper = self.mock_wrapper()
with connection.execute_wrapper(wrapper):
self.call_executemany(connection)
self.assertTrue(wrapper.called)
(_, sql, param_list, many, context), _ = wrapper.call_args
self.assertIn("DELETE", sql)
self.assertIsInstance(param_list, (list, tuple))
self.assertIs(many, True)
self.assertEqual(context["connection"], connection)
def test_database_queried(self):
wrapper = self.mock_wrapper()
with connection.execute_wrapper(wrapper):
with connection.cursor() as cursor:
sql = "SELECT 17" + connection.features.bare_select_suffix
cursor.execute(sql)
seventeen = cursor.fetchall()
self.assertEqual(list(seventeen), [(17,)])
self.call_executemany(connection)
def test_nested_wrapper_invoked(self):
outer_wrapper = self.mock_wrapper()
inner_wrapper = self.mock_wrapper()
with (
connection.execute_wrapper(outer_wrapper),
connection.execute_wrapper(inner_wrapper),
):
self.call_execute(connection)
self.assertEqual(inner_wrapper.call_count, 1)
self.call_executemany(connection)
self.assertEqual(inner_wrapper.call_count, 2)
def test_outer_wrapper_blocks(self):
def blocker(*args):
pass
wrapper = self.mock_wrapper()
c = connection # This alias shortens the next line.
with (
c.execute_wrapper(wrapper),
c.execute_wrapper(blocker),
c.execute_wrapper(wrapper),
):
with c.cursor() as cursor:
cursor.execute("The database never sees this")
self.assertEqual(wrapper.call_count, 1)
cursor.executemany("The database never sees this %s", [("either",)])
self.assertEqual(wrapper.call_count, 2)
def test_wrapper_gets_sql(self):
wrapper = self.mock_wrapper()
sql = "SELECT 'aloha'" + connection.features.bare_select_suffix
with connection.execute_wrapper(wrapper), connection.cursor() as cursor:
cursor.execute(sql)
(_, reported_sql, _, _, _), _ = wrapper.call_args
self.assertEqual(reported_sql, sql)
def test_wrapper_connection_specific(self):
wrapper = self.mock_wrapper()
with connections["other"].execute_wrapper(wrapper):
self.assertEqual(connections["other"].execute_wrappers, [wrapper])
self.call_execute(connection)
self.assertFalse(wrapper.called)
self.assertEqual(connection.execute_wrappers, [])
self.assertEqual(connections["other"].execute_wrappers, [])
def test_wrapper_debug(self):
def wrap_with_comment(execute, sql, params, many, context):
return execute(f"/* My comment */ {sql}", params, many, context)
with CaptureQueriesContext(connection) as ctx:
with connection.execute_wrapper(wrap_with_comment):
list(Person.objects.all())
last_query = ctx.captured_queries[-1]["sql"]
self.assertTrue(last_query.startswith("/* My comment */"))
class ConnectionHealthChecksTests(SimpleTestCase):
databases = {"default"}
def setUp(self):
# All test cases here need newly configured and created connections.
# Use the default db connection for convenience.
connection.close()
self.addCleanup(connection.close)
def patch_settings_dict(self, conn_health_checks):
self.settings_dict_patcher = patch.dict(
connection.settings_dict,
{
**connection.settings_dict,
"CONN_MAX_AGE": None,
"CONN_HEALTH_CHECKS": conn_health_checks,
},
)
self.settings_dict_patcher.start()
self.addCleanup(self.settings_dict_patcher.stop)
def run_query(self):
with connection.cursor() as cursor:
cursor.execute("SELECT 42" + connection.features.bare_select_suffix)
@skipUnlessDBFeature("test_db_allows_multiple_connections")
def test_health_checks_enabled(self):
self.patch_settings_dict(conn_health_checks=True)
self.assertIsNone(connection.connection)
# Newly created connections are considered healthy without performing
# the health check.
with patch.object(connection, "is_usable", side_effect=AssertionError):
self.run_query()
old_connection = connection.connection
# Simulate request_finished.
connection.close_if_unusable_or_obsolete()
self.assertIs(old_connection, connection.connection)
# Simulate connection health check failing.
with patch.object(
connection, "is_usable", return_value=False
) as mocked_is_usable:
self.run_query()
new_connection = connection.connection
# A new connection is established.
self.assertIsNot(new_connection, old_connection)
# Only one health check per "request" is performed, so the next
# query will carry on even if the health check fails. Next query
# succeeds because the real connection is healthy and only the
# health check failure is mocked.
self.run_query()
self.assertIs(new_connection, connection.connection)
self.assertEqual(mocked_is_usable.call_count, 1)
# Simulate request_finished.
connection.close_if_unusable_or_obsolete()
# The underlying connection is being reused further with health checks
# succeeding.
self.run_query()
self.run_query()
self.assertIs(new_connection, connection.connection)
@skipUnlessDBFeature("test_db_allows_multiple_connections")
def test_health_checks_enabled_errors_occurred(self):
self.patch_settings_dict(conn_health_checks=True)
self.assertIsNone(connection.connection)
# Newly created connections are considered healthy without performing
# the health check.
with patch.object(connection, "is_usable", side_effect=AssertionError):
self.run_query()
old_connection = connection.connection
# Simulate errors_occurred.
connection.errors_occurred = True
# Simulate request_started (the connection is healthy).
connection.close_if_unusable_or_obsolete()
# Persistent connections are enabled.
self.assertIs(old_connection, connection.connection)
# No additional health checks after the one in
# close_if_unusable_or_obsolete() are executed during this "request"
# when running queries.
with patch.object(connection, "is_usable", side_effect=AssertionError):
self.run_query()
@skipUnlessDBFeature("test_db_allows_multiple_connections")
def test_health_checks_disabled(self):
self.patch_settings_dict(conn_health_checks=False)
self.assertIsNone(connection.connection)
# Newly created connections are considered healthy without performing
# the health check.
with patch.object(connection, "is_usable", side_effect=AssertionError):
self.run_query()
old_connection = connection.connection
# Simulate request_finished.
connection.close_if_unusable_or_obsolete()
# Persistent connections are enabled (connection is not).
self.assertIs(old_connection, connection.connection)
# Health checks are not performed.
with patch.object(connection, "is_usable", side_effect=AssertionError):
self.run_query()
# Health check wasn't performed and the connection is unchanged.
self.assertIs(old_connection, connection.connection)
self.run_query()
# The connection is unchanged after the next query either during
# the current "request".
self.assertIs(old_connection, connection.connection)
@skipUnlessDBFeature("test_db_allows_multiple_connections")
def test_set_autocommit_health_checks_enabled(self):
self.patch_settings_dict(conn_health_checks=True)
self.assertIsNone(connection.connection)
# Newly created connections are considered healthy without performing
# the health check.
with patch.object(connection, "is_usable", side_effect=AssertionError):
# Simulate outermost atomic block: changing autocommit for
# a connection.
connection.set_autocommit(False)
self.run_query()
connection.commit()
connection.set_autocommit(True)
old_connection = connection.connection
# Simulate request_finished.
connection.close_if_unusable_or_obsolete()
# Persistent connections are enabled.
self.assertIs(old_connection, connection.connection)
# Simulate connection health check failing.
with patch.object(
connection, "is_usable", return_value=False
) as mocked_is_usable:
# Simulate outermost atomic block: changing autocommit for
# a connection.
connection.set_autocommit(False)
new_connection = connection.connection
self.assertIsNot(new_connection, old_connection)
# Only one health check per "request" is performed, so a query will
# carry on even if the health check fails. This query succeeds
# because the real connection is healthy and only the health check
# failure is mocked.
self.run_query()
connection.commit()
connection.set_autocommit(True)
# The connection is unchanged.
self.assertIs(new_connection, connection.connection)
self.assertEqual(mocked_is_usable.call_count, 1)
# Simulate request_finished.
connection.close_if_unusable_or_obsolete()
# The underlying connection is being reused further with health checks
# succeeding.
connection.set_autocommit(False)
self.run_query()
connection.commit()
connection.set_autocommit(True)
self.assertIs(new_connection, connection.connection)
class MultiDatabaseTests(TestCase):
databases = {"default", "other"}
def test_multi_database_init_connection_state_called_once(self):
for db in self.databases:
with self.subTest(database=db):
with patch.object(connections[db], "commit", return_value=None):
with patch.object(
connections[db],
"check_database_version_supported",
) as mocked_check_database_version_supported:
connections[db].init_connection_state()
after_first_calls = len(
mocked_check_database_version_supported.mock_calls
)
connections[db].init_connection_state()
self.assertEqual(
len(mocked_check_database_version_supported.mock_calls),
after_first_calls,
)
|
./temp_repos/django/tests/sitemaps_tests/base.py
|
./temp_repos/django/tests/backends/base/test_base.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'SitemapTestsBase'.
Context:
- Class Name: SitemapTestsBase
- Dependencies to Mock: None detected
- Key Imports: django.test, django.contrib.sites.models, django.core.cache, models, django.apps
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
SitemapTestsBase
|
python
|
import os
import subprocess
import sys
from django.db.backends.base.creation import BaseDatabaseCreation
from .client import DatabaseClient
class DatabaseCreation(BaseDatabaseCreation):
def sql_table_creation_suffix(self):
suffix = []
test_settings = self.connection.settings_dict["TEST"]
if test_settings["CHARSET"]:
suffix.append("CHARACTER SET %s" % test_settings["CHARSET"])
if test_settings["COLLATION"]:
suffix.append("COLLATE %s" % test_settings["COLLATION"])
return " ".join(suffix)
def _execute_create_test_db(self, cursor, parameters, keepdb=False):
try:
super()._execute_create_test_db(cursor, parameters, keepdb)
except Exception as e:
if len(e.args) < 1 or e.args[0] != 1007:
# All errors except "database exists" (1007) cancel tests.
self.log("Got an error creating the test database: %s" % e)
sys.exit(2)
else:
raise
def _clone_test_db(self, suffix, verbosity, keepdb=False):
source_database_name = self.connection.settings_dict["NAME"]
target_database_name = self.get_test_db_clone_settings(suffix)["NAME"]
test_db_params = {
"dbname": self.connection.ops.quote_name(target_database_name),
"suffix": self.sql_table_creation_suffix(),
}
with self._nodb_cursor() as cursor:
try:
self._execute_create_test_db(cursor, test_db_params, keepdb)
except Exception:
if keepdb:
# If the database should be kept, skip everything else.
return
try:
if verbosity >= 1:
self.log(
"Destroying old test database for alias %s..."
% (
self._get_database_display_str(
verbosity, target_database_name
),
)
)
cursor.execute("DROP DATABASE %(dbname)s" % test_db_params)
self._execute_create_test_db(cursor, test_db_params, keepdb)
except Exception as e:
self.log("Got an error recreating the test database: %s" % e)
sys.exit(2)
self._clone_db(source_database_name, target_database_name)
def _clone_db(self, source_database_name, target_database_name):
cmd_args, cmd_env = DatabaseClient.settings_to_cmd_args_env(
self.connection.settings_dict, []
)
dump_cmd = [
"mysqldump",
*cmd_args[1:-1],
"--routines",
"--events",
source_database_name,
]
dump_env = load_env = {**os.environ, **cmd_env} if cmd_env else None
load_cmd = cmd_args
load_cmd[-1] = target_database_name
with (
subprocess.Popen(
dump_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=dump_env
) as dump_proc,
subprocess.Popen(
load_cmd,
stdin=dump_proc.stdout,
stdout=subprocess.DEVNULL,
stderr=subprocess.PIPE,
env=load_env,
) as load_proc,
):
# Allow dump_proc to receive a SIGPIPE if the load process exits.
dump_proc.stdout.close()
dump_err = dump_proc.stderr.read().decode(errors="replace")
load_err = load_proc.stderr.read().decode(errors="replace")
if dump_proc.returncode != 0:
self.log(
f"Got an error on mysqldump when cloning the test database: {dump_err}"
)
sys.exit(dump_proc.returncode)
if load_proc.returncode != 0:
self.log(f"Got an error cloning the test database: {load_err}")
sys.exit(load_proc.returncode)
|
import unittest
from io import StringIO
from unittest import mock
from django.db import DatabaseError, connection
from django.db.backends.oracle.creation import DatabaseCreation
from django.test import TestCase
@unittest.skipUnless(connection.vendor == "oracle", "Oracle tests")
@mock.patch.object(DatabaseCreation, "_maindb_connection", return_value=connection)
@mock.patch("sys.stdout", new_callable=StringIO)
@mock.patch("sys.stderr", new_callable=StringIO)
class DatabaseCreationTests(TestCase):
def _execute_raise_user_already_exists(
self, cursor, statements, parameters, verbosity, allow_quiet_fail=False
):
# Raise "user already exists" only in test user creation
if statements and statements[0].startswith("CREATE USER"):
raise DatabaseError(
"ORA-01920: user name 'string' conflicts with another user or role name"
)
def _execute_raise_tablespace_already_exists(
self, cursor, statements, parameters, verbosity, allow_quiet_fail=False
):
raise DatabaseError("ORA-01543: tablespace 'string' already exists")
def _execute_raise_insufficient_privileges(
self, cursor, statements, parameters, verbosity, allow_quiet_fail=False
):
raise DatabaseError("ORA-01031: insufficient privileges")
def _test_database_passwd(self):
# Mocked to avoid test user password changed
return connection.settings_dict["SAVED_PASSWORD"]
def patch_execute_statements(self, execute_statements):
return mock.patch.object(
DatabaseCreation, "_execute_statements", execute_statements
)
@mock.patch.object(DatabaseCreation, "_test_user_create", return_value=False)
def test_create_test_db(self, *mocked_objects):
creation = DatabaseCreation(connection)
# Simulate test database creation raising "tablespace already exists"
with self.patch_execute_statements(
self._execute_raise_tablespace_already_exists
):
with mock.patch("builtins.input", return_value="no"):
with self.assertRaises(SystemExit):
# SystemExit is raised if the user answers "no" to the
# prompt asking if it's okay to delete the test tablespace.
creation._create_test_db(verbosity=0, keepdb=False)
# "Tablespace already exists" error is ignored when keepdb is on
creation._create_test_db(verbosity=0, keepdb=True)
# Simulate test database creation raising unexpected error
with self.patch_execute_statements(self._execute_raise_insufficient_privileges):
with self.assertRaises(SystemExit):
creation._create_test_db(verbosity=0, keepdb=False)
with self.assertRaises(SystemExit):
creation._create_test_db(verbosity=0, keepdb=True)
@mock.patch.object(DatabaseCreation, "_test_database_create", return_value=False)
def test_create_test_user(self, *mocked_objects):
creation = DatabaseCreation(connection)
with mock.patch.object(
DatabaseCreation, "_test_database_passwd", self._test_database_passwd
):
# Simulate test user creation raising "user already exists"
with self.patch_execute_statements(self._execute_raise_user_already_exists):
with mock.patch("builtins.input", return_value="no"):
with self.assertRaises(SystemExit):
# SystemExit is raised if the user answers "no" to the
# prompt asking if it's okay to delete the test user.
creation._create_test_db(verbosity=0, keepdb=False)
# "User already exists" error is ignored when keepdb is on
creation._create_test_db(verbosity=0, keepdb=True)
# Simulate test user creation raising unexpected error
with self.patch_execute_statements(
self._execute_raise_insufficient_privileges
):
with self.assertRaises(SystemExit):
creation._create_test_db(verbosity=0, keepdb=False)
with self.assertRaises(SystemExit):
creation._create_test_db(verbosity=0, keepdb=True)
def test_oracle_managed_files(self, *mocked_objects):
def _execute_capture_statements(
self, cursor, statements, parameters, verbosity, allow_quiet_fail=False
):
self.tblspace_sqls = statements
creation = DatabaseCreation(connection)
# Simulate test database creation with Oracle Managed File (OMF)
# tablespaces.
with mock.patch.object(
DatabaseCreation, "_test_database_oracle_managed_files", return_value=True
):
with self.patch_execute_statements(_execute_capture_statements):
with connection.cursor() as cursor:
creation._execute_test_db_creation(
cursor, creation._get_test_db_params(), verbosity=0
)
tblspace_sql, tblspace_tmp_sql = creation.tblspace_sqls
# Datafile names shouldn't appear.
self.assertIn("DATAFILE SIZE", tblspace_sql)
self.assertIn("TEMPFILE SIZE", tblspace_tmp_sql)
# REUSE cannot be used with OMF.
self.assertNotIn("REUSE", tblspace_sql)
self.assertNotIn("REUSE", tblspace_tmp_sql)
|
./temp_repos/django/django/db/backends/mysql/creation.py
|
./temp_repos/django/tests/backends/oracle/test_creation.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'DatabaseCreation'.
Context:
- Class Name: DatabaseCreation
- Dependencies to Mock: None detected
- Key Imports: client, subprocess, django.db.backends.base.creation, os, sys
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
DatabaseCreation
|
python
|
import os
import subprocess
import sys
from django.db.backends.base.creation import BaseDatabaseCreation
from .client import DatabaseClient
class DatabaseCreation(BaseDatabaseCreation):
def sql_table_creation_suffix(self):
suffix = []
test_settings = self.connection.settings_dict["TEST"]
if test_settings["CHARSET"]:
suffix.append("CHARACTER SET %s" % test_settings["CHARSET"])
if test_settings["COLLATION"]:
suffix.append("COLLATE %s" % test_settings["COLLATION"])
return " ".join(suffix)
def _execute_create_test_db(self, cursor, parameters, keepdb=False):
try:
super()._execute_create_test_db(cursor, parameters, keepdb)
except Exception as e:
if len(e.args) < 1 or e.args[0] != 1007:
# All errors except "database exists" (1007) cancel tests.
self.log("Got an error creating the test database: %s" % e)
sys.exit(2)
else:
raise
def _clone_test_db(self, suffix, verbosity, keepdb=False):
source_database_name = self.connection.settings_dict["NAME"]
target_database_name = self.get_test_db_clone_settings(suffix)["NAME"]
test_db_params = {
"dbname": self.connection.ops.quote_name(target_database_name),
"suffix": self.sql_table_creation_suffix(),
}
with self._nodb_cursor() as cursor:
try:
self._execute_create_test_db(cursor, test_db_params, keepdb)
except Exception:
if keepdb:
# If the database should be kept, skip everything else.
return
try:
if verbosity >= 1:
self.log(
"Destroying old test database for alias %s..."
% (
self._get_database_display_str(
verbosity, target_database_name
),
)
)
cursor.execute("DROP DATABASE %(dbname)s" % test_db_params)
self._execute_create_test_db(cursor, test_db_params, keepdb)
except Exception as e:
self.log("Got an error recreating the test database: %s" % e)
sys.exit(2)
self._clone_db(source_database_name, target_database_name)
def _clone_db(self, source_database_name, target_database_name):
cmd_args, cmd_env = DatabaseClient.settings_to_cmd_args_env(
self.connection.settings_dict, []
)
dump_cmd = [
"mysqldump",
*cmd_args[1:-1],
"--routines",
"--events",
source_database_name,
]
dump_env = load_env = {**os.environ, **cmd_env} if cmd_env else None
load_cmd = cmd_args
load_cmd[-1] = target_database_name
with (
subprocess.Popen(
dump_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=dump_env
) as dump_proc,
subprocess.Popen(
load_cmd,
stdin=dump_proc.stdout,
stdout=subprocess.DEVNULL,
stderr=subprocess.PIPE,
env=load_env,
) as load_proc,
):
# Allow dump_proc to receive a SIGPIPE if the load process exits.
dump_proc.stdout.close()
dump_err = dump_proc.stderr.read().decode(errors="replace")
load_err = load_proc.stderr.read().decode(errors="replace")
if dump_proc.returncode != 0:
self.log(
f"Got an error on mysqldump when cloning the test database: {dump_err}"
)
sys.exit(dump_proc.returncode)
if load_proc.returncode != 0:
self.log(f"Got an error cloning the test database: {load_err}")
sys.exit(load_proc.returncode)
|
import copy
import multiprocessing
import unittest
from unittest import mock
from django.db import DEFAULT_DB_ALIAS, NotSupportedError, connection, connections
from django.test import SimpleTestCase
@unittest.skipUnless(connection.vendor == "sqlite", "SQLite tests")
class TestDbSignatureTests(SimpleTestCase):
def test_custom_test_name(self):
test_connection = copy.copy(connections[DEFAULT_DB_ALIAS])
test_connection.settings_dict = copy.deepcopy(
connections[DEFAULT_DB_ALIAS].settings_dict
)
test_connection.settings_dict["NAME"] = None
test_connection.settings_dict["TEST"]["NAME"] = "custom.sqlite.db"
signature = test_connection.creation_class(test_connection).test_db_signature()
self.assertEqual(signature, (None, "custom.sqlite.db"))
def test_get_test_db_clone_settings_name(self):
test_connection = copy.copy(connections[DEFAULT_DB_ALIAS])
test_connection.settings_dict = copy.deepcopy(
connections[DEFAULT_DB_ALIAS].settings_dict,
)
tests = [
("test.sqlite3", "test_1.sqlite3"),
("test", "test_1"),
]
for test_db_name, expected_clone_name in tests:
with self.subTest(test_db_name=test_db_name):
test_connection.settings_dict["NAME"] = test_db_name
test_connection.settings_dict["TEST"]["NAME"] = test_db_name
creation_class = test_connection.creation_class(test_connection)
clone_settings_dict = creation_class.get_test_db_clone_settings("1")
self.assertEqual(clone_settings_dict["NAME"], expected_clone_name)
@mock.patch.object(multiprocessing, "get_start_method", return_value="unsupported")
def test_get_test_db_clone_settings_not_supported(self, *mocked_objects):
msg = "Cloning with start method 'unsupported' is not supported."
with self.assertRaisesMessage(NotSupportedError, msg):
connection.creation.get_test_db_clone_settings(1)
|
./temp_repos/django/django/db/backends/mysql/creation.py
|
./temp_repos/django/tests/backends/sqlite/test_creation.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'DatabaseCreation'.
Context:
- Class Name: DatabaseCreation
- Dependencies to Mock: None detected
- Key Imports: client, subprocess, django.db.backends.base.creation, os, sys
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
DatabaseCreation
|
python
|
from django.contrib.gis.db.backends.base.features import BaseSpatialFeatures
from django.db.backends.mysql.features import DatabaseFeatures as MySQLDatabaseFeatures
from django.utils.functional import cached_property
class DatabaseFeatures(BaseSpatialFeatures, MySQLDatabaseFeatures):
empty_intersection_returns_none = False
has_spatialrefsys_table = False
supports_add_srs_entry = False
supports_distance_geodetic = False
supports_length_geodetic = False
supports_area_geodetic = False
supports_transform = False
supports_null_geometries = False
supports_num_points_poly = False
unsupported_geojson_options = {"crs"}
@cached_property
def supports_geometry_field_unique_index(self):
# Not supported in MySQL since
# https://dev.mysql.com/worklog/task/?id=11808
return self.connection.mysql_is_mariadb
|
import sqlite3
from unittest import mock, skipUnless
from django.db import OperationalError, connection
from django.test import TestCase
@skipUnless(connection.vendor == "sqlite", "SQLite tests.")
class FeaturesTests(TestCase):
def test_supports_json_field_operational_error(self):
if hasattr(connection.features, "supports_json_field"):
del connection.features.supports_json_field
msg = "unable to open database file"
with mock.patch.object(
connection,
"cursor",
side_effect=OperationalError(msg),
):
with self.assertRaisesMessage(OperationalError, msg):
connection.features.supports_json_field
def test_max_query_params_respects_variable_limit(self):
limit_name = sqlite3.SQLITE_LIMIT_VARIABLE_NUMBER
current_limit = connection.features.max_query_params
new_limit = min(42, current_limit)
try:
connection.connection.setlimit(limit_name, new_limit)
self.assertEqual(connection.features.max_query_params, new_limit)
finally:
connection.connection.setlimit(limit_name, current_limit)
self.assertEqual(connection.features.max_query_params, current_limit)
|
./temp_repos/django/django/contrib/gis/db/backends/mysql/features.py
|
./temp_repos/django/tests/backends/sqlite/test_features.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'DatabaseFeatures'.
Context:
- Class Name: DatabaseFeatures
- Dependencies to Mock: None detected
- Key Imports: django.utils.functional, django.contrib.gis.db.backends.base.features, django.db.backends.mysql.features
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
DatabaseFeatures
|
python
|
import os
from . import Error, Tags, register
E001 = Error(
"You should not set the DJANGO_ALLOW_ASYNC_UNSAFE environment variable in "
"deployment. This disables async safety protection.",
id="async.E001",
)
@register(Tags.async_support, deploy=True)
def check_async_unsafe(app_configs, **kwargs):
if os.environ.get("DJANGO_ALLOW_ASYNC_UNSAFE"):
return [E001]
return []
|
import os
from unittest import mock
from django.core.checks.async_checks import E001, check_async_unsafe
from django.test import SimpleTestCase
class AsyncCheckTests(SimpleTestCase):
@mock.patch.dict(os.environ, {"DJANGO_ALLOW_ASYNC_UNSAFE": ""})
def test_no_allowed_async_unsafe(self):
self.assertEqual(check_async_unsafe(None), [])
@mock.patch.dict(os.environ, {"DJANGO_ALLOW_ASYNC_UNSAFE": "true"})
def test_allowed_async_unsafe_set(self):
self.assertEqual(check_async_unsafe(None), [E001])
|
./temp_repos/django/django/core/checks/async_checks.py
|
./temp_repos/django/tests/check_framework/test_async_checks.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'Unknown'.
Context:
- Class Name: Unknown
- Dependencies to Mock: None detected
- Key Imports: os
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
Unknown
|
python
|
from django.db import connections
from . import Tags, register
@register(Tags.database)
def check_database_backends(databases=None, **kwargs):
if databases is None:
return []
issues = []
for alias in databases:
conn = connections[alias]
issues.extend(conn.validation.check(**kwargs))
return issues
|
import unittest
from unittest import mock
from django.core.checks.database import check_database_backends
from django.db import connection, connections
from django.test import TestCase
class DatabaseCheckTests(TestCase):
databases = {"default", "other"}
@mock.patch("django.db.backends.base.validation.BaseDatabaseValidation.check")
def test_database_checks_called(self, mocked_check):
check_database_backends()
self.assertFalse(mocked_check.called)
check_database_backends(databases=self.databases)
self.assertTrue(mocked_check.called)
@unittest.skipUnless(connection.vendor == "mysql", "Test only for MySQL")
def test_mysql_strict_mode(self):
def _clean_sql_mode():
for alias in self.databases:
if hasattr(connections[alias], "sql_mode"):
del connections[alias].sql_mode
_clean_sql_mode()
good_sql_modes = [
"STRICT_TRANS_TABLES,STRICT_ALL_TABLES",
"STRICT_TRANS_TABLES",
"STRICT_ALL_TABLES",
]
for sql_mode in good_sql_modes:
with mock.patch.object(
connection,
"mysql_server_data",
{"sql_mode": sql_mode},
):
self.assertEqual(check_database_backends(databases=self.databases), [])
_clean_sql_mode()
bad_sql_modes = ["", "WHATEVER"]
for sql_mode in bad_sql_modes:
mocker_default = mock.patch.object(
connection,
"mysql_server_data",
{"sql_mode": sql_mode},
)
mocker_other = mock.patch.object(
connections["other"],
"mysql_server_data",
{"sql_mode": sql_mode},
)
with mocker_default, mocker_other:
# One warning for each database alias
result = check_database_backends(databases=self.databases)
self.assertEqual(len(result), 2)
self.assertEqual([r.id for r in result], ["mysql.W002", "mysql.W002"])
_clean_sql_mode()
|
./temp_repos/django/django/core/checks/database.py
|
./temp_repos/django/tests/check_framework/test_database.py
|
django
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'Unknown'.
Context:
- Class Name: Unknown
- Dependencies to Mock: None detected
- Key Imports: django.db
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
Unknown
|
python
|
from os import path
from random import randrange
from random import sample
from urllib.parse import urlsplit
from jinja2 import Environment
from jinja2 import FileSystemLoader
from werkzeug.local import Local
from werkzeug.local import LocalManager
from werkzeug.routing import Map
from werkzeug.routing import Rule
from werkzeug.utils import cached_property
from werkzeug.wrappers import Response
TEMPLATE_PATH = path.join(path.dirname(__file__), "templates")
STATIC_PATH = path.join(path.dirname(__file__), "static")
ALLOWED_SCHEMES = frozenset(["http", "https", "ftp", "ftps"])
URL_CHARS = "abcdefghijkmpqrstuvwxyzABCDEFGHIJKLMNPQRST23456789"
local = Local()
local_manager = LocalManager([local])
application = local("application")
url_map = Map([Rule("/static/<file>", endpoint="static", build_only=True)])
jinja_env = Environment(loader=FileSystemLoader(TEMPLATE_PATH))
def expose(rule, **kw):
def decorate(f):
kw["endpoint"] = f.__name__
url_map.add(Rule(rule, **kw))
return f
return decorate
def url_for(endpoint, _external=False, **values):
return local.url_adapter.build(endpoint, values, force_external=_external)
jinja_env.globals["url_for"] = url_for
def render_template(template, **context):
return Response(
jinja_env.get_template(template).render(**context), mimetype="text/html"
)
def validate_url(url):
return urlsplit(url)[0] in ALLOWED_SCHEMES
def get_random_uid():
return "".join(sample(URL_CHARS, randrange(3, 9)))
class Pagination:
def __init__(self, results, per_page, page, endpoint):
self.results = results
self.per_page = per_page
self.page = page
self.endpoint = endpoint
@cached_property
def count(self):
return len(self.results)
@cached_property
def entries(self):
return self.results[
((self.page - 1) * self.per_page) : (
((self.page - 1) * self.per_page) + self.per_page
)
]
@property
def has_previous(self):
"""Return True if there are pages before the current one."""
return self.page > 1
@property
def has_next(self):
"""Return True if there are pages after the current one."""
return self.page < self.pages
@property
def previous(self):
"""Return the URL for the previous page."""
return url_for(self.endpoint, page=self.page - 1)
@property
def next(self):
"""Return the URL for the next page."""
return url_for(self.endpoint, page=self.page + 1)
@property
def pages(self):
"""Return the number of pages."""
return max(0, self.count - 1) // self.per_page + 1
|
from __future__ import annotations
import inspect
from datetime import datetime
import pytest
from werkzeug import Request
from werkzeug import utils
from werkzeug.datastructures import Headers
from werkzeug.http import http_date
from werkzeug.http import parse_date
from werkzeug.test import Client
from werkzeug.test import EnvironBuilder
from werkzeug.wrappers import Response
@pytest.mark.parametrize(
("url", "code", "expect"),
[
("http://example.com", None, "http://example.com"),
("/füübär", 305, "/f%C3%BC%C3%BCb%C3%A4r"),
("http://☃.example.com/", 307, "http://xn--n3h.example.com/"),
("itms-services://?url=abc", None, "itms-services://?url=abc"),
],
)
def test_redirect(url: str, code: int | None, expect: str) -> None:
environ = EnvironBuilder().get_environ()
if code is None:
resp = utils.redirect(url)
assert resp.status_code == 302
else:
resp = utils.redirect(url, code)
assert resp.status_code == code
assert resp.headers["Location"] == url
assert resp.get_wsgi_headers(environ)["Location"] == expect
assert resp.get_data(as_text=True).count(url) == 2
def test_redirect_xss():
location = 'http://example.com/?xss="><script>alert(1)</script>'
resp = utils.redirect(location)
assert b"<script>alert(1)</script>" not in resp.get_data()
location = 'http://example.com/?xss="onmouseover="alert(1)'
resp = utils.redirect(location)
assert (
b'href="http://example.com/?xss="onmouseover="alert(1)"' not in resp.get_data()
)
def test_redirect_with_custom_response_class():
class MyResponse(Response):
pass
location = "http://example.com/redirect"
resp = utils.redirect(location, Response=MyResponse)
assert isinstance(resp, MyResponse)
assert resp.headers["Location"] == location
def test_cached_property():
foo = []
class A:
def prop(self):
foo.append(42)
return 42
prop = utils.cached_property(prop)
a = A()
p = a.prop
q = a.prop
assert p == q == 42
assert foo == [42]
foo = []
class A:
def _prop(self):
foo.append(42)
return 42
prop = utils.cached_property(_prop, name="prop")
del _prop
a = A()
p = a.prop
q = a.prop
assert p == q == 42
assert foo == [42]
def test_can_set_cached_property():
class A:
@utils.cached_property
def _prop(self):
return "cached_property return value"
a = A()
a._prop = "value"
assert a._prop == "value"
def test_invalidate_cached_property():
accessed = 0
class A:
@utils.cached_property
def prop(self):
nonlocal accessed
accessed += 1
return 42
a = A()
p = a.prop
q = a.prop
assert p == q == 42
assert accessed == 1
a.prop = 16
assert a.prop == 16
assert accessed == 1
del a.prop
r = a.prop
assert r == 42
assert accessed == 2
def test_inspect_treats_cached_property_as_property():
class A:
@utils.cached_property
def _prop(self):
return "cached_property return value"
attrs = inspect.classify_class_attrs(A)
for attr in attrs:
if attr.name == "_prop":
break
assert attr.kind == "property"
def test_environ_property():
class A:
environ = {"string": "abc", "number": "42"}
string = utils.environ_property("string")
missing = utils.environ_property("missing", "spam")
read_only = utils.environ_property("number")
number = utils.environ_property("number", load_func=int)
broken_number = utils.environ_property("broken_number", load_func=int)
date = utils.environ_property(
"date", None, parse_date, http_date, read_only=False
)
foo = utils.environ_property("foo")
a = A()
assert a.string == "abc"
assert a.missing == "spam"
def test_assign():
a.read_only = "something"
pytest.raises(AttributeError, test_assign)
assert a.number == 42
assert a.broken_number is None
assert a.date is None
a.date = datetime(2008, 1, 22, 10, 0, 0, 0)
assert a.environ["date"] == "Tue, 22 Jan 2008 10:00:00 GMT"
def test_import_string():
from datetime import date
from werkzeug.debug import DebuggedApplication
assert utils.import_string("datetime.date") is date
assert utils.import_string("datetime.date") is date
assert utils.import_string("datetime:date") is date
assert utils.import_string("XXXXXXXXXXXX", True) is None
assert utils.import_string("datetime.XXXXXXXXXXXX", True) is None
assert (
utils.import_string("werkzeug.debug.DebuggedApplication") is DebuggedApplication
)
pytest.raises(ImportError, utils.import_string, "XXXXXXXXXXXXXXXX")
pytest.raises(ImportError, utils.import_string, "datetime.XXXXXXXXXX")
def test_import_string_provides_traceback(tmpdir, monkeypatch):
monkeypatch.syspath_prepend(str(tmpdir))
# Couple of packages
dir_a = tmpdir.mkdir("a")
dir_b = tmpdir.mkdir("b")
# Totally packages, I promise
dir_a.join("__init__.py").write("")
dir_b.join("__init__.py").write("")
# 'aa.a' that depends on 'bb.b', which in turn has a broken import
dir_a.join("aa.py").write("from b import bb")
dir_b.join("bb.py").write("from os import a_typo")
# Do we get all the useful information in the traceback?
with pytest.raises(ImportError) as baz_exc:
utils.import_string("a.aa")
traceback = "".join(str(line) for line in baz_exc.traceback)
assert "bb.py':1" in traceback # a bit different than typical python tb
assert "from os import a_typo" in traceback
def test_import_string_attribute_error(tmpdir, monkeypatch):
monkeypatch.syspath_prepend(str(tmpdir))
tmpdir.join("foo_test.py").write("from bar_test import value")
tmpdir.join("bar_test.py").write("raise AttributeError('bad')")
with pytest.raises(AttributeError) as info:
utils.import_string("foo_test")
assert "bad" in str(info.value)
with pytest.raises(AttributeError) as info:
utils.import_string("bar_test")
assert "bad" in str(info.value)
def test_find_modules():
assert list(utils.find_modules("werkzeug.debug")) == [
"werkzeug.debug.console",
"werkzeug.debug.repr",
"werkzeug.debug.tbtools",
]
def test_header_set_duplication_bug():
headers = Headers([("Content-Type", "text/html"), ("Foo", "bar"), ("Blub", "blah")])
headers["blub"] = "hehe"
headers["blafasel"] = "humm"
assert headers == Headers(
[
("Content-Type", "text/html"),
("Foo", "bar"),
("blub", "hehe"),
("blafasel", "humm"),
]
)
@pytest.mark.parametrize(
("path", "base_url", "absolute_location"),
[
("foo", "http://example.org/app", "http://example.org/app/foo/"),
("/foo", "http://example.org/app", "http://example.org/app/foo/"),
("/foo/bar", "http://example.org/", "http://example.org/foo/bar/"),
("/foo/bar", "http://example.org/app", "http://example.org/app/foo/bar/"),
("/foo?baz", "http://example.org/", "http://example.org/foo/?baz"),
("/foo/", "http://example.org/", "http://example.org/foo/"),
("/foo/", "http://example.org/app", "http://example.org/app/foo/"),
("/", "http://example.org/", "http://example.org/"),
("/", "http://example.org/app", "http://example.org/app/"),
],
)
@pytest.mark.parametrize("autocorrect", [False, True])
def test_append_slash_redirect(autocorrect, path, base_url, absolute_location):
@Request.application
def app(request):
rv = utils.append_slash_redirect(request.environ)
rv.autocorrect_location_header = autocorrect
return rv
client = Client(app)
response = client.get(path, base_url=base_url)
assert response.status_code == 308
if not autocorrect:
assert response.headers["Location"].count("/") == 1
else:
assert response.headers["Location"] == absolute_location
def test_cached_property_doc():
@utils.cached_property
def foo():
"""testing"""
return 42
assert foo.__doc__ == "testing"
assert foo.__name__ == "foo"
assert foo.__module__ == __name__
def test_secure_filename():
assert utils.secure_filename("My cool movie.mov") == "My_cool_movie.mov"
assert utils.secure_filename("../../../etc/passwd") == "etc_passwd"
assert (
utils.secure_filename("i contain cool \xfcml\xe4uts.txt")
== "i_contain_cool_umlauts.txt"
)
assert utils.secure_filename("__filename__") == "filename"
assert utils.secure_filename("foo$&^*)bar") == "foobar"
|
./temp_repos/werkzeug/examples/couchy/utils.py
|
./temp_repos/werkzeug/tests/test_utils.py
|
werkzeug
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'Pagination'.
Context:
- Class Name: Pagination
- Dependencies to Mock: page, per_page, results, endpoint
- Key Imports: werkzeug.utils, werkzeug.routing, random, werkzeug.wrappers, urllib.parse, os, werkzeug.local, jinja2
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
Pagination
|
python
|
from __future__ import annotations
import hashlib
import hmac
import os
import posixpath
import secrets
SALT_CHARS = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
DEFAULT_PBKDF2_ITERATIONS = 1_000_000
_os_alt_seps: list[str] = list(
sep for sep in [os.sep, os.path.altsep] if sep is not None and sep != "/"
)
_windows_device_files = {
"CON",
"PRN",
"AUX",
"NUL",
*(f"COM{i}" for i in range(10)),
*(f"LPT{i}" for i in range(10)),
}
def gen_salt(length: int) -> str:
"""Generate a random string of SALT_CHARS with specified ``length``."""
if length <= 0:
raise ValueError("Salt length must be at least 1.")
return "".join(secrets.choice(SALT_CHARS) for _ in range(length))
def _hash_internal(method: str, salt: str, password: str) -> tuple[str, str]:
method, *args = method.split(":")
salt_bytes = salt.encode()
password_bytes = password.encode()
if method == "scrypt":
if not args:
n = 2**15
r = 8
p = 1
else:
try:
n, r, p = map(int, args)
except ValueError:
raise ValueError("'scrypt' takes 3 arguments.") from None
maxmem = 132 * n * r * p # ideally 128, but some extra seems needed
return (
hashlib.scrypt(
password_bytes, salt=salt_bytes, n=n, r=r, p=p, maxmem=maxmem
).hex(),
f"scrypt:{n}:{r}:{p}",
)
elif method == "pbkdf2":
len_args = len(args)
if len_args == 0:
hash_name = "sha256"
iterations = DEFAULT_PBKDF2_ITERATIONS
elif len_args == 1:
hash_name = args[0]
iterations = DEFAULT_PBKDF2_ITERATIONS
elif len_args == 2:
hash_name = args[0]
iterations = int(args[1])
else:
raise ValueError("'pbkdf2' takes 2 arguments.")
return (
hashlib.pbkdf2_hmac(
hash_name, password_bytes, salt_bytes, iterations
).hex(),
f"pbkdf2:{hash_name}:{iterations}",
)
else:
raise ValueError(f"Invalid hash method '{method}'.")
def generate_password_hash(
password: str, method: str = "scrypt", salt_length: int = 16
) -> str:
"""Securely hash a password for storage. A password can be compared to a stored hash
using :func:`check_password_hash`.
The following methods are supported:
- ``scrypt``, the default. The parameters are ``n``, ``r``, and ``p``, the default
is ``scrypt:32768:8:1``. See :func:`hashlib.scrypt`.
- ``pbkdf2``, less secure. The parameters are ``hash_method`` and ``iterations``,
the default is ``pbkdf2:sha256:600000``. See :func:`hashlib.pbkdf2_hmac`.
Default parameters may be updated to reflect current guidelines, and methods may be
deprecated and removed if they are no longer considered secure. To migrate old
hashes, you may generate a new hash when checking an old hash, or you may contact
users with a link to reset their password.
:param password: The plaintext password.
:param method: The key derivation function and parameters.
:param salt_length: The number of characters to generate for the salt.
.. versionchanged:: 3.1
The default iterations for pbkdf2 was increased to 1,000,000.
.. versionchanged:: 2.3
Scrypt support was added.
.. versionchanged:: 2.3
The default iterations for pbkdf2 was increased to 600,000.
.. versionchanged:: 2.3
All plain hashes are deprecated and will not be supported in Werkzeug 3.0.
"""
salt = gen_salt(salt_length)
h, actual_method = _hash_internal(method, salt, password)
return f"{actual_method}${salt}${h}"
def check_password_hash(pwhash: str, password: str) -> bool:
"""Securely check that the given stored password hash, previously generated using
:func:`generate_password_hash`, matches the given password.
Methods may be deprecated and removed if they are no longer considered secure. To
migrate old hashes, you may generate a new hash when checking an old hash, or you
may contact users with a link to reset their password.
:param pwhash: The hashed password.
:param password: The plaintext password.
.. versionchanged:: 2.3
All plain hashes are deprecated and will not be supported in Werkzeug 3.0.
"""
try:
method, salt, hashval = pwhash.split("$", 2)
except ValueError:
return False
return hmac.compare_digest(_hash_internal(method, salt, password)[0], hashval)
def safe_join(directory: str, *pathnames: str) -> str | None:
"""Safely join zero or more untrusted path components to a base
directory to avoid escaping the base directory.
:param directory: The trusted base directory.
:param pathnames: The untrusted path components relative to the
base directory.
:return: A safe path, otherwise ``None``.
.. versionchanged:: 3.1.4
Special device names are disallowed on Windows.
"""
if not directory:
# Ensure we end up with ./path if directory="" is given,
# otherwise the first untrusted part could become trusted.
directory = "."
parts = [directory]
for filename in pathnames:
if filename != "":
filename = posixpath.normpath(filename)
if (
any(sep in filename for sep in _os_alt_seps)
or (
os.name == "nt"
and os.path.splitext(filename)[0].upper() in _windows_device_files
)
or os.path.isabs(filename)
# ntpath.isabs doesn't catch this on Python < 3.11
or filename.startswith("/")
or filename == ".."
or filename.startswith("../")
):
return None
parts.append(filename)
return posixpath.join(*parts)
|
import os
import sys
import pytest
from werkzeug.security import check_password_hash
from werkzeug.security import generate_password_hash
from werkzeug.security import safe_join
def test_default_password_method():
value = generate_password_hash("secret")
assert value.startswith("scrypt:")
@pytest.mark.xfail(
sys.implementation.name == "pypy", reason="scrypt unavailable on pypy"
)
def test_scrypt():
value = generate_password_hash("secret", method="scrypt")
assert check_password_hash(value, "secret")
assert value.startswith("scrypt:32768:8:1$")
def test_pbkdf2():
value = generate_password_hash("secret", method="pbkdf2")
assert check_password_hash(value, "secret")
assert value.startswith("pbkdf2:sha256:1000000$")
def test_salted_hashes():
hash1 = generate_password_hash("secret")
hash2 = generate_password_hash("secret")
assert hash1 != hash2
assert check_password_hash(hash1, "secret")
assert check_password_hash(hash2, "secret")
def test_require_salt():
with pytest.raises(ValueError):
generate_password_hash("secret", salt_length=0)
def test_invalid_method():
with pytest.raises(ValueError, match="Invalid hash method"):
generate_password_hash("secret", "sha256")
@pytest.mark.parametrize(
("path", "expect"),
[
("b/c", "a/b/c"),
("../b/c", None),
("b\\c", None if os.name == "nt" else "a/b\\c"),
("//b/c", None),
],
)
def test_safe_join(path, expect):
assert safe_join("a", path) == expect
def test_safe_join_os_sep():
import werkzeug.security as sec
prev_value = sec._os_alt_seps
sec._os_alt_seps = "*"
assert safe_join("foo", "bar/baz*") is None
sec._os_alt_steps = prev_value
def test_safe_join_empty_trusted():
assert safe_join("", "c:test.txt") == "./c:test.txt"
def test_safe_join_windows_special(monkeypatch: pytest.MonkeyPatch) -> None:
"""Windows special device name is not allowed on Windows."""
monkeypatch.setattr("os.name", "nt")
assert safe_join("a", "CON") is None
monkeypatch.setattr("os.name", "posix")
assert safe_join("a", "CON") == "a/CON"
|
./temp_repos/werkzeug/src/werkzeug/security.py
|
./temp_repos/werkzeug/tests/test_security.py
|
werkzeug
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'Unknown'.
Context:
- Class Name: Unknown
- Dependencies to Mock: None detected
- Key Imports: secrets, hmac, hashlib, os, __future__, posixpath
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
Unknown
|
python
|
"""A WSGI and HTTP server for use **during development only**. This
server is convenient to use, but is not designed to be particularly
stable, secure, or efficient. Use a dedicate WSGI server and HTTP
server when deploying to production.
It provides features like interactive debugging and code reloading. Use
``run_simple`` to start the server. Put this in a ``run.py`` script:
.. code-block:: python
from myapp import create_app
from werkzeug import run_simple
"""
from __future__ import annotations
import errno
import io
import os
import selectors
import socket
import socketserver
import sys
import typing as t
from datetime import datetime as dt
from datetime import timedelta
from datetime import timezone
from http.server import BaseHTTPRequestHandler
from http.server import HTTPServer
from urllib.parse import unquote
from urllib.parse import urlsplit
from ._internal import _log
from ._internal import _wsgi_encoding_dance
from .exceptions import InternalServerError
from .urls import uri_to_iri
try:
import ssl
connection_dropped_errors: tuple[type[Exception], ...] = (
ConnectionError,
socket.timeout,
ssl.SSLEOFError,
)
except ImportError:
class _SslDummy:
def __getattr__(self, name: str) -> t.Any:
raise RuntimeError( # noqa: B904
"SSL is unavailable because this Python runtime was not"
" compiled with SSL/TLS support."
)
ssl = _SslDummy() # type: ignore
connection_dropped_errors = (ConnectionError, socket.timeout)
_log_add_style = True
if os.name == "nt":
try:
__import__("colorama")
except ImportError:
_log_add_style = False
can_fork = hasattr(os, "fork")
if can_fork:
ForkingMixIn = socketserver.ForkingMixIn
else:
class ForkingMixIn: # type: ignore
pass
try:
af_unix = socket.AF_UNIX
except AttributeError:
af_unix = None # type: ignore
LISTEN_QUEUE = 128
_TSSLContextArg = t.Optional[
t.Union["ssl.SSLContext", tuple[str, t.Optional[str]], t.Literal["adhoc"]]
]
if t.TYPE_CHECKING:
from _typeshed.wsgi import WSGIApplication
from _typeshed.wsgi import WSGIEnvironment
from cryptography.hazmat.primitives.asymmetric.rsa import (
RSAPrivateKeyWithSerialization,
)
from cryptography.x509 import Certificate
class DechunkedInput(io.RawIOBase):
"""An input stream that handles Transfer-Encoding 'chunked'"""
def __init__(self, rfile: t.IO[bytes]) -> None:
self._rfile = rfile
self._done = False
self._len = 0
def readable(self) -> bool:
return True
def read_chunk_len(self) -> int:
try:
line = self._rfile.readline().decode("latin1")
_len = int(line.strip(), 16)
except ValueError as e:
raise OSError("Invalid chunk header") from e
if _len < 0:
raise OSError("Negative chunk length not allowed")
return _len
def readinto(self, buf: bytearray) -> int: # type: ignore
read = 0
while not self._done and read < len(buf):
if self._len == 0:
# This is the first chunk or we fully consumed the previous
# one. Read the next length of the next chunk
self._len = self.read_chunk_len()
if self._len == 0:
# Found the final chunk of size 0. The stream is now exhausted,
# but there is still a final newline that should be consumed
self._done = True
if self._len > 0:
# There is data (left) in this chunk, so append it to the
# buffer. If this operation fully consumes the chunk, this will
# reset self._len to 0.
n = min(len(buf), self._len)
# If (read + chunk size) becomes more than len(buf), buf will
# grow beyond the original size and read more data than
# required. So only read as much data as can fit in buf.
if read + n > len(buf):
buf[read:] = self._rfile.read(len(buf) - read)
self._len -= len(buf) - read
read = len(buf)
else:
buf[read : read + n] = self._rfile.read(n)
self._len -= n
read += n
if self._len == 0:
# Skip the terminating newline of a chunk that has been fully
# consumed. This also applies to the 0-sized final chunk
terminator = self._rfile.readline()
if terminator not in (b"\n", b"\r\n", b"\r"):
raise OSError("Missing chunk terminating newline")
return read
class WSGIRequestHandler(BaseHTTPRequestHandler):
"""A request handler that implements WSGI dispatching."""
server: BaseWSGIServer
@property
def server_version(self) -> str: # type: ignore
return self.server._server_version
def make_environ(self) -> WSGIEnvironment:
request_url = urlsplit(self.path)
url_scheme = "http" if self.server.ssl_context is None else "https"
if not self.client_address:
self.client_address = ("<local>", 0)
elif isinstance(self.client_address, str):
self.client_address = (self.client_address, 0)
# If there was no scheme but the path started with two slashes,
# the first segment may have been incorrectly parsed as the
# netloc, prepend it to the path again.
if not request_url.scheme and request_url.netloc:
path_info = f"/{request_url.netloc}{request_url.path}"
else:
path_info = request_url.path
path_info = unquote(path_info)
environ: WSGIEnvironment = {
"wsgi.version": (1, 0),
"wsgi.url_scheme": url_scheme,
"wsgi.input": self.rfile,
"wsgi.errors": sys.stderr,
"wsgi.multithread": self.server.multithread,
"wsgi.multiprocess": self.server.multiprocess,
"wsgi.run_once": False,
"werkzeug.socket": self.connection,
"SERVER_SOFTWARE": self.server_version,
"REQUEST_METHOD": self.command,
"SCRIPT_NAME": "",
"PATH_INFO": _wsgi_encoding_dance(path_info),
"QUERY_STRING": _wsgi_encoding_dance(request_url.query),
# Non-standard, added by mod_wsgi, uWSGI
"REQUEST_URI": _wsgi_encoding_dance(self.path),
# Non-standard, added by gunicorn
"RAW_URI": _wsgi_encoding_dance(self.path),
"REMOTE_ADDR": self.address_string(),
"REMOTE_PORT": self.port_integer(),
"SERVER_NAME": self.server.server_address[0],
"SERVER_PORT": str(self.server.server_address[1]),
"SERVER_PROTOCOL": self.request_version,
}
for key, value in self.headers.items():
if "_" in key:
continue
key = key.upper().replace("-", "_")
value = value.replace("\r\n", "")
if key not in ("CONTENT_TYPE", "CONTENT_LENGTH"):
key = f"HTTP_{key}"
if key in environ:
value = f"{environ[key]},{value}"
environ[key] = value
if environ.get("HTTP_TRANSFER_ENCODING", "").strip().lower() == "chunked":
environ["wsgi.input_terminated"] = True
environ["wsgi.input"] = DechunkedInput(environ["wsgi.input"])
# Per RFC 2616, if the URL is absolute, use that as the host.
# We're using "has a scheme" to indicate an absolute URL.
if request_url.scheme and request_url.netloc:
environ["HTTP_HOST"] = request_url.netloc
try:
# binary_form=False gives nicer information, but wouldn't be compatible with
# what Nginx or Apache could return.
peer_cert = self.connection.getpeercert(binary_form=True)
if peer_cert is not None:
# Nginx and Apache use PEM format.
environ["SSL_CLIENT_CERT"] = ssl.DER_cert_to_PEM_cert(peer_cert)
except ValueError:
# SSL handshake hasn't finished.
self.server.log("error", "Cannot fetch SSL peer certificate info")
except AttributeError:
# Not using TLS, the socket will not have getpeercert().
pass
return environ
def run_wsgi(self) -> None:
if self.headers.get("Expect", "").lower().strip() == "100-continue":
self.wfile.write(b"HTTP/1.1 100 Continue\r\n\r\n")
self.environ = environ = self.make_environ()
status_set: str | None = None
headers_set: list[tuple[str, str]] | None = None
status_sent: str | None = None
headers_sent: list[tuple[str, str]] | None = None
chunk_response: bool = False
def write(data: bytes) -> None:
nonlocal status_sent, headers_sent, chunk_response
assert status_set is not None, "write() before start_response"
assert headers_set is not None, "write() before start_response"
if status_sent is None:
status_sent = status_set
headers_sent = headers_set
try:
code_str, msg = status_sent.split(None, 1)
except ValueError:
code_str, msg = status_sent, ""
code = int(code_str)
self.send_response(code, msg)
header_keys = set()
for key, value in headers_sent:
self.send_header(key, value)
header_keys.add(key.lower())
# Use chunked transfer encoding if there is no content
# length. Do not use for 1xx and 204 responses. 304
# responses and HEAD requests are also excluded, which
# is the more conservative behavior and matches other
# parts of the code.
# https://httpwg.org/specs/rfc7230.html#rfc.section.3.3.1
if (
not (
"content-length" in header_keys
or environ["REQUEST_METHOD"] == "HEAD"
or (100 <= code < 200)
or code in {204, 304}
)
and self.protocol_version >= "HTTP/1.1"
):
chunk_response = True
self.send_header("Transfer-Encoding", "chunked")
# Always close the connection. This disables HTTP/1.1
# keep-alive connections. They aren't handled well by
# Python's http.server because it doesn't know how to
# drain the stream before the next request line.
self.send_header("Connection", "close")
self.end_headers()
assert isinstance(data, bytes), "applications must write bytes"
if data:
if chunk_response:
self.wfile.write(hex(len(data))[2:].encode())
self.wfile.write(b"\r\n")
self.wfile.write(data)
if chunk_response:
self.wfile.write(b"\r\n")
self.wfile.flush()
def start_response(status, headers, exc_info=None): # type: ignore
nonlocal status_set, headers_set
if exc_info:
try:
if headers_sent:
raise exc_info[1].with_traceback(exc_info[2])
finally:
exc_info = None
elif headers_set:
raise AssertionError("Headers already set")
status_set = status
headers_set = headers
return write
def execute(app: WSGIApplication) -> None:
application_iter = app(environ, start_response)
try:
for data in application_iter:
write(data)
if not headers_sent:
write(b"")
if chunk_response:
self.wfile.write(b"0\r\n\r\n")
finally:
# Check for any remaining data in the read socket, and discard it. This
# will read past request.max_content_length, but lets the client see a
# 413 response instead of a connection reset failure. If we supported
# keep-alive connections, this naive approach would break by reading the
# next request line. Since we know that write (above) closes every
# connection we can read everything.
selector = selectors.DefaultSelector()
selector.register(self.connection, selectors.EVENT_READ)
total_size = 0
total_reads = 0
# A timeout of 0 tends to fail because a client needs a small amount of
# time to continue sending its data.
while selector.select(timeout=0.01):
# Only read 10MB into memory at a time.
data = self.rfile.read(10_000_000)
total_size += len(data)
total_reads += 1
# Stop reading on no data, >=10GB, or 1000 reads. If a client sends
# more than that, they'll get a connection reset failure.
if not data or total_size >= 10_000_000_000 or total_reads > 1000:
break
selector.close()
if hasattr(application_iter, "close"):
application_iter.close()
try:
execute(self.server.app)
except connection_dropped_errors as e:
self.connection_dropped(e, environ)
except Exception as e:
if self.server.passthrough_errors:
raise
if status_sent is not None and chunk_response:
self.close_connection = True
try:
# if we haven't yet sent the headers but they are set
# we roll back to be able to set them again.
if status_sent is None:
status_set = None
headers_set = None
execute(InternalServerError())
except Exception:
pass
from .debug.tbtools import DebugTraceback
msg = DebugTraceback(e).render_traceback_text()
self.server.log("error", f"Error on request:\n{msg}")
def handle(self) -> None:
"""Handles a request ignoring dropped connections."""
try:
super().handle()
except (ConnectionError, socket.timeout) as e:
self.connection_dropped(e)
except Exception as e:
if self.server.ssl_context is not None and is_ssl_error(e):
self.log_error("SSL error occurred: %s", e)
else:
raise
def connection_dropped(
self, error: BaseException, environ: WSGIEnvironment | None = None
) -> None:
"""Called if the connection was closed by the client. By default
nothing happens.
"""
def __getattr__(self, name: str) -> t.Any:
# All HTTP methods are handled by run_wsgi.
if name.startswith("do_"):
return self.run_wsgi
# All other attributes are forwarded to the base class.
return getattr(super(), name)
def address_string(self) -> str:
if getattr(self, "environ", None):
return self.environ["REMOTE_ADDR"] # type: ignore
if not self.client_address:
return "<local>"
return self.client_address[0]
def port_integer(self) -> int:
return self.client_address[1]
# Escape control characters. This is defined (but private) in Python 3.12.
_control_char_table = str.maketrans(
{c: rf"\x{c:02x}" for c in [*range(0x20), *range(0x7F, 0xA0)]}
)
_control_char_table[ord("\\")] = r"\\"
def log_request(self, code: int | str = "-", size: int | str = "-") -> None:
try:
path = uri_to_iri(self.path)
msg = f"{self.command} {path} {self.request_version}"
except AttributeError:
# path isn't set if the requestline was bad
msg = self.requestline
# Escape control characters that may be in the decoded path.
msg = msg.translate(self._control_char_table)
code = str(code)
if code[0] == "1": # 1xx - Informational
msg = _ansi_style(msg, "bold")
elif code == "200": # 2xx - Success
pass
elif code == "304": # 304 - Resource Not Modified
msg = _ansi_style(msg, "cyan")
elif code[0] == "3": # 3xx - Redirection
msg = _ansi_style(msg, "green")
elif code == "404": # 404 - Resource Not Found
msg = _ansi_style(msg, "yellow")
elif code[0] == "4": # 4xx - Client Error
msg = _ansi_style(msg, "bold", "red")
else: # 5xx, or any other response
msg = _ansi_style(msg, "bold", "magenta")
self.log("info", '"%s" %s %s', msg, code, size)
def log_error(self, format: str, *args: t.Any) -> None:
self.log("error", format, *args)
def log_message(self, format: str, *args: t.Any) -> None:
self.log("info", format, *args)
def log(self, type: str, message: str, *args: t.Any) -> None:
# an IPv6 scoped address contains "%" which breaks logging
address_string = self.address_string().replace("%", "%%")
_log(
type,
f"{address_string} - - [{self.log_date_time_string()}] {message}\n",
*args,
)
def _ansi_style(value: str, *styles: str) -> str:
if not _log_add_style:
return value
codes = {
"bold": 1,
"red": 31,
"green": 32,
"yellow": 33,
"magenta": 35,
"cyan": 36,
}
for style in styles:
value = f"\x1b[{codes[style]}m{value}"
return f"{value}\x1b[0m"
def generate_adhoc_ssl_pair(
cn: str | None = None,
) -> tuple[Certificate, RSAPrivateKeyWithSerialization]:
try:
from cryptography import x509
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.asymmetric import rsa
from cryptography.x509.oid import NameOID
except ImportError:
raise TypeError(
"Using ad-hoc certificates requires the cryptography library."
) from None
backend = default_backend()
pkey = rsa.generate_private_key(
public_exponent=65537, key_size=2048, backend=backend
)
# pretty damn sure that this is not actually accepted by anyone
if cn is None:
cn = "*"
subject = x509.Name(
[
x509.NameAttribute(NameOID.ORGANIZATION_NAME, "Dummy Certificate"),
x509.NameAttribute(NameOID.COMMON_NAME, cn),
]
)
backend = default_backend()
cert = (
x509.CertificateBuilder()
.subject_name(subject)
.issuer_name(subject)
.public_key(pkey.public_key())
.serial_number(x509.random_serial_number())
.not_valid_before(dt.now(timezone.utc))
.not_valid_after(dt.now(timezone.utc) + timedelta(days=365))
.add_extension(x509.ExtendedKeyUsage([x509.OID_SERVER_AUTH]), critical=False)
.add_extension(
x509.SubjectAlternativeName([x509.DNSName(cn), x509.DNSName(f"*.{cn}")]),
critical=False,
)
.sign(pkey, hashes.SHA256(), backend)
)
return cert, pkey
def make_ssl_devcert(
base_path: str, host: str | None = None, cn: str | None = None
) -> tuple[str, str]:
"""Creates an SSL key for development. This should be used instead of
the ``'adhoc'`` key which generates a new cert on each server start.
It accepts a path for where it should store the key and cert and
either a host or CN. If a host is given it will use the CN
``*.host/CN=host``.
For more information see :func:`run_simple`.
.. versionadded:: 0.9
:param base_path: the path to the certificate and key. The extension
``.crt`` is added for the certificate, ``.key`` is
added for the key.
:param host: the name of the host. This can be used as an alternative
for the `cn`.
:param cn: the `CN` to use.
"""
if host is not None:
cn = host
cert, pkey = generate_adhoc_ssl_pair(cn=cn)
from cryptography.hazmat.primitives import serialization
cert_file = f"{base_path}.crt"
pkey_file = f"{base_path}.key"
with open(cert_file, "wb") as f:
f.write(cert.public_bytes(serialization.Encoding.PEM))
with open(pkey_file, "wb") as f:
f.write(
pkey.private_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PrivateFormat.TraditionalOpenSSL,
encryption_algorithm=serialization.NoEncryption(),
)
)
return cert_file, pkey_file
def generate_adhoc_ssl_context() -> ssl.SSLContext:
"""Generates an adhoc SSL context for the development server."""
import atexit
import tempfile
cert, pkey = generate_adhoc_ssl_pair()
from cryptography.hazmat.primitives import serialization
cert_handle, cert_file = tempfile.mkstemp()
pkey_handle, pkey_file = tempfile.mkstemp()
atexit.register(os.remove, pkey_file)
atexit.register(os.remove, cert_file)
os.write(cert_handle, cert.public_bytes(serialization.Encoding.PEM))
os.write(
pkey_handle,
pkey.private_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PrivateFormat.TraditionalOpenSSL,
encryption_algorithm=serialization.NoEncryption(),
),
)
os.close(cert_handle)
os.close(pkey_handle)
ctx = load_ssl_context(cert_file, pkey_file)
return ctx
def load_ssl_context(
cert_file: str, pkey_file: str | None = None, protocol: int | None = None
) -> ssl.SSLContext:
"""Loads SSL context from cert/private key files and optional protocol.
Many parameters are directly taken from the API of
:py:class:`ssl.SSLContext`.
:param cert_file: Path of the certificate to use.
:param pkey_file: Path of the private key to use. If not given, the key
will be obtained from the certificate file.
:param protocol: A ``PROTOCOL`` constant from the :mod:`ssl` module.
Defaults to :data:`ssl.PROTOCOL_TLS_SERVER`.
"""
if protocol is None:
protocol = ssl.PROTOCOL_TLS_SERVER
ctx = ssl.SSLContext(protocol)
ctx.load_cert_chain(cert_file, pkey_file)
return ctx
def is_ssl_error(error: Exception | None = None) -> bool:
"""Checks if the given error (or the current one) is an SSL error."""
if error is None:
error = t.cast(Exception, sys.exc_info()[1])
return isinstance(error, ssl.SSLError)
def select_address_family(host: str, port: int) -> socket.AddressFamily:
"""Return ``AF_INET4``, ``AF_INET6``, or ``AF_UNIX`` depending on
the host and port."""
if host.startswith("unix://"):
return socket.AF_UNIX
elif ":" in host and hasattr(socket, "AF_INET6"):
return socket.AF_INET6
return socket.AF_INET
def get_sockaddr(
host: str, port: int, family: socket.AddressFamily
) -> tuple[str, int] | str:
"""Return a fully qualified socket address that can be passed to
:func:`socket.bind`."""
if family == af_unix:
# Absolute path avoids IDNA encoding error when path starts with dot.
return os.path.abspath(host.partition("://")[2])
try:
res = socket.getaddrinfo(
host, port, family, socket.SOCK_STREAM, socket.IPPROTO_TCP
)
except socket.gaierror:
return host, port
return res[0][4] # type: ignore
def get_interface_ip(family: socket.AddressFamily) -> str:
"""Get the IP address of an external interface. Used when binding to
0.0.0.0 or ::1 to show a more useful URL.
:meta private:
"""
# arbitrary private address
host = "fd31:f903:5ab5:1::1" if family == socket.AF_INET6 else "10.253.155.219"
with socket.socket(family, socket.SOCK_DGRAM) as s:
try:
s.connect((host, 58162))
except OSError:
return "::1" if family == socket.AF_INET6 else "127.0.0.1"
return s.getsockname()[0] # type: ignore
class BaseWSGIServer(HTTPServer):
"""A WSGI server that that handles one request at a time.
Use :func:`make_server` to create a server instance.
"""
multithread = False
multiprocess = False
request_queue_size = LISTEN_QUEUE
allow_reuse_address = True
def __init__(
self,
host: str,
port: int,
app: WSGIApplication,
handler: type[WSGIRequestHandler] | None = None,
passthrough_errors: bool = False,
ssl_context: _TSSLContextArg | None = None,
fd: int | None = None,
) -> None:
if handler is None:
handler = WSGIRequestHandler
# If the handler doesn't directly set a protocol version and
# thread or process workers are used, then allow chunked
# responses and keep-alive connections by enabling HTTP/1.1.
if "protocol_version" not in vars(handler) and (
self.multithread or self.multiprocess
):
handler.protocol_version = "HTTP/1.1"
self.host = host
self.port = port
self.app = app
self.passthrough_errors = passthrough_errors
self.address_family = address_family = select_address_family(host, port)
server_address = get_sockaddr(host, int(port), address_family)
# Remove a leftover Unix socket file from a previous run. Don't
# remove a file that was set up by run_simple.
if address_family == af_unix and fd is None:
server_address = t.cast(str, server_address)
if os.path.exists(server_address):
os.unlink(server_address)
# Bind and activate will be handled manually, it should only
# happen if we're not using a socket that was already set up.
super().__init__(
server_address, # type: ignore[arg-type]
handler,
bind_and_activate=False,
)
if fd is None:
# No existing socket descriptor, do bind_and_activate=True.
try:
self.server_bind()
self.server_activate()
except OSError as e:
# Catch connection issues and show them without the traceback. Show
# extra instructions for address not found, and for macOS.
self.server_close()
print(e.strerror, file=sys.stderr)
if e.errno == errno.EADDRINUSE:
print(
f"Port {port} is in use by another program. Either identify and"
" stop that program, or start the server with a different"
" port.",
file=sys.stderr,
)
if sys.platform == "darwin" and port == 5000:
print(
"On macOS, try searching for and disabling"
" 'AirPlay Receiver' in System Settings.",
file=sys.stderr,
)
sys.exit(1)
except BaseException:
self.server_close()
raise
else:
# TCPServer automatically opens a socket even if bind_and_activate is False.
# Close it to silence a ResourceWarning.
self.server_close()
# Use the passed in socket directly.
self.socket = socket.fromfd(fd, address_family, socket.SOCK_STREAM)
self.server_address = self.socket.getsockname()
if address_family != af_unix:
# If port was 0, this will record the bound port.
self.port = self.server_address[1]
if ssl_context is not None:
if isinstance(ssl_context, tuple):
ssl_context = load_ssl_context(*ssl_context)
elif ssl_context == "adhoc":
ssl_context = generate_adhoc_ssl_context()
self.socket = ssl_context.wrap_socket(self.socket, server_side=True)
self.ssl_context: ssl.SSLContext | None = ssl_context
else:
self.ssl_context = None
import importlib.metadata
self._server_version = f"Werkzeug/{importlib.metadata.version('werkzeug')}"
def log(self, type: str, message: str, *args: t.Any) -> None:
_log(type, message, *args)
def serve_forever(self, poll_interval: float = 0.5) -> None:
try:
super().serve_forever(poll_interval=poll_interval)
except KeyboardInterrupt:
pass
finally:
self.server_close()
def handle_error(
self, request: t.Any, client_address: tuple[str, int] | str
) -> None:
if self.passthrough_errors:
raise
return super().handle_error(request, client_address)
def log_startup(self) -> None:
"""Show information about the address when starting the server."""
dev_warning = (
"WARNING: This is a development server. Do not use it in a production"
" deployment. Use a production WSGI server instead."
)
dev_warning = _ansi_style(dev_warning, "bold", "red")
messages = [dev_warning]
if self.address_family == af_unix:
messages.append(f" * Running on {self.host}")
else:
scheme = "http" if self.ssl_context is None else "https"
display_hostname = self.host
if self.host in {"0.0.0.0", "::"}:
messages.append(f" * Running on all addresses ({self.host})")
if self.host == "0.0.0.0":
localhost = "127.0.0.1"
display_hostname = get_interface_ip(socket.AF_INET)
else:
localhost = "[::1]"
display_hostname = get_interface_ip(socket.AF_INET6)
messages.append(f" * Running on {scheme}://{localhost}:{self.port}")
if ":" in display_hostname:
display_hostname = f"[{display_hostname}]"
messages.append(f" * Running on {scheme}://{display_hostname}:{self.port}")
_log("info", "\n".join(messages))
class ThreadedWSGIServer(socketserver.ThreadingMixIn, BaseWSGIServer):
"""A WSGI server that handles concurrent requests in separate
threads.
Use :func:`make_server` to create a server instance.
"""
multithread = True
daemon_threads = True
class ForkingWSGIServer(ForkingMixIn, BaseWSGIServer):
"""A WSGI server that handles concurrent requests in separate forked
processes.
Use :func:`make_server` to create a server instance.
"""
multiprocess = True
def __init__(
self,
host: str,
port: int,
app: WSGIApplication,
processes: int = 40,
handler: type[WSGIRequestHandler] | None = None,
passthrough_errors: bool = False,
ssl_context: _TSSLContextArg | None = None,
fd: int | None = None,
) -> None:
if not can_fork:
raise ValueError("Your platform does not support forking.")
super().__init__(host, port, app, handler, passthrough_errors, ssl_context, fd)
self.max_children = processes
def make_server(
host: str,
port: int,
app: WSGIApplication,
threaded: bool = False,
processes: int = 1,
request_handler: type[WSGIRequestHandler] | None = None,
passthrough_errors: bool = False,
ssl_context: _TSSLContextArg | None = None,
fd: int | None = None,
) -> BaseWSGIServer:
"""Create an appropriate WSGI server instance based on the value of
``threaded`` and ``processes``.
This is called from :func:`run_simple`, but can be used separately
to have access to the server object, such as to run it in a separate
thread.
See :func:`run_simple` for parameter docs.
"""
if threaded and processes > 1:
raise ValueError("Cannot have a multi-thread and multi-process server.")
if threaded:
return ThreadedWSGIServer(
host, port, app, request_handler, passthrough_errors, ssl_context, fd=fd
)
if processes > 1:
return ForkingWSGIServer(
host,
port,
app,
processes,
request_handler,
passthrough_errors,
ssl_context,
fd=fd,
)
return BaseWSGIServer(
host, port, app, request_handler, passthrough_errors, ssl_context, fd=fd
)
def is_running_from_reloader() -> bool:
"""Check if the server is running as a subprocess within the
Werkzeug reloader.
.. versionadded:: 0.10
"""
return os.environ.get("WERKZEUG_RUN_MAIN") == "true"
def run_simple(
hostname: str,
port: int,
application: WSGIApplication,
use_reloader: bool = False,
use_debugger: bool = False,
use_evalex: bool = True,
extra_files: t.Iterable[str] | None = None,
exclude_patterns: t.Iterable[str] | None = None,
reloader_interval: int = 1,
reloader_type: str = "auto",
threaded: bool = False,
processes: int = 1,
request_handler: type[WSGIRequestHandler] | None = None,
static_files: dict[str, str | tuple[str, str]] | None = None,
passthrough_errors: bool = False,
ssl_context: _TSSLContextArg | None = None,
) -> None:
"""Start a development server for a WSGI application. Various
optional features can be enabled.
.. warning::
Do not use the development server when deploying to production.
It is intended for use only during local development. It is not
designed to be particularly efficient, stable, or secure.
:param hostname: The host to bind to, for example ``'localhost'``.
Can be a domain, IPv4 or IPv6 address, or file path starting
with ``unix://`` for a Unix socket.
:param port: The port to bind to, for example ``8080``. Using ``0``
tells the OS to pick a random free port.
:param application: The WSGI application to run.
:param use_reloader: Use a reloader process to restart the server
process when files are changed.
:param use_debugger: Use Werkzeug's debugger, which will show
formatted tracebacks on unhandled exceptions.
:param use_evalex: Make the debugger interactive. A Python terminal
can be opened for any frame in the traceback. Some protection is
provided by requiring a PIN, but this should never be enabled
on a publicly visible server.
:param extra_files: The reloader will watch these files for changes
in addition to Python modules. For example, watch a
configuration file.
:param exclude_patterns: The reloader will ignore changes to any
files matching these :mod:`fnmatch` patterns. For example,
ignore cache files.
:param reloader_interval: How often the reloader tries to check for
changes.
:param reloader_type: The reloader to use. The ``'stat'`` reloader
is built in, but may require significant CPU to watch files. The
``'watchdog'`` reloader is much more efficient but requires
installing the ``watchdog`` package first.
:param threaded: Handle concurrent requests using threads. Cannot be
used with ``processes``.
:param processes: Handle concurrent requests using up to this number
of processes. Cannot be used with ``threaded``.
:param request_handler: Use a different
:class:`~BaseHTTPServer.BaseHTTPRequestHandler` subclass to
handle requests.
:param static_files: A dict mapping URL prefixes to directories to
serve static files from using
:class:`~werkzeug.middleware.SharedDataMiddleware`.
:param passthrough_errors: Don't catch unhandled exceptions at the
server level, let the server crash instead. If ``use_debugger``
is enabled, the debugger will still catch such errors.
:param ssl_context: Configure TLS to serve over HTTPS. Can be an
:class:`ssl.SSLContext` object, a ``(cert_file, key_file)``
tuple to create a typical context, or the string ``'adhoc'`` to
generate a temporary self-signed certificate.
.. versionchanged:: 2.1
Instructions are shown for dealing with an "address already in
use" error.
.. versionchanged:: 2.1
Running on ``0.0.0.0`` or ``::`` shows the loopback IP in
addition to a real IP.
.. versionchanged:: 2.1
The command-line interface was removed.
.. versionchanged:: 2.0
Running on ``0.0.0.0`` or ``::`` shows a real IP address that
was bound as well as a warning not to run the development server
in production.
.. versionchanged:: 2.0
The ``exclude_patterns`` parameter was added.
.. versionchanged:: 0.15
Bind to a Unix socket by passing a ``hostname`` that starts with
``unix://``.
.. versionchanged:: 0.10
Improved the reloader and added support for changing the backend
through the ``reloader_type`` parameter.
.. versionchanged:: 0.9
A command-line interface was added.
.. versionchanged:: 0.8
``ssl_context`` can be a tuple of paths to the certificate and
private key files.
.. versionchanged:: 0.6
The ``ssl_context`` parameter was added.
.. versionchanged:: 0.5
The ``static_files`` and ``passthrough_errors`` parameters were
added.
"""
if not isinstance(port, int):
raise TypeError("port must be an integer")
if static_files:
from .middleware.shared_data import SharedDataMiddleware
application = SharedDataMiddleware(application, static_files)
if use_debugger:
from .debug import DebuggedApplication
application = DebuggedApplication(application, evalex=use_evalex)
# Allow the specified hostname to use the debugger, in addition to
# localhost domains.
application.trusted_hosts.append(hostname)
if not is_running_from_reloader():
fd = None
else:
fd = int(os.environ["WERKZEUG_SERVER_FD"])
srv = make_server(
hostname,
port,
application,
threaded,
processes,
request_handler,
passthrough_errors,
ssl_context,
fd=fd,
)
srv.socket.set_inheritable(True)
os.environ["WERKZEUG_SERVER_FD"] = str(srv.fileno())
if not is_running_from_reloader():
srv.log_startup()
_log("info", _ansi_style("Press CTRL+C to quit", "yellow"))
if use_reloader:
from ._reloader import run_with_reloader
try:
run_with_reloader(
srv.serve_forever,
extra_files=extra_files,
exclude_patterns=exclude_patterns,
interval=reloader_interval,
reloader_type=reloader_type,
)
finally:
srv.server_close()
else:
srv.serve_forever()
|
from __future__ import annotations
import collections.abc as cabc
import http.client
import importlib.metadata
import json
import os
import shutil
import socket
import ssl
import sys
import typing as t
from importlib.metadata import PackageNotFoundError
from io import BytesIO
from pathlib import Path
from unittest.mock import Mock
from unittest.mock import patch
import pytest
from werkzeug import run_simple
from werkzeug._reloader import _find_stat_paths
from werkzeug._reloader import _find_watchdog_paths
from werkzeug._reloader import _get_args_for_reloading
from werkzeug._reloader import WatchdogReloaderLoop
from werkzeug.datastructures import FileStorage
from werkzeug.serving import make_ssl_devcert
from werkzeug.test import stream_encode_multipart
if t.TYPE_CHECKING:
from conftest import DevServerClient
from conftest import StartDevServer
try:
watchdog_version: str = importlib.metadata.version("watchdog")
except PackageNotFoundError:
watchdog_version = ""
@pytest.mark.parametrize(
"kwargs",
[
pytest.param({}, id="http"),
pytest.param({"ssl_context": "adhoc"}, id="https"),
pytest.param({"use_reloader": True}, id="reloader"),
pytest.param(
{"hostname": "unix"},
id="unix socket",
marks=pytest.mark.skipif(
not hasattr(socket, "AF_UNIX"), reason="requires unix socket support"
),
),
],
)
@pytest.mark.dev_server
def test_server(
tmp_path: Path, dev_server: StartDevServer, kwargs: dict[str, t.Any]
) -> None:
if kwargs.get("hostname") == "unix":
kwargs["hostname"] = f"unix://{tmp_path / 'test.sock'}"
client = dev_server(**kwargs)
r = client.request()
assert r.status == 200
assert r.json["PATH_INFO"] == "/"
@pytest.mark.dev_server
def test_untrusted_host(standard_app: DevServerClient) -> None:
r = standard_app.request(
"http://missing.test:1337/index.html#ignore",
headers={"x-base-url": standard_app.url},
)
assert r.json["HTTP_HOST"] == "missing.test:1337"
assert r.json["PATH_INFO"] == "/index.html"
host, _, port = r.json["HTTP_X_BASE_URL"].rpartition(":")
assert r.json["SERVER_NAME"] == host.partition("http://")[2]
assert r.json["SERVER_PORT"] == port
@pytest.mark.dev_server
def test_double_slash_path(standard_app: DevServerClient) -> None:
r = standard_app.request("//double-slash")
assert "double-slash" not in r.json["HTTP_HOST"]
assert r.json["PATH_INFO"] == "/double-slash"
@pytest.mark.dev_server
def test_500_error(standard_app: DevServerClient) -> None:
r = standard_app.request("/crash")
assert r.status == 500
assert b"Internal Server Error" in r.data
@pytest.mark.dev_server
def test_ssl_dev_cert(tmp_path: Path, dev_server: StartDevServer) -> None:
client = dev_server(ssl_context=make_ssl_devcert(os.fspath(tmp_path)))
r = client.request()
assert r.json["wsgi.url_scheme"] == "https"
@pytest.mark.dev_server
def test_ssl_object(dev_server: StartDevServer) -> None:
client = dev_server(ssl_context="custom")
r = client.request()
assert r.json["wsgi.url_scheme"] == "https"
require_watchdog = pytest.mark.skipif(
not watchdog_version, reason="watchdog not installed"
)
@pytest.mark.parametrize(
"reloader_type", ["stat", pytest.param("watchdog", marks=[require_watchdog])]
)
@pytest.mark.skipif(
os.name == "nt" and "CI" in os.environ, reason="unreliable on Windows during CI"
)
@pytest.mark.dev_server
def test_reloader_sys_path(
tmp_path: Path, dev_server: StartDevServer, reloader_type: str
) -> None:
"""This tests the general behavior of the reloader. It also tests
that fixing an import error triggers a reload, not just Python
retrying the failed import.
"""
real_path = tmp_path / "real_app.py"
real_path.write_text("syntax error causes import error")
client = dev_server("reloader", reloader_type=reloader_type)
assert client.request().status == 500
shutil.copyfile(Path(__file__).parent / "live_apps" / "standard_app.py", real_path)
client.wait_for_log(f"Detected change in {str(real_path)!r}")
client.wait_for_reload()
assert client.request().status == 200
@require_watchdog
@patch.object(WatchdogReloaderLoop, "trigger_reload")
def test_watchdog_reloader_ignores_opened(mock_trigger_reload: Mock) -> None:
from watchdog.events import EVENT_TYPE_MODIFIED
from watchdog.events import EVENT_TYPE_OPENED
from watchdog.events import FileModifiedEvent
reloader = WatchdogReloaderLoop()
modified_event = FileModifiedEvent("")
modified_event.event_type = EVENT_TYPE_MODIFIED
reloader.event_handler.on_any_event(modified_event)
mock_trigger_reload.assert_called_once()
mock_trigger_reload.reset_mock()
opened_event = FileModifiedEvent("")
opened_event.event_type = EVENT_TYPE_OPENED
reloader.event_handler.on_any_event(opened_event)
mock_trigger_reload.assert_not_called()
@pytest.mark.skipif(
watchdog_version < "5",
reason="'closed no write' event introduced in watchdog 5.0",
)
@patch.object(WatchdogReloaderLoop, "trigger_reload")
def test_watchdog_reloader_ignores_closed_no_write(mock_trigger_reload: Mock) -> None:
from watchdog.events import EVENT_TYPE_CLOSED_NO_WRITE
from watchdog.events import EVENT_TYPE_MODIFIED
from watchdog.events import FileModifiedEvent
reloader = WatchdogReloaderLoop()
modified_event = FileModifiedEvent("")
modified_event.event_type = EVENT_TYPE_MODIFIED
reloader.event_handler.on_any_event(modified_event)
mock_trigger_reload.assert_called_once()
mock_trigger_reload.reset_mock()
opened_event = FileModifiedEvent("")
opened_event.event_type = EVENT_TYPE_CLOSED_NO_WRITE
reloader.event_handler.on_any_event(opened_event)
mock_trigger_reload.assert_not_called()
@pytest.mark.skipif(sys.version_info >= (3, 10), reason="not needed on >= 3.10")
def test_windows_get_args_for_reloading(
monkeypatch: pytest.MonkeyPatch, tmp_path: Path
) -> None:
argv = [str(tmp_path / "test.exe"), "run"]
monkeypatch.setattr("sys.executable", str(tmp_path / "python.exe"))
monkeypatch.setattr("sys.argv", argv)
monkeypatch.setattr("__main__.__package__", None)
monkeypatch.setattr("os.name", "nt")
rv = _get_args_for_reloading()
assert rv == argv
@pytest.mark.parametrize("find", [_find_stat_paths, _find_watchdog_paths])
def test_exclude_patterns(
find: t.Callable[[set[str], set[str]], cabc.Iterable[str]],
) -> None:
# Select a path to exclude from the unfiltered list, assert that it is present and
# then gets excluded.
paths = find(set(), set())
path_to_exclude = next(iter(paths))
assert any(p.startswith(path_to_exclude) for p in paths)
# Those paths should be excluded due to the pattern.
paths = find(set(), {f"{path_to_exclude}*"})
assert not any(p.startswith(path_to_exclude) for p in paths)
@pytest.mark.dev_server
def test_wrong_protocol(standard_app: DevServerClient) -> None:
"""An HTTPS request to an HTTP server doesn't show a traceback.
https://github.com/pallets/werkzeug/pull/838
"""
conn = http.client.HTTPSConnection(standard_app.addr)
with pytest.raises(ssl.SSLError):
conn.request("GET", f"https://{standard_app.addr}")
assert "Traceback" not in standard_app.read_log()
@pytest.mark.dev_server
def test_content_type_and_length(standard_app: DevServerClient) -> None:
r = standard_app.request()
assert "CONTENT_TYPE" not in r.json
assert "CONTENT_LENGTH" not in r.json
r = standard_app.request(body=b"{}", headers={"content-type": "application/json"})
assert r.json["CONTENT_TYPE"] == "application/json"
assert r.json["CONTENT_LENGTH"] == "2"
def test_port_is_int() -> None:
with pytest.raises(TypeError, match="port must be an integer"):
run_simple("127.0.0.1", "5000", None) # type: ignore[arg-type]
@pytest.mark.parametrize("send_length", [False, True])
@pytest.mark.dev_server
def test_chunked_request(
monkeypatch: pytest.MonkeyPatch, dev_server: StartDevServer, send_length: bool
) -> None:
stream, length, boundary = stream_encode_multipart(
{
"value": "this is text",
"file": FileStorage(
BytesIO(b"this is a file"),
filename="test.txt",
content_type="text/plain",
),
}
)
client = dev_server("data")
# Small block size to produce multiple chunks.
conn = client.connect(blocksize=128)
conn.putrequest("POST", "/")
conn.putheader("Transfer-Encoding", "chunked")
conn.putheader("Content-Type", f"multipart/form-data; boundary={boundary}")
# Sending the content-length header with chunked is invalid, but if
# a client does send it the server should ignore it. Previously the
# multipart parser would crash. Python's higher-level functions
# won't send the header, which is why we use conn.put in this test.
if send_length:
conn.putheader("Content-Length", "invalid")
expect_content_len = "invalid"
else:
expect_content_len = None
conn.endheaders(stream, encode_chunked=True)
r = conn.getresponse()
data = json.load(r)
r.close()
assert data["form"]["value"] == "this is text"
assert data["files"]["file"] == "this is a file"
environ = data["environ"]
assert environ["HTTP_TRANSFER_ENCODING"] == "chunked"
assert environ.get("CONTENT_LENGTH") == expect_content_len
assert environ["wsgi.input_terminated"]
@pytest.mark.dev_server
def test_multiple_headers_concatenated(standard_app: DevServerClient) -> None:
"""A header key can be sent multiple times. The server will join all
the values with commas.
https://tools.ietf.org/html/rfc3875#section-4.1.18
"""
# conn.request doesn't support multiple values.
conn = standard_app.connect()
conn.putrequest("GET", "/")
conn.putheader("XYZ", "a ") # trailing space is preserved
conn.putheader("X-Ignore-1", "ignore value")
conn.putheader("XYZ", " b") # leading space is collapsed
conn.putheader("X-Ignore-2", "ignore value")
conn.putheader("XYZ", "c ")
conn.putheader("X-Ignore-3", "ignore value")
conn.putheader("XYZ", "d")
conn.endheaders()
r = conn.getresponse()
data = json.load(r)
r.close()
assert data["HTTP_XYZ"] == "a ,b,c ,d"
@pytest.mark.dev_server
def test_multiline_header_folding(standard_app: DevServerClient) -> None:
"""A header value can be split over multiple lines with a leading
tab. The server will remove the newlines and preserve the tabs.
https://tools.ietf.org/html/rfc2616#section-2.2
"""
# conn.request doesn't support multiline values.
conn = standard_app.connect()
conn.putrequest("GET", "/")
conn.putheader("XYZ", "first", "second", "third")
conn.endheaders()
r = conn.getresponse()
data = json.load(r)
r.close()
assert data["HTTP_XYZ"] == "first\tsecond\tthird"
@pytest.mark.parametrize("endpoint", ["", "crash"])
@pytest.mark.dev_server
def test_streaming_close_response(dev_server: StartDevServer, endpoint: str) -> None:
"""When using HTTP/1.0, chunked encoding is not supported. Fall
back to Connection: close, but this allows no reliable way to
distinguish between complete and truncated responses.
"""
r = dev_server("streaming").request("/" + endpoint)
assert r.getheader("connection") == "close"
assert r.data == "".join(str(x) + "\n" for x in range(5)).encode()
@pytest.mark.dev_server
def test_streaming_chunked_response(dev_server: StartDevServer) -> None:
"""When using HTTP/1.1, use Transfer-Encoding: chunked for streamed
responses, since it can distinguish the end of the response without
closing the connection.
https://tools.ietf.org/html/rfc2616#section-3.6.1
"""
r = dev_server("streaming", threaded=True).request("/")
assert r.getheader("transfer-encoding") == "chunked"
assert r.data == "".join(str(x) + "\n" for x in range(5)).encode()
@pytest.mark.dev_server
def test_streaming_chunked_truncation(dev_server: StartDevServer) -> None:
"""When using HTTP/1.1, chunked encoding allows the client to detect
content truncated by a prematurely closed connection.
"""
with pytest.raises(http.client.IncompleteRead):
dev_server("streaming", threaded=True).request("/crash")
@pytest.mark.dev_server
def test_host_with_ipv6_scope(dev_server: StartDevServer) -> None:
client = dev_server(override_client_addr="fe80::1ff:fe23:4567:890a%eth2")
r = client.request("/crash")
assert r.status == 500
assert b"Internal Server Error" in r.data
assert "Logging error" not in client.read_log()
|
./temp_repos/werkzeug/src/werkzeug/serving.py
|
./temp_repos/werkzeug/tests/test_serving.py
|
werkzeug
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'DechunkedInput'.
Context:
- Class Name: DechunkedInput
- Dependencies to Mock: rfile, app, host, handler, fd, ssl_context, processes, passthrough_errors, port
- Key Imports: socketserver, debug.tbtools, exceptions, _reloader, _typeshed.wsgi, debug, cryptography.hazmat.primitives.asymmetric, os, __future__, selectors
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
DechunkedInput
|
python
|
from __future__ import annotations
import typing as t
from io import BytesIO
from urllib.parse import parse_qsl
from ._internal import _plain_int
from .datastructures import FileStorage
from .datastructures import Headers
from .datastructures import MultiDict
from .exceptions import RequestEntityTooLarge
from .http import parse_options_header
from .sansio.multipart import Data
from .sansio.multipart import Epilogue
from .sansio.multipart import Field
from .sansio.multipart import File
from .sansio.multipart import MultipartDecoder
from .sansio.multipart import NeedData
from .wsgi import get_content_length
from .wsgi import get_input_stream
# there are some platforms where SpooledTemporaryFile is not available.
# In that case we need to provide a fallback.
try:
from tempfile import SpooledTemporaryFile
except ImportError:
from tempfile import TemporaryFile
SpooledTemporaryFile = None # type: ignore
if t.TYPE_CHECKING:
import typing as te
from _typeshed.wsgi import WSGIEnvironment
t_parse_result = tuple[
t.IO[bytes], MultiDict[str, str], MultiDict[str, FileStorage]
]
class TStreamFactory(te.Protocol):
def __call__(
self,
total_content_length: int | None,
content_type: str | None,
filename: str | None,
content_length: int | None = None,
) -> t.IO[bytes]: ...
F = t.TypeVar("F", bound=t.Callable[..., t.Any])
def default_stream_factory(
total_content_length: int | None,
content_type: str | None,
filename: str | None,
content_length: int | None = None,
) -> t.IO[bytes]:
max_size = 1024 * 500
if SpooledTemporaryFile is not None:
return t.cast(t.IO[bytes], SpooledTemporaryFile(max_size=max_size, mode="rb+"))
elif total_content_length is None or total_content_length > max_size:
return t.cast(t.IO[bytes], TemporaryFile("rb+"))
return BytesIO()
def parse_form_data(
environ: WSGIEnvironment,
stream_factory: TStreamFactory | None = None,
max_form_memory_size: int | None = None,
max_content_length: int | None = None,
cls: type[MultiDict[str, t.Any]] | None = None,
silent: bool = True,
*,
max_form_parts: int | None = None,
) -> t_parse_result:
"""Parse the form data in the environ and return it as tuple in the form
``(stream, form, files)``. You should only call this method if the
transport method is `POST`, `PUT`, or `PATCH`.
If the mimetype of the data transmitted is `multipart/form-data` the
files multidict will be filled with `FileStorage` objects. If the
mimetype is unknown the input stream is wrapped and returned as first
argument, else the stream is empty.
This is a shortcut for the common usage of :class:`FormDataParser`.
:param environ: the WSGI environment to be used for parsing.
:param stream_factory: An optional callable that returns a new read and
writeable file descriptor. This callable works
the same as :meth:`Response._get_file_stream`.
:param max_form_memory_size: the maximum number of bytes to be accepted for
in-memory stored form data. If the data
exceeds the value specified an
:exc:`~exceptions.RequestEntityTooLarge`
exception is raised.
:param max_content_length: If this is provided and the transmitted data
is longer than this value an
:exc:`~exceptions.RequestEntityTooLarge`
exception is raised.
:param cls: an optional dict class to use. If this is not specified
or `None` the default :class:`MultiDict` is used.
:param silent: If set to False parsing errors will not be caught.
:param max_form_parts: The maximum number of multipart parts to be parsed. If this
is exceeded, a :exc:`~exceptions.RequestEntityTooLarge` exception is raised.
:return: A tuple in the form ``(stream, form, files)``.
.. versionchanged:: 3.0
The ``charset`` and ``errors`` parameters were removed.
.. versionchanged:: 2.3
Added the ``max_form_parts`` parameter.
.. versionadded:: 0.5.1
Added the ``silent`` parameter.
.. versionadded:: 0.5
Added the ``max_form_memory_size``, ``max_content_length``, and ``cls``
parameters.
"""
return FormDataParser(
stream_factory=stream_factory,
max_form_memory_size=max_form_memory_size,
max_content_length=max_content_length,
max_form_parts=max_form_parts,
silent=silent,
cls=cls,
).parse_from_environ(environ)
class FormDataParser:
"""This class implements parsing of form data for Werkzeug. By itself
it can parse multipart and url encoded form data. It can be subclassed
and extended but for most mimetypes it is a better idea to use the
untouched stream and expose it as separate attributes on a request
object.
:param stream_factory: An optional callable that returns a new read and
writeable file descriptor. This callable works
the same as :meth:`Response._get_file_stream`.
:param max_form_memory_size: the maximum number of bytes to be accepted for
in-memory stored form data. If the data
exceeds the value specified an
:exc:`~exceptions.RequestEntityTooLarge`
exception is raised.
:param max_content_length: If this is provided and the transmitted data
is longer than this value an
:exc:`~exceptions.RequestEntityTooLarge`
exception is raised.
:param cls: an optional dict class to use. If this is not specified
or `None` the default :class:`MultiDict` is used.
:param silent: If set to False parsing errors will not be caught.
:param max_form_parts: The maximum number of multipart parts to be parsed. If this
is exceeded, a :exc:`~exceptions.RequestEntityTooLarge` exception is raised.
.. versionchanged:: 3.0
The ``charset`` and ``errors`` parameters were removed.
.. versionchanged:: 3.0
The ``parse_functions`` attribute and ``get_parse_func`` methods were removed.
.. versionchanged:: 2.2.3
Added the ``max_form_parts`` parameter.
.. versionadded:: 0.8
"""
def __init__(
self,
stream_factory: TStreamFactory | None = None,
max_form_memory_size: int | None = None,
max_content_length: int | None = None,
cls: type[MultiDict[str, t.Any]] | None = None,
silent: bool = True,
*,
max_form_parts: int | None = None,
) -> None:
if stream_factory is None:
stream_factory = default_stream_factory
self.stream_factory = stream_factory
self.max_form_memory_size = max_form_memory_size
self.max_content_length = max_content_length
self.max_form_parts = max_form_parts
if cls is None:
cls = t.cast("type[MultiDict[str, t.Any]]", MultiDict)
self.cls = cls
self.silent = silent
def parse_from_environ(self, environ: WSGIEnvironment) -> t_parse_result:
"""Parses the information from the environment as form data.
:param environ: the WSGI environment to be used for parsing.
:return: A tuple in the form ``(stream, form, files)``.
"""
stream = get_input_stream(environ, max_content_length=self.max_content_length)
content_length = get_content_length(environ)
mimetype, options = parse_options_header(environ.get("CONTENT_TYPE"))
return self.parse(
stream,
content_length=content_length,
mimetype=mimetype,
options=options,
)
def parse(
self,
stream: t.IO[bytes],
mimetype: str,
content_length: int | None,
options: dict[str, str] | None = None,
) -> t_parse_result:
"""Parses the information from the given stream, mimetype,
content length and mimetype parameters.
:param stream: an input stream
:param mimetype: the mimetype of the data
:param content_length: the content length of the incoming data
:param options: optional mimetype parameters (used for
the multipart boundary for instance)
:return: A tuple in the form ``(stream, form, files)``.
.. versionchanged:: 3.0
The invalid ``application/x-url-encoded`` content type is not
treated as ``application/x-www-form-urlencoded``.
"""
if mimetype == "multipart/form-data":
parse_func = self._parse_multipart
elif mimetype == "application/x-www-form-urlencoded":
parse_func = self._parse_urlencoded
else:
return stream, self.cls(), self.cls()
if options is None:
options = {}
try:
return parse_func(stream, mimetype, content_length, options)
except ValueError:
if not self.silent:
raise
return stream, self.cls(), self.cls()
def _parse_multipart(
self,
stream: t.IO[bytes],
mimetype: str,
content_length: int | None,
options: dict[str, str],
) -> t_parse_result:
parser = MultiPartParser(
stream_factory=self.stream_factory,
max_form_memory_size=self.max_form_memory_size,
max_form_parts=self.max_form_parts,
cls=self.cls,
)
boundary = options.get("boundary", "").encode("ascii")
if not boundary:
raise ValueError("Missing boundary")
form, files = parser.parse(stream, boundary, content_length)
return stream, form, files
def _parse_urlencoded(
self,
stream: t.IO[bytes],
mimetype: str,
content_length: int | None,
options: dict[str, str],
) -> t_parse_result:
if (
self.max_form_memory_size is not None
and content_length is not None
and content_length > self.max_form_memory_size
):
raise RequestEntityTooLarge()
items = parse_qsl(
stream.read().decode(),
keep_blank_values=True,
errors="werkzeug.url_quote",
)
return stream, self.cls(items), self.cls()
class MultiPartParser:
def __init__(
self,
stream_factory: TStreamFactory | None = None,
max_form_memory_size: int | None = None,
cls: type[MultiDict[str, t.Any]] | None = None,
buffer_size: int = 64 * 1024,
max_form_parts: int | None = None,
) -> None:
self.max_form_memory_size = max_form_memory_size
self.max_form_parts = max_form_parts
if stream_factory is None:
stream_factory = default_stream_factory
self.stream_factory = stream_factory
if cls is None:
cls = t.cast("type[MultiDict[str, t.Any]]", MultiDict)
self.cls = cls
self.buffer_size = buffer_size
def fail(self, message: str) -> te.NoReturn:
raise ValueError(message)
def get_part_charset(self, headers: Headers) -> str:
# Figure out input charset for current part
content_type = headers.get("content-type")
if content_type:
parameters = parse_options_header(content_type)[1]
ct_charset = parameters.get("charset", "").lower()
# A safe list of encodings. Modern clients should only send ASCII or UTF-8.
# This list will not be extended further.
if ct_charset in {"ascii", "us-ascii", "utf-8", "iso-8859-1"}:
return ct_charset
return "utf-8"
def start_file_streaming(
self, event: File, total_content_length: int | None
) -> t.IO[bytes]:
content_type = event.headers.get("content-type")
try:
content_length = _plain_int(event.headers["content-length"])
except (KeyError, ValueError):
content_length = 0
container = self.stream_factory(
total_content_length=total_content_length,
filename=event.filename,
content_type=content_type,
content_length=content_length,
)
return container
def parse(
self, stream: t.IO[bytes], boundary: bytes, content_length: int | None
) -> tuple[MultiDict[str, str], MultiDict[str, FileStorage]]:
current_part: Field | File
field_size: int | None = None
container: t.IO[bytes] | list[bytes]
_write: t.Callable[[bytes], t.Any]
parser = MultipartDecoder(
boundary,
max_form_memory_size=self.max_form_memory_size,
max_parts=self.max_form_parts,
)
fields = []
files = []
for data in _chunk_iter(stream.read, self.buffer_size):
parser.receive_data(data)
event = parser.next_event()
while not isinstance(event, (Epilogue, NeedData)):
if isinstance(event, Field):
current_part = event
field_size = 0
container = []
_write = container.append
elif isinstance(event, File):
current_part = event
field_size = None
container = self.start_file_streaming(event, content_length)
_write = container.write
elif isinstance(event, Data):
if self.max_form_memory_size is not None and field_size is not None:
# Ensure that accumulated data events do not exceed limit.
# Also checked within single event in MultipartDecoder.
field_size += len(event.data)
if field_size > self.max_form_memory_size:
raise RequestEntityTooLarge()
_write(event.data)
if not event.more_data:
if isinstance(current_part, Field):
value = b"".join(container).decode(
self.get_part_charset(current_part.headers), "replace"
)
fields.append((current_part.name, value))
else:
container = t.cast(t.IO[bytes], container)
container.seek(0)
files.append(
(
current_part.name,
FileStorage(
container,
current_part.filename,
current_part.name,
headers=current_part.headers,
),
)
)
event = parser.next_event()
return self.cls(fields), self.cls(files)
def _chunk_iter(read: t.Callable[[int], bytes], size: int) -> t.Iterator[bytes | None]:
"""Read data in chunks for multipart/form-data parsing. Stop if no data is read.
Yield ``None`` at the end to signal end of parsing.
"""
while True:
data = read(size)
if not data:
break
yield data
yield None
|
import csv
import io
from os.path import dirname
from os.path import join
import pytest
from werkzeug import formparser
from werkzeug.datastructures import MultiDict
from werkzeug.exceptions import RequestEntityTooLarge
from werkzeug.formparser import FormDataParser
from werkzeug.formparser import parse_form_data
from werkzeug.test import Client
from werkzeug.test import create_environ
from werkzeug.wrappers import Request
from werkzeug.wrappers import Response
@Request.application
def form_data_consumer(request):
result_object = request.args["object"]
if result_object == "text":
return Response(repr(request.form["text"]))
f = request.files[result_object]
return Response(
b"\n".join(
(
repr(f.filename).encode("ascii"),
repr(f.name).encode("ascii"),
repr(f.content_type).encode("ascii"),
f.stream.read(),
)
)
)
def get_contents(filename):
with open(filename, "rb") as f:
return f.read()
class TestFormParser:
def test_limiting(self):
data = b"foo=Hello+World&bar=baz"
req = Request.from_values(
input_stream=io.BytesIO(data),
content_length=len(data),
content_type="application/x-www-form-urlencoded",
method="POST",
)
req.max_content_length = 400
assert req.form["foo"] == "Hello World"
req = Request.from_values(
input_stream=io.BytesIO(data),
content_length=len(data),
content_type="application/x-www-form-urlencoded",
method="POST",
)
req.max_form_memory_size = 7
pytest.raises(RequestEntityTooLarge, lambda: req.form["foo"])
req = Request.from_values(
input_stream=io.BytesIO(data),
content_length=len(data),
content_type="application/x-www-form-urlencoded",
method="POST",
)
req.max_form_memory_size = 400
assert req.form["foo"] == "Hello World"
input_stream = io.BytesIO(b"foo=123456")
req = Request.from_values(
input_stream=input_stream,
content_type="application/x-www-form-urlencoded",
method="POST",
)
req.max_content_length = 4
pytest.raises(RequestEntityTooLarge, lambda: req.form["foo"])
# content-length was set, so request could exit early without reading anything
assert input_stream.read() == b"foo=123456"
data = (
b"--foo\r\nContent-Disposition: form-field; name=foo\r\n\r\n"
b"Hello World\r\n"
b"--foo\r\nContent-Disposition: form-field; name=bar\r\n\r\n"
b"bar=baz\r\n--foo--"
)
req = Request.from_values(
input_stream=io.BytesIO(data),
content_length=len(data),
content_type="multipart/form-data; boundary=foo",
method="POST",
)
req.max_content_length = 400
assert req.form["foo"] == "Hello World"
req = Request.from_values(
input_stream=io.BytesIO(data),
content_length=len(data),
content_type="multipart/form-data; boundary=foo",
method="POST",
)
req.max_form_memory_size = 7
pytest.raises(RequestEntityTooLarge, lambda: req.form["foo"])
req = Request.from_values(
input_stream=io.BytesIO(data),
content_length=len(data),
content_type="multipart/form-data; boundary=foo",
method="POST",
)
req.max_form_memory_size = 400
assert req.form["foo"] == "Hello World"
req = Request.from_values(
input_stream=io.BytesIO(data),
content_length=len(data),
content_type="multipart/form-data; boundary=foo",
method="POST",
)
req.max_form_parts = 1
pytest.raises(RequestEntityTooLarge, lambda: req.form["foo"])
def test_urlencoded_no_max(self) -> None:
r = Request.from_values(method="POST", data={"a": 1, "b": 2})
r.max_form_parts = 1
assert r.form["a"] == "1"
assert r.form["b"] == "2"
def test_urlencoded_silent_decode(self) -> None:
r = Request.from_values(
data=b"\x80",
content_type="application/x-www-form-urlencoded",
method="POST",
)
assert not r.form
def test_missing_multipart_boundary(self):
data = (
b"--foo\r\nContent-Disposition: form-field; name=foo\r\n\r\n"
b"Hello World\r\n"
b"--foo\r\nContent-Disposition: form-field; name=bar\r\n\r\n"
b"bar=baz\r\n--foo--"
)
req = Request.from_values(
input_stream=io.BytesIO(data),
content_length=len(data),
content_type="multipart/form-data",
method="POST",
)
assert req.form == {}
def test_chunk_split_on_line_break_before_epilogue(self):
data = b"".join(
(
# exactly 64 bytes of header
b"--thirteenbytes\r\n",
b"Content-Disposition: form-data; name=tx3065\r\n\r\n",
# payload that fills 65535 bytes together with the header
b"\n".join([b"\r" * 31] * 2045 + [b"y" * 31]),
# This newline is split by the first chunk
b"\r\n",
# extra payload that also has the final newline split exactly
# at the chunk size.
b"\n".join([b"\r" * 31] * 2047 + [b"x" * 30]),
b"\r\n--thirteenbytes--",
)
)
req = Request.from_values(
input_stream=io.BytesIO(data),
content_length=len(data),
content_type="multipart/form-data; boundary=thirteenbytes",
method="POST",
)
assert len(req.form["tx3065"]) == (131072 - 64 - 1)
assert req.form["tx3065"][-1] == "x"
assert req.form["tx3065"][65470:65473] == "y\r\n"
def test_parse_form_data_put_without_content(self):
# A PUT without a Content-Type header returns empty data
# Both rfc1945 and rfc2616 (1.0 and 1.1) say "Any HTTP/[1.0/1.1] message
# containing an entity-body SHOULD include a Content-Type header field
# defining the media type of that body." In the case where either
# headers are omitted, parse_form_data should still work.
env = create_environ("/foo", "http://example.org/", method="PUT")
stream, form, files = formparser.parse_form_data(env)
assert stream.read() == b""
assert len(form) == 0
assert len(files) == 0
def test_parse_form_data_get_without_content(self):
env = create_environ("/foo", "http://example.org/", method="GET")
stream, form, files = formparser.parse_form_data(env)
assert stream.read() == b""
assert len(form) == 0
assert len(files) == 0
@pytest.mark.parametrize(
("no_spooled", "size"), ((False, 100), (False, 3000), (True, 100), (True, 3000))
)
def test_default_stream_factory(self, no_spooled, size, monkeypatch):
if no_spooled:
monkeypatch.setattr("werkzeug.formparser.SpooledTemporaryFile", None)
data = b"a,b,c\n" * size
with Request.from_values(
data={"foo": (io.BytesIO(data), "test.txt")}, method="POST"
) as req:
reader = csv.reader(io.TextIOWrapper(req.files["foo"]))
# This fails if file_storage doesn't implement IOBase.
# https://github.com/pallets/werkzeug/issues/1344
# https://github.com/python/cpython/pull/3249
assert sum(1 for _ in reader) == size
def test_parse_bad_content_type(self):
parser = FormDataParser()
assert parser.parse("", "bad-mime-type", 0) == (
"",
MultiDict([]),
MultiDict([]),
)
def test_parse_from_environ(self):
parser = FormDataParser()
stream, _, _ = parser.parse_from_environ({"wsgi.input": ""})
assert stream is not None
# TODO Fix the ResourceErrors, somewhere in here a SpooledTemporaryFile is not
# getting closed. This causes PytestUnraisableExceptionWarning.
class TestMultiPart:
def test_basic(self):
resources = join(dirname(__file__), "multipart")
client = Client(form_data_consumer)
repository = [
(
"firefox3-2png1txt",
"---------------------------186454651713519341951581030105",
[
("anchor.png", "file1", "image/png", "file1.png"),
("application_edit.png", "file2", "image/png", "file2.png"),
],
"example text",
),
(
"firefox3-2pnglongtext",
"---------------------------14904044739787191031754711748",
[
("accept.png", "file1", "image/png", "file1.png"),
("add.png", "file2", "image/png", "file2.png"),
],
"--long text\r\n--with boundary\r\n--lookalikes--",
),
(
"opera8-2png1txt",
"----------zEO9jQKmLc2Cq88c23Dx19",
[
("arrow_branch.png", "file1", "image/png", "file1.png"),
("award_star_bronze_1.png", "file2", "image/png", "file2.png"),
],
"blafasel öäü",
),
(
"webkit3-2png1txt",
"----WebKitFormBoundaryjdSFhcARk8fyGNy6",
[
("gtk-apply.png", "file1", "image/png", "file1.png"),
("gtk-no.png", "file2", "image/png", "file2.png"),
],
"this is another text with ümläüts",
),
(
"ie6-2png1txt",
"---------------------------7d91b03a20128",
[
("file1.png", "file1", "image/x-png", "file1.png"),
("file2.png", "file2", "image/x-png", "file2.png"),
],
"ie6 sucks :-/",
),
]
for name, boundary, files, text in repository:
folder = join(resources, name)
data = get_contents(join(folder, "request.http"))
for filename, field, content_type, fsname in files:
with client.post(
f"/?object={field}",
data=data,
content_type=f'multipart/form-data; boundary="{boundary}"',
content_length=len(data),
) as response:
lines = response.get_data().split(b"\n", 3)
assert lines[0] == repr(filename).encode("ascii")
assert lines[1] == repr(field).encode("ascii")
assert lines[2] == repr(content_type).encode("ascii")
assert lines[3] == get_contents(join(folder, fsname))
with client.post(
"/?object=text",
data=data,
content_type=f'multipart/form-data; boundary="{boundary}"',
content_length=len(data),
) as response:
assert response.get_data() == repr(text).encode()
@pytest.mark.filterwarnings("ignore::pytest.PytestUnraisableExceptionWarning")
def test_ie7_unc_path(self):
client = Client(form_data_consumer)
data_file = join(dirname(__file__), "multipart", "ie7_full_path_request.http")
data = get_contents(data_file)
boundary = "---------------------------7da36d1b4a0164"
with client.post(
"/?object=cb_file_upload_multiple",
data=data,
content_type=f'multipart/form-data; boundary="{boundary}"',
content_length=len(data),
) as response:
lines = response.get_data().split(b"\n", 3)
assert lines[0] == b"'Sellersburg Town Council Meeting 02-22-2010doc.doc'"
def test_end_of_file(self):
# This test looks innocent but it was actually timing out in
# the Werkzeug 0.5 release version (#394)
data = (
b"--foo\r\n"
b'Content-Disposition: form-data; name="test"; filename="test.txt"\r\n'
b"Content-Type: text/plain\r\n\r\n"
b"file contents and no end"
)
with Request.from_values(
input_stream=io.BytesIO(data),
content_length=len(data),
content_type="multipart/form-data; boundary=foo",
method="POST",
) as data:
assert not data.files
assert not data.form
def test_file_no_content_type(self):
data = (
b"--foo\r\n"
b'Content-Disposition: form-data; name="test"; filename="test.txt"\r\n\r\n'
b"file contents\r\n--foo--"
)
with Request.from_values(
input_stream=io.BytesIO(data),
content_length=len(data),
content_type="multipart/form-data; boundary=foo",
method="POST",
) as data:
assert data.files["test"].filename == "test.txt"
assert data.files["test"].read() == b"file contents"
def test_extra_newline(self):
# this test looks innocent but it was actually timing out in
# the Werkzeug 0.5 release version (#394)
data = (
b"\r\n\r\n--foo\r\n"
b'Content-Disposition: form-data; name="foo"\r\n\r\n'
b"a string\r\n"
b"--foo--"
)
data = Request.from_values(
input_stream=io.BytesIO(data),
content_length=len(data),
content_type="multipart/form-data; boundary=foo",
method="POST",
)
assert not data.files
assert data.form["foo"] == "a string"
def test_headers(self):
data = (
b"--foo\r\n"
b'Content-Disposition: form-data; name="foo"; filename="foo.txt"\r\n'
b"X-Custom-Header: blah\r\n"
b"Content-Type: text/plain; charset=utf-8\r\n\r\n"
b"file contents, just the contents\r\n"
b"--foo--"
)
with Request.from_values(
input_stream=io.BytesIO(data),
content_length=len(data),
content_type="multipart/form-data; boundary=foo",
method="POST",
) as req:
foo = req.files["foo"]
assert foo.mimetype == "text/plain"
assert foo.mimetype_params == {"charset": "utf-8"}
assert foo.headers["content-type"] == foo.content_type
assert foo.content_type == "text/plain; charset=utf-8"
assert foo.headers["x-custom-header"] == "blah"
@pytest.mark.parametrize("ending", [b"\n", b"\r", b"\r\n"])
def test_nonstandard_line_endings(self, ending: bytes):
data = ending.join(
(
b"--foo",
b"Content-Disposition: form-data; name=foo",
b"",
b"this is just bar",
b"--foo",
b"Content-Disposition: form-data; name=bar",
b"",
b"blafasel",
b"--foo--",
)
)
req = Request.from_values(
input_stream=io.BytesIO(data),
content_length=len(data),
content_type="multipart/form-data; boundary=foo",
method="POST",
)
assert req.form["foo"] == "this is just bar"
assert req.form["bar"] == "blafasel"
def test_failures(self):
def parse_multipart(stream, boundary, content_length):
parser = formparser.MultiPartParser(content_length)
return parser.parse(stream, boundary, content_length)
data = b"--foo\r\n\r\nHello World\r\n--foo--"
pytest.raises(ValueError, parse_multipart, io.BytesIO(data), b"foo", len(data))
data = (
b"--foo\r\nContent-Disposition: form-field; name=foo\r\n\r\nHello World\r\n"
)
pytest.raises(ValueError, parse_multipart, io.BytesIO(data), b"foo", len(data))
def test_empty_multipart(self):
environ = {}
data = b"--boundary--"
environ["REQUEST_METHOD"] = "POST"
environ["CONTENT_TYPE"] = "multipart/form-data; boundary=boundary"
environ["CONTENT_LENGTH"] = str(len(data))
environ["wsgi.input"] = io.BytesIO(data)
stream, form, files = parse_form_data(environ, silent=False)
rv = stream.read()
assert rv == b""
assert form == MultiDict()
assert files == MultiDict()
class TestMultiPartParser:
def test_constructor_not_pass_stream_factory_and_cls(self):
parser = formparser.MultiPartParser()
assert parser.stream_factory is formparser.default_stream_factory
assert parser.cls is MultiDict
def test_constructor_pass_stream_factory_and_cls(self):
def stream_factory():
pass
parser = formparser.MultiPartParser(stream_factory=stream_factory, cls=dict)
assert parser.stream_factory is stream_factory
assert parser.cls is dict
def test_file_rfc2231_filename_continuations(self):
data = (
b"--foo\r\n"
b"Content-Type: text/plain; charset=utf-8\r\n"
b"Content-Disposition: form-data; name=rfc2231;\r\n"
b" filename*0*=ascii''a%20b%20;\r\n"
b" filename*1*=c%20d%20;\r\n"
b' filename*2="e f.txt"\r\n\r\n'
b"file contents\r\n--foo--"
)
with Request.from_values(
input_stream=io.BytesIO(data),
content_length=len(data),
content_type="multipart/form-data; boundary=foo",
method="POST",
) as request:
assert request.files["rfc2231"].filename == "a b c d e f.txt"
assert request.files["rfc2231"].read() == b"file contents"
def test_multipart_max_form_memory_size() -> None:
"""max_form_memory_size is tracked across multiple data events."""
data = b"--bound\r\nContent-Disposition: form-field; name=a\r\n\r\n"
data += b"a" * 15 + b"\r\n--bound--"
# The buffer size is less than the max size, so multiple data events will be
# returned. The field size is greater than the max.
parser = formparser.MultiPartParser(max_form_memory_size=10, buffer_size=5)
with pytest.raises(RequestEntityTooLarge):
parser.parse(io.BytesIO(data), b"bound", None)
|
./temp_repos/werkzeug/src/werkzeug/formparser.py
|
./temp_repos/werkzeug/tests/test_formparser.py
|
werkzeug
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'FormDataParser'.
Context:
- Class Name: FormDataParser
- Dependencies to Mock: max_content_length, silent, cls, stream_factory, max_form_parts, max_form_memory_size, buffer_size
- Key Imports: sansio.multipart, io, exceptions, datastructures, typing, _internal, http, tempfile, _typeshed.wsgi, urllib.parse
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
FormDataParser
|
python
|
"""
X-Forwarded-For Proxy Fix
=========================
This module provides a middleware that adjusts the WSGI environ based on
``X-Forwarded-`` headers that proxies in front of an application may
set.
When an application is running behind a proxy server, WSGI may see the
request as coming from that server rather than the real client. Proxies
set various headers to track where the request actually came from.
This middleware should only be used if the application is actually
behind such a proxy, and should be configured with the number of proxies
that are chained in front of it. Not all proxies set all the headers.
Since incoming headers can be faked, you must set how many proxies are
setting each header so the middleware knows what to trust.
.. autoclass:: ProxyFix
:copyright: 2007 Pallets
:license: BSD-3-Clause
"""
from __future__ import annotations
import typing as t
from ..http import parse_list_header
if t.TYPE_CHECKING:
from _typeshed.wsgi import StartResponse
from _typeshed.wsgi import WSGIApplication
from _typeshed.wsgi import WSGIEnvironment
class ProxyFix:
"""Adjust the WSGI environ based on ``X-Forwarded-`` that proxies in
front of the application may set.
- ``X-Forwarded-For`` sets ``REMOTE_ADDR``.
- ``X-Forwarded-Proto`` sets ``wsgi.url_scheme``.
- ``X-Forwarded-Host`` sets ``HTTP_HOST``, ``SERVER_NAME``, and
``SERVER_PORT``.
- ``X-Forwarded-Port`` sets ``HTTP_HOST`` and ``SERVER_PORT``.
- ``X-Forwarded-Prefix`` sets ``SCRIPT_NAME``.
You must tell the middleware how many proxies set each header so it
knows what values to trust. It is a security issue to trust values
that came from the client rather than a proxy.
The original values of the headers are stored in the WSGI
environ as ``werkzeug.proxy_fix.orig``, a dict.
:param app: The WSGI application to wrap.
:param x_for: Number of values to trust for ``X-Forwarded-For``.
:param x_proto: Number of values to trust for ``X-Forwarded-Proto``.
:param x_host: Number of values to trust for ``X-Forwarded-Host``.
:param x_port: Number of values to trust for ``X-Forwarded-Port``.
:param x_prefix: Number of values to trust for
``X-Forwarded-Prefix``.
.. code-block:: python
from werkzeug.middleware.proxy_fix import ProxyFix
# App is behind one proxy that sets the -For and -Host headers.
app = ProxyFix(app, x_for=1, x_host=1)
.. versionchanged:: 1.0
The ``num_proxies`` argument and attribute; the ``get_remote_addr`` method; and
the environ keys ``orig_remote_addr``, ``orig_wsgi_url_scheme``, and
``orig_http_host`` were removed.
.. versionchanged:: 0.15
All headers support multiple values. Each header is configured with a separate
number of trusted proxies.
.. versionchanged:: 0.15
Original WSGI environ values are stored in the ``werkzeug.proxy_fix.orig`` dict.
.. versionchanged:: 0.15
Support ``X-Forwarded-Port`` and ``X-Forwarded-Prefix``.
.. versionchanged:: 0.15
``X-Forwarded-Host`` and ``X-Forwarded-Port`` modify
``SERVER_NAME`` and ``SERVER_PORT``.
"""
def __init__(
self,
app: WSGIApplication,
x_for: int = 1,
x_proto: int = 1,
x_host: int = 0,
x_port: int = 0,
x_prefix: int = 0,
) -> None:
self.app = app
self.x_for = x_for
self.x_proto = x_proto
self.x_host = x_host
self.x_port = x_port
self.x_prefix = x_prefix
def _get_real_value(self, trusted: int, value: str | None) -> str | None:
"""Get the real value from a list header based on the configured
number of trusted proxies.
:param trusted: Number of values to trust in the header.
:param value: Comma separated list header value to parse.
:return: The real value, or ``None`` if there are fewer values
than the number of trusted proxies.
.. versionchanged:: 1.0
Renamed from ``_get_trusted_comma``.
.. versionadded:: 0.15
"""
if not (trusted and value):
return None
values = parse_list_header(value)
if len(values) >= trusted:
return values[-trusted]
return None
def __call__(
self, environ: WSGIEnvironment, start_response: StartResponse
) -> t.Iterable[bytes]:
"""Modify the WSGI environ based on the various ``Forwarded``
headers before calling the wrapped application. Store the
original environ values in ``werkzeug.proxy_fix.orig_{key}``.
"""
environ_get = environ.get
orig_remote_addr = environ_get("REMOTE_ADDR")
orig_wsgi_url_scheme = environ_get("wsgi.url_scheme")
orig_http_host = environ_get("HTTP_HOST")
environ.update(
{
"werkzeug.proxy_fix.orig": {
"REMOTE_ADDR": orig_remote_addr,
"wsgi.url_scheme": orig_wsgi_url_scheme,
"HTTP_HOST": orig_http_host,
"SERVER_NAME": environ_get("SERVER_NAME"),
"SERVER_PORT": environ_get("SERVER_PORT"),
"SCRIPT_NAME": environ_get("SCRIPT_NAME"),
}
}
)
x_for = self._get_real_value(self.x_for, environ_get("HTTP_X_FORWARDED_FOR"))
if x_for:
environ["REMOTE_ADDR"] = x_for
x_proto = self._get_real_value(
self.x_proto, environ_get("HTTP_X_FORWARDED_PROTO")
)
if x_proto:
environ["wsgi.url_scheme"] = x_proto
x_host = self._get_real_value(self.x_host, environ_get("HTTP_X_FORWARDED_HOST"))
if x_host:
environ["HTTP_HOST"] = environ["SERVER_NAME"] = x_host
# "]" to check for IPv6 address without port
if ":" in x_host and not x_host.endswith("]"):
environ["SERVER_NAME"], environ["SERVER_PORT"] = x_host.rsplit(":", 1)
x_port = self._get_real_value(self.x_port, environ_get("HTTP_X_FORWARDED_PORT"))
if x_port:
host = environ.get("HTTP_HOST")
if host:
# "]" to check for IPv6 address without port
if ":" in host and not host.endswith("]"):
host = host.rsplit(":", 1)[0]
environ["HTTP_HOST"] = f"{host}:{x_port}"
environ["SERVER_PORT"] = x_port
x_prefix = self._get_real_value(
self.x_prefix, environ_get("HTTP_X_FORWARDED_PREFIX")
)
if x_prefix:
environ["SCRIPT_NAME"] = x_prefix
return self.app(environ, start_response)
|
import pytest
from werkzeug.middleware.proxy_fix import ProxyFix
from werkzeug.routing import Map
from werkzeug.routing import Rule
from werkzeug.test import Client
from werkzeug.test import create_environ
from werkzeug.utils import redirect
from werkzeug.wrappers import Request
from werkzeug.wrappers import Response
@pytest.mark.parametrize(
("kwargs", "base", "url_root"),
(
pytest.param(
{},
{
"REMOTE_ADDR": "192.168.0.2",
"HTTP_HOST": "spam",
"HTTP_X_FORWARDED_FOR": "192.168.0.1",
"HTTP_X_FORWARDED_PROTO": "https",
},
"https://spam/",
id="for",
),
pytest.param(
{"x_proto": 1},
{"HTTP_HOST": "spam", "HTTP_X_FORWARDED_PROTO": "https"},
"https://spam/",
id="proto",
),
pytest.param(
{"x_host": 1},
{"HTTP_HOST": "spam", "HTTP_X_FORWARDED_HOST": "eggs"},
"http://eggs/",
id="host",
),
pytest.param(
{"x_port": 1},
{"HTTP_HOST": "spam", "HTTP_X_FORWARDED_PORT": "8080"},
"http://spam:8080/",
id="port, host without port",
),
pytest.param(
{"x_port": 1},
{"HTTP_HOST": "spam:9000", "HTTP_X_FORWARDED_PORT": "8080"},
"http://spam:8080/",
id="port, host with port",
),
pytest.param(
{"x_port": 1},
{
"SERVER_NAME": "spam",
"SERVER_PORT": "9000",
"HTTP_X_FORWARDED_PORT": "8080",
},
"http://spam:8080/",
id="port, name",
),
pytest.param(
{"x_prefix": 1},
{"HTTP_HOST": "spam", "HTTP_X_FORWARDED_PREFIX": "/eggs"},
"http://spam/eggs/",
id="prefix",
),
pytest.param(
{"x_for": 1, "x_proto": 1, "x_host": 1, "x_port": 1, "x_prefix": 1},
{
"REMOTE_ADDR": "192.168.0.2",
"HTTP_HOST": "spam:9000",
"HTTP_X_FORWARDED_FOR": "192.168.0.1",
"HTTP_X_FORWARDED_PROTO": "https",
"HTTP_X_FORWARDED_HOST": "eggs",
"HTTP_X_FORWARDED_PORT": "443",
"HTTP_X_FORWARDED_PREFIX": "/ham",
},
"https://eggs/ham/",
id="all",
),
pytest.param(
{"x_for": 2},
{
"REMOTE_ADDR": "192.168.0.3",
"HTTP_HOST": "spam",
"HTTP_X_FORWARDED_FOR": "192.168.0.1, 192.168.0.2",
},
"http://spam/",
id="multiple for",
),
pytest.param(
{"x_for": 0},
{
"REMOTE_ADDR": "192.168.0.1",
"HTTP_HOST": "spam",
"HTTP_X_FORWARDED_FOR": "192.168.0.2",
},
"http://spam/",
id="ignore 0",
),
pytest.param(
{"x_for": 3},
{
"REMOTE_ADDR": "192.168.0.1",
"HTTP_HOST": "spam",
"HTTP_X_FORWARDED_FOR": "192.168.0.3, 192.168.0.2",
},
"http://spam/",
id="ignore len < trusted",
),
pytest.param(
{},
{
"REMOTE_ADDR": "192.168.0.2",
"HTTP_HOST": "spam",
"HTTP_X_FORWARDED_FOR": "192.168.0.3, 192.168.0.1",
},
"http://spam/",
id="ignore untrusted",
),
pytest.param(
{"x_for": 2},
{
"REMOTE_ADDR": "192.168.0.1",
"HTTP_HOST": "spam",
"HTTP_X_FORWARDED_FOR": ", 192.168.0.3",
},
"http://spam/",
id="ignore empty",
),
pytest.param(
{"x_for": 2, "x_prefix": 1},
{
"REMOTE_ADDR": "192.168.0.2",
"HTTP_HOST": "spam",
"HTTP_X_FORWARDED_FOR": "192.168.0.1, 192.168.0.3",
"HTTP_X_FORWARDED_PREFIX": "/ham, /eggs",
},
"http://spam/eggs/",
id="prefix < for",
),
pytest.param(
{"x_host": 1},
{"HTTP_HOST": "spam", "HTTP_X_FORWARDED_HOST": "[2001:db8::a]"},
"http://[2001:db8::a]/",
id="ipv6 host",
),
pytest.param(
{"x_port": 1},
{"HTTP_HOST": "[2001:db8::a]", "HTTP_X_FORWARDED_PORT": "8080"},
"http://[2001:db8::a]:8080/",
id="ipv6 port, host without port",
),
pytest.param(
{"x_port": 1},
{"HTTP_HOST": "[2001:db8::a]:9000", "HTTP_X_FORWARDED_PORT": "8080"},
"http://[2001:db8::a]:8080/",
id="ipv6 - port, host with port",
),
),
)
def test_proxy_fix(monkeypatch, kwargs, base, url_root):
monkeypatch.setattr(Response, "autocorrect_location_header", True)
@Request.application
def app(request):
# for header
assert request.remote_addr == "192.168.0.1"
# proto, host, port, prefix headers
assert request.url_root == url_root
urls = url_map.bind_to_environ(request.environ)
parrot_url = urls.build("parrot")
# build includes prefix
assert urls.build("parrot") == "/".join((request.script_root, "parrot"))
# match doesn't include prefix
assert urls.match("/parrot")[0] == "parrot"
# With autocorrect_location_header enabled, location header will
# start with url_root
return redirect(parrot_url)
url_map = Map([Rule("/parrot", endpoint="parrot")])
app = ProxyFix(app, **kwargs)
base.setdefault("REMOTE_ADDR", "192.168.0.1")
environ = create_environ(environ_overrides=base)
# host is always added, remove it if the test doesn't set it
if "HTTP_HOST" not in base:
del environ["HTTP_HOST"]
response = Client(app).open(Request(environ))
assert response.location == f"{url_root}parrot"
|
./temp_repos/werkzeug/src/werkzeug/middleware/proxy_fix.py
|
./temp_repos/werkzeug/tests/middleware/test_proxy_fix.py
|
werkzeug
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'ProxyFix'.
Context:
- Class Name: ProxyFix
- Dependencies to Mock: app, x_proto, x_prefix, x_port, x_host, x_for
- Key Imports: typing, http, __future__, _typeshed.wsgi
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
ProxyFix
|
python
|
"""
Application Profiler
====================
This module provides a middleware that profiles each request with the
:mod:`cProfile` module. This can help identify bottlenecks in your code
that may be slowing down your application.
.. autoclass:: ProfilerMiddleware
:copyright: 2007 Pallets
:license: BSD-3-Clause
"""
from __future__ import annotations
import os.path
import sys
import time
import typing as t
from pstats import Stats
try:
from cProfile import Profile
except ImportError:
from profile import Profile # type: ignore
if t.TYPE_CHECKING:
from _typeshed.wsgi import StartResponse
from _typeshed.wsgi import WSGIApplication
from _typeshed.wsgi import WSGIEnvironment
class ProfilerMiddleware:
"""Wrap a WSGI application and profile the execution of each
request. Responses are buffered so that timings are more exact.
If ``stream`` is given, :class:`pstats.Stats` are written to it
after each request. If ``profile_dir`` is given, :mod:`cProfile`
data files are saved to that directory, one file per request.
The filename can be customized by passing ``filename_format``. If
it is a string, it will be formatted using :meth:`str.format` with
the following fields available:
- ``{method}`` - The request method; GET, POST, etc.
- ``{path}`` - The request path or 'root' should one not exist.
- ``{elapsed}`` - The elapsed time of the request in milliseconds.
- ``{time}`` - The time of the request.
If it is a callable, it will be called with the WSGI ``environ`` and
be expected to return a filename string. The ``environ`` dictionary
will also have the ``"werkzeug.profiler"`` key populated with a
dictionary containing the following fields (more may be added in the
future):
- ``{elapsed}`` - The elapsed time of the request in milliseconds.
- ``{time}`` - The time of the request.
:param app: The WSGI application to wrap.
:param stream: Write stats to this stream. Disable with ``None``.
:param sort_by: A tuple of columns to sort stats by. See
:meth:`pstats.Stats.sort_stats`.
:param restrictions: A tuple of restrictions to filter stats by. See
:meth:`pstats.Stats.print_stats`.
:param profile_dir: Save profile data files to this directory.
:param filename_format: Format string for profile data file names,
or a callable returning a name. See explanation above.
.. code-block:: python
from werkzeug.middleware.profiler import ProfilerMiddleware
app = ProfilerMiddleware(app)
.. versionchanged:: 3.0
Added the ``"werkzeug.profiler"`` key to the ``filename_format(environ)``
parameter with the ``elapsed`` and ``time`` fields.
.. versionchanged:: 0.15
Stats are written even if ``profile_dir`` is given, and can be
disable by passing ``stream=None``.
.. versionadded:: 0.15
Added ``filename_format``.
.. versionadded:: 0.9
Added ``restrictions`` and ``profile_dir``.
"""
def __init__(
self,
app: WSGIApplication,
stream: t.IO[str] | None = sys.stdout,
sort_by: t.Iterable[str] = ("time", "calls"),
restrictions: t.Iterable[str | int | float] = (),
profile_dir: str | None = None,
filename_format: str = "{method}.{path}.{elapsed:.0f}ms.{time:.0f}.prof",
) -> None:
self._app = app
self._stream = stream
self._sort_by = sort_by
self._restrictions = restrictions
self._profile_dir = profile_dir
self._filename_format = filename_format
def __call__(
self, environ: WSGIEnvironment, start_response: StartResponse
) -> t.Iterable[bytes]:
response_body: list[bytes] = []
def catching_start_response(status, headers, exc_info=None): # type: ignore
start_response(status, headers, exc_info)
return response_body.append
def runapp() -> None:
app_iter = self._app(
environ, t.cast("StartResponse", catching_start_response)
)
response_body.extend(app_iter)
if hasattr(app_iter, "close"):
app_iter.close()
profile = Profile()
start = time.time()
profile.runcall(runapp)
body = b"".join(response_body)
elapsed = time.time() - start
if self._profile_dir is not None:
if callable(self._filename_format):
environ["werkzeug.profiler"] = {
"elapsed": elapsed * 1000.0,
"time": time.time(),
}
filename = self._filename_format(environ)
else:
filename = self._filename_format.format(
method=environ["REQUEST_METHOD"],
path=environ["PATH_INFO"].strip("/").replace("/", ".") or "root",
elapsed=elapsed * 1000.0,
time=time.time(),
)
filename = os.path.join(self._profile_dir, filename)
profile.dump_stats(filename)
if self._stream is not None:
stats = Stats(profile, stream=self._stream)
stats.sort_stats(*self._sort_by)
print("-" * 80, file=self._stream)
path_info = environ.get("PATH_INFO", "")
print(f"PATH: {path_info!r}", file=self._stream)
stats.print_stats(*self._restrictions)
print(f"{'-' * 80}\n", file=self._stream)
return [body]
|
import datetime
import os
from unittest.mock import ANY
from unittest.mock import MagicMock
from unittest.mock import patch
from werkzeug.middleware.profiler import Profile
from werkzeug.middleware.profiler import ProfilerMiddleware
from werkzeug.test import Client
def dummy_application(environ, start_response):
start_response("200 OK", [("Content-Type", "text/plain")])
return [b"Foo"]
def test_filename_format_function():
# This should be called once with the generated file name
mock_capture_name = MagicMock()
def filename_format(env):
now = datetime.datetime.fromtimestamp(env["werkzeug.profiler"]["time"])
timestamp = now.strftime("%Y-%m-%d:%H:%M:%S")
path = (
"_".join(token for token in env["PATH_INFO"].split("/") if token) or "ROOT"
)
elapsed = env["werkzeug.profiler"]["elapsed"]
name = f"{timestamp}.{env['REQUEST_METHOD']}.{path}.{elapsed:.0f}ms.prof"
mock_capture_name(name=name)
return name
client = Client(
ProfilerMiddleware(
dummy_application,
stream=None,
profile_dir="profiles",
filename_format=filename_format,
)
)
# Replace the Profile class with a function that simulates an __init__()
# call and returns our mock instance.
mock_profile = MagicMock(wraps=Profile())
mock_profile.dump_stats = MagicMock()
with patch("werkzeug.middleware.profiler.Profile", lambda: mock_profile):
client.get("/foo/bar")
mock_capture_name.assert_called_once_with(name=ANY)
name = mock_capture_name.mock_calls[0].kwargs["name"]
mock_profile.dump_stats.assert_called_once_with(os.path.join("profiles", name))
|
./temp_repos/werkzeug/src/werkzeug/middleware/profiler.py
|
./temp_repos/werkzeug/tests/middleware/test_profiler.py
|
werkzeug
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'ProfilerMiddleware'.
Context:
- Class Name: ProfilerMiddleware
- Dependencies to Mock: app, stream, profile_dir, sort_by, restrictions, filename_format
- Key Imports: time, profile, cProfile, typing, _typeshed.wsgi, pstats, __future__, os.path, sys
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
ProfilerMiddleware
|
python
|
"""
Application Dispatcher
======================
This middleware creates a single WSGI application that dispatches to
multiple other WSGI applications mounted at different URL paths.
A common example is writing a Single Page Application, where you have a
backend API and a frontend written in JavaScript that does the routing
in the browser rather than requesting different pages from the server.
The frontend is a single HTML and JS file that should be served for any
path besides "/api".
This example dispatches to an API app under "/api", an admin app
under "/admin", and an app that serves frontend files for all other
requests::
app = DispatcherMiddleware(serve_frontend, {
'/api': api_app,
'/admin': admin_app,
})
In production, you might instead handle this at the HTTP server level,
serving files or proxying to application servers based on location. The
API and admin apps would each be deployed with a separate WSGI server,
and the static files would be served directly by the HTTP server.
.. autoclass:: DispatcherMiddleware
:copyright: 2007 Pallets
:license: BSD-3-Clause
"""
from __future__ import annotations
import typing as t
if t.TYPE_CHECKING:
from _typeshed.wsgi import StartResponse
from _typeshed.wsgi import WSGIApplication
from _typeshed.wsgi import WSGIEnvironment
class DispatcherMiddleware:
"""Combine multiple applications as a single WSGI application.
Requests are dispatched to an application based on the path it is
mounted under.
:param app: The WSGI application to dispatch to if the request
doesn't match a mounted path.
:param mounts: Maps path prefixes to applications for dispatching.
"""
def __init__(
self,
app: WSGIApplication,
mounts: dict[str, WSGIApplication] | None = None,
) -> None:
self.app = app
self.mounts = mounts or {}
def __call__(
self, environ: WSGIEnvironment, start_response: StartResponse
) -> t.Iterable[bytes]:
script = environ.get("PATH_INFO", "")
path_info = ""
while "/" in script:
if script in self.mounts:
app = self.mounts[script]
break
script, last_item = script.rsplit("/", 1)
path_info = f"/{last_item}{path_info}"
else:
app = self.mounts.get(script, self.app)
original_script_name = environ.get("SCRIPT_NAME", "")
environ["SCRIPT_NAME"] = original_script_name + script
environ["PATH_INFO"] = path_info
return app(environ, start_response)
|
from werkzeug.middleware.dispatcher import DispatcherMiddleware
from werkzeug.test import create_environ
from werkzeug.test import run_wsgi_app
def test_dispatcher():
def null_application(environ, start_response):
start_response("404 NOT FOUND", [("Content-Type", "text/plain")])
yield b"NOT FOUND"
def dummy_application(environ, start_response):
start_response("200 OK", [("Content-Type", "text/plain")])
yield environ["SCRIPT_NAME"].encode()
app = DispatcherMiddleware(
null_application,
{"/test1": dummy_application, "/test2/very": dummy_application},
)
tests = {
"/test1": ("/test1", "/test1/asfd", "/test1/very"),
"/test2/very": ("/test2/very", "/test2/very/long/path/after/script/name"),
}
for name, urls in tests.items():
for p in urls:
environ = create_environ(p)
app_iter, status, headers = run_wsgi_app(app, environ)
assert status == "200 OK"
assert b"".join(app_iter).strip() == name.encode()
app_iter, status, headers = run_wsgi_app(app, create_environ("/missing"))
assert status == "404 NOT FOUND"
assert b"".join(app_iter).strip() == b"NOT FOUND"
|
./temp_repos/werkzeug/src/werkzeug/middleware/dispatcher.py
|
./temp_repos/werkzeug/tests/middleware/test_dispatcher.py
|
werkzeug
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'DispatcherMiddleware'.
Context:
- Class Name: DispatcherMiddleware
- Dependencies to Mock: app, mounts
- Key Imports: typing, __future__, _typeshed.wsgi
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
DispatcherMiddleware
|
python
|
from fastapi import FastAPI, File, Form, UploadFile
app = FastAPI()
@app.post("/files/")
async def create_file(
file: bytes = File(), fileb: UploadFile = File(), token: str = Form()
):
return {
"file_size": len(file),
"token": token,
"fileb_content_type": fileb.content_type,
}
|
import importlib
from fastapi.testclient import TestClient
from ...utils import needs_pydanticv2
def get_client() -> TestClient:
from docs_src.conditional_openapi import tutorial001
importlib.reload(tutorial001)
client = TestClient(tutorial001.app)
return client
@needs_pydanticv2
def test_disable_openapi(monkeypatch):
monkeypatch.setenv("OPENAPI_URL", "")
# Load the client after setting the env var
client = get_client()
response = client.get("/openapi.json")
assert response.status_code == 404, response.text
response = client.get("/docs")
assert response.status_code == 404, response.text
response = client.get("/redoc")
assert response.status_code == 404, response.text
@needs_pydanticv2
def test_root():
client = get_client()
response = client.get("/")
assert response.status_code == 200
assert response.json() == {"message": "Hello World"}
@needs_pydanticv2
def test_default_openapi():
client = get_client()
response = client.get("/docs")
assert response.status_code == 200, response.text
response = client.get("/redoc")
assert response.status_code == 200, response.text
response = client.get("/openapi.json")
assert response.json() == {
"openapi": "3.1.0",
"info": {"title": "FastAPI", "version": "0.1.0"},
"paths": {
"/": {
"get": {
"summary": "Root",
"operationId": "root__get",
"responses": {
"200": {
"description": "Successful Response",
"content": {"application/json": {"schema": {}}},
}
},
}
}
},
}
|
./temp_repos/fastapi/docs_src/request_forms_and_files/tutorial001.py
|
./temp_repos/fastapi/tests/test_tutorial/test_conditional_openapi/test_tutorial001.py
|
fastapi
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'Unknown'.
Context:
- Class Name: Unknown
- Dependencies to Mock: None detected
- Key Imports: fastapi
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
Unknown
|
python
|
from typing import Union
from fastapi import FastAPI
from pydantic import BaseModel
class Item(BaseModel):
name: str
description: Union[str, None] = None
price: float
tax: Union[float, None] = None
app = FastAPI()
@app.post("/items/")
async def create_item(item: Item):
item_dict = item.dict()
if item.tax is not None:
price_with_tax = item.price + item.tax
item_dict.update({"price_with_tax": price_with_tax})
return item_dict
|
import importlib
import warnings
import pytest
from dirty_equals import IsDict, IsInt
from fastapi.testclient import TestClient
from inline_snapshot import Is, snapshot
from sqlalchemy import StaticPool
from sqlmodel import SQLModel, create_engine
from sqlmodel.main import default_registry
from tests.utils import needs_py39, needs_py310
def clear_sqlmodel():
# Clear the tables in the metadata for the default base model
SQLModel.metadata.clear()
# Clear the Models associated with the registry, to avoid warnings
default_registry.dispose()
@pytest.fixture(
name="client",
params=[
"tutorial002",
pytest.param("tutorial002_py39", marks=needs_py39),
pytest.param("tutorial002_py310", marks=needs_py310),
"tutorial002_an",
pytest.param("tutorial002_an_py39", marks=needs_py39),
pytest.param("tutorial002_an_py310", marks=needs_py310),
],
)
def get_client(request: pytest.FixtureRequest):
clear_sqlmodel()
# TODO: remove when updating SQL tutorial to use new lifespan API
with warnings.catch_warnings(record=True):
warnings.simplefilter("always")
mod = importlib.import_module(f"docs_src.sql_databases.{request.param}")
clear_sqlmodel()
importlib.reload(mod)
mod.sqlite_url = "sqlite://"
mod.engine = create_engine(
mod.sqlite_url, connect_args={"check_same_thread": False}, poolclass=StaticPool
)
with TestClient(mod.app) as c:
yield c
# Clean up connection explicitly to avoid resource warning
mod.engine.dispose()
def test_crud_app(client: TestClient):
# TODO: this warns that SQLModel.from_orm is deprecated in Pydantic v1, refactor
# this if using obj.model_validate becomes independent of Pydantic v2
with warnings.catch_warnings(record=True):
warnings.simplefilter("always")
# No heroes before creating
response = client.get("heroes/")
assert response.status_code == 200, response.text
assert response.json() == []
# Create a hero
response = client.post(
"/heroes/",
json={
"id": 9000,
"name": "Dead Pond",
"age": 30,
"secret_name": "Dive Wilson",
},
)
assert response.status_code == 200, response.text
assert response.json() == snapshot(
{"age": 30, "id": IsInt(), "name": "Dead Pond"}
)
assert response.json()["id"] != 9000, (
"The ID should be generated by the database"
)
# Read a hero
hero_id = response.json()["id"]
response = client.get(f"/heroes/{hero_id}")
assert response.status_code == 200, response.text
assert response.json() == snapshot(
{"name": "Dead Pond", "age": 30, "id": IsInt()}
)
# Read all heroes
# Create more heroes first
response = client.post(
"/heroes/",
json={"name": "Spider-Boy", "age": 18, "secret_name": "Pedro Parqueador"},
)
assert response.status_code == 200, response.text
response = client.post(
"/heroes/", json={"name": "Rusty-Man", "secret_name": "Tommy Sharp"}
)
assert response.status_code == 200, response.text
response = client.get("/heroes/")
assert response.status_code == 200, response.text
assert response.json() == snapshot(
[
{"name": "Dead Pond", "age": 30, "id": IsInt()},
{"name": "Spider-Boy", "age": 18, "id": IsInt()},
{"name": "Rusty-Man", "age": None, "id": IsInt()},
]
)
response = client.get("/heroes/?offset=1&limit=1")
assert response.status_code == 200, response.text
assert response.json() == snapshot(
[{"name": "Spider-Boy", "age": 18, "id": IsInt()}]
)
# Update a hero
response = client.patch(
f"/heroes/{hero_id}", json={"name": "Dog Pond", "age": None}
)
assert response.status_code == 200, response.text
assert response.json() == snapshot(
{"name": "Dog Pond", "age": None, "id": Is(hero_id)}
)
# Get updated hero
response = client.get(f"/heroes/{hero_id}")
assert response.status_code == 200, response.text
assert response.json() == snapshot(
{"name": "Dog Pond", "age": None, "id": Is(hero_id)}
)
# Delete a hero
response = client.delete(f"/heroes/{hero_id}")
assert response.status_code == 200, response.text
assert response.json() == snapshot({"ok": True})
# The hero is no longer found
response = client.get(f"/heroes/{hero_id}")
assert response.status_code == 404, response.text
# Delete a hero that does not exist
response = client.delete(f"/heroes/{hero_id}")
assert response.status_code == 404, response.text
assert response.json() == snapshot({"detail": "Hero not found"})
# Update a hero that does not exist
response = client.patch(f"/heroes/{hero_id}", json={"name": "Dog Pond"})
assert response.status_code == 404, response.text
assert response.json() == snapshot({"detail": "Hero not found"})
def test_openapi_schema(client: TestClient):
response = client.get("/openapi.json")
assert response.status_code == 200, response.text
assert response.json() == snapshot(
{
"openapi": "3.1.0",
"info": {"title": "FastAPI", "version": "0.1.0"},
"paths": {
"/heroes/": {
"post": {
"summary": "Create Hero",
"operationId": "create_hero_heroes__post",
"requestBody": {
"required": True,
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HeroCreate"
}
}
},
},
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HeroPublic"
}
}
},
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
},
},
},
},
"get": {
"summary": "Read Heroes",
"operationId": "read_heroes_heroes__get",
"parameters": [
{
"name": "offset",
"in": "query",
"required": False,
"schema": {
"type": "integer",
"default": 0,
"title": "Offset",
},
},
{
"name": "limit",
"in": "query",
"required": False,
"schema": {
"type": "integer",
"maximum": 100,
"default": 100,
"title": "Limit",
},
},
],
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {
"type": "array",
"items": {
"$ref": "#/components/schemas/HeroPublic"
},
"title": "Response Read Heroes Heroes Get",
}
}
},
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
},
},
},
},
},
"/heroes/{hero_id}": {
"get": {
"summary": "Read Hero",
"operationId": "read_hero_heroes__hero_id__get",
"parameters": [
{
"name": "hero_id",
"in": "path",
"required": True,
"schema": {"type": "integer", "title": "Hero Id"},
}
],
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HeroPublic"
}
}
},
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
},
},
},
},
"patch": {
"summary": "Update Hero",
"operationId": "update_hero_heroes__hero_id__patch",
"parameters": [
{
"name": "hero_id",
"in": "path",
"required": True,
"schema": {"type": "integer", "title": "Hero Id"},
}
],
"requestBody": {
"required": True,
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HeroUpdate"
}
}
},
},
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HeroPublic"
}
}
},
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
},
},
},
},
"delete": {
"summary": "Delete Hero",
"operationId": "delete_hero_heroes__hero_id__delete",
"parameters": [
{
"name": "hero_id",
"in": "path",
"required": True,
"schema": {"type": "integer", "title": "Hero Id"},
}
],
"responses": {
"200": {
"description": "Successful Response",
"content": {"application/json": {"schema": {}}},
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
},
},
},
},
},
},
"components": {
"schemas": {
"HTTPValidationError": {
"properties": {
"detail": {
"items": {
"$ref": "#/components/schemas/ValidationError"
},
"type": "array",
"title": "Detail",
}
},
"type": "object",
"title": "HTTPValidationError",
},
"HeroCreate": {
"properties": {
"name": {"type": "string", "title": "Name"},
"age": IsDict(
{
"anyOf": [{"type": "integer"}, {"type": "null"}],
"title": "Age",
}
)
| IsDict(
# TODO: remove when deprecating Pydantic v1
{
"type": "integer",
"title": "Age",
}
),
"secret_name": {"type": "string", "title": "Secret Name"},
},
"type": "object",
"required": ["name", "secret_name"],
"title": "HeroCreate",
},
"HeroPublic": {
"properties": {
"name": {"type": "string", "title": "Name"},
"age": IsDict(
{
"anyOf": [{"type": "integer"}, {"type": "null"}],
"title": "Age",
}
)
| IsDict(
# TODO: remove when deprecating Pydantic v1
{
"type": "integer",
"title": "Age",
}
),
"id": {"type": "integer", "title": "Id"},
},
"type": "object",
"required": ["name", "id"],
"title": "HeroPublic",
},
"HeroUpdate": {
"properties": {
"name": IsDict(
{
"anyOf": [{"type": "string"}, {"type": "null"}],
"title": "Name",
}
)
| IsDict(
# TODO: remove when deprecating Pydantic v1
{
"type": "string",
"title": "Name",
}
),
"age": IsDict(
{
"anyOf": [{"type": "integer"}, {"type": "null"}],
"title": "Age",
}
)
| IsDict(
# TODO: remove when deprecating Pydantic v1
{
"type": "integer",
"title": "Age",
}
),
"secret_name": IsDict(
{
"anyOf": [{"type": "string"}, {"type": "null"}],
"title": "Secret Name",
}
)
| IsDict(
# TODO: remove when deprecating Pydantic v1
{
"type": "string",
"title": "Secret Name",
}
),
},
"type": "object",
"title": "HeroUpdate",
},
"ValidationError": {
"properties": {
"loc": {
"items": {
"anyOf": [{"type": "string"}, {"type": "integer"}]
},
"type": "array",
"title": "Location",
},
"msg": {"type": "string", "title": "Message"},
"type": {"type": "string", "title": "Error Type"},
},
"type": "object",
"required": ["loc", "msg", "type"],
"title": "ValidationError",
},
}
},
}
)
|
./temp_repos/fastapi/docs_src/body/tutorial002.py
|
./temp_repos/fastapi/tests/test_tutorial/test_sql_databases/test_tutorial002.py
|
fastapi
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'Item'.
Context:
- Class Name: Item
- Dependencies to Mock: None detected
- Key Imports: typing, pydantic, fastapi
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
Item
|
python
|
from fastapi import FastAPI, File, Form, UploadFile
app = FastAPI()
@app.post("/files/")
async def create_file(
file: bytes = File(), fileb: UploadFile = File(), token: str = Form()
):
return {
"file_size": len(file),
"token": token,
"fileb_content_type": fileb.content_type,
}
|
import importlib
from unittest.mock import patch
import pytest
from dirty_equals import IsDict
from fastapi.testclient import TestClient
from ...utils import needs_py310
@pytest.fixture(
name="client",
params=[
"tutorial001",
pytest.param("tutorial001_py310", marks=needs_py310),
],
)
def get_client(request: pytest.FixtureRequest):
mod = importlib.import_module(f"docs_src.body.{request.param}")
client = TestClient(mod.app)
return client
def test_body_float(client: TestClient):
response = client.post("/items/", json={"name": "Foo", "price": 50.5})
assert response.status_code == 200
assert response.json() == {
"name": "Foo",
"price": 50.5,
"description": None,
"tax": None,
}
def test_post_with_str_float(client: TestClient):
response = client.post("/items/", json={"name": "Foo", "price": "50.5"})
assert response.status_code == 200
assert response.json() == {
"name": "Foo",
"price": 50.5,
"description": None,
"tax": None,
}
def test_post_with_str_float_description(client: TestClient):
response = client.post(
"/items/", json={"name": "Foo", "price": "50.5", "description": "Some Foo"}
)
assert response.status_code == 200
assert response.json() == {
"name": "Foo",
"price": 50.5,
"description": "Some Foo",
"tax": None,
}
def test_post_with_str_float_description_tax(client: TestClient):
response = client.post(
"/items/",
json={"name": "Foo", "price": "50.5", "description": "Some Foo", "tax": 0.3},
)
assert response.status_code == 200
assert response.json() == {
"name": "Foo",
"price": 50.5,
"description": "Some Foo",
"tax": 0.3,
}
def test_post_with_only_name(client: TestClient):
response = client.post("/items/", json={"name": "Foo"})
assert response.status_code == 422
assert response.json() == IsDict(
{
"detail": [
{
"type": "missing",
"loc": ["body", "price"],
"msg": "Field required",
"input": {"name": "Foo"},
}
]
}
) | IsDict(
# TODO: remove when deprecating Pydantic v1
{
"detail": [
{
"loc": ["body", "price"],
"msg": "field required",
"type": "value_error.missing",
}
]
}
)
def test_post_with_only_name_price(client: TestClient):
response = client.post("/items/", json={"name": "Foo", "price": "twenty"})
assert response.status_code == 422
assert response.json() == IsDict(
{
"detail": [
{
"type": "float_parsing",
"loc": ["body", "price"],
"msg": "Input should be a valid number, unable to parse string as a number",
"input": "twenty",
}
]
}
) | IsDict(
# TODO: remove when deprecating Pydantic v1
{
"detail": [
{
"loc": ["body", "price"],
"msg": "value is not a valid float",
"type": "type_error.float",
}
]
}
)
def test_post_with_no_data(client: TestClient):
response = client.post("/items/", json={})
assert response.status_code == 422
assert response.json() == IsDict(
{
"detail": [
{
"type": "missing",
"loc": ["body", "name"],
"msg": "Field required",
"input": {},
},
{
"type": "missing",
"loc": ["body", "price"],
"msg": "Field required",
"input": {},
},
]
}
) | IsDict(
# TODO: remove when deprecating Pydantic v1
{
"detail": [
{
"loc": ["body", "name"],
"msg": "field required",
"type": "value_error.missing",
},
{
"loc": ["body", "price"],
"msg": "field required",
"type": "value_error.missing",
},
]
}
)
def test_post_with_none(client: TestClient):
response = client.post("/items/", json=None)
assert response.status_code == 422
assert response.json() == IsDict(
{
"detail": [
{
"type": "missing",
"loc": ["body"],
"msg": "Field required",
"input": None,
}
]
}
) | IsDict(
# TODO: remove when deprecating Pydantic v1
{
"detail": [
{
"loc": ["body"],
"msg": "field required",
"type": "value_error.missing",
}
]
}
)
def test_post_broken_body(client: TestClient):
response = client.post(
"/items/",
headers={"content-type": "application/json"},
content="{some broken json}",
)
assert response.status_code == 422, response.text
assert response.json() == IsDict(
{
"detail": [
{
"type": "json_invalid",
"loc": ["body", 1],
"msg": "JSON decode error",
"input": {},
"ctx": {
"error": "Expecting property name enclosed in double quotes"
},
}
]
}
) | IsDict(
# TODO: remove when deprecating Pydantic v1
{
"detail": [
{
"loc": ["body", 1],
"msg": "Expecting property name enclosed in double quotes: line 1 column 2 (char 1)",
"type": "value_error.jsondecode",
"ctx": {
"msg": "Expecting property name enclosed in double quotes",
"doc": "{some broken json}",
"pos": 1,
"lineno": 1,
"colno": 2,
},
}
]
}
)
def test_post_form_for_json(client: TestClient):
response = client.post("/items/", data={"name": "Foo", "price": 50.5})
assert response.status_code == 422, response.text
assert response.json() == IsDict(
{
"detail": [
{
"type": "model_attributes_type",
"loc": ["body"],
"msg": "Input should be a valid dictionary or object to extract fields from",
"input": "name=Foo&price=50.5",
}
]
}
) | IsDict(
# TODO: remove when deprecating Pydantic v1
{
"detail": [
{
"loc": ["body"],
"msg": "value is not a valid dict",
"type": "type_error.dict",
}
]
}
)
def test_explicit_content_type(client: TestClient):
response = client.post(
"/items/",
content='{"name": "Foo", "price": 50.5}',
headers={"Content-Type": "application/json"},
)
assert response.status_code == 200, response.text
def test_geo_json(client: TestClient):
response = client.post(
"/items/",
content='{"name": "Foo", "price": 50.5}',
headers={"Content-Type": "application/geo+json"},
)
assert response.status_code == 200, response.text
def test_no_content_type_is_json(client: TestClient):
response = client.post(
"/items/",
content='{"name": "Foo", "price": 50.5}',
)
assert response.status_code == 200, response.text
assert response.json() == {
"name": "Foo",
"description": None,
"price": 50.5,
"tax": None,
}
def test_wrong_headers(client: TestClient):
data = '{"name": "Foo", "price": 50.5}'
response = client.post(
"/items/", content=data, headers={"Content-Type": "text/plain"}
)
assert response.status_code == 422, response.text
assert response.json() == IsDict(
{
"detail": [
{
"type": "model_attributes_type",
"loc": ["body"],
"msg": "Input should be a valid dictionary or object to extract fields from",
"input": '{"name": "Foo", "price": 50.5}',
}
]
}
) | IsDict(
# TODO: remove when deprecating Pydantic v1
{
"detail": [
{
"loc": ["body"],
"msg": "value is not a valid dict",
"type": "type_error.dict",
}
]
}
)
response = client.post(
"/items/", content=data, headers={"Content-Type": "application/geo+json-seq"}
)
assert response.status_code == 422, response.text
assert response.json() == IsDict(
{
"detail": [
{
"type": "model_attributes_type",
"loc": ["body"],
"msg": "Input should be a valid dictionary or object to extract fields from",
"input": '{"name": "Foo", "price": 50.5}',
}
]
}
) | IsDict(
# TODO: remove when deprecating Pydantic v1
{
"detail": [
{
"loc": ["body"],
"msg": "value is not a valid dict",
"type": "type_error.dict",
}
]
}
)
response = client.post(
"/items/", content=data, headers={"Content-Type": "application/not-really-json"}
)
assert response.status_code == 422, response.text
assert response.json() == IsDict(
{
"detail": [
{
"type": "model_attributes_type",
"loc": ["body"],
"msg": "Input should be a valid dictionary or object to extract fields from",
"input": '{"name": "Foo", "price": 50.5}',
}
]
}
) | IsDict(
# TODO: remove when deprecating Pydantic v1
{
"detail": [
{
"loc": ["body"],
"msg": "value is not a valid dict",
"type": "type_error.dict",
}
]
}
)
def test_other_exceptions(client: TestClient):
with patch("json.loads", side_effect=Exception):
response = client.post("/items/", json={"test": "test2"})
assert response.status_code == 400, response.text
def test_openapi_schema(client: TestClient):
response = client.get("/openapi.json")
assert response.status_code == 200, response.text
assert response.json() == {
"openapi": "3.1.0",
"info": {"title": "FastAPI", "version": "0.1.0"},
"paths": {
"/items/": {
"post": {
"responses": {
"200": {
"description": "Successful Response",
"content": {"application/json": {"schema": {}}},
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
},
},
},
"summary": "Create Item",
"operationId": "create_item_items__post",
"requestBody": {
"content": {
"application/json": {
"schema": {"$ref": "#/components/schemas/Item"}
}
},
"required": True,
},
}
}
},
"components": {
"schemas": {
"Item": {
"title": "Item",
"required": ["name", "price"],
"type": "object",
"properties": {
"name": {"title": "Name", "type": "string"},
"price": {"title": "Price", "type": "number"},
"description": IsDict(
{
"title": "Description",
"anyOf": [{"type": "string"}, {"type": "null"}],
}
)
| IsDict(
# TODO: remove when deprecating Pydantic v1
{"title": "Description", "type": "string"}
),
"tax": IsDict(
{
"title": "Tax",
"anyOf": [{"type": "number"}, {"type": "null"}],
}
)
| IsDict(
# TODO: remove when deprecating Pydantic v1
{"title": "Tax", "type": "number"}
),
},
},
"ValidationError": {
"title": "ValidationError",
"required": ["loc", "msg", "type"],
"type": "object",
"properties": {
"loc": {
"title": "Location",
"type": "array",
"items": {
"anyOf": [{"type": "string"}, {"type": "integer"}]
},
},
"msg": {"title": "Message", "type": "string"},
"type": {"title": "Error Type", "type": "string"},
},
},
"HTTPValidationError": {
"title": "HTTPValidationError",
"type": "object",
"properties": {
"detail": {
"title": "Detail",
"type": "array",
"items": {"$ref": "#/components/schemas/ValidationError"},
}
},
},
}
},
}
|
./temp_repos/fastapi/docs_src/request_forms_and_files/tutorial001.py
|
./temp_repos/fastapi/tests/test_tutorial/test_body/test_tutorial001.py
|
fastapi
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'Unknown'.
Context:
- Class Name: Unknown
- Dependencies to Mock: None detected
- Key Imports: fastapi
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
Unknown
|
python
|
from fastapi import FastAPI, File, Form, UploadFile
app = FastAPI()
@app.post("/files/")
async def create_file(
file: bytes = File(), fileb: UploadFile = File(), token: str = Form()
):
return {
"file_size": len(file),
"token": token,
"fileb_content_type": fileb.content_type,
}
|
import importlib
import pytest
from fastapi.testclient import TestClient
from pytest import MonkeyPatch
from ...utils import needs_pydanticv1, needs_pydanticv2
@pytest.fixture(
name="app",
params=[
pytest.param("tutorial001", marks=needs_pydanticv2),
pytest.param("tutorial001_pv1", marks=needs_pydanticv1),
],
)
def get_app(request: pytest.FixtureRequest, monkeypatch: MonkeyPatch):
monkeypatch.setenv("ADMIN_EMAIL", "[email protected]")
mod = importlib.import_module(f"docs_src.settings.{request.param}")
return mod.app
def test_settings(app):
client = TestClient(app)
response = client.get("/info")
assert response.status_code == 200, response.text
assert response.json() == {
"app_name": "Awesome API",
"admin_email": "[email protected]",
"items_per_user": 50,
}
|
./temp_repos/fastapi/docs_src/request_forms_and_files/tutorial001.py
|
./temp_repos/fastapi/tests/test_tutorial/test_settings/test_tutorial001.py
|
fastapi
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'Unknown'.
Context:
- Class Name: Unknown
- Dependencies to Mock: None detected
- Key Imports: fastapi
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
Unknown
|
python
|
from __future__ import annotations
import errno
import importlib.util
import os
import stat
from email.utils import parsedate
from typing import Union
import anyio
import anyio.to_thread
from starlette._utils import get_route_path
from starlette.datastructures import URL, Headers
from starlette.exceptions import HTTPException
from starlette.responses import FileResponse, RedirectResponse, Response
from starlette.types import Receive, Scope, Send
PathLike = Union[str, "os.PathLike[str]"]
class NotModifiedResponse(Response):
NOT_MODIFIED_HEADERS = (
"cache-control",
"content-location",
"date",
"etag",
"expires",
"vary",
)
def __init__(self, headers: Headers):
super().__init__(
status_code=304,
headers={name: value for name, value in headers.items() if name in self.NOT_MODIFIED_HEADERS},
)
class StaticFiles:
def __init__(
self,
*,
directory: PathLike | None = None,
packages: list[str | tuple[str, str]] | None = None,
html: bool = False,
check_dir: bool = True,
follow_symlink: bool = False,
) -> None:
self.directory = directory
self.packages = packages
self.all_directories = self.get_directories(directory, packages)
self.html = html
self.config_checked = False
self.follow_symlink = follow_symlink
if check_dir and directory is not None and not os.path.isdir(directory):
raise RuntimeError(f"Directory '{directory}' does not exist")
def get_directories(
self,
directory: PathLike | None = None,
packages: list[str | tuple[str, str]] | None = None,
) -> list[PathLike]:
"""
Given `directory` and `packages` arguments, return a list of all the
directories that should be used for serving static files from.
"""
directories = []
if directory is not None:
directories.append(directory)
for package in packages or []:
if isinstance(package, tuple):
package, statics_dir = package
else:
statics_dir = "statics"
spec = importlib.util.find_spec(package)
assert spec is not None, f"Package {package!r} could not be found."
assert spec.origin is not None, f"Package {package!r} could not be found."
package_directory = os.path.normpath(os.path.join(spec.origin, "..", statics_dir))
assert os.path.isdir(package_directory), (
f"Directory '{statics_dir!r}' in package {package!r} could not be found."
)
directories.append(package_directory)
return directories
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
"""
The ASGI entry point.
"""
assert scope["type"] == "http"
if not self.config_checked:
await self.check_config()
self.config_checked = True
path = self.get_path(scope)
response = await self.get_response(path, scope)
await response(scope, receive, send)
def get_path(self, scope: Scope) -> str:
"""
Given the ASGI scope, return the `path` string to serve up,
with OS specific path separators, and any '..', '.' components removed.
"""
route_path = get_route_path(scope)
return os.path.normpath(os.path.join(*route_path.split("/")))
async def get_response(self, path: str, scope: Scope) -> Response:
"""
Returns an HTTP response, given the incoming path, method and request headers.
"""
if scope["method"] not in ("GET", "HEAD"):
raise HTTPException(status_code=405)
try:
full_path, stat_result = await anyio.to_thread.run_sync(self.lookup_path, path)
except PermissionError:
raise HTTPException(status_code=401)
except OSError as exc:
# Filename is too long, so it can't be a valid static file.
if exc.errno == errno.ENAMETOOLONG:
raise HTTPException(status_code=404)
raise exc
if stat_result and stat.S_ISREG(stat_result.st_mode):
# We have a static file to serve.
return self.file_response(full_path, stat_result, scope)
elif stat_result and stat.S_ISDIR(stat_result.st_mode) and self.html:
# We're in HTML mode, and have got a directory URL.
# Check if we have 'index.html' file to serve.
index_path = os.path.join(path, "index.html")
full_path, stat_result = await anyio.to_thread.run_sync(self.lookup_path, index_path)
if stat_result is not None and stat.S_ISREG(stat_result.st_mode):
if not scope["path"].endswith("/"):
# Directory URLs should redirect to always end in "/".
url = URL(scope=scope)
url = url.replace(path=url.path + "/")
return RedirectResponse(url=url)
return self.file_response(full_path, stat_result, scope)
if self.html:
# Check for '404.html' if we're in HTML mode.
full_path, stat_result = await anyio.to_thread.run_sync(self.lookup_path, "404.html")
if stat_result and stat.S_ISREG(stat_result.st_mode):
return FileResponse(full_path, stat_result=stat_result, status_code=404)
raise HTTPException(status_code=404)
def lookup_path(self, path: str) -> tuple[str, os.stat_result | None]:
for directory in self.all_directories:
joined_path = os.path.join(directory, path)
if self.follow_symlink:
full_path = os.path.abspath(joined_path)
directory = os.path.abspath(directory)
else:
full_path = os.path.realpath(joined_path)
directory = os.path.realpath(directory)
if os.path.commonpath([full_path, directory]) != str(directory):
# Don't allow misbehaving clients to break out of the static files directory.
continue
try:
return full_path, os.stat(full_path)
except (FileNotFoundError, NotADirectoryError):
continue
return "", None
def file_response(
self,
full_path: PathLike,
stat_result: os.stat_result,
scope: Scope,
status_code: int = 200,
) -> Response:
request_headers = Headers(scope=scope)
response = FileResponse(full_path, status_code=status_code, stat_result=stat_result)
if self.is_not_modified(response.headers, request_headers):
return NotModifiedResponse(response.headers)
return response
async def check_config(self) -> None:
"""
Perform a one-off configuration check that StaticFiles is actually
pointed at a directory, so that we can raise loud errors rather than
just returning 404 responses.
"""
if self.directory is None:
return
try:
stat_result = await anyio.to_thread.run_sync(os.stat, self.directory)
except FileNotFoundError:
raise RuntimeError(f"StaticFiles directory '{self.directory}' does not exist.")
if not (stat.S_ISDIR(stat_result.st_mode) or stat.S_ISLNK(stat_result.st_mode)):
raise RuntimeError(f"StaticFiles path '{self.directory}' is not a directory.")
def is_not_modified(self, response_headers: Headers, request_headers: Headers) -> bool:
"""
Given the request and response headers, return `True` if an HTTP
"Not Modified" response could be returned instead.
"""
if if_none_match := request_headers.get("if-none-match"):
# The "etag" header is added by FileResponse, so it's always present.
etag = response_headers["etag"]
return etag in [tag.strip(" W/") for tag in if_none_match.split(",")]
try:
if_modified_since = parsedate(request_headers["if-modified-since"])
last_modified = parsedate(response_headers["last-modified"])
if if_modified_since is not None and last_modified is not None and if_modified_since >= last_modified:
return True
except KeyError:
pass
return False
|
import os
import stat
import tempfile
import time
from pathlib import Path
from typing import Any
import anyio
import pytest
from starlette.applications import Starlette
from starlette.exceptions import HTTPException
from starlette.middleware import Middleware
from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint
from starlette.requests import Request
from starlette.responses import Response
from starlette.routing import Mount
from starlette.staticfiles import StaticFiles
from tests.types import TestClientFactory
def test_staticfiles(tmpdir: Path, test_client_factory: TestClientFactory) -> None:
path = os.path.join(tmpdir, "example.txt")
with open(path, "w") as file:
file.write("<file content>")
app = StaticFiles(directory=tmpdir)
client = test_client_factory(app)
response = client.get("/example.txt")
assert response.status_code == 200
assert response.text == "<file content>"
def test_staticfiles_with_pathlib(tmp_path: Path, test_client_factory: TestClientFactory) -> None:
path = tmp_path / "example.txt"
with open(path, "w") as file:
file.write("<file content>")
app = StaticFiles(directory=tmp_path)
client = test_client_factory(app)
response = client.get("/example.txt")
assert response.status_code == 200
assert response.text == "<file content>"
def test_staticfiles_head_with_middleware(tmpdir: Path, test_client_factory: TestClientFactory) -> None:
"""
see https://github.com/Kludex/starlette/pull/935
"""
path = os.path.join(tmpdir, "example.txt")
with open(path, "w") as file:
file.write("x" * 100)
async def does_nothing_middleware(request: Request, call_next: RequestResponseEndpoint) -> Response:
response = await call_next(request)
return response
routes = [Mount("/static", app=StaticFiles(directory=tmpdir), name="static")]
middleware = [Middleware(BaseHTTPMiddleware, dispatch=does_nothing_middleware)]
app = Starlette(routes=routes, middleware=middleware)
client = test_client_factory(app)
response = client.head("/static/example.txt")
assert response.status_code == 200
assert response.headers.get("content-length") == "100"
def test_staticfiles_with_package(test_client_factory: TestClientFactory) -> None:
app = StaticFiles(packages=["tests"])
client = test_client_factory(app)
response = client.get("/example.txt")
assert response.status_code == 200
assert response.text == "123\n"
app = StaticFiles(packages=[("tests", "statics")])
client = test_client_factory(app)
response = client.get("/example.txt")
assert response.status_code == 200
assert response.text == "123\n"
def test_staticfiles_post(tmpdir: Path, test_client_factory: TestClientFactory) -> None:
path = os.path.join(tmpdir, "example.txt")
with open(path, "w") as file:
file.write("<file content>")
routes = [Mount("/", app=StaticFiles(directory=tmpdir), name="static")]
app = Starlette(routes=routes)
client = test_client_factory(app)
response = client.post("/example.txt")
assert response.status_code == 405
assert response.text == "Method Not Allowed"
def test_staticfiles_with_directory_returns_404(tmpdir: Path, test_client_factory: TestClientFactory) -> None:
path = os.path.join(tmpdir, "example.txt")
with open(path, "w") as file:
file.write("<file content>")
routes = [Mount("/", app=StaticFiles(directory=tmpdir), name="static")]
app = Starlette(routes=routes)
client = test_client_factory(app)
response = client.get("/")
assert response.status_code == 404
assert response.text == "Not Found"
def test_staticfiles_with_missing_file_returns_404(tmpdir: Path, test_client_factory: TestClientFactory) -> None:
path = os.path.join(tmpdir, "example.txt")
with open(path, "w") as file:
file.write("<file content>")
routes = [Mount("/", app=StaticFiles(directory=tmpdir), name="static")]
app = Starlette(routes=routes)
client = test_client_factory(app)
response = client.get("/404.txt")
assert response.status_code == 404
assert response.text == "Not Found"
def test_staticfiles_instantiated_with_missing_directory(tmpdir: Path) -> None:
with pytest.raises(RuntimeError) as exc_info:
path = os.path.join(tmpdir, "no_such_directory")
StaticFiles(directory=path)
assert "does not exist" in str(exc_info.value)
def test_staticfiles_configured_with_missing_directory(tmpdir: Path, test_client_factory: TestClientFactory) -> None:
path = os.path.join(tmpdir, "no_such_directory")
app = StaticFiles(directory=path, check_dir=False)
client = test_client_factory(app)
with pytest.raises(RuntimeError) as exc_info:
client.get("/example.txt")
assert "does not exist" in str(exc_info.value)
def test_staticfiles_configured_with_file_instead_of_directory(
tmpdir: Path, test_client_factory: TestClientFactory
) -> None:
path = os.path.join(tmpdir, "example.txt")
with open(path, "w") as file:
file.write("<file content>")
app = StaticFiles(directory=path, check_dir=False)
client = test_client_factory(app)
with pytest.raises(RuntimeError) as exc_info:
client.get("/example.txt")
assert "is not a directory" in str(exc_info.value)
def test_staticfiles_config_check_occurs_only_once(tmpdir: Path, test_client_factory: TestClientFactory) -> None:
app = StaticFiles(directory=tmpdir)
client = test_client_factory(app)
assert not app.config_checked
with pytest.raises(HTTPException):
client.get("/")
assert app.config_checked
with pytest.raises(HTTPException):
client.get("/")
def test_staticfiles_prevents_breaking_out_of_directory(tmpdir: Path) -> None:
directory = os.path.join(tmpdir, "foo")
os.mkdir(directory)
path = os.path.join(tmpdir, "example.txt")
with open(path, "w") as file:
file.write("outside root dir")
app = StaticFiles(directory=directory)
# We can't test this with 'httpx', so we test the app directly here.
path = app.get_path({"path": "/../example.txt"})
scope = {"method": "GET"}
with pytest.raises(HTTPException) as exc_info:
anyio.run(app.get_response, path, scope)
assert exc_info.value.status_code == 404
assert exc_info.value.detail == "Not Found"
def test_staticfiles_never_read_file_for_head_method(tmpdir: Path, test_client_factory: TestClientFactory) -> None:
path = os.path.join(tmpdir, "example.txt")
with open(path, "w") as file:
file.write("<file content>")
app = StaticFiles(directory=tmpdir)
client = test_client_factory(app)
response = client.head("/example.txt")
assert response.status_code == 200
assert response.content == b""
assert response.headers["content-length"] == "14"
def test_staticfiles_304_with_etag_match(tmpdir: Path, test_client_factory: TestClientFactory) -> None:
path = os.path.join(tmpdir, "example.txt")
with open(path, "w") as file:
file.write("<file content>")
app = StaticFiles(directory=tmpdir)
client = test_client_factory(app)
first_resp = client.get("/example.txt")
assert first_resp.status_code == 200
last_etag = first_resp.headers["etag"]
second_resp = client.get("/example.txt", headers={"if-none-match": last_etag})
assert second_resp.status_code == 304
assert second_resp.content == b""
second_resp = client.get("/example.txt", headers={"if-none-match": f'W/{last_etag}, "123"'})
assert second_resp.status_code == 304
assert second_resp.content == b""
def test_staticfiles_200_with_etag_mismatch(tmpdir: Path, test_client_factory: TestClientFactory) -> None:
path = os.path.join(tmpdir, "example.txt")
with open(path, "w") as file:
file.write("<file content>")
app = StaticFiles(directory=tmpdir)
client = test_client_factory(app)
first_resp = client.get("/example.txt")
assert first_resp.status_code == 200
assert first_resp.headers["etag"] != '"123"'
second_resp = client.get("/example.txt", headers={"if-none-match": '"123"'})
assert second_resp.status_code == 200
assert second_resp.content == b"<file content>"
def test_staticfiles_200_with_etag_mismatch_and_timestamp_match(
tmpdir: Path, test_client_factory: TestClientFactory
) -> None:
path = tmpdir / "example.txt"
path.write_text("<file content>", encoding="utf-8")
app = StaticFiles(directory=tmpdir)
client = test_client_factory(app)
first_resp = client.get("/example.txt")
assert first_resp.status_code == 200
assert first_resp.headers["etag"] != '"123"'
last_modified = first_resp.headers["last-modified"]
# If `if-none-match` is present, `if-modified-since` is ignored.
second_resp = client.get("/example.txt", headers={"if-none-match": '"123"', "if-modified-since": last_modified})
assert second_resp.status_code == 200
assert second_resp.content == b"<file content>"
def test_staticfiles_304_with_last_modified_compare_last_req(
tmpdir: Path, test_client_factory: TestClientFactory
) -> None:
path = os.path.join(tmpdir, "example.txt")
file_last_modified_time = time.mktime(time.strptime("2013-10-10 23:40:00", "%Y-%m-%d %H:%M:%S"))
with open(path, "w") as file:
file.write("<file content>")
os.utime(path, (file_last_modified_time, file_last_modified_time))
app = StaticFiles(directory=tmpdir)
client = test_client_factory(app)
# last modified less than last request, 304
response = client.get("/example.txt", headers={"If-Modified-Since": "Thu, 11 Oct 2013 15:30:19 GMT"})
assert response.status_code == 304
assert response.content == b""
# last modified greater than last request, 200 with content
response = client.get("/example.txt", headers={"If-Modified-Since": "Thu, 20 Feb 2012 15:30:19 GMT"})
assert response.status_code == 200
assert response.content == b"<file content>"
def test_staticfiles_html_normal(tmpdir: Path, test_client_factory: TestClientFactory) -> None:
path = os.path.join(tmpdir, "404.html")
with open(path, "w") as file:
file.write("<h1>Custom not found page</h1>")
path = os.path.join(tmpdir, "dir")
os.mkdir(path)
path = os.path.join(path, "index.html")
with open(path, "w") as file:
file.write("<h1>Hello</h1>")
app = StaticFiles(directory=tmpdir, html=True)
client = test_client_factory(app)
response = client.get("/dir/")
assert response.url == "http://testserver/dir/"
assert response.status_code == 200
assert response.text == "<h1>Hello</h1>"
response = client.get("/dir")
assert response.url == "http://testserver/dir/"
assert response.status_code == 200
assert response.text == "<h1>Hello</h1>"
response = client.get("/dir/index.html")
assert response.url == "http://testserver/dir/index.html"
assert response.status_code == 200
assert response.text == "<h1>Hello</h1>"
response = client.get("/missing")
assert response.status_code == 404
assert response.text == "<h1>Custom not found page</h1>"
def test_staticfiles_html_without_index(tmpdir: Path, test_client_factory: TestClientFactory) -> None:
path = os.path.join(tmpdir, "404.html")
with open(path, "w") as file:
file.write("<h1>Custom not found page</h1>")
path = os.path.join(tmpdir, "dir")
os.mkdir(path)
app = StaticFiles(directory=tmpdir, html=True)
client = test_client_factory(app)
response = client.get("/dir/")
assert response.url == "http://testserver/dir/"
assert response.status_code == 404
assert response.text == "<h1>Custom not found page</h1>"
response = client.get("/dir")
assert response.url == "http://testserver/dir"
assert response.status_code == 404
assert response.text == "<h1>Custom not found page</h1>"
response = client.get("/missing")
assert response.status_code == 404
assert response.text == "<h1>Custom not found page</h1>"
def test_staticfiles_html_without_404(tmpdir: Path, test_client_factory: TestClientFactory) -> None:
path = os.path.join(tmpdir, "dir")
os.mkdir(path)
path = os.path.join(path, "index.html")
with open(path, "w") as file:
file.write("<h1>Hello</h1>")
app = StaticFiles(directory=tmpdir, html=True)
client = test_client_factory(app)
response = client.get("/dir/")
assert response.url == "http://testserver/dir/"
assert response.status_code == 200
assert response.text == "<h1>Hello</h1>"
response = client.get("/dir")
assert response.url == "http://testserver/dir/"
assert response.status_code == 200
assert response.text == "<h1>Hello</h1>"
with pytest.raises(HTTPException) as exc_info:
response = client.get("/missing")
assert exc_info.value.status_code == 404
def test_staticfiles_html_only_files(tmpdir: Path, test_client_factory: TestClientFactory) -> None:
path = os.path.join(tmpdir, "hello.html")
with open(path, "w") as file:
file.write("<h1>Hello</h1>")
app = StaticFiles(directory=tmpdir, html=True)
client = test_client_factory(app)
with pytest.raises(HTTPException) as exc_info:
response = client.get("/")
assert exc_info.value.status_code == 404
response = client.get("/hello.html")
assert response.status_code == 200
assert response.text == "<h1>Hello</h1>"
def test_staticfiles_cache_invalidation_for_deleted_file_html_mode(
tmpdir: Path, test_client_factory: TestClientFactory
) -> None:
path_404 = os.path.join(tmpdir, "404.html")
with open(path_404, "w") as file:
file.write("<p>404 file</p>")
path_some = os.path.join(tmpdir, "some.html")
with open(path_some, "w") as file:
file.write("<p>some file</p>")
common_modified_time = time.mktime(time.strptime("2013-10-10 23:40:00", "%Y-%m-%d %H:%M:%S"))
os.utime(path_404, (common_modified_time, common_modified_time))
os.utime(path_some, (common_modified_time, common_modified_time))
app = StaticFiles(directory=tmpdir, html=True)
client = test_client_factory(app)
resp_exists = client.get("/some.html")
assert resp_exists.status_code == 200
assert resp_exists.text == "<p>some file</p>"
resp_cached = client.get(
"/some.html",
headers={"If-Modified-Since": resp_exists.headers["last-modified"]},
)
assert resp_cached.status_code == 304
os.remove(path_some)
resp_deleted = client.get(
"/some.html",
headers={"If-Modified-Since": resp_exists.headers["last-modified"]},
)
assert resp_deleted.status_code == 404
assert resp_deleted.text == "<p>404 file</p>"
def test_staticfiles_with_invalid_dir_permissions_returns_401(
tmp_path: Path, test_client_factory: TestClientFactory
) -> None:
(tmp_path / "example.txt").write_bytes(b"<file content>")
original_mode = tmp_path.stat().st_mode
tmp_path.chmod(stat.S_IRWXO)
try:
routes = [
Mount(
"/",
app=StaticFiles(directory=os.fsdecode(tmp_path)),
name="static",
)
]
app = Starlette(routes=routes)
client = test_client_factory(app)
response = client.get("/example.txt")
assert response.status_code == 401
assert response.text == "Unauthorized"
finally:
tmp_path.chmod(original_mode)
def test_staticfiles_with_missing_dir_returns_404(tmpdir: Path, test_client_factory: TestClientFactory) -> None:
path = os.path.join(tmpdir, "example.txt")
with open(path, "w") as file:
file.write("<file content>")
routes = [Mount("/", app=StaticFiles(directory=tmpdir), name="static")]
app = Starlette(routes=routes)
client = test_client_factory(app)
response = client.get("/foo/example.txt")
assert response.status_code == 404
assert response.text == "Not Found"
def test_staticfiles_access_file_as_dir_returns_404(tmpdir: Path, test_client_factory: TestClientFactory) -> None:
path = os.path.join(tmpdir, "example.txt")
with open(path, "w") as file:
file.write("<file content>")
routes = [Mount("/", app=StaticFiles(directory=tmpdir), name="static")]
app = Starlette(routes=routes)
client = test_client_factory(app)
response = client.get("/example.txt/foo")
assert response.status_code == 404
assert response.text == "Not Found"
def test_staticfiles_filename_too_long(tmpdir: Path, test_client_factory: TestClientFactory) -> None:
routes = [Mount("/", app=StaticFiles(directory=tmpdir), name="static")]
app = Starlette(routes=routes)
client = test_client_factory(app)
path_max_size = os.pathconf("/", "PC_PATH_MAX")
response = client.get(f"/{'a' * path_max_size}.txt")
assert response.status_code == 404
assert response.text == "Not Found"
def test_staticfiles_unhandled_os_error_returns_500(
tmpdir: Path,
test_client_factory: TestClientFactory,
monkeypatch: pytest.MonkeyPatch,
) -> None:
def mock_timeout(*args: Any, **kwargs: Any) -> None:
raise TimeoutError
path = os.path.join(tmpdir, "example.txt")
with open(path, "w") as file:
file.write("<file content>")
routes = [Mount("/", app=StaticFiles(directory=tmpdir), name="static")]
app = Starlette(routes=routes)
client = test_client_factory(app, raise_server_exceptions=False)
monkeypatch.setattr("starlette.staticfiles.StaticFiles.lookup_path", mock_timeout)
response = client.get("/example.txt")
assert response.status_code == 500
assert response.text == "Internal Server Error"
def test_staticfiles_follows_symlinks(tmpdir: Path, test_client_factory: TestClientFactory) -> None:
statics_path = os.path.join(tmpdir, "statics")
os.mkdir(statics_path)
source_path = tempfile.mkdtemp()
source_file_path = os.path.join(source_path, "page.html")
with open(source_file_path, "w") as file:
file.write("<h1>Hello</h1>")
statics_file_path = os.path.join(statics_path, "index.html")
os.symlink(source_file_path, statics_file_path)
app = StaticFiles(directory=statics_path, follow_symlink=True)
client = test_client_factory(app)
response = client.get("/index.html")
assert response.url == "http://testserver/index.html"
assert response.status_code == 200
assert response.text == "<h1>Hello</h1>"
def test_staticfiles_follows_symlink_directories(tmpdir: Path, test_client_factory: TestClientFactory) -> None:
statics_path = os.path.join(tmpdir, "statics")
statics_html_path = os.path.join(statics_path, "html")
os.mkdir(statics_path)
source_path = tempfile.mkdtemp()
source_file_path = os.path.join(source_path, "page.html")
with open(source_file_path, "w") as file:
file.write("<h1>Hello</h1>")
os.symlink(source_path, statics_html_path)
app = StaticFiles(directory=statics_path, follow_symlink=True)
client = test_client_factory(app)
response = client.get("/html/page.html")
assert response.url == "http://testserver/html/page.html"
assert response.status_code == 200
assert response.text == "<h1>Hello</h1>"
def test_staticfiles_disallows_path_traversal_with_symlinks(tmpdir: Path) -> None:
statics_path = os.path.join(tmpdir, "statics")
root_source_path = tempfile.mkdtemp()
source_path = os.path.join(root_source_path, "statics")
os.mkdir(source_path)
source_file_path = os.path.join(root_source_path, "index.html")
with open(source_file_path, "w") as file:
file.write("<h1>Hello</h1>")
os.symlink(source_path, statics_path)
app = StaticFiles(directory=statics_path, follow_symlink=True)
# We can't test this with 'httpx', so we test the app directly here.
path = app.get_path({"path": "/../index.html"})
scope = {"method": "GET"}
with pytest.raises(HTTPException) as exc_info:
anyio.run(app.get_response, path, scope)
assert exc_info.value.status_code == 404
assert exc_info.value.detail == "Not Found"
def test_staticfiles_avoids_path_traversal(tmp_path: Path) -> None:
statics_path = tmp_path / "static"
statics_disallow_path = tmp_path / "static_disallow"
statics_path.mkdir()
statics_disallow_path.mkdir()
static_index_file = statics_path / "index.html"
statics_disallow_path_index_file = statics_disallow_path / "index.html"
static_file = tmp_path / "static1.txt"
static_index_file.write_text("<h1>Hello</h1>")
statics_disallow_path_index_file.write_text("<h1>Private</h1>")
static_file.write_text("Private")
app = StaticFiles(directory=statics_path)
# We can't test this with 'httpx', so we test the app directly here.
path = app.get_path({"path": "/../static1.txt"})
with pytest.raises(HTTPException) as exc_info:
anyio.run(app.get_response, path, {"method": "GET"})
assert exc_info.value.status_code == 404
assert exc_info.value.detail == "Not Found"
path = app.get_path({"path": "/../static_disallow/index.html"})
with pytest.raises(HTTPException) as exc_info:
anyio.run(app.get_response, path, {"method": "GET"})
assert exc_info.value.status_code == 404
assert exc_info.value.detail == "Not Found"
def test_staticfiles_self_symlinks(tmp_path: Path, test_client_factory: TestClientFactory) -> None:
statics_path = tmp_path / "statics"
statics_path.mkdir()
source_file_path = statics_path / "index.html"
source_file_path.write_text("<h1>Hello</h1>", encoding="utf-8")
statics_symlink_path = tmp_path / "statics_symlink"
statics_symlink_path.symlink_to(statics_path)
app = StaticFiles(directory=statics_symlink_path, follow_symlink=True)
client = test_client_factory(app)
response = client.get("/index.html")
assert response.url == "http://testserver/index.html"
assert response.status_code == 200
assert response.text == "<h1>Hello</h1>"
def test_staticfiles_relative_directory_symlinks(test_client_factory: TestClientFactory) -> None:
app = StaticFiles(directory="tests/statics", follow_symlink=True)
client = test_client_factory(app)
response = client.get("/example.txt")
assert response.status_code == 200
assert response.text == "123\n"
|
./temp_repos/starlette/starlette/staticfiles.py
|
./temp_repos/starlette/tests/test_staticfiles.py
|
starlette
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'NotModifiedResponse'.
Context:
- Class Name: NotModifiedResponse
- Dependencies to Mock: headers
- Key Imports: anyio.to_thread, starlette._utils, starlette.datastructures, starlette.responses, stat, typing, anyio, errno, importlib.util, starlette.types
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
NotModifiedResponse
|
python
|
from __future__ import annotations
import contextlib
import inspect
import io
import json
import math
import sys
import warnings
from collections.abc import Awaitable, Callable, Generator, Iterable, Mapping, MutableMapping, Sequence
from concurrent.futures import Future
from contextlib import AbstractContextManager
from types import GeneratorType
from typing import (
Any,
Literal,
TypedDict,
TypeGuard,
cast,
)
from urllib.parse import unquote, urljoin
import anyio
import anyio.abc
import anyio.from_thread
from anyio.streams.stapled import StapledObjectStream
from starlette._utils import is_async_callable
from starlette.types import ASGIApp, Message, Receive, Scope, Send
from starlette.websockets import WebSocketDisconnect
if sys.version_info >= (3, 11): # pragma: no cover
from typing import Self
else: # pragma: no cover
from typing_extensions import Self
try:
import httpx
except ModuleNotFoundError: # pragma: no cover
raise RuntimeError(
"The starlette.testclient module requires the httpx package to be installed.\n"
"You can install this with:\n"
" $ pip install httpx\n"
)
_PortalFactoryType = Callable[[], AbstractContextManager[anyio.abc.BlockingPortal]]
ASGIInstance = Callable[[Receive, Send], Awaitable[None]]
ASGI2App = Callable[[Scope], ASGIInstance]
ASGI3App = Callable[[Scope, Receive, Send], Awaitable[None]]
_RequestData = Mapping[str, str | Iterable[str] | bytes]
def _is_asgi3(app: ASGI2App | ASGI3App) -> TypeGuard[ASGI3App]:
if inspect.isclass(app):
return hasattr(app, "__await__")
return is_async_callable(app)
class _WrapASGI2:
"""
Provide an ASGI3 interface onto an ASGI2 app.
"""
def __init__(self, app: ASGI2App) -> None:
self.app = app
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
instance = self.app(scope)
await instance(receive, send)
class _AsyncBackend(TypedDict):
backend: str
backend_options: dict[str, Any]
class _Upgrade(Exception):
def __init__(self, session: WebSocketTestSession) -> None:
self.session = session
class WebSocketDenialResponse( # type: ignore[misc]
httpx.Response,
WebSocketDisconnect,
):
"""
A special case of `WebSocketDisconnect`, raised in the `TestClient` if the
`WebSocket` is closed before being accepted with a `send_denial_response()`.
"""
class WebSocketTestSession:
def __init__(
self,
app: ASGI3App,
scope: Scope,
portal_factory: _PortalFactoryType,
) -> None:
self.app = app
self.scope = scope
self.accepted_subprotocol = None
self.portal_factory = portal_factory
self.extra_headers = None
def __enter__(self) -> WebSocketTestSession:
with contextlib.ExitStack() as stack:
self.portal = portal = stack.enter_context(self.portal_factory())
fut, cs = portal.start_task(self._run)
stack.callback(fut.result)
stack.callback(portal.call, cs.cancel)
self.send({"type": "websocket.connect"})
message = self.receive()
self._raise_on_close(message)
self.accepted_subprotocol = message.get("subprotocol", None)
self.extra_headers = message.get("headers", None)
stack.callback(self.close, 1000)
self.exit_stack = stack.pop_all()
return self
def __exit__(self, *args: Any) -> bool | None:
return self.exit_stack.__exit__(*args)
async def _run(self, *, task_status: anyio.abc.TaskStatus[anyio.CancelScope]) -> None:
"""
The sub-thread in which the websocket session runs.
"""
send: anyio.create_memory_object_stream[Message] = anyio.create_memory_object_stream(math.inf)
send_tx, send_rx = send
receive: anyio.create_memory_object_stream[Message] = anyio.create_memory_object_stream(math.inf)
receive_tx, receive_rx = receive
with send_tx, send_rx, receive_tx, receive_rx, anyio.CancelScope() as cs:
self._receive_tx = receive_tx
self._send_rx = send_rx
task_status.started(cs)
await self.app(self.scope, receive_rx.receive, send_tx.send)
# wait for cs.cancel to be called before closing streams
await anyio.sleep_forever()
def _raise_on_close(self, message: Message) -> None:
if message["type"] == "websocket.close":
raise WebSocketDisconnect(code=message.get("code", 1000), reason=message.get("reason", ""))
elif message["type"] == "websocket.http.response.start":
status_code: int = message["status"]
headers: list[tuple[bytes, bytes]] = message["headers"]
body: list[bytes] = []
while True:
message = self.receive()
assert message["type"] == "websocket.http.response.body"
body.append(message["body"])
if not message.get("more_body", False):
break
raise WebSocketDenialResponse(status_code=status_code, headers=headers, content=b"".join(body))
def send(self, message: Message) -> None:
self.portal.call(self._receive_tx.send, message)
def send_text(self, data: str) -> None:
self.send({"type": "websocket.receive", "text": data})
def send_bytes(self, data: bytes) -> None:
self.send({"type": "websocket.receive", "bytes": data})
def send_json(self, data: Any, mode: Literal["text", "binary"] = "text") -> None:
text = json.dumps(data, separators=(",", ":"), ensure_ascii=False)
if mode == "text":
self.send({"type": "websocket.receive", "text": text})
else:
self.send({"type": "websocket.receive", "bytes": text.encode("utf-8")})
def close(self, code: int = 1000, reason: str | None = None) -> None:
self.send({"type": "websocket.disconnect", "code": code, "reason": reason})
def receive(self) -> Message:
return self.portal.call(self._send_rx.receive)
def receive_text(self) -> str:
message = self.receive()
self._raise_on_close(message)
return cast(str, message["text"])
def receive_bytes(self) -> bytes:
message = self.receive()
self._raise_on_close(message)
return cast(bytes, message["bytes"])
def receive_json(self, mode: Literal["text", "binary"] = "text") -> Any:
message = self.receive()
self._raise_on_close(message)
if mode == "text":
text = message["text"]
else:
text = message["bytes"].decode("utf-8")
return json.loads(text)
class _TestClientTransport(httpx.BaseTransport):
def __init__(
self,
app: ASGI3App,
portal_factory: _PortalFactoryType,
raise_server_exceptions: bool = True,
root_path: str = "",
*,
client: tuple[str, int],
app_state: dict[str, Any],
) -> None:
self.app = app
self.raise_server_exceptions = raise_server_exceptions
self.root_path = root_path
self.portal_factory = portal_factory
self.app_state = app_state
self.client = client
def handle_request(self, request: httpx.Request) -> httpx.Response:
scheme = request.url.scheme
netloc = request.url.netloc.decode(encoding="ascii")
path = request.url.path
raw_path = request.url.raw_path
query = request.url.query.decode(encoding="ascii")
default_port = {"http": 80, "ws": 80, "https": 443, "wss": 443}[scheme]
if ":" in netloc:
host, port_string = netloc.split(":", 1)
port = int(port_string)
else:
host = netloc
port = default_port
# Include the 'host' header.
if "host" in request.headers:
headers: list[tuple[bytes, bytes]] = []
elif port == default_port: # pragma: no cover
headers = [(b"host", host.encode())]
else: # pragma: no cover
headers = [(b"host", (f"{host}:{port}").encode())]
# Include other request headers.
headers += [(key.lower().encode(), value.encode()) for key, value in request.headers.multi_items()]
scope: dict[str, Any]
if scheme in {"ws", "wss"}:
subprotocol = request.headers.get("sec-websocket-protocol", None)
if subprotocol is None:
subprotocols: Sequence[str] = []
else:
subprotocols = [value.strip() for value in subprotocol.split(",")]
scope = {
"type": "websocket",
"path": unquote(path),
"raw_path": raw_path.split(b"?", 1)[0],
"root_path": self.root_path,
"scheme": scheme,
"query_string": query.encode(),
"headers": headers,
"client": self.client,
"server": [host, port],
"subprotocols": subprotocols,
"state": self.app_state.copy(),
"extensions": {"websocket.http.response": {}},
}
session = WebSocketTestSession(self.app, scope, self.portal_factory)
raise _Upgrade(session)
scope = {
"type": "http",
"http_version": "1.1",
"method": request.method,
"path": unquote(path),
"raw_path": raw_path.split(b"?", 1)[0],
"root_path": self.root_path,
"scheme": scheme,
"query_string": query.encode(),
"headers": headers,
"client": self.client,
"server": [host, port],
"extensions": {"http.response.debug": {}},
"state": self.app_state.copy(),
}
request_complete = False
response_started = False
response_complete: anyio.Event
raw_kwargs: dict[str, Any] = {"stream": io.BytesIO()}
template = None
context = None
async def receive() -> Message:
nonlocal request_complete
if request_complete:
if not response_complete.is_set():
await response_complete.wait()
return {"type": "http.disconnect"}
body = request.read()
if isinstance(body, str):
body_bytes: bytes = body.encode("utf-8") # pragma: no cover
elif body is None:
body_bytes = b"" # pragma: no cover
elif isinstance(body, GeneratorType):
try: # pragma: no cover
chunk = body.send(None)
if isinstance(chunk, str):
chunk = chunk.encode("utf-8")
return {"type": "http.request", "body": chunk, "more_body": True}
except StopIteration: # pragma: no cover
request_complete = True
return {"type": "http.request", "body": b""}
else:
body_bytes = body
request_complete = True
return {"type": "http.request", "body": body_bytes}
async def send(message: Message) -> None:
nonlocal raw_kwargs, response_started, template, context
if message["type"] == "http.response.start":
assert not response_started, 'Received multiple "http.response.start" messages.'
raw_kwargs["status_code"] = message["status"]
raw_kwargs["headers"] = [(key.decode(), value.decode()) for key, value in message.get("headers", [])]
response_started = True
elif message["type"] == "http.response.body":
assert response_started, 'Received "http.response.body" without "http.response.start".'
assert not response_complete.is_set(), 'Received "http.response.body" after response completed.'
body = message.get("body", b"")
more_body = message.get("more_body", False)
if request.method != "HEAD":
raw_kwargs["stream"].write(body)
if not more_body:
raw_kwargs["stream"].seek(0)
response_complete.set()
elif message["type"] == "http.response.debug":
template = message["info"]["template"]
context = message["info"]["context"]
try:
with self.portal_factory() as portal:
response_complete = portal.call(anyio.Event)
portal.call(self.app, scope, receive, send)
except BaseException as exc:
if self.raise_server_exceptions:
raise exc
if self.raise_server_exceptions:
assert response_started, "TestClient did not receive any response."
elif not response_started:
raw_kwargs = {
"status_code": 500,
"headers": [],
"stream": io.BytesIO(),
}
raw_kwargs["stream"] = httpx.ByteStream(raw_kwargs["stream"].read())
response = httpx.Response(**raw_kwargs, request=request)
if template is not None:
response.template = template # type: ignore[attr-defined]
response.context = context # type: ignore[attr-defined]
return response
class TestClient(httpx.Client):
__test__ = False
task: Future[None]
portal: anyio.abc.BlockingPortal | None = None
def __init__(
self,
app: ASGIApp,
base_url: str = "http://testserver",
raise_server_exceptions: bool = True,
root_path: str = "",
backend: Literal["asyncio", "trio"] = "asyncio",
backend_options: dict[str, Any] | None = None,
cookies: httpx._types.CookieTypes | None = None,
headers: dict[str, str] | None = None,
follow_redirects: bool = True,
client: tuple[str, int] = ("testclient", 50000),
) -> None:
self.async_backend = _AsyncBackend(backend=backend, backend_options=backend_options or {})
if _is_asgi3(app):
asgi_app = app
else:
app = cast(ASGI2App, app) # type: ignore[assignment]
asgi_app = _WrapASGI2(app) # type: ignore[arg-type]
self.app = asgi_app
self.app_state: dict[str, Any] = {}
transport = _TestClientTransport(
self.app,
portal_factory=self._portal_factory,
raise_server_exceptions=raise_server_exceptions,
root_path=root_path,
app_state=self.app_state,
client=client,
)
if headers is None:
headers = {}
headers.setdefault("user-agent", "testclient")
super().__init__(
base_url=base_url,
headers=headers,
transport=transport,
follow_redirects=follow_redirects,
cookies=cookies,
)
@contextlib.contextmanager
def _portal_factory(self) -> Generator[anyio.abc.BlockingPortal, None, None]:
if self.portal is not None:
yield self.portal
else:
with anyio.from_thread.start_blocking_portal(**self.async_backend) as portal:
yield portal
def request( # type: ignore[override]
self,
method: str,
url: httpx._types.URLTypes,
*,
content: httpx._types.RequestContent | None = None,
data: _RequestData | None = None,
files: httpx._types.RequestFiles | None = None,
json: Any = None,
params: httpx._types.QueryParamTypes | None = None,
headers: httpx._types.HeaderTypes | None = None,
cookies: httpx._types.CookieTypes | None = None,
auth: httpx._types.AuthTypes | httpx._client.UseClientDefault = httpx._client.USE_CLIENT_DEFAULT,
follow_redirects: bool | httpx._client.UseClientDefault = httpx._client.USE_CLIENT_DEFAULT,
timeout: httpx._types.TimeoutTypes | httpx._client.UseClientDefault = httpx._client.USE_CLIENT_DEFAULT,
extensions: dict[str, Any] | None = None,
) -> httpx.Response:
if timeout is not httpx.USE_CLIENT_DEFAULT:
warnings.warn(
"You should not use the 'timeout' argument with the TestClient. "
"See https://github.com/Kludex/starlette/issues/1108 for more information.",
DeprecationWarning,
)
url = self._merge_url(url)
return super().request(
method,
url,
content=content,
data=data,
files=files,
json=json,
params=params,
headers=headers,
cookies=cookies,
auth=auth,
follow_redirects=follow_redirects,
timeout=timeout,
extensions=extensions,
)
def get( # type: ignore[override]
self,
url: httpx._types.URLTypes,
*,
params: httpx._types.QueryParamTypes | None = None,
headers: httpx._types.HeaderTypes | None = None,
cookies: httpx._types.CookieTypes | None = None,
auth: httpx._types.AuthTypes | httpx._client.UseClientDefault = httpx._client.USE_CLIENT_DEFAULT,
follow_redirects: bool | httpx._client.UseClientDefault = httpx._client.USE_CLIENT_DEFAULT,
timeout: httpx._types.TimeoutTypes | httpx._client.UseClientDefault = httpx._client.USE_CLIENT_DEFAULT,
extensions: dict[str, Any] | None = None,
) -> httpx.Response:
return super().get(
url,
params=params,
headers=headers,
cookies=cookies,
auth=auth,
follow_redirects=follow_redirects,
timeout=timeout,
extensions=extensions,
)
def options( # type: ignore[override]
self,
url: httpx._types.URLTypes,
*,
params: httpx._types.QueryParamTypes | None = None,
headers: httpx._types.HeaderTypes | None = None,
cookies: httpx._types.CookieTypes | None = None,
auth: httpx._types.AuthTypes | httpx._client.UseClientDefault = httpx._client.USE_CLIENT_DEFAULT,
follow_redirects: bool | httpx._client.UseClientDefault = httpx._client.USE_CLIENT_DEFAULT,
timeout: httpx._types.TimeoutTypes | httpx._client.UseClientDefault = httpx._client.USE_CLIENT_DEFAULT,
extensions: dict[str, Any] | None = None,
) -> httpx.Response:
return super().options(
url,
params=params,
headers=headers,
cookies=cookies,
auth=auth,
follow_redirects=follow_redirects,
timeout=timeout,
extensions=extensions,
)
def head( # type: ignore[override]
self,
url: httpx._types.URLTypes,
*,
params: httpx._types.QueryParamTypes | None = None,
headers: httpx._types.HeaderTypes | None = None,
cookies: httpx._types.CookieTypes | None = None,
auth: httpx._types.AuthTypes | httpx._client.UseClientDefault = httpx._client.USE_CLIENT_DEFAULT,
follow_redirects: bool | httpx._client.UseClientDefault = httpx._client.USE_CLIENT_DEFAULT,
timeout: httpx._types.TimeoutTypes | httpx._client.UseClientDefault = httpx._client.USE_CLIENT_DEFAULT,
extensions: dict[str, Any] | None = None,
) -> httpx.Response:
return super().head(
url,
params=params,
headers=headers,
cookies=cookies,
auth=auth,
follow_redirects=follow_redirects,
timeout=timeout,
extensions=extensions,
)
def post( # type: ignore[override]
self,
url: httpx._types.URLTypes,
*,
content: httpx._types.RequestContent | None = None,
data: _RequestData | None = None,
files: httpx._types.RequestFiles | None = None,
json: Any = None,
params: httpx._types.QueryParamTypes | None = None,
headers: httpx._types.HeaderTypes | None = None,
cookies: httpx._types.CookieTypes | None = None,
auth: httpx._types.AuthTypes | httpx._client.UseClientDefault = httpx._client.USE_CLIENT_DEFAULT,
follow_redirects: bool | httpx._client.UseClientDefault = httpx._client.USE_CLIENT_DEFAULT,
timeout: httpx._types.TimeoutTypes | httpx._client.UseClientDefault = httpx._client.USE_CLIENT_DEFAULT,
extensions: dict[str, Any] | None = None,
) -> httpx.Response:
return super().post(
url,
content=content,
data=data,
files=files,
json=json,
params=params,
headers=headers,
cookies=cookies,
auth=auth,
follow_redirects=follow_redirects,
timeout=timeout,
extensions=extensions,
)
def put( # type: ignore[override]
self,
url: httpx._types.URLTypes,
*,
content: httpx._types.RequestContent | None = None,
data: _RequestData | None = None,
files: httpx._types.RequestFiles | None = None,
json: Any = None,
params: httpx._types.QueryParamTypes | None = None,
headers: httpx._types.HeaderTypes | None = None,
cookies: httpx._types.CookieTypes | None = None,
auth: httpx._types.AuthTypes | httpx._client.UseClientDefault = httpx._client.USE_CLIENT_DEFAULT,
follow_redirects: bool | httpx._client.UseClientDefault = httpx._client.USE_CLIENT_DEFAULT,
timeout: httpx._types.TimeoutTypes | httpx._client.UseClientDefault = httpx._client.USE_CLIENT_DEFAULT,
extensions: dict[str, Any] | None = None,
) -> httpx.Response:
return super().put(
url,
content=content,
data=data,
files=files,
json=json,
params=params,
headers=headers,
cookies=cookies,
auth=auth,
follow_redirects=follow_redirects,
timeout=timeout,
extensions=extensions,
)
def patch( # type: ignore[override]
self,
url: httpx._types.URLTypes,
*,
content: httpx._types.RequestContent | None = None,
data: _RequestData | None = None,
files: httpx._types.RequestFiles | None = None,
json: Any = None,
params: httpx._types.QueryParamTypes | None = None,
headers: httpx._types.HeaderTypes | None = None,
cookies: httpx._types.CookieTypes | None = None,
auth: httpx._types.AuthTypes | httpx._client.UseClientDefault = httpx._client.USE_CLIENT_DEFAULT,
follow_redirects: bool | httpx._client.UseClientDefault = httpx._client.USE_CLIENT_DEFAULT,
timeout: httpx._types.TimeoutTypes | httpx._client.UseClientDefault = httpx._client.USE_CLIENT_DEFAULT,
extensions: dict[str, Any] | None = None,
) -> httpx.Response:
return super().patch(
url,
content=content,
data=data,
files=files,
json=json,
params=params,
headers=headers,
cookies=cookies,
auth=auth,
follow_redirects=follow_redirects,
timeout=timeout,
extensions=extensions,
)
def delete( # type: ignore[override]
self,
url: httpx._types.URLTypes,
*,
params: httpx._types.QueryParamTypes | None = None,
headers: httpx._types.HeaderTypes | None = None,
cookies: httpx._types.CookieTypes | None = None,
auth: httpx._types.AuthTypes | httpx._client.UseClientDefault = httpx._client.USE_CLIENT_DEFAULT,
follow_redirects: bool | httpx._client.UseClientDefault = httpx._client.USE_CLIENT_DEFAULT,
timeout: httpx._types.TimeoutTypes | httpx._client.UseClientDefault = httpx._client.USE_CLIENT_DEFAULT,
extensions: dict[str, Any] | None = None,
) -> httpx.Response:
return super().delete(
url,
params=params,
headers=headers,
cookies=cookies,
auth=auth,
follow_redirects=follow_redirects,
timeout=timeout,
extensions=extensions,
)
def websocket_connect(
self,
url: str,
subprotocols: Sequence[str] | None = None,
**kwargs: Any,
) -> WebSocketTestSession:
url = urljoin("ws://testserver", url)
headers = kwargs.get("headers", {})
headers.setdefault("connection", "upgrade")
headers.setdefault("sec-websocket-key", "testserver==")
headers.setdefault("sec-websocket-version", "13")
if subprotocols is not None:
headers.setdefault("sec-websocket-protocol", ", ".join(subprotocols))
kwargs["headers"] = headers
try:
super().request("GET", url, **kwargs)
except _Upgrade as exc:
session = exc.session
else:
raise RuntimeError("Expected WebSocket upgrade") # pragma: no cover
return session
def __enter__(self) -> Self:
with contextlib.ExitStack() as stack:
self.portal = portal = stack.enter_context(anyio.from_thread.start_blocking_portal(**self.async_backend))
@stack.callback
def reset_portal() -> None:
self.portal = None
send: anyio.create_memory_object_stream[MutableMapping[str, Any] | None] = (
anyio.create_memory_object_stream(math.inf)
)
receive: anyio.create_memory_object_stream[MutableMapping[str, Any]] = anyio.create_memory_object_stream(
math.inf
)
for channel in (*send, *receive):
stack.callback(channel.close)
self.stream_send = StapledObjectStream(*send)
self.stream_receive = StapledObjectStream(*receive)
self.task = portal.start_task_soon(self.lifespan)
portal.call(self.wait_startup)
@stack.callback
def wait_shutdown() -> None:
portal.call(self.wait_shutdown)
self.exit_stack = stack.pop_all()
return self
def __exit__(self, *args: Any) -> None:
self.exit_stack.close()
async def lifespan(self) -> None:
scope = {"type": "lifespan", "state": self.app_state}
try:
await self.app(scope, self.stream_receive.receive, self.stream_send.send)
finally:
await self.stream_send.send(None)
async def wait_startup(self) -> None:
await self.stream_receive.send({"type": "lifespan.startup"})
async def receive() -> Any:
message = await self.stream_send.receive()
if message is None:
self.task.result()
return message
message = await receive()
assert message["type"] in (
"lifespan.startup.complete",
"lifespan.startup.failed",
)
if message["type"] == "lifespan.startup.failed":
await receive()
async def wait_shutdown(self) -> None:
async def receive() -> Any:
message = await self.stream_send.receive()
if message is None:
self.task.result()
return message
await self.stream_receive.send({"type": "lifespan.shutdown"})
message = await receive()
assert message["type"] in (
"lifespan.shutdown.complete",
"lifespan.shutdown.failed",
)
if message["type"] == "lifespan.shutdown.failed":
await receive()
|
from __future__ import annotations
import itertools
import sys
from asyncio import Task, current_task as asyncio_current_task
from collections.abc import AsyncGenerator
from contextlib import asynccontextmanager
from typing import Any
import anyio
import anyio.lowlevel
import pytest
import sniffio
import trio.lowlevel
from starlette.applications import Starlette
from starlette.middleware import Middleware
from starlette.requests import Request
from starlette.responses import JSONResponse, RedirectResponse, Response
from starlette.routing import Route
from starlette.testclient import ASGIInstance, TestClient
from starlette.types import ASGIApp, Receive, Scope, Send
from starlette.websockets import WebSocket, WebSocketDisconnect
from tests.types import TestClientFactory
def mock_service_endpoint(request: Request) -> JSONResponse:
return JSONResponse({"mock": "example"})
mock_service = Starlette(routes=[Route("/", endpoint=mock_service_endpoint)])
def current_task() -> Task[Any] | trio.lowlevel.Task:
# anyio's TaskInfo comparisons are invalid after their associated native
# task object is GC'd https://github.com/agronholm/anyio/issues/324
asynclib_name = sniffio.current_async_library()
if asynclib_name == "trio":
return trio.lowlevel.current_task()
if asynclib_name == "asyncio":
task = asyncio_current_task()
if task is None:
raise RuntimeError("must be called from a running task") # pragma: no cover
return task
raise RuntimeError(f"unsupported asynclib={asynclib_name}") # pragma: no cover
def startup() -> None:
raise RuntimeError()
def test_use_testclient_in_endpoint(test_client_factory: TestClientFactory) -> None:
"""
We should be able to use the test client within applications.
This is useful if we need to mock out other services,
during tests or in development.
"""
def homepage(request: Request) -> JSONResponse:
client = test_client_factory(mock_service)
response = client.get("/")
return JSONResponse(response.json())
app = Starlette(routes=[Route("/", endpoint=homepage)])
client = test_client_factory(app)
response = client.get("/")
assert response.json() == {"mock": "example"}
def test_testclient_headers_behavior() -> None:
"""
We should be able to use the test client with user defined headers.
This is useful if we need to set custom headers for authentication
during tests or in development.
"""
client = TestClient(mock_service)
assert client.headers.get("user-agent") == "testclient"
client = TestClient(mock_service, headers={"user-agent": "non-default-agent"})
assert client.headers.get("user-agent") == "non-default-agent"
client = TestClient(mock_service, headers={"Authentication": "Bearer 123"})
assert client.headers.get("user-agent") == "testclient"
assert client.headers.get("Authentication") == "Bearer 123"
def test_use_testclient_as_contextmanager(test_client_factory: TestClientFactory, anyio_backend_name: str) -> None:
"""
This test asserts a number of properties that are important for an
app level task_group
"""
counter = itertools.count()
identity_runvar = anyio.lowlevel.RunVar[int]("identity_runvar")
def get_identity() -> int:
try:
return identity_runvar.get()
except LookupError:
token = next(counter)
identity_runvar.set(token)
return token
startup_task = object()
startup_loop = None
shutdown_task = object()
shutdown_loop = None
@asynccontextmanager
async def lifespan_context(app: Starlette) -> AsyncGenerator[None, None]:
nonlocal startup_task, startup_loop, shutdown_task, shutdown_loop
startup_task = current_task()
startup_loop = get_identity()
async with anyio.create_task_group():
yield
shutdown_task = current_task()
shutdown_loop = get_identity()
async def loop_id(request: Request) -> JSONResponse:
return JSONResponse(get_identity())
app = Starlette(
lifespan=lifespan_context,
routes=[Route("/loop_id", endpoint=loop_id)],
)
client = test_client_factory(app)
with client:
# within a TestClient context every async request runs in the same thread
assert client.get("/loop_id").json() == 0
assert client.get("/loop_id").json() == 0
# that thread is also the same as the lifespan thread
assert startup_loop == 0
assert shutdown_loop == 0
# lifespan events run in the same task, this is important because a task
# group must be entered and exited in the same task.
assert startup_task is shutdown_task
# outside the TestClient context, new requests continue to spawn in new
# event loops in new threads
assert client.get("/loop_id").json() == 1
assert client.get("/loop_id").json() == 2
first_task = startup_task
with client:
# the TestClient context can be re-used, starting a new lifespan task
# in a new thread
assert client.get("/loop_id").json() == 3
assert client.get("/loop_id").json() == 3
assert startup_loop == 3
assert shutdown_loop == 3
# lifespan events still run in the same task, with the context but...
assert startup_task is shutdown_task
# ... the second TestClient context creates a new lifespan task.
assert first_task is not startup_task
def test_error_on_startup(test_client_factory: TestClientFactory) -> None:
with pytest.deprecated_call(match="The on_startup and on_shutdown parameters are deprecated"):
startup_error_app = Starlette(on_startup=[startup])
with pytest.raises(RuntimeError):
with test_client_factory(startup_error_app):
pass # pragma: no cover
def test_exception_in_middleware(test_client_factory: TestClientFactory) -> None:
class MiddlewareException(Exception):
pass
class BrokenMiddleware:
def __init__(self, app: ASGIApp):
self.app = app
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
raise MiddlewareException()
broken_middleware = Starlette(middleware=[Middleware(BrokenMiddleware)])
with pytest.raises(MiddlewareException):
with test_client_factory(broken_middleware):
pass # pragma: no cover
def test_testclient_asgi2(test_client_factory: TestClientFactory) -> None:
def app(scope: Scope) -> ASGIInstance:
async def inner(receive: Receive, send: Send) -> None:
await send(
{
"type": "http.response.start",
"status": 200,
"headers": [[b"content-type", b"text/plain"]],
}
)
await send({"type": "http.response.body", "body": b"Hello, world!"})
return inner
client = test_client_factory(app) # type: ignore
response = client.get("/")
assert response.text == "Hello, world!"
def test_testclient_asgi3(test_client_factory: TestClientFactory) -> None:
async def app(scope: Scope, receive: Receive, send: Send) -> None:
await send(
{
"type": "http.response.start",
"status": 200,
"headers": [[b"content-type", b"text/plain"]],
}
)
await send({"type": "http.response.body", "body": b"Hello, world!"})
client = test_client_factory(app)
response = client.get("/")
assert response.text == "Hello, world!"
def test_websocket_blocking_receive(test_client_factory: TestClientFactory) -> None:
def app(scope: Scope) -> ASGIInstance:
async def respond(websocket: WebSocket) -> None:
await websocket.send_json({"message": "test"})
async def asgi(receive: Receive, send: Send) -> None:
websocket = WebSocket(scope, receive=receive, send=send)
await websocket.accept()
async with anyio.create_task_group() as task_group:
task_group.start_soon(respond, websocket)
try:
# this will block as the client does not send us data
# it should not prevent `respond` from executing though
await websocket.receive_json()
except WebSocketDisconnect:
pass
return asgi
client = test_client_factory(app) # type: ignore
with client.websocket_connect("/") as websocket:
data = websocket.receive_json()
assert data == {"message": "test"}
def test_websocket_not_block_on_close(test_client_factory: TestClientFactory) -> None:
cancelled = False
def app(scope: Scope) -> ASGIInstance:
async def asgi(receive: Receive, send: Send) -> None:
nonlocal cancelled
try:
websocket = WebSocket(scope, receive=receive, send=send)
await websocket.accept()
await anyio.sleep_forever()
except anyio.get_cancelled_exc_class():
cancelled = True
raise
return asgi
client = test_client_factory(app) # type: ignore
with client.websocket_connect("/"):
...
assert cancelled
def test_client(test_client_factory: TestClientFactory) -> None:
async def app(scope: Scope, receive: Receive, send: Send) -> None:
client = scope.get("client")
assert client is not None
host, port = client
response = JSONResponse({"host": host, "port": port})
await response(scope, receive, send)
client = test_client_factory(app)
response = client.get("/")
assert response.json() == {"host": "testclient", "port": 50000}
def test_client_custom_client(test_client_factory: TestClientFactory) -> None:
async def app(scope: Scope, receive: Receive, send: Send) -> None:
client = scope.get("client")
assert client is not None
host, port = client
response = JSONResponse({"host": host, "port": port})
await response(scope, receive, send)
client = test_client_factory(app, client=("192.168.0.1", 3000))
response = client.get("/")
assert response.json() == {"host": "192.168.0.1", "port": 3000}
@pytest.mark.parametrize("param", ("2020-07-14T00:00:00+00:00", "España", "voilà"))
def test_query_params(test_client_factory: TestClientFactory, param: str) -> None:
def homepage(request: Request) -> Response:
return Response(request.query_params["param"])
app = Starlette(routes=[Route("/", endpoint=homepage)])
client = test_client_factory(app)
response = client.get("/", params={"param": param})
assert response.text == param
@pytest.mark.parametrize(
"domain, ok",
[
pytest.param(
"testserver",
True,
marks=[
pytest.mark.xfail(
sys.version_info < (3, 11),
reason="Fails due to domain handling in http.cookiejar module (see #2152)",
),
],
),
("testserver.local", True),
("localhost", False),
("example.com", False),
],
)
def test_domain_restricted_cookies(test_client_factory: TestClientFactory, domain: str, ok: bool) -> None:
"""
Test that test client discards domain restricted cookies which do not match the
base_url of the testclient (`http://testserver` by default).
The domain `testserver.local` works because the Python http.cookiejar module derives
the "effective domain" by appending `.local` to non-dotted request domains
in accordance with RFC 2965.
"""
async def app(scope: Scope, receive: Receive, send: Send) -> None:
response = Response("Hello, world!", media_type="text/plain")
response.set_cookie(
"mycookie",
"myvalue",
path="/",
domain=domain,
)
await response(scope, receive, send)
client = test_client_factory(app)
response = client.get("/")
cookie_set = len(response.cookies) == 1
assert cookie_set == ok
def test_forward_follow_redirects(test_client_factory: TestClientFactory) -> None:
async def app(scope: Scope, receive: Receive, send: Send) -> None:
if "/ok" in scope["path"]:
response = Response("ok")
else:
response = RedirectResponse("/ok")
await response(scope, receive, send)
client = test_client_factory(app, follow_redirects=True)
response = client.get("/")
assert response.status_code == 200
def test_forward_nofollow_redirects(test_client_factory: TestClientFactory) -> None:
async def app(scope: Scope, receive: Receive, send: Send) -> None:
response = RedirectResponse("/ok")
await response(scope, receive, send)
client = test_client_factory(app, follow_redirects=False)
response = client.get("/")
assert response.status_code == 307
def test_with_duplicate_headers(test_client_factory: TestClientFactory) -> None:
def homepage(request: Request) -> JSONResponse:
return JSONResponse({"x-token": request.headers.getlist("x-token")})
app = Starlette(routes=[Route("/", endpoint=homepage)])
client = test_client_factory(app)
response = client.get("/", headers=[("x-token", "foo"), ("x-token", "bar")])
assert response.json() == {"x-token": ["foo", "bar"]}
def test_merge_url(test_client_factory: TestClientFactory) -> None:
def homepage(request: Request) -> Response:
return Response(request.url.path)
app = Starlette(routes=[Route("/api/v1/bar", endpoint=homepage)])
client = test_client_factory(app, base_url="http://testserver/api/v1/")
response = client.get("/bar")
assert response.text == "/api/v1/bar"
def test_raw_path_with_querystring(test_client_factory: TestClientFactory) -> None:
async def app(scope: Scope, receive: Receive, send: Send) -> None:
response = Response(scope.get("raw_path"))
await response(scope, receive, send)
client = test_client_factory(app)
response = client.get("/hello-world", params={"foo": "bar"})
assert response.content == b"/hello-world"
def test_websocket_raw_path_without_params(test_client_factory: TestClientFactory) -> None:
async def app(scope: Scope, receive: Receive, send: Send) -> None:
websocket = WebSocket(scope, receive=receive, send=send)
await websocket.accept()
raw_path = scope.get("raw_path")
assert raw_path is not None
await websocket.send_bytes(raw_path)
client = test_client_factory(app)
with client.websocket_connect("/hello-world", params={"foo": "bar"}) as websocket:
data = websocket.receive_bytes()
assert data == b"/hello-world"
def test_timeout_deprecation() -> None:
with pytest.deprecated_call(match="You should not use the 'timeout' argument with the TestClient."):
client = TestClient(mock_service)
client.get("/", timeout=1)
|
./temp_repos/starlette/starlette/testclient.py
|
./temp_repos/starlette/tests/test_testclient.py
|
starlette
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class '_WrapASGI2'.
Context:
- Class Name: _WrapASGI2
- Dependencies to Mock: app, follow_redirects, headers, backend_options, client, raise_server_exceptions, base_url, session, scope, root_path, portal_factory, cookies, backend
- Key Imports: anyio, concurrent.futures, starlette.websockets, __future__, anyio.abc, contextlib, io, math, typing_extensions, typing
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
_WrapASGI2
|
python
|
from __future__ import annotations
import enum
import json
from collections.abc import AsyncIterator, Iterable
from typing import Any, cast
from starlette.requests import HTTPConnection
from starlette.responses import Response
from starlette.types import Message, Receive, Scope, Send
class WebSocketState(enum.Enum):
CONNECTING = 0
CONNECTED = 1
DISCONNECTED = 2
RESPONSE = 3
class WebSocketDisconnect(Exception):
def __init__(self, code: int = 1000, reason: str | None = None) -> None:
self.code = code
self.reason = reason or ""
class WebSocket(HTTPConnection):
def __init__(self, scope: Scope, receive: Receive, send: Send) -> None:
super().__init__(scope)
assert scope["type"] == "websocket"
self._receive = receive
self._send = send
self.client_state = WebSocketState.CONNECTING
self.application_state = WebSocketState.CONNECTING
async def receive(self) -> Message:
"""
Receive ASGI websocket messages, ensuring valid state transitions.
"""
if self.client_state == WebSocketState.CONNECTING:
message = await self._receive()
message_type = message["type"]
if message_type != "websocket.connect":
raise RuntimeError(f'Expected ASGI message "websocket.connect", but got {message_type!r}')
self.client_state = WebSocketState.CONNECTED
return message
elif self.client_state == WebSocketState.CONNECTED:
message = await self._receive()
message_type = message["type"]
if message_type not in {"websocket.receive", "websocket.disconnect"}:
raise RuntimeError(
f'Expected ASGI message "websocket.receive" or "websocket.disconnect", but got {message_type!r}'
)
if message_type == "websocket.disconnect":
self.client_state = WebSocketState.DISCONNECTED
return message
else:
raise RuntimeError('Cannot call "receive" once a disconnect message has been received.')
async def send(self, message: Message) -> None:
"""
Send ASGI websocket messages, ensuring valid state transitions.
"""
if self.application_state == WebSocketState.CONNECTING:
message_type = message["type"]
if message_type not in {"websocket.accept", "websocket.close", "websocket.http.response.start"}:
raise RuntimeError(
'Expected ASGI message "websocket.accept", "websocket.close" or "websocket.http.response.start", '
f"but got {message_type!r}"
)
if message_type == "websocket.close":
self.application_state = WebSocketState.DISCONNECTED
elif message_type == "websocket.http.response.start":
self.application_state = WebSocketState.RESPONSE
else:
self.application_state = WebSocketState.CONNECTED
await self._send(message)
elif self.application_state == WebSocketState.CONNECTED:
message_type = message["type"]
if message_type not in {"websocket.send", "websocket.close"}:
raise RuntimeError(
f'Expected ASGI message "websocket.send" or "websocket.close", but got {message_type!r}'
)
if message_type == "websocket.close":
self.application_state = WebSocketState.DISCONNECTED
try:
await self._send(message)
except OSError:
self.application_state = WebSocketState.DISCONNECTED
raise WebSocketDisconnect(code=1006)
elif self.application_state == WebSocketState.RESPONSE:
message_type = message["type"]
if message_type != "websocket.http.response.body":
raise RuntimeError(f'Expected ASGI message "websocket.http.response.body", but got {message_type!r}')
if not message.get("more_body", False):
self.application_state = WebSocketState.DISCONNECTED
await self._send(message)
else:
raise RuntimeError('Cannot call "send" once a close message has been sent.')
async def accept(
self,
subprotocol: str | None = None,
headers: Iterable[tuple[bytes, bytes]] | None = None,
) -> None:
headers = headers or []
if self.client_state == WebSocketState.CONNECTING: # pragma: no branch
# If we haven't yet seen the 'connect' message, then wait for it first.
await self.receive()
await self.send({"type": "websocket.accept", "subprotocol": subprotocol, "headers": headers})
def _raise_on_disconnect(self, message: Message) -> None:
if message["type"] == "websocket.disconnect":
raise WebSocketDisconnect(message["code"], message.get("reason"))
async def receive_text(self) -> str:
if self.application_state != WebSocketState.CONNECTED:
raise RuntimeError('WebSocket is not connected. Need to call "accept" first.')
message = await self.receive()
self._raise_on_disconnect(message)
return cast(str, message["text"])
async def receive_bytes(self) -> bytes:
if self.application_state != WebSocketState.CONNECTED:
raise RuntimeError('WebSocket is not connected. Need to call "accept" first.')
message = await self.receive()
self._raise_on_disconnect(message)
return cast(bytes, message["bytes"])
async def receive_json(self, mode: str = "text") -> Any:
if mode not in {"text", "binary"}:
raise RuntimeError('The "mode" argument should be "text" or "binary".')
if self.application_state != WebSocketState.CONNECTED:
raise RuntimeError('WebSocket is not connected. Need to call "accept" first.')
message = await self.receive()
self._raise_on_disconnect(message)
if mode == "text":
text = message["text"]
else:
text = message["bytes"].decode("utf-8")
return json.loads(text)
async def iter_text(self) -> AsyncIterator[str]:
try:
while True:
yield await self.receive_text()
except WebSocketDisconnect:
pass
async def iter_bytes(self) -> AsyncIterator[bytes]:
try:
while True:
yield await self.receive_bytes()
except WebSocketDisconnect:
pass
async def iter_json(self) -> AsyncIterator[Any]:
try:
while True:
yield await self.receive_json()
except WebSocketDisconnect:
pass
async def send_text(self, data: str) -> None:
await self.send({"type": "websocket.send", "text": data})
async def send_bytes(self, data: bytes) -> None:
await self.send({"type": "websocket.send", "bytes": data})
async def send_json(self, data: Any, mode: str = "text") -> None:
if mode not in {"text", "binary"}:
raise RuntimeError('The "mode" argument should be "text" or "binary".')
text = json.dumps(data, separators=(",", ":"), ensure_ascii=False)
if mode == "text":
await self.send({"type": "websocket.send", "text": text})
else:
await self.send({"type": "websocket.send", "bytes": text.encode("utf-8")})
async def close(self, code: int = 1000, reason: str | None = None) -> None:
await self.send({"type": "websocket.close", "code": code, "reason": reason or ""})
async def send_denial_response(self, response: Response) -> None:
if "websocket.http.response" in self.scope.get("extensions", {}):
await response(self.scope, self.receive, self.send)
else:
raise RuntimeError("The server doesn't support the Websocket Denial Response extension.")
class WebSocketClose:
def __init__(self, code: int = 1000, reason: str | None = None) -> None:
self.code = code
self.reason = reason or ""
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
await send({"type": "websocket.close", "code": self.code, "reason": self.reason})
|
import sys
from collections.abc import MutableMapping
from typing import Any
import anyio
import pytest
from anyio.abc import ObjectReceiveStream, ObjectSendStream
from starlette import status
from starlette.responses import Response
from starlette.testclient import WebSocketDenialResponse
from starlette.types import Message, Receive, Scope, Send
from starlette.websockets import WebSocket, WebSocketDisconnect, WebSocketState
from tests.types import TestClientFactory
def test_websocket_url(test_client_factory: TestClientFactory) -> None:
async def app(scope: Scope, receive: Receive, send: Send) -> None:
websocket = WebSocket(scope, receive=receive, send=send)
await websocket.accept()
await websocket.send_json({"url": str(websocket.url)})
await websocket.close()
client = test_client_factory(app)
with client.websocket_connect("/123?a=abc") as websocket:
data = websocket.receive_json()
assert data == {"url": "ws://testserver/123?a=abc"}
def test_websocket_binary_json(test_client_factory: TestClientFactory) -> None:
async def app(scope: Scope, receive: Receive, send: Send) -> None:
websocket = WebSocket(scope, receive=receive, send=send)
await websocket.accept()
message = await websocket.receive_json(mode="binary")
await websocket.send_json(message, mode="binary")
await websocket.close()
client = test_client_factory(app)
with client.websocket_connect("/123?a=abc") as websocket:
websocket.send_json({"test": "data"}, mode="binary")
data = websocket.receive_json(mode="binary")
assert data == {"test": "data"}
def test_websocket_ensure_unicode_on_send_json(
test_client_factory: TestClientFactory,
) -> None:
async def app(scope: Scope, receive: Receive, send: Send) -> None:
websocket = WebSocket(scope, receive=receive, send=send)
await websocket.accept()
message = await websocket.receive_json(mode="text")
await websocket.send_json(message, mode="text")
await websocket.close()
client = test_client_factory(app)
with client.websocket_connect("/123?a=abc") as websocket:
websocket.send_json({"test": "数据"}, mode="text")
data = websocket.receive_text()
assert data == '{"test":"数据"}'
def test_websocket_query_params(test_client_factory: TestClientFactory) -> None:
async def app(scope: Scope, receive: Receive, send: Send) -> None:
websocket = WebSocket(scope, receive=receive, send=send)
query_params = dict(websocket.query_params)
await websocket.accept()
await websocket.send_json({"params": query_params})
await websocket.close()
client = test_client_factory(app)
with client.websocket_connect("/?a=abc&b=456") as websocket:
data = websocket.receive_json()
assert data == {"params": {"a": "abc", "b": "456"}}
@pytest.mark.skipif(
any(module in sys.modules for module in ("brotli", "brotlicffi")),
reason='urllib3 includes "br" to the "accept-encoding" headers.',
)
def test_websocket_headers(test_client_factory: TestClientFactory) -> None:
async def app(scope: Scope, receive: Receive, send: Send) -> None:
websocket = WebSocket(scope, receive=receive, send=send)
headers = dict(websocket.headers)
await websocket.accept()
await websocket.send_json({"headers": headers})
await websocket.close()
client = test_client_factory(app)
with client.websocket_connect("/") as websocket:
expected_headers = {
"accept": "*/*",
"accept-encoding": "gzip, deflate",
"connection": "upgrade",
"host": "testserver",
"user-agent": "testclient",
"sec-websocket-key": "testserver==",
"sec-websocket-version": "13",
}
data = websocket.receive_json()
assert data == {"headers": expected_headers}
def test_websocket_port(test_client_factory: TestClientFactory) -> None:
async def app(scope: Scope, receive: Receive, send: Send) -> None:
websocket = WebSocket(scope, receive=receive, send=send)
await websocket.accept()
await websocket.send_json({"port": websocket.url.port})
await websocket.close()
client = test_client_factory(app)
with client.websocket_connect("ws://example.com:123/123?a=abc") as websocket:
data = websocket.receive_json()
assert data == {"port": 123}
def test_websocket_send_and_receive_text(
test_client_factory: TestClientFactory,
) -> None:
async def app(scope: Scope, receive: Receive, send: Send) -> None:
websocket = WebSocket(scope, receive=receive, send=send)
await websocket.accept()
data = await websocket.receive_text()
await websocket.send_text("Message was: " + data)
await websocket.close()
client = test_client_factory(app)
with client.websocket_connect("/") as websocket:
websocket.send_text("Hello, world!")
data = websocket.receive_text()
assert data == "Message was: Hello, world!"
def test_websocket_send_and_receive_bytes(
test_client_factory: TestClientFactory,
) -> None:
async def app(scope: Scope, receive: Receive, send: Send) -> None:
websocket = WebSocket(scope, receive=receive, send=send)
await websocket.accept()
data = await websocket.receive_bytes()
await websocket.send_bytes(b"Message was: " + data)
await websocket.close()
client = test_client_factory(app)
with client.websocket_connect("/") as websocket:
websocket.send_bytes(b"Hello, world!")
data = websocket.receive_bytes()
assert data == b"Message was: Hello, world!"
def test_websocket_send_and_receive_json(
test_client_factory: TestClientFactory,
) -> None:
async def app(scope: Scope, receive: Receive, send: Send) -> None:
websocket = WebSocket(scope, receive=receive, send=send)
await websocket.accept()
data = await websocket.receive_json()
await websocket.send_json({"message": data})
await websocket.close()
client = test_client_factory(app)
with client.websocket_connect("/") as websocket:
websocket.send_json({"hello": "world"})
data = websocket.receive_json()
assert data == {"message": {"hello": "world"}}
def test_websocket_iter_text(test_client_factory: TestClientFactory) -> None:
async def app(scope: Scope, receive: Receive, send: Send) -> None:
websocket = WebSocket(scope, receive=receive, send=send)
await websocket.accept()
async for data in websocket.iter_text():
await websocket.send_text("Message was: " + data)
client = test_client_factory(app)
with client.websocket_connect("/") as websocket:
websocket.send_text("Hello, world!")
data = websocket.receive_text()
assert data == "Message was: Hello, world!"
def test_websocket_iter_bytes(test_client_factory: TestClientFactory) -> None:
async def app(scope: Scope, receive: Receive, send: Send) -> None:
websocket = WebSocket(scope, receive=receive, send=send)
await websocket.accept()
async for data in websocket.iter_bytes():
await websocket.send_bytes(b"Message was: " + data)
client = test_client_factory(app)
with client.websocket_connect("/") as websocket:
websocket.send_bytes(b"Hello, world!")
data = websocket.receive_bytes()
assert data == b"Message was: Hello, world!"
def test_websocket_iter_json(test_client_factory: TestClientFactory) -> None:
async def app(scope: Scope, receive: Receive, send: Send) -> None:
websocket = WebSocket(scope, receive=receive, send=send)
await websocket.accept()
async for data in websocket.iter_json():
await websocket.send_json({"message": data})
client = test_client_factory(app)
with client.websocket_connect("/") as websocket:
websocket.send_json({"hello": "world"})
data = websocket.receive_json()
assert data == {"message": {"hello": "world"}}
def test_websocket_concurrency_pattern(test_client_factory: TestClientFactory) -> None:
stream_send: ObjectSendStream[MutableMapping[str, Any]]
stream_receive: ObjectReceiveStream[MutableMapping[str, Any]]
stream_send, stream_receive = anyio.create_memory_object_stream()
async def reader(websocket: WebSocket) -> None:
async with stream_send:
async for data in websocket.iter_json():
await stream_send.send(data)
async def writer(websocket: WebSocket) -> None:
async with stream_receive:
async for message in stream_receive:
await websocket.send_json(message)
async def app(scope: Scope, receive: Receive, send: Send) -> None:
websocket = WebSocket(scope, receive=receive, send=send)
await websocket.accept()
async with anyio.create_task_group() as task_group:
task_group.start_soon(reader, websocket)
await writer(websocket)
await websocket.close()
client = test_client_factory(app)
with client.websocket_connect("/") as websocket:
websocket.send_json({"hello": "world"})
data = websocket.receive_json()
assert data == {"hello": "world"}
def test_client_close(test_client_factory: TestClientFactory) -> None:
close_code = None
close_reason = None
async def app(scope: Scope, receive: Receive, send: Send) -> None:
nonlocal close_code, close_reason
websocket = WebSocket(scope, receive=receive, send=send)
await websocket.accept()
try:
await websocket.receive_text()
except WebSocketDisconnect as exc:
close_code = exc.code
close_reason = exc.reason
client = test_client_factory(app)
with client.websocket_connect("/") as websocket:
websocket.close(code=status.WS_1001_GOING_AWAY, reason="Going Away")
assert close_code == status.WS_1001_GOING_AWAY
assert close_reason == "Going Away"
@pytest.mark.anyio
async def test_client_disconnect_on_send() -> None:
async def app(scope: Scope, receive: Receive, send: Send) -> None:
websocket = WebSocket(scope, receive=receive, send=send)
await websocket.accept()
await websocket.send_text("Hello, world!")
async def receive() -> Message:
return {"type": "websocket.connect"}
async def send(message: Message) -> None:
if message["type"] == "websocket.accept":
return
# Simulate the exception the server would send to the application when the client disconnects.
raise OSError
with pytest.raises(WebSocketDisconnect) as ctx:
await app({"type": "websocket", "path": "/"}, receive, send)
assert ctx.value.code == status.WS_1006_ABNORMAL_CLOSURE
def test_application_close(test_client_factory: TestClientFactory) -> None:
async def app(scope: Scope, receive: Receive, send: Send) -> None:
websocket = WebSocket(scope, receive=receive, send=send)
await websocket.accept()
await websocket.close(status.WS_1001_GOING_AWAY)
client = test_client_factory(app)
with client.websocket_connect("/") as websocket:
with pytest.raises(WebSocketDisconnect) as exc:
websocket.receive_text()
assert exc.value.code == status.WS_1001_GOING_AWAY
def test_rejected_connection(test_client_factory: TestClientFactory) -> None:
async def app(scope: Scope, receive: Receive, send: Send) -> None:
websocket = WebSocket(scope, receive=receive, send=send)
msg = await websocket.receive()
assert msg == {"type": "websocket.connect"}
await websocket.close(status.WS_1001_GOING_AWAY)
client = test_client_factory(app)
with pytest.raises(WebSocketDisconnect) as exc:
with client.websocket_connect("/"):
pass # pragma: no cover
assert exc.value.code == status.WS_1001_GOING_AWAY
def test_send_denial_response(test_client_factory: TestClientFactory) -> None:
async def app(scope: Scope, receive: Receive, send: Send) -> None:
websocket = WebSocket(scope, receive=receive, send=send)
msg = await websocket.receive()
assert msg == {"type": "websocket.connect"}
response = Response(status_code=404, content="foo")
await websocket.send_denial_response(response)
client = test_client_factory(app)
with pytest.raises(WebSocketDenialResponse) as exc:
with client.websocket_connect("/"):
pass # pragma: no cover
assert exc.value.status_code == 404
assert exc.value.content == b"foo"
def test_send_response_multi(test_client_factory: TestClientFactory) -> None:
async def app(scope: Scope, receive: Receive, send: Send) -> None:
websocket = WebSocket(scope, receive=receive, send=send)
msg = await websocket.receive()
assert msg == {"type": "websocket.connect"}
await websocket.send(
{
"type": "websocket.http.response.start",
"status": 404,
"headers": [(b"content-type", b"text/plain"), (b"foo", b"bar")],
}
)
await websocket.send({"type": "websocket.http.response.body", "body": b"hard", "more_body": True})
await websocket.send({"type": "websocket.http.response.body", "body": b"body"})
client = test_client_factory(app)
with pytest.raises(WebSocketDenialResponse) as exc:
with client.websocket_connect("/"):
pass # pragma: no cover
assert exc.value.status_code == 404
assert exc.value.content == b"hardbody"
assert exc.value.headers["foo"] == "bar"
def test_send_response_unsupported(test_client_factory: TestClientFactory) -> None:
async def app(scope: Scope, receive: Receive, send: Send) -> None:
del scope["extensions"]["websocket.http.response"]
websocket = WebSocket(scope, receive=receive, send=send)
msg = await websocket.receive()
assert msg == {"type": "websocket.connect"}
response = Response(status_code=404, content="foo")
with pytest.raises(
RuntimeError,
match="The server doesn't support the Websocket Denial Response extension.",
):
await websocket.send_denial_response(response)
await websocket.close()
client = test_client_factory(app)
with pytest.raises(WebSocketDisconnect) as exc:
with client.websocket_connect("/"):
pass # pragma: no cover
assert exc.value.code == status.WS_1000_NORMAL_CLOSURE
def test_send_response_duplicate_start(test_client_factory: TestClientFactory) -> None:
async def app(scope: Scope, receive: Receive, send: Send) -> None:
websocket = WebSocket(scope, receive=receive, send=send)
msg = await websocket.receive()
assert msg == {"type": "websocket.connect"}
response = Response(status_code=404, content="foo")
await websocket.send(
{
"type": "websocket.http.response.start",
"status": response.status_code,
"headers": response.raw_headers,
}
)
await websocket.send(
{
"type": "websocket.http.response.start",
"status": response.status_code,
"headers": response.raw_headers,
}
)
client = test_client_factory(app)
with pytest.raises(
RuntimeError,
match=("Expected ASGI message \"websocket.http.response.body\", but got 'websocket.http.response.start'"),
):
with client.websocket_connect("/"):
pass # pragma: no cover
def test_subprotocol(test_client_factory: TestClientFactory) -> None:
async def app(scope: Scope, receive: Receive, send: Send) -> None:
websocket = WebSocket(scope, receive=receive, send=send)
assert websocket["subprotocols"] == ["soap", "wamp"]
await websocket.accept(subprotocol="wamp")
await websocket.close()
client = test_client_factory(app)
with client.websocket_connect("/", subprotocols=["soap", "wamp"]) as websocket:
assert websocket.accepted_subprotocol == "wamp"
def test_additional_headers(test_client_factory: TestClientFactory) -> None:
async def app(scope: Scope, receive: Receive, send: Send) -> None:
websocket = WebSocket(scope, receive=receive, send=send)
await websocket.accept(headers=[(b"additional", b"header")])
await websocket.close()
client = test_client_factory(app)
with client.websocket_connect("/") as websocket:
assert websocket.extra_headers == [(b"additional", b"header")]
def test_no_additional_headers(test_client_factory: TestClientFactory) -> None:
async def app(scope: Scope, receive: Receive, send: Send) -> None:
websocket = WebSocket(scope, receive=receive, send=send)
await websocket.accept()
await websocket.close()
client = test_client_factory(app)
with client.websocket_connect("/") as websocket:
assert websocket.extra_headers == []
def test_websocket_exception(test_client_factory: TestClientFactory) -> None:
async def app(scope: Scope, receive: Receive, send: Send) -> None:
assert False
client = test_client_factory(app)
with pytest.raises(AssertionError):
with client.websocket_connect("/123?a=abc"):
pass # pragma: no cover
def test_duplicate_close(test_client_factory: TestClientFactory) -> None:
async def app(scope: Scope, receive: Receive, send: Send) -> None:
websocket = WebSocket(scope, receive=receive, send=send)
await websocket.accept()
await websocket.close()
await websocket.close()
client = test_client_factory(app)
with pytest.raises(RuntimeError):
with client.websocket_connect("/"):
pass # pragma: no cover
def test_duplicate_disconnect(test_client_factory: TestClientFactory) -> None:
async def app(scope: Scope, receive: Receive, send: Send) -> None:
websocket = WebSocket(scope, receive=receive, send=send)
await websocket.accept()
message = await websocket.receive()
assert message["type"] == "websocket.disconnect"
message = await websocket.receive()
client = test_client_factory(app)
with pytest.raises(RuntimeError):
with client.websocket_connect("/") as websocket:
websocket.close()
def test_websocket_scope_interface() -> None:
"""
A WebSocket can be instantiated with a scope, and presents a `Mapping`
interface.
"""
async def mock_receive() -> Message: # type: ignore
... # pragma: no cover
async def mock_send(message: Message) -> None: ... # pragma: no cover
websocket = WebSocket({"type": "websocket", "path": "/abc/", "headers": []}, receive=mock_receive, send=mock_send)
assert websocket["type"] == "websocket"
assert dict(websocket) == {"type": "websocket", "path": "/abc/", "headers": []}
assert len(websocket) == 3
# check __eq__ and __hash__
assert websocket != WebSocket(
{"type": "websocket", "path": "/abc/", "headers": []},
receive=mock_receive,
send=mock_send,
)
assert websocket == websocket
assert websocket in {websocket}
assert {websocket} == {websocket}
def test_websocket_close_reason(test_client_factory: TestClientFactory) -> None:
async def app(scope: Scope, receive: Receive, send: Send) -> None:
websocket = WebSocket(scope, receive=receive, send=send)
await websocket.accept()
await websocket.close(code=status.WS_1001_GOING_AWAY, reason="Going Away")
client = test_client_factory(app)
with client.websocket_connect("/") as websocket:
with pytest.raises(WebSocketDisconnect) as exc:
websocket.receive_text()
assert exc.value.code == status.WS_1001_GOING_AWAY
assert exc.value.reason == "Going Away"
def test_send_json_invalid_mode(test_client_factory: TestClientFactory) -> None:
async def app(scope: Scope, receive: Receive, send: Send) -> None:
websocket = WebSocket(scope, receive=receive, send=send)
await websocket.accept()
await websocket.send_json({}, mode="invalid")
client = test_client_factory(app)
with pytest.raises(RuntimeError):
with client.websocket_connect("/"):
pass # pragma: no cover
def test_receive_json_invalid_mode(test_client_factory: TestClientFactory) -> None:
async def app(scope: Scope, receive: Receive, send: Send) -> None:
websocket = WebSocket(scope, receive=receive, send=send)
await websocket.accept()
await websocket.receive_json(mode="invalid")
client = test_client_factory(app)
with pytest.raises(RuntimeError):
with client.websocket_connect("/"):
pass # pragma: no cover
def test_receive_text_before_accept(test_client_factory: TestClientFactory) -> None:
async def app(scope: Scope, receive: Receive, send: Send) -> None:
websocket = WebSocket(scope, receive=receive, send=send)
await websocket.receive_text()
client = test_client_factory(app)
with pytest.raises(RuntimeError):
with client.websocket_connect("/"):
pass # pragma: no cover
def test_receive_bytes_before_accept(test_client_factory: TestClientFactory) -> None:
async def app(scope: Scope, receive: Receive, send: Send) -> None:
websocket = WebSocket(scope, receive=receive, send=send)
await websocket.receive_bytes()
client = test_client_factory(app)
with pytest.raises(RuntimeError):
with client.websocket_connect("/"):
pass # pragma: no cover
def test_receive_json_before_accept(test_client_factory: TestClientFactory) -> None:
async def app(scope: Scope, receive: Receive, send: Send) -> None:
websocket = WebSocket(scope, receive=receive, send=send)
await websocket.receive_json()
client = test_client_factory(app)
with pytest.raises(RuntimeError):
with client.websocket_connect("/"):
pass # pragma: no cover
def test_send_before_accept(test_client_factory: TestClientFactory) -> None:
async def app(scope: Scope, receive: Receive, send: Send) -> None:
websocket = WebSocket(scope, receive=receive, send=send)
await websocket.send({"type": "websocket.send"})
client = test_client_factory(app)
with pytest.raises(RuntimeError):
with client.websocket_connect("/"):
pass # pragma: no cover
def test_send_wrong_message_type(test_client_factory: TestClientFactory) -> None:
async def app(scope: Scope, receive: Receive, send: Send) -> None:
websocket = WebSocket(scope, receive=receive, send=send)
await websocket.send({"type": "websocket.accept"})
await websocket.send({"type": "websocket.accept"})
client = test_client_factory(app)
with pytest.raises(RuntimeError):
with client.websocket_connect("/"):
pass # pragma: no cover
def test_receive_before_accept(test_client_factory: TestClientFactory) -> None:
async def app(scope: Scope, receive: Receive, send: Send) -> None:
websocket = WebSocket(scope, receive=receive, send=send)
await websocket.accept()
websocket.client_state = WebSocketState.CONNECTING
await websocket.receive()
client = test_client_factory(app)
with pytest.raises(RuntimeError):
with client.websocket_connect("/") as websocket:
websocket.send({"type": "websocket.send"})
def test_receive_wrong_message_type(test_client_factory: TestClientFactory) -> None:
async def app(scope: Scope, receive: Receive, send: Send) -> None:
websocket = WebSocket(scope, receive=receive, send=send)
await websocket.accept()
await websocket.receive()
client = test_client_factory(app)
with pytest.raises(RuntimeError):
with client.websocket_connect("/") as websocket:
websocket.send({"type": "websocket.connect"})
|
./temp_repos/starlette/starlette/websockets.py
|
./temp_repos/starlette/tests/test_websockets.py
|
starlette
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'WebSocketState'.
Context:
- Class Name: WebSocketState
- Dependencies to Mock: send, receive, scope, code, reason
- Key Imports: starlette.responses, enum, typing, collections.abc, starlette.types, json, __future__, starlette.requests
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
WebSocketState
|
python
|
from __future__ import annotations
import hashlib
import http.cookies
import json
import os
import stat
import sys
import warnings
from collections.abc import AsyncIterable, Awaitable, Callable, Iterable, Mapping, Sequence
from datetime import datetime
from email.utils import format_datetime, formatdate
from functools import partial
from mimetypes import guess_type
from secrets import token_hex
from typing import Any, Literal
from urllib.parse import quote
import anyio
import anyio.to_thread
from starlette._utils import collapse_excgroups
from starlette.background import BackgroundTask
from starlette.concurrency import iterate_in_threadpool
from starlette.datastructures import URL, Headers, MutableHeaders
from starlette.requests import ClientDisconnect
from starlette.types import Receive, Scope, Send
class Response:
media_type = None
charset = "utf-8"
def __init__(
self,
content: Any = None,
status_code: int = 200,
headers: Mapping[str, str] | None = None,
media_type: str | None = None,
background: BackgroundTask | None = None,
) -> None:
self.status_code = status_code
if media_type is not None:
self.media_type = media_type
self.background = background
self.body = self.render(content)
self.init_headers(headers)
def render(self, content: Any) -> bytes | memoryview:
if content is None:
return b""
if isinstance(content, bytes | memoryview):
return content
return content.encode(self.charset) # type: ignore
def init_headers(self, headers: Mapping[str, str] | None = None) -> None:
if headers is None:
raw_headers: list[tuple[bytes, bytes]] = []
populate_content_length = True
populate_content_type = True
else:
raw_headers = [(k.lower().encode("latin-1"), v.encode("latin-1")) for k, v in headers.items()]
keys = [h[0] for h in raw_headers]
populate_content_length = b"content-length" not in keys
populate_content_type = b"content-type" not in keys
body = getattr(self, "body", None)
if (
body is not None
and populate_content_length
and not (self.status_code < 200 or self.status_code in (204, 304))
):
content_length = str(len(body))
raw_headers.append((b"content-length", content_length.encode("latin-1")))
content_type = self.media_type
if content_type is not None and populate_content_type:
if content_type.startswith("text/") and "charset=" not in content_type.lower():
content_type += "; charset=" + self.charset
raw_headers.append((b"content-type", content_type.encode("latin-1")))
self.raw_headers = raw_headers
@property
def headers(self) -> MutableHeaders:
if not hasattr(self, "_headers"):
self._headers = MutableHeaders(raw=self.raw_headers)
return self._headers
def set_cookie(
self,
key: str,
value: str = "",
max_age: int | None = None,
expires: datetime | str | int | None = None,
path: str | None = "/",
domain: str | None = None,
secure: bool = False,
httponly: bool = False,
samesite: Literal["lax", "strict", "none"] | None = "lax",
partitioned: bool = False,
) -> None:
cookie: http.cookies.BaseCookie[str] = http.cookies.SimpleCookie()
cookie[key] = value
if max_age is not None:
cookie[key]["max-age"] = max_age
if expires is not None:
if isinstance(expires, datetime):
cookie[key]["expires"] = format_datetime(expires, usegmt=True)
else:
cookie[key]["expires"] = expires
if path is not None:
cookie[key]["path"] = path
if domain is not None:
cookie[key]["domain"] = domain
if secure:
cookie[key]["secure"] = True
if httponly:
cookie[key]["httponly"] = True
if samesite is not None:
assert samesite.lower() in [
"strict",
"lax",
"none",
], "samesite must be either 'strict', 'lax' or 'none'"
cookie[key]["samesite"] = samesite
if partitioned:
if sys.version_info < (3, 14):
raise ValueError("Partitioned cookies are only supported in Python 3.14 and above.") # pragma: no cover
cookie[key]["partitioned"] = True # pragma: no cover
cookie_val = cookie.output(header="").strip()
self.raw_headers.append((b"set-cookie", cookie_val.encode("latin-1")))
def delete_cookie(
self,
key: str,
path: str = "/",
domain: str | None = None,
secure: bool = False,
httponly: bool = False,
samesite: Literal["lax", "strict", "none"] | None = "lax",
) -> None:
self.set_cookie(
key,
max_age=0,
expires=0,
path=path,
domain=domain,
secure=secure,
httponly=httponly,
samesite=samesite,
)
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
prefix = "websocket." if scope["type"] == "websocket" else ""
await send(
{
"type": prefix + "http.response.start",
"status": self.status_code,
"headers": self.raw_headers,
}
)
await send({"type": prefix + "http.response.body", "body": self.body})
if self.background is not None:
await self.background()
class HTMLResponse(Response):
media_type = "text/html"
class PlainTextResponse(Response):
media_type = "text/plain"
class JSONResponse(Response):
media_type = "application/json"
def __init__(
self,
content: Any,
status_code: int = 200,
headers: Mapping[str, str] | None = None,
media_type: str | None = None,
background: BackgroundTask | None = None,
) -> None:
super().__init__(content, status_code, headers, media_type, background)
def render(self, content: Any) -> bytes:
return json.dumps(
content,
ensure_ascii=False,
allow_nan=False,
indent=None,
separators=(",", ":"),
).encode("utf-8")
class RedirectResponse(Response):
def __init__(
self,
url: str | URL,
status_code: int = 307,
headers: Mapping[str, str] | None = None,
background: BackgroundTask | None = None,
) -> None:
super().__init__(content=b"", status_code=status_code, headers=headers, background=background)
self.headers["location"] = quote(str(url), safe=":/%#?=@[]!$&'()*+,;")
Content = str | bytes | memoryview
SyncContentStream = Iterable[Content]
AsyncContentStream = AsyncIterable[Content]
ContentStream = AsyncContentStream | SyncContentStream
class StreamingResponse(Response):
body_iterator: AsyncContentStream
def __init__(
self,
content: ContentStream,
status_code: int = 200,
headers: Mapping[str, str] | None = None,
media_type: str | None = None,
background: BackgroundTask | None = None,
) -> None:
if isinstance(content, AsyncIterable):
self.body_iterator = content
else:
self.body_iterator = iterate_in_threadpool(content)
self.status_code = status_code
self.media_type = self.media_type if media_type is None else media_type
self.background = background
self.init_headers(headers)
async def listen_for_disconnect(self, receive: Receive) -> None:
while True:
message = await receive()
if message["type"] == "http.disconnect":
break
async def stream_response(self, send: Send) -> None:
await send(
{
"type": "http.response.start",
"status": self.status_code,
"headers": self.raw_headers,
}
)
async for chunk in self.body_iterator:
if not isinstance(chunk, bytes | memoryview):
chunk = chunk.encode(self.charset)
await send({"type": "http.response.body", "body": chunk, "more_body": True})
await send({"type": "http.response.body", "body": b"", "more_body": False})
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
spec_version = tuple(map(int, scope.get("asgi", {}).get("spec_version", "2.0").split(".")))
if spec_version >= (2, 4):
try:
await self.stream_response(send)
except OSError:
raise ClientDisconnect()
else:
with collapse_excgroups():
async with anyio.create_task_group() as task_group:
async def wrap(func: Callable[[], Awaitable[None]]) -> None:
await func()
task_group.cancel_scope.cancel()
task_group.start_soon(wrap, partial(self.stream_response, send))
await wrap(partial(self.listen_for_disconnect, receive))
if self.background is not None:
await self.background()
class MalformedRangeHeader(Exception):
def __init__(self, content: str = "Malformed range header.") -> None:
self.content = content
class RangeNotSatisfiable(Exception):
def __init__(self, max_size: int) -> None:
self.max_size = max_size
class FileResponse(Response):
chunk_size = 64 * 1024
def __init__(
self,
path: str | os.PathLike[str],
status_code: int = 200,
headers: Mapping[str, str] | None = None,
media_type: str | None = None,
background: BackgroundTask | None = None,
filename: str | None = None,
stat_result: os.stat_result | None = None,
method: str | None = None,
content_disposition_type: str = "attachment",
) -> None:
self.path = path
self.status_code = status_code
self.filename = filename
if method is not None:
warnings.warn(
"The 'method' parameter is not used, and it will be removed.",
DeprecationWarning,
)
if media_type is None:
media_type = guess_type(filename or path)[0] or "text/plain"
self.media_type = media_type
self.background = background
self.init_headers(headers)
self.headers.setdefault("accept-ranges", "bytes")
if self.filename is not None:
content_disposition_filename = quote(self.filename)
if content_disposition_filename != self.filename:
content_disposition = f"{content_disposition_type}; filename*=utf-8''{content_disposition_filename}"
else:
content_disposition = f'{content_disposition_type}; filename="{self.filename}"'
self.headers.setdefault("content-disposition", content_disposition)
self.stat_result = stat_result
if stat_result is not None:
self.set_stat_headers(stat_result)
def set_stat_headers(self, stat_result: os.stat_result) -> None:
content_length = str(stat_result.st_size)
last_modified = formatdate(stat_result.st_mtime, usegmt=True)
etag_base = str(stat_result.st_mtime) + "-" + str(stat_result.st_size)
etag = f'"{hashlib.md5(etag_base.encode(), usedforsecurity=False).hexdigest()}"'
self.headers.setdefault("content-length", content_length)
self.headers.setdefault("last-modified", last_modified)
self.headers.setdefault("etag", etag)
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
send_header_only: bool = scope["method"].upper() == "HEAD"
send_pathsend: bool = "http.response.pathsend" in scope.get("extensions", {})
if self.stat_result is None:
try:
stat_result = await anyio.to_thread.run_sync(os.stat, self.path)
self.set_stat_headers(stat_result)
except FileNotFoundError:
raise RuntimeError(f"File at path {self.path} does not exist.")
else:
mode = stat_result.st_mode
if not stat.S_ISREG(mode):
raise RuntimeError(f"File at path {self.path} is not a file.")
else:
stat_result = self.stat_result
headers = Headers(scope=scope)
http_range = headers.get("range")
http_if_range = headers.get("if-range")
if http_range is None or (http_if_range is not None and not self._should_use_range(http_if_range)):
await self._handle_simple(send, send_header_only, send_pathsend)
else:
try:
ranges = self._parse_range_header(http_range, stat_result.st_size)
except MalformedRangeHeader as exc:
return await PlainTextResponse(exc.content, status_code=400)(scope, receive, send)
except RangeNotSatisfiable as exc:
response = PlainTextResponse(status_code=416, headers={"Content-Range": f"*/{exc.max_size}"})
return await response(scope, receive, send)
if len(ranges) == 1:
start, end = ranges[0]
await self._handle_single_range(send, start, end, stat_result.st_size, send_header_only)
else:
await self._handle_multiple_ranges(send, ranges, stat_result.st_size, send_header_only)
if self.background is not None:
await self.background()
async def _handle_simple(self, send: Send, send_header_only: bool, send_pathsend: bool) -> None:
await send({"type": "http.response.start", "status": self.status_code, "headers": self.raw_headers})
if send_header_only:
await send({"type": "http.response.body", "body": b"", "more_body": False})
elif send_pathsend:
await send({"type": "http.response.pathsend", "path": str(self.path)})
else:
async with await anyio.open_file(self.path, mode="rb") as file:
more_body = True
while more_body:
chunk = await file.read(self.chunk_size)
more_body = len(chunk) == self.chunk_size
await send({"type": "http.response.body", "body": chunk, "more_body": more_body})
async def _handle_single_range(
self, send: Send, start: int, end: int, file_size: int, send_header_only: bool
) -> None:
self.headers["content-range"] = f"bytes {start}-{end - 1}/{file_size}"
self.headers["content-length"] = str(end - start)
await send({"type": "http.response.start", "status": 206, "headers": self.raw_headers})
if send_header_only:
await send({"type": "http.response.body", "body": b"", "more_body": False})
else:
async with await anyio.open_file(self.path, mode="rb") as file:
await file.seek(start)
more_body = True
while more_body:
chunk = await file.read(min(self.chunk_size, end - start))
start += len(chunk)
more_body = len(chunk) == self.chunk_size and start < end
await send({"type": "http.response.body", "body": chunk, "more_body": more_body})
async def _handle_multiple_ranges(
self,
send: Send,
ranges: list[tuple[int, int]],
file_size: int,
send_header_only: bool,
) -> None:
# In firefox and chrome, they use boundary with 95-96 bits entropy (that's roughly 13 bytes).
boundary = token_hex(13)
content_length, header_generator = self.generate_multipart(
ranges, boundary, file_size, self.headers["content-type"]
)
self.headers["content-range"] = f"multipart/byteranges; boundary={boundary}"
self.headers["content-length"] = str(content_length)
await send({"type": "http.response.start", "status": 206, "headers": self.raw_headers})
if send_header_only:
await send({"type": "http.response.body", "body": b"", "more_body": False})
else:
async with await anyio.open_file(self.path, mode="rb") as file:
for start, end in ranges:
await send({"type": "http.response.body", "body": header_generator(start, end), "more_body": True})
await file.seek(start)
while start < end:
chunk = await file.read(min(self.chunk_size, end - start))
start += len(chunk)
await send({"type": "http.response.body", "body": chunk, "more_body": True})
await send({"type": "http.response.body", "body": b"\n", "more_body": True})
await send(
{
"type": "http.response.body",
"body": f"\n--{boundary}--\n".encode("latin-1"),
"more_body": False,
}
)
def _should_use_range(self, http_if_range: str) -> bool:
return http_if_range == self.headers["last-modified"] or http_if_range == self.headers["etag"]
@classmethod
def _parse_range_header(cls, http_range: str, file_size: int) -> list[tuple[int, int]]:
ranges: list[tuple[int, int]] = []
try:
units, range_ = http_range.split("=", 1)
except ValueError:
raise MalformedRangeHeader()
units = units.strip().lower()
if units != "bytes":
raise MalformedRangeHeader("Only support bytes range")
ranges = cls._parse_ranges(range_, file_size)
if len(ranges) == 0:
raise MalformedRangeHeader("Range header: range must be requested")
if any(not (0 <= start < file_size) for start, _ in ranges):
raise RangeNotSatisfiable(file_size)
if any(start > end for start, end in ranges):
raise MalformedRangeHeader("Range header: start must be less than end")
if len(ranges) == 1:
return ranges
# Merge ranges
result: list[tuple[int, int]] = []
for start, end in ranges:
for p in range(len(result)):
p_start, p_end = result[p]
if start > p_end:
continue
elif end < p_start:
result.insert(p, (start, end)) # THIS IS NOT REACHED!
break
else:
result[p] = (min(start, p_start), max(end, p_end))
break
else:
result.append((start, end))
return result
@classmethod
def _parse_ranges(cls, range_: str, file_size: int) -> list[tuple[int, int]]:
ranges: list[tuple[int, int]] = []
for part in range_.split(","):
part = part.strip()
# If the range is empty or a single dash, we ignore it.
if not part or part == "-":
continue
# If the range is not in the format "start-end", we ignore it.
if "-" not in part:
continue
start_str, end_str = part.split("-", 1)
start_str = start_str.strip()
end_str = end_str.strip()
try:
start = int(start_str) if start_str else file_size - int(end_str)
end = int(end_str) + 1 if start_str and end_str and int(end_str) < file_size else file_size
ranges.append((start, end))
except ValueError:
# If the range is not numeric, we ignore it.
continue
return ranges
def generate_multipart(
self,
ranges: Sequence[tuple[int, int]],
boundary: str,
max_size: int,
content_type: str,
) -> tuple[int, Callable[[int, int], bytes]]:
r"""
Multipart response headers generator.
```
--{boundary}\n
Content-Type: {content_type}\n
Content-Range: bytes {start}-{end-1}/{max_size}\n
\n
..........content...........\n
--{boundary}\n
Content-Type: {content_type}\n
Content-Range: bytes {start}-{end-1}/{max_size}\n
\n
..........content...........\n
--{boundary}--\n
```
"""
boundary_len = len(boundary)
static_header_part_len = 44 + boundary_len + len(content_type) + len(str(max_size))
content_length = sum(
(len(str(start)) + len(str(end - 1)) + static_header_part_len) # Headers
+ (end - start) # Content
for start, end in ranges
) + (
5 + boundary_len # --boundary--\n
)
return (
content_length,
lambda start, end: (
f"--{boundary}\nContent-Type: {content_type}\nContent-Range: bytes {start}-{end - 1}/{max_size}\n\n"
).encode("latin-1"),
)
|
from __future__ import annotations
import datetime as dt
import sys
import time
from collections.abc import AsyncGenerator, AsyncIterator, Iterator
from http.cookies import SimpleCookie
from pathlib import Path
from typing import Any
import anyio
import pytest
from starlette import status
from starlette.background import BackgroundTask
from starlette.datastructures import Headers
from starlette.requests import ClientDisconnect, Request
from starlette.responses import FileResponse, JSONResponse, RedirectResponse, Response, StreamingResponse
from starlette.testclient import TestClient
from starlette.types import Message, Receive, Scope, Send
from tests.types import TestClientFactory
def test_text_response(test_client_factory: TestClientFactory) -> None:
async def app(scope: Scope, receive: Receive, send: Send) -> None:
response = Response("hello, world", media_type="text/plain")
await response(scope, receive, send)
client = test_client_factory(app)
response = client.get("/")
assert response.text == "hello, world"
def test_bytes_response(test_client_factory: TestClientFactory) -> None:
async def app(scope: Scope, receive: Receive, send: Send) -> None:
response = Response(b"xxxxx", media_type="image/png")
await response(scope, receive, send)
client = test_client_factory(app)
response = client.get("/")
assert response.content == b"xxxxx"
def test_json_none_response(test_client_factory: TestClientFactory) -> None:
async def app(scope: Scope, receive: Receive, send: Send) -> None:
response = JSONResponse(None)
await response(scope, receive, send)
client = test_client_factory(app)
response = client.get("/")
assert response.json() is None
assert response.content == b"null"
def test_redirect_response(test_client_factory: TestClientFactory) -> None:
async def app(scope: Scope, receive: Receive, send: Send) -> None:
if scope["path"] == "/":
response = Response("hello, world", media_type="text/plain")
else:
response = RedirectResponse("/")
await response(scope, receive, send)
client = test_client_factory(app)
response = client.get("/redirect")
assert response.text == "hello, world"
assert response.url == "http://testserver/"
def test_quoting_redirect_response(test_client_factory: TestClientFactory) -> None:
async def app(scope: Scope, receive: Receive, send: Send) -> None:
if scope["path"] == "/I ♥ Starlette/":
response = Response("hello, world", media_type="text/plain")
else:
response = RedirectResponse("/I ♥ Starlette/")
await response(scope, receive, send)
client = test_client_factory(app)
response = client.get("/redirect")
assert response.text == "hello, world"
assert response.url == "http://testserver/I%20%E2%99%A5%20Starlette/"
def test_redirect_response_content_length_header(
test_client_factory: TestClientFactory,
) -> None:
async def app(scope: Scope, receive: Receive, send: Send) -> None:
if scope["path"] == "/":
response = Response("hello", media_type="text/plain") # pragma: no cover
else:
response = RedirectResponse("/")
await response(scope, receive, send)
client: TestClient = test_client_factory(app)
response = client.request("GET", "/redirect", follow_redirects=False)
assert response.url == "http://testserver/redirect"
assert response.headers["content-length"] == "0"
def test_streaming_response(test_client_factory: TestClientFactory) -> None:
filled_by_bg_task = ""
async def app(scope: Scope, receive: Receive, send: Send) -> None:
async def numbers(minimum: int, maximum: int) -> AsyncIterator[str]:
for i in range(minimum, maximum + 1):
yield str(i)
if i != maximum:
yield ", "
await anyio.sleep(0)
async def numbers_for_cleanup(start: int = 1, stop: int = 5) -> None:
nonlocal filled_by_bg_task
async for thing in numbers(start, stop):
filled_by_bg_task = filled_by_bg_task + thing
cleanup_task = BackgroundTask(numbers_for_cleanup, start=6, stop=9)
generator = numbers(1, 5)
response = StreamingResponse(generator, media_type="text/plain", background=cleanup_task)
await response(scope, receive, send)
assert filled_by_bg_task == ""
client = test_client_factory(app)
response = client.get("/")
assert response.text == "1, 2, 3, 4, 5"
assert filled_by_bg_task == "6, 7, 8, 9"
def test_streaming_response_custom_iterator(
test_client_factory: TestClientFactory,
) -> None:
async def app(scope: Scope, receive: Receive, send: Send) -> None:
class CustomAsyncIterator:
def __init__(self) -> None:
self._called = 0
def __aiter__(self) -> AsyncIterator[str]:
return self
async def __anext__(self) -> str:
if self._called == 5:
raise StopAsyncIteration()
self._called += 1
return str(self._called)
response = StreamingResponse(CustomAsyncIterator(), media_type="text/plain")
await response(scope, receive, send)
client = test_client_factory(app)
response = client.get("/")
assert response.text == "12345"
def test_streaming_response_custom_iterable(
test_client_factory: TestClientFactory,
) -> None:
async def app(scope: Scope, receive: Receive, send: Send) -> None:
class CustomAsyncIterable:
async def __aiter__(self) -> AsyncIterator[str | bytes]:
for i in range(5):
yield str(i + 1)
response = StreamingResponse(CustomAsyncIterable(), media_type="text/plain")
await response(scope, receive, send)
client = test_client_factory(app)
response = client.get("/")
assert response.text == "12345"
def test_sync_streaming_response(test_client_factory: TestClientFactory) -> None:
async def app(scope: Scope, receive: Receive, send: Send) -> None:
def numbers(minimum: int, maximum: int) -> Iterator[str]:
for i in range(minimum, maximum + 1):
yield str(i)
if i != maximum:
yield ", "
generator = numbers(1, 5)
response = StreamingResponse(generator, media_type="text/plain")
await response(scope, receive, send)
client = test_client_factory(app)
response = client.get("/")
assert response.text == "1, 2, 3, 4, 5"
def test_response_headers(test_client_factory: TestClientFactory) -> None:
async def app(scope: Scope, receive: Receive, send: Send) -> None:
headers = {"x-header-1": "123", "x-header-2": "456"}
response = Response("hello, world", media_type="text/plain", headers=headers)
response.headers["x-header-2"] = "789"
await response(scope, receive, send)
client = test_client_factory(app)
response = client.get("/")
assert response.headers["x-header-1"] == "123"
assert response.headers["x-header-2"] == "789"
def test_response_phrase(test_client_factory: TestClientFactory) -> None:
app = Response(status_code=204)
client = test_client_factory(app)
response = client.get("/")
assert response.reason_phrase == "No Content"
app = Response(b"", status_code=123)
client = test_client_factory(app)
response = client.get("/")
assert response.reason_phrase == ""
def test_file_response(tmp_path: Path, test_client_factory: TestClientFactory) -> None:
path = tmp_path / "xyz"
content = b"<file content>" * 1000
path.write_bytes(content)
filled_by_bg_task = ""
async def numbers(minimum: int, maximum: int) -> AsyncIterator[str]:
for i in range(minimum, maximum + 1):
yield str(i)
if i != maximum:
yield ", "
await anyio.sleep(0)
async def numbers_for_cleanup(start: int = 1, stop: int = 5) -> None:
nonlocal filled_by_bg_task
async for thing in numbers(start, stop):
filled_by_bg_task = filled_by_bg_task + thing
cleanup_task = BackgroundTask(numbers_for_cleanup, start=6, stop=9)
async def app(scope: Scope, receive: Receive, send: Send) -> None:
response = FileResponse(path=path, filename="example.png", background=cleanup_task)
await response(scope, receive, send)
assert filled_by_bg_task == ""
client = test_client_factory(app)
response = client.get("/")
expected_disposition = 'attachment; filename="example.png"'
assert response.status_code == status.HTTP_200_OK
assert response.content == content
assert response.headers["content-type"] == "image/png"
assert response.headers["content-disposition"] == expected_disposition
assert "content-length" in response.headers
assert "last-modified" in response.headers
assert "etag" in response.headers
assert filled_by_bg_task == "6, 7, 8, 9"
@pytest.mark.anyio
async def test_file_response_on_head_method(tmp_path: Path) -> None:
path = tmp_path / "xyz"
content = b"<file content>" * 1000
path.write_bytes(content)
app = FileResponse(path=path, filename="example.png")
async def receive() -> Message: # type: ignore[empty-body]
... # pragma: no cover
async def send(message: Message) -> None:
if message["type"] == "http.response.start":
assert message["status"] == status.HTTP_200_OK
headers = Headers(raw=message["headers"])
assert headers["content-type"] == "image/png"
assert "content-length" in headers
assert "content-disposition" in headers
assert "last-modified" in headers
assert "etag" in headers
elif message["type"] == "http.response.body": # pragma: no branch
assert message["body"] == b""
assert message["more_body"] is False
# Since the TestClient drops the response body on HEAD requests, we need to test
# this directly.
await app({"type": "http", "method": "head", "headers": [(b"key", b"value")]}, receive, send)
def test_file_response_set_media_type(tmp_path: Path, test_client_factory: TestClientFactory) -> None:
path = tmp_path / "xyz"
path.write_bytes(b"<file content>")
# By default, FileResponse will determine the `content-type` based on
# the filename or path, unless a specific `media_type` is provided.
app = FileResponse(path=path, filename="example.png", media_type="image/jpeg")
client: TestClient = test_client_factory(app)
response = client.get("/")
assert response.headers["content-type"] == "image/jpeg"
def test_file_response_with_directory_raises_error(tmp_path: Path, test_client_factory: TestClientFactory) -> None:
app = FileResponse(path=tmp_path, filename="example.png")
client = test_client_factory(app)
with pytest.raises(RuntimeError) as exc_info:
client.get("/")
assert "is not a file" in str(exc_info.value)
def test_file_response_with_missing_file_raises_error(tmp_path: Path, test_client_factory: TestClientFactory) -> None:
path = tmp_path / "404.txt"
app = FileResponse(path=path, filename="404.txt")
client = test_client_factory(app)
with pytest.raises(RuntimeError) as exc_info:
client.get("/")
assert "does not exist" in str(exc_info.value)
def test_file_response_with_chinese_filename(tmp_path: Path, test_client_factory: TestClientFactory) -> None:
content = b"file content"
filename = "你好.txt" # probably "Hello.txt" in Chinese
path = tmp_path / filename
path.write_bytes(content)
app = FileResponse(path=path, filename=filename)
client = test_client_factory(app)
response = client.get("/")
expected_disposition = "attachment; filename*=utf-8''%E4%BD%A0%E5%A5%BD.txt"
assert response.status_code == status.HTTP_200_OK
assert response.content == content
assert response.headers["content-disposition"] == expected_disposition
def test_file_response_with_inline_disposition(tmp_path: Path, test_client_factory: TestClientFactory) -> None:
content = b"file content"
filename = "hello.txt"
path = tmp_path / filename
path.write_bytes(content)
app = FileResponse(path=path, filename=filename, content_disposition_type="inline")
client = test_client_factory(app)
response = client.get("/")
expected_disposition = 'inline; filename="hello.txt"'
assert response.status_code == status.HTTP_200_OK
assert response.content == content
assert response.headers["content-disposition"] == expected_disposition
def test_file_response_with_method_warns(tmp_path: Path) -> None:
with pytest.warns(DeprecationWarning):
FileResponse(path=tmp_path, filename="example.png", method="GET")
def test_file_response_with_range_header(tmp_path: Path, test_client_factory: TestClientFactory) -> None:
content = b"file content"
filename = "hello.txt"
path = tmp_path / filename
path.write_bytes(content)
etag = '"a_non_autogenerated_etag"'
app = FileResponse(path=path, filename=filename, headers={"etag": etag})
client = test_client_factory(app)
response = client.get("/", headers={"range": "bytes=0-4", "if-range": etag})
assert response.status_code == status.HTTP_206_PARTIAL_CONTENT
assert response.content == content[:5]
assert response.headers["etag"] == etag
assert response.headers["content-length"] == "5"
assert response.headers["content-range"] == f"bytes 0-4/{len(content)}"
@pytest.mark.anyio
async def test_file_response_with_pathsend(tmpdir: Path) -> None:
path = tmpdir / "xyz"
content = b"<file content>" * 1000
with open(path, "wb") as file:
file.write(content)
app = FileResponse(path=path, filename="example.png")
async def receive() -> Message: # type: ignore[empty-body]
... # pragma: no cover
async def send(message: Message) -> None:
if message["type"] == "http.response.start":
assert message["status"] == status.HTTP_200_OK
headers = Headers(raw=message["headers"])
assert headers["content-type"] == "image/png"
assert "content-length" in headers
assert "content-disposition" in headers
assert "last-modified" in headers
assert "etag" in headers
elif message["type"] == "http.response.pathsend": # pragma: no branch
assert message["path"] == str(path)
# Since the TestClient doesn't support `pathsend`, we need to test this directly.
await app(
{"type": "http", "method": "get", "headers": [], "extensions": {"http.response.pathsend": {}}},
receive,
send,
)
def test_set_cookie(test_client_factory: TestClientFactory, monkeypatch: pytest.MonkeyPatch) -> None:
# Mock time used as a reference for `Expires` by stdlib `SimpleCookie`.
mocked_now = dt.datetime(2037, 1, 22, 12, 0, 0, tzinfo=dt.timezone.utc)
monkeypatch.setattr(time, "time", lambda: mocked_now.timestamp())
async def app(scope: Scope, receive: Receive, send: Send) -> None:
response = Response("Hello, world!", media_type="text/plain")
response.set_cookie(
"mycookie",
"myvalue",
max_age=10,
expires=10,
path="/",
domain="localhost",
secure=True,
httponly=True,
samesite="none",
partitioned=True if sys.version_info >= (3, 14) else False,
)
await response(scope, receive, send)
partitioned_text = "Partitioned; " if sys.version_info >= (3, 14) else ""
client = test_client_factory(app)
response = client.get("/")
assert response.text == "Hello, world!"
assert (
response.headers["set-cookie"] == "mycookie=myvalue; Domain=localhost; expires=Thu, 22 Jan 2037 12:00:10 GMT; "
f"HttpOnly; Max-Age=10; {partitioned_text}Path=/; SameSite=none; Secure"
)
@pytest.mark.skipif(sys.version_info >= (3, 14), reason="Only relevant for <3.14")
def test_set_cookie_raises_for_invalid_python_version(
test_client_factory: TestClientFactory,
) -> None: # pragma: no cover
async def app(scope: Scope, receive: Receive, send: Send) -> None:
response = Response("Hello, world!", media_type="text/plain")
with pytest.raises(ValueError):
response.set_cookie("mycookie", "myvalue", partitioned=True)
await response(scope, receive, send)
client = test_client_factory(app)
response = client.get("/")
assert response.text == "Hello, world!"
assert response.headers.get("set-cookie") is None
def test_set_cookie_path_none(test_client_factory: TestClientFactory) -> None:
async def app(scope: Scope, receive: Receive, send: Send) -> None:
response = Response("Hello, world!", media_type="text/plain")
response.set_cookie("mycookie", "myvalue", path=None)
await response(scope, receive, send)
client = test_client_factory(app)
response = client.get("/")
assert response.text == "Hello, world!"
assert response.headers["set-cookie"] == "mycookie=myvalue; SameSite=lax"
def test_set_cookie_samesite_none(test_client_factory: TestClientFactory) -> None:
async def app(scope: Scope, receive: Receive, send: Send) -> None:
response = Response("Hello, world!", media_type="text/plain")
response.set_cookie("mycookie", "myvalue", samesite=None)
await response(scope, receive, send)
client = test_client_factory(app)
response = client.get("/")
assert response.text == "Hello, world!"
assert response.headers["set-cookie"] == "mycookie=myvalue; Path=/"
@pytest.mark.parametrize(
"expires",
[
pytest.param(dt.datetime(2037, 1, 22, 12, 0, 10, tzinfo=dt.timezone.utc), id="datetime"),
pytest.param("Thu, 22 Jan 2037 12:00:10 GMT", id="str"),
pytest.param(10, id="int"),
],
)
def test_expires_on_set_cookie(
test_client_factory: TestClientFactory,
monkeypatch: pytest.MonkeyPatch,
expires: str,
) -> None:
# Mock time used as a reference for `Expires` by stdlib `SimpleCookie`.
mocked_now = dt.datetime(2037, 1, 22, 12, 0, 0, tzinfo=dt.timezone.utc)
monkeypatch.setattr(time, "time", lambda: mocked_now.timestamp())
async def app(scope: Scope, receive: Receive, send: Send) -> None:
response = Response("Hello, world!", media_type="text/plain")
response.set_cookie("mycookie", "myvalue", expires=expires)
await response(scope, receive, send)
client = test_client_factory(app)
response = client.get("/")
cookie = SimpleCookie(response.headers.get("set-cookie"))
assert cookie["mycookie"]["expires"] == "Thu, 22 Jan 2037 12:00:10 GMT"
def test_delete_cookie(test_client_factory: TestClientFactory) -> None:
async def app(scope: Scope, receive: Receive, send: Send) -> None:
request = Request(scope, receive)
response = Response("Hello, world!", media_type="text/plain")
if request.cookies.get("mycookie"):
response.delete_cookie("mycookie")
else:
response.set_cookie("mycookie", "myvalue")
await response(scope, receive, send)
client = test_client_factory(app)
response = client.get("/")
assert response.cookies["mycookie"]
response = client.get("/")
assert not response.cookies.get("mycookie")
def test_populate_headers(test_client_factory: TestClientFactory) -> None:
app = Response(content="hi", headers={}, media_type="text/html")
client = test_client_factory(app)
response = client.get("/")
assert response.text == "hi"
assert response.headers["content-length"] == "2"
assert response.headers["content-type"] == "text/html; charset=utf-8"
def test_head_method(test_client_factory: TestClientFactory) -> None:
app = Response("hello, world", media_type="text/plain")
client = test_client_factory(app)
response = client.head("/")
assert response.text == ""
def test_empty_response(test_client_factory: TestClientFactory) -> None:
app = Response()
client: TestClient = test_client_factory(app)
response = client.get("/")
assert response.content == b""
assert response.headers["content-length"] == "0"
assert "content-type" not in response.headers
def test_empty_204_response(test_client_factory: TestClientFactory) -> None:
app = Response(status_code=204)
client: TestClient = test_client_factory(app)
response = client.get("/")
assert "content-length" not in response.headers
def test_non_empty_response(test_client_factory: TestClientFactory) -> None:
app = Response(content="hi")
client: TestClient = test_client_factory(app)
response = client.get("/")
assert response.headers["content-length"] == "2"
def test_response_do_not_add_redundant_charset(
test_client_factory: TestClientFactory,
) -> None:
app = Response(media_type="text/plain; charset=utf-8")
client = test_client_factory(app)
response = client.get("/")
assert response.headers["content-type"] == "text/plain; charset=utf-8"
def test_file_response_known_size(tmp_path: Path, test_client_factory: TestClientFactory) -> None:
path = tmp_path / "xyz"
content = b"<file content>" * 1000
path.write_bytes(content)
app = FileResponse(path=path, filename="example.png")
client: TestClient = test_client_factory(app)
response = client.get("/")
assert response.headers["content-length"] == str(len(content))
def test_streaming_response_unknown_size(
test_client_factory: TestClientFactory,
) -> None:
app = StreamingResponse(content=iter(["hello", "world"]))
client: TestClient = test_client_factory(app)
response = client.get("/")
assert "content-length" not in response.headers
def test_streaming_response_known_size(test_client_factory: TestClientFactory) -> None:
app = StreamingResponse(content=iter(["hello", "world"]), headers={"content-length": "10"})
client: TestClient = test_client_factory(app)
response = client.get("/")
assert response.headers["content-length"] == "10"
def test_response_memoryview(test_client_factory: TestClientFactory) -> None:
app = Response(content=memoryview(b"\xc0"))
client: TestClient = test_client_factory(app)
response = client.get("/")
assert response.content == b"\xc0"
def test_streaming_response_memoryview(test_client_factory: TestClientFactory) -> None:
app = StreamingResponse(content=iter([memoryview(b"\xc0"), memoryview(b"\xf5")]))
client: TestClient = test_client_factory(app)
response = client.get("/")
assert response.content == b"\xc0\xf5"
@pytest.mark.anyio
async def test_streaming_response_stops_if_receiving_http_disconnect() -> None:
streamed = 0
disconnected = anyio.Event()
async def receive_disconnect() -> Message:
await disconnected.wait()
return {"type": "http.disconnect"}
async def send(message: Message) -> None:
nonlocal streamed
if message["type"] == "http.response.body":
streamed += len(message.get("body", b""))
# Simulate disconnection after download has started
if streamed >= 16:
disconnected.set()
async def stream_indefinitely() -> AsyncIterator[bytes]:
while True:
# Need a sleep for the event loop to switch to another task
await anyio.sleep(0)
yield b"chunk "
response = StreamingResponse(content=stream_indefinitely())
with anyio.move_on_after(1) as cancel_scope:
await response({}, receive_disconnect, send)
assert not cancel_scope.cancel_called, "Content streaming should stop itself."
@pytest.mark.anyio
async def test_streaming_response_on_client_disconnects() -> None:
chunks = bytearray()
streamed = False
async def receive_disconnect() -> Message:
raise NotImplementedError
async def send(message: Message) -> None:
nonlocal streamed
if message["type"] == "http.response.body":
if not streamed:
chunks.extend(message.get("body", b""))
streamed = True
else:
raise OSError
async def stream_indefinitely() -> AsyncGenerator[bytes, None]:
while True:
await anyio.sleep(0)
yield b"chunk"
stream = stream_indefinitely()
response = StreamingResponse(content=stream)
with anyio.move_on_after(1) as cancel_scope:
with pytest.raises(ClientDisconnect):
await response({"asgi": {"spec_version": "2.4"}}, receive_disconnect, send)
assert not cancel_scope.cancel_called, "Content streaming should stop itself."
assert chunks == b"chunk"
await stream.aclose()
README = """\
# BáiZé
Powerful and exquisite WSGI/ASGI framework/toolkit.
The minimize implementation of methods required in the Web framework. No redundant implementation means that you can freely customize functions without considering the conflict with baize's own implementation.
Under the ASGI/WSGI protocol, the interface of the request object and the response object is almost the same, only need to add or delete `await` in the appropriate place. In addition, it should be noted that ASGI supports WebSocket but WSGI does not.
""" # noqa: E501
@pytest.fixture
def readme_file(tmp_path: Path) -> Path:
filepath = tmp_path / "README.txt"
filepath.write_bytes(README.encode("utf8"))
return filepath
@pytest.fixture
def file_response_client(readme_file: Path, test_client_factory: TestClientFactory) -> TestClient:
return test_client_factory(app=FileResponse(str(readme_file)))
def test_file_response_without_range(file_response_client: TestClient) -> None:
response = file_response_client.get("/")
assert response.status_code == 200
assert response.headers["content-length"] == str(len(README.encode("utf8")))
assert response.text == README
def test_file_response_head(file_response_client: TestClient) -> None:
response = file_response_client.head("/")
assert response.status_code == 200
assert response.headers["content-length"] == str(len(README.encode("utf8")))
assert response.content == b""
def test_file_response_range(file_response_client: TestClient) -> None:
response = file_response_client.get("/", headers={"Range": "bytes=0-100"})
assert response.status_code == 206
assert response.headers["content-range"] == f"bytes 0-100/{len(README.encode('utf8'))}"
assert response.headers["content-length"] == "101"
assert response.content == README.encode("utf8")[:101]
def test_file_response_range_head(file_response_client: TestClient) -> None:
response = file_response_client.head("/", headers={"Range": "bytes=0-100"})
assert response.status_code == 206
assert response.headers["content-length"] == str(101)
assert response.content == b""
def test_file_response_range_multi(file_response_client: TestClient) -> None:
response = file_response_client.get("/", headers={"Range": "bytes=0-100, 200-300"})
assert response.status_code == 206
assert response.headers["content-range"].startswith("multipart/byteranges; boundary=")
assert response.headers["content-length"] == "439"
def test_file_response_range_multi_head(file_response_client: TestClient) -> None:
response = file_response_client.head("/", headers={"Range": "bytes=0-100, 200-300"})
assert response.status_code == 206
assert response.headers["content-length"] == "439"
assert response.content == b""
response = file_response_client.head(
"/",
headers={"Range": "bytes=200-300", "if-range": response.headers["etag"][:-1]},
)
assert response.status_code == 200
response = file_response_client.head(
"/",
headers={"Range": "bytes=200-300", "if-range": response.headers["etag"]},
)
assert response.status_code == 206
def test_file_response_range_invalid(file_response_client: TestClient) -> None:
response = file_response_client.head("/", headers={"Range": "bytes: 0-1000"})
assert response.status_code == 400
def test_file_response_range_head_max(file_response_client: TestClient) -> None:
response = file_response_client.head("/", headers={"Range": f"bytes=0-{len(README.encode('utf8')) + 1}"})
assert response.status_code == 206
def test_file_response_range_416(file_response_client: TestClient) -> None:
response = file_response_client.head("/", headers={"Range": f"bytes={len(README.encode('utf8')) + 1}-"})
assert response.status_code == 416
assert response.headers["Content-Range"] == f"*/{len(README.encode('utf8'))}"
def test_file_response_only_support_bytes_range(file_response_client: TestClient) -> None:
response = file_response_client.get("/", headers={"Range": "items=0-100"})
assert response.status_code == 400
assert response.text == "Only support bytes range"
def test_file_response_range_must_be_requested(file_response_client: TestClient) -> None:
response = file_response_client.get("/", headers={"Range": "bytes="})
assert response.status_code == 400
assert response.text == "Range header: range must be requested"
def test_file_response_start_must_be_less_than_end(file_response_client: TestClient) -> None:
response = file_response_client.get("/", headers={"Range": "bytes=100-0"})
assert response.status_code == 400
assert response.text == "Range header: start must be less than end"
def test_file_response_merge_ranges(file_response_client: TestClient) -> None:
response = file_response_client.get("/", headers={"Range": "bytes=0-100, 50-200"})
assert response.status_code == 206
assert response.headers["content-length"] == "201"
assert response.headers["content-range"] == f"bytes 0-200/{len(README.encode('utf8'))}"
def test_file_response_insert_ranges(file_response_client: TestClient) -> None:
response = file_response_client.get("/", headers={"Range": "bytes=100-200, 0-50"})
assert response.status_code == 206
assert response.headers["content-range"].startswith("multipart/byteranges; boundary=")
boundary = response.headers["content-range"].split("boundary=")[1]
assert response.text.splitlines() == [
f"--{boundary}",
"Content-Type: text/plain; charset=utf-8",
"Content-Range: bytes 0-50/526",
"",
"# BáiZé",
"",
"Powerful and exquisite WSGI/ASGI framewo",
f"--{boundary}",
"Content-Type: text/plain; charset=utf-8",
"Content-Range: bytes 100-200/526",
"",
"ds required in the Web framework. No redundant implementation means that you can freely customize fun",
"",
f"--{boundary}--",
]
def test_file_response_range_without_dash(file_response_client: TestClient) -> None:
response = file_response_client.get("/", headers={"Range": "bytes=100, 0-50"})
assert response.status_code == 206
assert response.headers["content-range"] == f"bytes 0-50/{len(README.encode('utf8'))}"
def test_file_response_range_empty_start_and_end(file_response_client: TestClient) -> None:
response = file_response_client.get("/", headers={"Range": "bytes= - , 0-50"})
assert response.status_code == 206
assert response.headers["content-range"] == f"bytes 0-50/{len(README.encode('utf8'))}"
def test_file_response_range_ignore_non_numeric(file_response_client: TestClient) -> None:
response = file_response_client.get("/", headers={"Range": "bytes=abc-def, 0-50"})
assert response.status_code == 206
assert response.headers["content-range"] == f"bytes 0-50/{len(README.encode('utf8'))}"
def test_file_response_suffix_range(file_response_client: TestClient) -> None:
# Test suffix range (last N bytes) - line 523 with empty start_str
response = file_response_client.get("/", headers={"Range": "bytes=-100"})
assert response.status_code == 206
file_size = len(README.encode("utf8"))
assert response.headers["content-range"] == f"bytes {file_size - 100}-{file_size - 1}/{file_size}"
assert response.headers["content-length"] == "100"
assert response.content == README.encode("utf8")[-100:]
@pytest.mark.anyio
async def test_file_response_multi_small_chunk_size(readme_file: Path) -> None:
class SmallChunkSizeFileResponse(FileResponse):
chunk_size = 10
app = SmallChunkSizeFileResponse(path=str(readme_file))
received_chunks: list[bytes] = []
start_message: dict[str, Any] = {}
async def receive() -> Message:
raise NotImplementedError("Should not be called!")
async def send(message: Message) -> None:
if message["type"] == "http.response.start":
start_message.update(message)
elif message["type"] == "http.response.body": # pragma: no branch
received_chunks.append(message["body"])
await app({"type": "http", "method": "get", "headers": [(b"range", b"bytes=0-15,20-35,35-50")]}, receive, send)
assert start_message["status"] == 206
headers = Headers(raw=start_message["headers"])
assert headers.get("content-type") == "text/plain; charset=utf-8"
assert headers.get("accept-ranges") == "bytes"
assert "content-length" in headers
assert "last-modified" in headers
assert "etag" in headers
assert headers["content-range"].startswith("multipart/byteranges; boundary=")
boundary = headers["content-range"].split("boundary=")[1]
assert received_chunks == [
# Send the part headers.
f"--{boundary}\nContent-Type: text/plain; charset=utf-8\nContent-Range: bytes 0-15/526\n\n".encode(),
# Send the first chunk (10 bytes).
b"# B\xc3\xa1iZ\xc3\xa9\n",
# Send the second chunk (6 bytes).
b"\nPower",
# Send the new line to separate the parts.
b"\n",
# Send the part headers. We merge the ranges 20-35 and 35-50 into a single part.
f"--{boundary}\nContent-Type: text/plain; charset=utf-8\nContent-Range: bytes 20-50/526\n\n".encode(),
# Send the first chunk (10 bytes).
b"and exquis",
# Send the second chunk (10 bytes).
b"ite WSGI/A",
# Send the third chunk (10 bytes).
b"SGI framew",
# Send the last chunk (1 byte).
b"o",
b"\n",
f"\n--{boundary}--\n".encode(),
]
|
./temp_repos/starlette/starlette/responses.py
|
./temp_repos/starlette/tests/test_responses.py
|
starlette
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'Response'.
Context:
- Class Name: Response
- Dependencies to Mock: status_code, path, headers, content_disposition_type, background, stat_result, url, filename, max_size, content, method, media_type
- Key Imports: secrets, anyio.to_thread, mimetypes, anyio, hashlib, os, __future__, starlette.datastructures, stat, typing
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
Response
|
python
|
from __future__ import annotations
import os
import warnings
from collections.abc import Callable, Iterator, Mapping, MutableMapping
from pathlib import Path
from typing import Any, TypeVar, overload
class undefined:
pass
class EnvironError(Exception):
pass
class Environ(MutableMapping[str, str]):
def __init__(self, environ: MutableMapping[str, str] = os.environ):
self._environ = environ
self._has_been_read: set[str] = set()
def __getitem__(self, key: str) -> str:
self._has_been_read.add(key)
return self._environ.__getitem__(key)
def __setitem__(self, key: str, value: str) -> None:
if key in self._has_been_read:
raise EnvironError(f"Attempting to set environ['{key}'], but the value has already been read.")
self._environ.__setitem__(key, value)
def __delitem__(self, key: str) -> None:
if key in self._has_been_read:
raise EnvironError(f"Attempting to delete environ['{key}'], but the value has already been read.")
self._environ.__delitem__(key)
def __iter__(self) -> Iterator[str]:
return iter(self._environ)
def __len__(self) -> int:
return len(self._environ)
environ = Environ()
T = TypeVar("T")
class Config:
def __init__(
self,
env_file: str | Path | None = None,
environ: Mapping[str, str] = environ,
env_prefix: str = "",
encoding: str = "utf-8",
) -> None:
self.environ = environ
self.env_prefix = env_prefix
self.file_values: dict[str, str] = {}
if env_file is not None:
if not os.path.isfile(env_file):
warnings.warn(f"Config file '{env_file}' not found.")
else:
self.file_values = self._read_file(env_file, encoding)
@overload
def __call__(self, key: str, *, default: None) -> str | None: ...
@overload
def __call__(self, key: str, cast: type[T], default: T = ...) -> T: ...
@overload
def __call__(self, key: str, cast: type[str] = ..., default: str = ...) -> str: ...
@overload
def __call__(
self,
key: str,
cast: Callable[[Any], T] = ...,
default: Any = ...,
) -> T: ...
@overload
def __call__(self, key: str, cast: type[str] = ..., default: T = ...) -> T | str: ...
def __call__(
self,
key: str,
cast: Callable[[Any], Any] | None = None,
default: Any = undefined,
) -> Any:
return self.get(key, cast, default)
def get(
self,
key: str,
cast: Callable[[Any], Any] | None = None,
default: Any = undefined,
) -> Any:
key = self.env_prefix + key
if key in self.environ:
value = self.environ[key]
return self._perform_cast(key, value, cast)
if key in self.file_values:
value = self.file_values[key]
return self._perform_cast(key, value, cast)
if default is not undefined:
return self._perform_cast(key, default, cast)
raise KeyError(f"Config '{key}' is missing, and has no default.")
def _read_file(self, file_name: str | Path, encoding: str) -> dict[str, str]:
file_values: dict[str, str] = {}
with open(file_name, encoding=encoding) as input_file:
for line in input_file.readlines():
line = line.strip()
if "=" in line and not line.startswith("#"):
key, value = line.split("=", 1)
key = key.strip()
value = value.strip().strip("\"'")
file_values[key] = value
return file_values
def _perform_cast(
self,
key: str,
value: Any,
cast: Callable[[Any], Any] | None = None,
) -> Any:
if cast is None or value is None:
return value
elif cast is bool and isinstance(value, str):
mapping = {"true": True, "1": True, "false": False, "0": False}
value = value.lower()
if value not in mapping:
raise ValueError(f"Config '{key}' has value '{value}'. Not a valid bool.")
return mapping[value]
try:
return cast(value)
except (TypeError, ValueError):
raise ValueError(f"Config '{key}' has value '{value}'. Not a valid {cast.__name__}.")
|
import os
from pathlib import Path
from typing import Any
import pytest
from typing_extensions import assert_type
from starlette.config import Config, Environ, EnvironError
from starlette.datastructures import URL, Secret
def test_config_types() -> None:
"""
We use `assert_type` to test the types returned by Config via mypy.
"""
config = Config(environ={"STR": "some_str_value", "STR_CAST": "some_str_value", "BOOL": "true"})
assert_type(config("STR"), str)
assert_type(config("STR_DEFAULT", default=""), str)
assert_type(config("STR_CAST", cast=str), str)
assert_type(config("STR_NONE", default=None), str | None)
assert_type(config("STR_CAST_NONE", cast=str, default=None), str | None)
assert_type(config("STR_CAST_STR", cast=str, default=""), str)
assert_type(config("BOOL", cast=bool), bool)
assert_type(config("BOOL_DEFAULT", cast=bool, default=False), bool)
assert_type(config("BOOL_NONE", cast=bool, default=None), bool | None)
def cast_to_int(v: Any) -> int:
return int(v)
# our type annotations allow these `cast` and `default` configurations, but
# the code will error at runtime.
with pytest.raises(ValueError):
config("INT_CAST_DEFAULT_STR", cast=cast_to_int, default="true")
with pytest.raises(ValueError):
config("INT_DEFAULT_STR", cast=int, default="true")
def test_config(tmpdir: Path, monkeypatch: pytest.MonkeyPatch) -> None:
path = os.path.join(tmpdir, ".env")
with open(path, "w") as file:
file.write("# Do not commit to source control\n")
file.write("DATABASE_URL=postgres://user:pass@localhost/dbname\n")
file.write("REQUEST_HOSTNAME=example.com\n")
file.write("SECRET_KEY=12345\n")
file.write("BOOL_AS_INT=0\n")
file.write("\n")
file.write("\n")
config = Config(path, environ={"DEBUG": "true"})
def cast_to_int(v: Any) -> int:
return int(v)
DEBUG = config("DEBUG", cast=bool)
DATABASE_URL = config("DATABASE_URL", cast=URL)
REQUEST_TIMEOUT = config("REQUEST_TIMEOUT", cast=int, default=10)
REQUEST_HOSTNAME = config("REQUEST_HOSTNAME")
MAIL_HOSTNAME = config("MAIL_HOSTNAME", default=None)
SECRET_KEY = config("SECRET_KEY", cast=Secret)
UNSET_SECRET = config("UNSET_SECRET", cast=Secret, default=None)
EMPTY_SECRET = config("EMPTY_SECRET", cast=Secret, default="")
assert config("BOOL_AS_INT", cast=bool) is False
assert config("BOOL_AS_INT", cast=cast_to_int) == 0
assert config("DEFAULTED_BOOL", cast=cast_to_int, default=True) == 1
assert DEBUG is True
assert DATABASE_URL.path == "/dbname"
assert DATABASE_URL.password == "pass"
assert DATABASE_URL.username == "user"
assert REQUEST_TIMEOUT == 10
assert REQUEST_HOSTNAME == "example.com"
assert MAIL_HOSTNAME is None
assert repr(SECRET_KEY) == "Secret('**********')"
assert str(SECRET_KEY) == "12345"
assert bool(SECRET_KEY)
assert not bool(EMPTY_SECRET)
assert not bool(UNSET_SECRET)
with pytest.raises(KeyError):
config.get("MISSING")
with pytest.raises(ValueError):
config.get("DEBUG", cast=int)
with pytest.raises(ValueError):
config.get("REQUEST_HOSTNAME", cast=bool)
config = Config(Path(path))
REQUEST_HOSTNAME = config("REQUEST_HOSTNAME")
assert REQUEST_HOSTNAME == "example.com"
config = Config()
monkeypatch.setenv("STARLETTE_EXAMPLE_TEST", "123")
monkeypatch.setenv("BOOL_AS_INT", "1")
assert config.get("STARLETTE_EXAMPLE_TEST", cast=int) == 123
assert config.get("BOOL_AS_INT", cast=bool) is True
monkeypatch.setenv("BOOL_AS_INT", "2")
with pytest.raises(ValueError):
config.get("BOOL_AS_INT", cast=bool)
def test_missing_env_file_raises(tmpdir: Path) -> None:
path = os.path.join(tmpdir, ".env")
with pytest.warns(UserWarning, match=f"Config file '{path}' not found."):
Config(path)
def test_environ() -> None:
environ = Environ()
# We can mutate the environ at this point.
environ["TESTING"] = "True"
environ["GONE"] = "123"
del environ["GONE"]
# We can read the environ.
assert environ["TESTING"] == "True"
assert "GONE" not in environ
# We cannot mutate these keys now that we've read them.
with pytest.raises(EnvironError):
environ["TESTING"] = "False"
with pytest.raises(EnvironError):
del environ["GONE"]
# Test coverage of abstract methods for MutableMapping.
environ = Environ()
assert list(iter(environ)) == list(iter(os.environ))
assert len(environ) == len(os.environ)
def test_config_with_env_prefix(tmpdir: Path, monkeypatch: pytest.MonkeyPatch) -> None:
config = Config(environ={"APP_DEBUG": "value", "ENVIRONMENT": "dev"}, env_prefix="APP_")
assert config.get("DEBUG") == "value"
with pytest.raises(KeyError):
config.get("ENVIRONMENT")
def test_config_with_encoding(tmpdir: Path) -> None:
path = tmpdir / ".env"
path.write_text("MESSAGE=Hello 世界\n", encoding="utf-8")
config = Config(path, encoding="utf-8")
assert config.get("MESSAGE") == "Hello 世界"
|
./temp_repos/starlette/starlette/config.py
|
./temp_repos/starlette/tests/test_config.py
|
starlette
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'undefined'.
Context:
- Class Name: undefined
- Dependencies to Mock: encoding, env_prefix, env_file, environ
- Key Imports: warnings, pathlib, typing, collections.abc, os, __future__
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
undefined
|
python
|
from __future__ import annotations
import http
from collections.abc import Mapping
class HTTPException(Exception):
def __init__(self, status_code: int, detail: str | None = None, headers: Mapping[str, str] | None = None) -> None:
if detail is None:
detail = http.HTTPStatus(status_code).phrase
self.status_code = status_code
self.detail = detail
self.headers = headers
def __str__(self) -> str:
return f"{self.status_code}: {self.detail}"
def __repr__(self) -> str:
class_name = self.__class__.__name__
return f"{class_name}(status_code={self.status_code!r}, detail={self.detail!r})"
class WebSocketException(Exception):
def __init__(self, code: int, reason: str | None = None) -> None:
self.code = code
self.reason = reason or ""
def __str__(self) -> str:
return f"{self.code}: {self.reason}"
def __repr__(self) -> str:
class_name = self.__class__.__name__
return f"{class_name}(code={self.code!r}, reason={self.reason!r})"
|
from collections.abc import Generator
from typing import Any
import pytest
from pytest import MonkeyPatch
from starlette.exceptions import HTTPException, WebSocketException
from starlette.middleware.exceptions import ExceptionMiddleware
from starlette.requests import Request
from starlette.responses import JSONResponse, PlainTextResponse
from starlette.routing import Route, Router, WebSocketRoute
from starlette.testclient import TestClient
from starlette.types import Receive, Scope, Send
from tests.types import TestClientFactory
def raise_runtime_error(request: Request) -> None:
raise RuntimeError("Yikes")
def not_acceptable(request: Request) -> None:
raise HTTPException(status_code=406)
def no_content(request: Request) -> None:
raise HTTPException(status_code=204)
def not_modified(request: Request) -> None:
raise HTTPException(status_code=304)
def with_headers(request: Request) -> None:
raise HTTPException(status_code=200, headers={"x-potato": "always"})
class BadBodyException(HTTPException):
pass
async def read_body_and_raise_exc(request: Request) -> None:
await request.body()
raise BadBodyException(422)
async def handler_that_reads_body(request: Request, exc: BadBodyException) -> JSONResponse:
body = await request.body()
return JSONResponse(status_code=422, content={"body": body.decode()})
class HandledExcAfterResponse:
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
response = PlainTextResponse("OK", status_code=200)
await response(scope, receive, send)
raise HTTPException(status_code=406)
router = Router(
routes=[
Route("/runtime_error", endpoint=raise_runtime_error),
Route("/not_acceptable", endpoint=not_acceptable),
Route("/no_content", endpoint=no_content),
Route("/not_modified", endpoint=not_modified),
Route("/with_headers", endpoint=with_headers),
Route("/handled_exc_after_response", endpoint=HandledExcAfterResponse()),
WebSocketRoute("/runtime_error", endpoint=raise_runtime_error),
Route("/consume_body_in_endpoint_and_handler", endpoint=read_body_and_raise_exc, methods=["POST"]),
]
)
app = ExceptionMiddleware(
router,
handlers={BadBodyException: handler_that_reads_body}, # type: ignore[dict-item]
)
@pytest.fixture
def client(test_client_factory: TestClientFactory) -> Generator[TestClient, None, None]:
with test_client_factory(app) as client:
yield client
def test_not_acceptable(client: TestClient) -> None:
response = client.get("/not_acceptable")
assert response.status_code == 406
assert response.text == "Not Acceptable"
def test_no_content(client: TestClient) -> None:
response = client.get("/no_content")
assert response.status_code == 204
assert "content-length" not in response.headers
def test_not_modified(client: TestClient) -> None:
response = client.get("/not_modified")
assert response.status_code == 304
assert response.text == ""
def test_with_headers(client: TestClient) -> None:
response = client.get("/with_headers")
assert response.status_code == 200
assert response.headers["x-potato"] == "always"
def test_websockets_should_raise(client: TestClient) -> None:
with pytest.raises(RuntimeError):
with client.websocket_connect("/runtime_error"):
pass # pragma: no cover
def test_handled_exc_after_response(test_client_factory: TestClientFactory, client: TestClient) -> None:
# A 406 HttpException is raised *after* the response has already been sent.
# The exception middleware should raise a RuntimeError.
with pytest.raises(RuntimeError, match="Caught handled exception, but response already started."):
client.get("/handled_exc_after_response")
# If `raise_server_exceptions=False` then the test client will still allow
# us to see the response as it will have been seen by the client.
allow_200_client = test_client_factory(app, raise_server_exceptions=False)
response = allow_200_client.get("/handled_exc_after_response")
assert response.status_code == 200
assert response.text == "OK"
def test_force_500_response(test_client_factory: TestClientFactory) -> None:
# use a sentinel variable to make sure we actually
# make it into the endpoint and don't get a 500
# from an incorrect ASGI app signature or something
called = False
async def app(scope: Scope, receive: Receive, send: Send) -> None:
nonlocal called
called = True
raise RuntimeError()
force_500_client = test_client_factory(app, raise_server_exceptions=False)
response = force_500_client.get("/")
assert called
assert response.status_code == 500
assert response.text == ""
def test_http_str() -> None:
assert str(HTTPException(status_code=404)) == "404: Not Found"
assert str(HTTPException(404, "Not Found: foo")) == "404: Not Found: foo"
assert str(HTTPException(404, headers={"key": "value"})) == "404: Not Found"
def test_http_repr() -> None:
assert repr(HTTPException(404)) == ("HTTPException(status_code=404, detail='Not Found')")
assert repr(HTTPException(404, detail="Not Found: foo")) == (
"HTTPException(status_code=404, detail='Not Found: foo')"
)
class CustomHTTPException(HTTPException):
pass
assert repr(CustomHTTPException(500, detail="Something custom")) == (
"CustomHTTPException(status_code=500, detail='Something custom')"
)
def test_websocket_str() -> None:
assert str(WebSocketException(1008)) == "1008: "
assert str(WebSocketException(1008, "Policy Violation")) == "1008: Policy Violation"
def test_websocket_repr() -> None:
assert repr(WebSocketException(1008, reason="Policy Violation")) == (
"WebSocketException(code=1008, reason='Policy Violation')"
)
class CustomWebSocketException(WebSocketException):
pass
assert (
repr(CustomWebSocketException(1013, reason="Something custom"))
== "CustomWebSocketException(code=1013, reason='Something custom')"
)
def test_request_in_app_and_handler_is_the_same_object(client: TestClient) -> None:
response = client.post("/consume_body_in_endpoint_and_handler", content=b"Hello!")
assert response.status_code == 422
assert response.json() == {"body": "Hello!"}
def test_http_exception_does_not_use_threadpool(client: TestClient, monkeypatch: MonkeyPatch) -> None:
"""
Verify that handling HTTPException does not invoke run_in_threadpool,
confirming the handler correctly runs in the main async context.
"""
from starlette import _exception_handler
# Replace run_in_threadpool with a function that raises an error
def mock_run_in_threadpool(*args: Any, **kwargs: Any) -> None:
pytest.fail("run_in_threadpool should not be called for HTTP exceptions") # pragma: no cover
# Apply the monkeypatch only during this test
monkeypatch.setattr(_exception_handler, "run_in_threadpool", mock_run_in_threadpool)
# This should succeed because http_exception is async and won't use run_in_threadpool
response = client.get("/not_acceptable")
assert response.status_code == 406
def test_handlers_annotations() -> None:
"""Check that async exception handlers are accepted by type checkers.
We annotate the handlers' exceptions with plain `Exception` to avoid variance issues
when using other exception types.
"""
async def async_catch_all_handler(request: Request, exc: Exception) -> JSONResponse:
raise NotImplementedError
def sync_catch_all_handler(request: Request, exc: Exception) -> JSONResponse:
raise NotImplementedError
ExceptionMiddleware(router, handlers={Exception: sync_catch_all_handler})
ExceptionMiddleware(router, handlers={Exception: async_catch_all_handler})
|
./temp_repos/starlette/starlette/exceptions.py
|
./temp_repos/starlette/tests/test_exceptions.py
|
starlette
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'HTTPException'.
Context:
- Class Name: HTTPException
- Dependencies to Mock: status_code, headers, code, detail, reason
- Key Imports: http, __future__, collections.abc
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
HTTPException
|
python
|
from __future__ import annotations
from collections.abc import AsyncGenerator
from dataclasses import dataclass, field
from enum import Enum
from tempfile import SpooledTemporaryFile
from typing import TYPE_CHECKING
from urllib.parse import unquote_plus
from starlette.datastructures import FormData, Headers, UploadFile
if TYPE_CHECKING:
import python_multipart as multipart
from python_multipart.multipart import MultipartCallbacks, QuerystringCallbacks, parse_options_header
else:
try:
try:
import python_multipart as multipart
from python_multipart.multipart import parse_options_header
except ModuleNotFoundError: # pragma: no cover
import multipart
from multipart.multipart import parse_options_header
except ModuleNotFoundError: # pragma: no cover
multipart = None
parse_options_header = None
class FormMessage(Enum):
FIELD_START = 1
FIELD_NAME = 2
FIELD_DATA = 3
FIELD_END = 4
END = 5
@dataclass
class MultipartPart:
content_disposition: bytes | None = None
field_name: str = ""
data: bytearray = field(default_factory=bytearray)
file: UploadFile | None = None
item_headers: list[tuple[bytes, bytes]] = field(default_factory=list)
def _user_safe_decode(src: bytes | bytearray, codec: str) -> str:
try:
return src.decode(codec)
except (UnicodeDecodeError, LookupError):
return src.decode("latin-1")
class MultiPartException(Exception):
def __init__(self, message: str) -> None:
self.message = message
class FormParser:
def __init__(self, headers: Headers, stream: AsyncGenerator[bytes, None]) -> None:
assert multipart is not None, "The `python-multipart` library must be installed to use form parsing."
self.headers = headers
self.stream = stream
self.messages: list[tuple[FormMessage, bytes]] = []
def on_field_start(self) -> None:
message = (FormMessage.FIELD_START, b"")
self.messages.append(message)
def on_field_name(self, data: bytes, start: int, end: int) -> None:
message = (FormMessage.FIELD_NAME, data[start:end])
self.messages.append(message)
def on_field_data(self, data: bytes, start: int, end: int) -> None:
message = (FormMessage.FIELD_DATA, data[start:end])
self.messages.append(message)
def on_field_end(self) -> None:
message = (FormMessage.FIELD_END, b"")
self.messages.append(message)
def on_end(self) -> None:
message = (FormMessage.END, b"")
self.messages.append(message)
async def parse(self) -> FormData:
# Callbacks dictionary.
callbacks: QuerystringCallbacks = {
"on_field_start": self.on_field_start,
"on_field_name": self.on_field_name,
"on_field_data": self.on_field_data,
"on_field_end": self.on_field_end,
"on_end": self.on_end,
}
# Create the parser.
parser = multipart.QuerystringParser(callbacks)
field_name = b""
field_value = b""
items: list[tuple[str, str | UploadFile]] = []
# Feed the parser with data from the request.
async for chunk in self.stream:
if chunk:
parser.write(chunk)
else:
parser.finalize()
messages = list(self.messages)
self.messages.clear()
for message_type, message_bytes in messages:
if message_type == FormMessage.FIELD_START:
field_name = b""
field_value = b""
elif message_type == FormMessage.FIELD_NAME:
field_name += message_bytes
elif message_type == FormMessage.FIELD_DATA:
field_value += message_bytes
elif message_type == FormMessage.FIELD_END:
name = unquote_plus(field_name.decode("latin-1"))
value = unquote_plus(field_value.decode("latin-1"))
items.append((name, value))
return FormData(items)
class MultiPartParser:
spool_max_size = 1024 * 1024 # 1MB
"""The maximum size of the spooled temporary file used to store file data."""
max_part_size = 1024 * 1024 # 1MB
"""The maximum size of a part in the multipart request."""
def __init__(
self,
headers: Headers,
stream: AsyncGenerator[bytes, None],
*,
max_files: int | float = 1000,
max_fields: int | float = 1000,
max_part_size: int = 1024 * 1024, # 1MB
) -> None:
assert multipart is not None, "The `python-multipart` library must be installed to use form parsing."
self.headers = headers
self.stream = stream
self.max_files = max_files
self.max_fields = max_fields
self.items: list[tuple[str, str | UploadFile]] = []
self._current_files = 0
self._current_fields = 0
self._current_partial_header_name: bytes = b""
self._current_partial_header_value: bytes = b""
self._current_part = MultipartPart()
self._charset = ""
self._file_parts_to_write: list[tuple[MultipartPart, bytes]] = []
self._file_parts_to_finish: list[MultipartPart] = []
self._files_to_close_on_error: list[SpooledTemporaryFile[bytes]] = []
self.max_part_size = max_part_size
def on_part_begin(self) -> None:
self._current_part = MultipartPart()
def on_part_data(self, data: bytes, start: int, end: int) -> None:
message_bytes = data[start:end]
if self._current_part.file is None:
if len(self._current_part.data) + len(message_bytes) > self.max_part_size:
raise MultiPartException(f"Part exceeded maximum size of {int(self.max_part_size / 1024)}KB.")
self._current_part.data.extend(message_bytes)
else:
self._file_parts_to_write.append((self._current_part, message_bytes))
def on_part_end(self) -> None:
if self._current_part.file is None:
self.items.append(
(
self._current_part.field_name,
_user_safe_decode(self._current_part.data, self._charset),
)
)
else:
self._file_parts_to_finish.append(self._current_part)
# The file can be added to the items right now even though it's not
# finished yet, because it will be finished in the `parse()` method, before
# self.items is used in the return value.
self.items.append((self._current_part.field_name, self._current_part.file))
def on_header_field(self, data: bytes, start: int, end: int) -> None:
self._current_partial_header_name += data[start:end]
def on_header_value(self, data: bytes, start: int, end: int) -> None:
self._current_partial_header_value += data[start:end]
def on_header_end(self) -> None:
field = self._current_partial_header_name.lower()
if field == b"content-disposition":
self._current_part.content_disposition = self._current_partial_header_value
self._current_part.item_headers.append((field, self._current_partial_header_value))
self._current_partial_header_name = b""
self._current_partial_header_value = b""
def on_headers_finished(self) -> None:
disposition, options = parse_options_header(self._current_part.content_disposition)
try:
self._current_part.field_name = _user_safe_decode(options[b"name"], self._charset)
except KeyError:
raise MultiPartException('The Content-Disposition header field "name" must be provided.')
if b"filename" in options:
self._current_files += 1
if self._current_files > self.max_files:
raise MultiPartException(f"Too many files. Maximum number of files is {self.max_files}.")
filename = _user_safe_decode(options[b"filename"], self._charset)
tempfile = SpooledTemporaryFile(max_size=self.spool_max_size)
self._files_to_close_on_error.append(tempfile)
self._current_part.file = UploadFile(
file=tempfile, # type: ignore[arg-type]
size=0,
filename=filename,
headers=Headers(raw=self._current_part.item_headers),
)
else:
self._current_fields += 1
if self._current_fields > self.max_fields:
raise MultiPartException(f"Too many fields. Maximum number of fields is {self.max_fields}.")
self._current_part.file = None
def on_end(self) -> None:
pass
async def parse(self) -> FormData:
# Parse the Content-Type header to get the multipart boundary.
_, params = parse_options_header(self.headers["Content-Type"])
charset = params.get(b"charset", "utf-8")
if isinstance(charset, bytes):
charset = charset.decode("latin-1")
self._charset = charset
try:
boundary = params[b"boundary"]
except KeyError:
raise MultiPartException("Missing boundary in multipart.")
# Callbacks dictionary.
callbacks: MultipartCallbacks = {
"on_part_begin": self.on_part_begin,
"on_part_data": self.on_part_data,
"on_part_end": self.on_part_end,
"on_header_field": self.on_header_field,
"on_header_value": self.on_header_value,
"on_header_end": self.on_header_end,
"on_headers_finished": self.on_headers_finished,
"on_end": self.on_end,
}
# Create the parser.
parser = multipart.MultipartParser(boundary, callbacks)
try:
# Feed the parser with data from the request.
async for chunk in self.stream:
parser.write(chunk)
# Write file data, it needs to use await with the UploadFile methods
# that call the corresponding file methods *in a threadpool*,
# otherwise, if they were called directly in the callback methods above
# (regular, non-async functions), that would block the event loop in
# the main thread.
for part, data in self._file_parts_to_write:
assert part.file # for type checkers
await part.file.write(data)
for part in self._file_parts_to_finish:
assert part.file # for type checkers
await part.file.seek(0)
self._file_parts_to_write.clear()
self._file_parts_to_finish.clear()
except MultiPartException as exc:
# Close all the files if there was an error.
for file in self._files_to_close_on_error:
file.close()
raise exc
parser.finalize()
return FormData(self.items)
|
from __future__ import annotations
import os
import threading
from collections.abc import Generator
from contextlib import AbstractContextManager, nullcontext as does_not_raise
from io import BytesIO
from pathlib import Path
from tempfile import SpooledTemporaryFile
from typing import Any, ClassVar
from unittest import mock
import pytest
from starlette.applications import Starlette
from starlette.datastructures import UploadFile
from starlette.formparsers import MultiPartException, MultiPartParser, _user_safe_decode
from starlette.requests import Request
from starlette.responses import JSONResponse
from starlette.routing import Mount
from starlette.types import ASGIApp, Receive, Scope, Send
from tests.types import TestClientFactory
class ForceMultipartDict(dict[Any, Any]):
def __bool__(self) -> bool:
return True
# FORCE_MULTIPART is an empty dict that boolean-evaluates as `True`.
FORCE_MULTIPART = ForceMultipartDict()
async def app(scope: Scope, receive: Receive, send: Send) -> None:
request = Request(scope, receive)
data = await request.form()
output: dict[str, Any] = {}
for key, value in data.items():
if isinstance(value, UploadFile):
content = await value.read()
output[key] = {
"filename": value.filename,
"size": value.size,
"content": content.decode(),
"content_type": value.content_type,
}
else:
output[key] = value
await request.close()
response = JSONResponse(output)
await response(scope, receive, send)
async def multi_items_app(scope: Scope, receive: Receive, send: Send) -> None:
request = Request(scope, receive)
data = await request.form()
output: dict[str, list[Any]] = {}
for key, value in data.multi_items():
if key not in output:
output[key] = []
if isinstance(value, UploadFile):
content = await value.read()
output[key].append(
{
"filename": value.filename,
"size": value.size,
"content": content.decode(),
"content_type": value.content_type,
}
)
else:
output[key].append(value)
await request.close()
response = JSONResponse(output)
await response(scope, receive, send)
async def app_with_headers(scope: Scope, receive: Receive, send: Send) -> None:
request = Request(scope, receive)
data = await request.form()
output: dict[str, Any] = {}
for key, value in data.items():
if isinstance(value, UploadFile):
content = await value.read()
output[key] = {
"filename": value.filename,
"size": value.size,
"content": content.decode(),
"content_type": value.content_type,
"headers": list(value.headers.items()),
}
else:
output[key] = value
await request.close()
response = JSONResponse(output)
await response(scope, receive, send)
async def app_read_body(scope: Scope, receive: Receive, send: Send) -> None:
request = Request(scope, receive)
# Read bytes, to force request.stream() to return the already parsed body
await request.body()
data = await request.form()
output = {}
for key, value in data.items():
output[key] = value
await request.close()
response = JSONResponse(output)
await response(scope, receive, send)
async def app_monitor_thread(scope: Scope, receive: Receive, send: Send) -> None:
"""Helper app to monitor what thread the app was called on.
This can later be used to validate thread/event loop operations.
"""
request = Request(scope, receive)
# Make sure we parse the form
await request.form()
await request.close()
# Send back the current thread id
response = JSONResponse({"thread_ident": threading.current_thread().ident})
await response(scope, receive, send)
def make_app_max_parts(max_files: int = 1000, max_fields: int = 1000, max_part_size: int = 1024 * 1024) -> ASGIApp:
async def app(scope: Scope, receive: Receive, send: Send) -> None:
request = Request(scope, receive)
data = await request.form(max_files=max_files, max_fields=max_fields, max_part_size=max_part_size)
output: dict[str, Any] = {}
for key, value in data.items():
if isinstance(value, UploadFile):
content = await value.read()
output[key] = {
"filename": value.filename,
"size": value.size,
"content": content.decode(),
"content_type": value.content_type,
}
else:
output[key] = value
await request.close()
response = JSONResponse(output)
await response(scope, receive, send)
return app
def test_multipart_request_data(tmpdir: Path, test_client_factory: TestClientFactory) -> None:
client = test_client_factory(app)
response = client.post("/", data={"some": "data"}, files=FORCE_MULTIPART)
assert response.json() == {"some": "data"}
def test_multipart_request_files(tmpdir: Path, test_client_factory: TestClientFactory) -> None:
path = os.path.join(tmpdir, "test.txt")
with open(path, "wb") as file:
file.write(b"<file content>")
client = test_client_factory(app)
with open(path, "rb") as f:
response = client.post("/", files={"test": f})
assert response.json() == {
"test": {
"filename": "test.txt",
"size": 14,
"content": "<file content>",
"content_type": "text/plain",
}
}
def test_multipart_request_files_with_content_type(tmpdir: Path, test_client_factory: TestClientFactory) -> None:
path = os.path.join(tmpdir, "test.txt")
with open(path, "wb") as file:
file.write(b"<file content>")
client = test_client_factory(app)
with open(path, "rb") as f:
response = client.post("/", files={"test": ("test.txt", f, "text/plain")})
assert response.json() == {
"test": {
"filename": "test.txt",
"size": 14,
"content": "<file content>",
"content_type": "text/plain",
}
}
def test_multipart_request_multiple_files(tmpdir: Path, test_client_factory: TestClientFactory) -> None:
path1 = os.path.join(tmpdir, "test1.txt")
with open(path1, "wb") as file:
file.write(b"<file1 content>")
path2 = os.path.join(tmpdir, "test2.txt")
with open(path2, "wb") as file:
file.write(b"<file2 content>")
client = test_client_factory(app)
with open(path1, "rb") as f1, open(path2, "rb") as f2:
response = client.post("/", files={"test1": f1, "test2": ("test2.txt", f2, "text/plain")})
assert response.json() == {
"test1": {
"filename": "test1.txt",
"size": 15,
"content": "<file1 content>",
"content_type": "text/plain",
},
"test2": {
"filename": "test2.txt",
"size": 15,
"content": "<file2 content>",
"content_type": "text/plain",
},
}
def test_multipart_request_multiple_files_with_headers(tmpdir: Path, test_client_factory: TestClientFactory) -> None:
path1 = os.path.join(tmpdir, "test1.txt")
with open(path1, "wb") as file:
file.write(b"<file1 content>")
path2 = os.path.join(tmpdir, "test2.txt")
with open(path2, "wb") as file:
file.write(b"<file2 content>")
client = test_client_factory(app_with_headers)
with open(path1, "rb") as f1, open(path2, "rb") as f2:
response = client.post(
"/",
files=[
("test1", (None, f1)),
("test2", ("test2.txt", f2, "text/plain", {"x-custom": "f2"})),
],
)
assert response.json() == {
"test1": "<file1 content>",
"test2": {
"filename": "test2.txt",
"size": 15,
"content": "<file2 content>",
"content_type": "text/plain",
"headers": [
[
"content-disposition",
'form-data; name="test2"; filename="test2.txt"',
],
["x-custom", "f2"],
["content-type", "text/plain"],
],
},
}
def test_multi_items(tmpdir: Path, test_client_factory: TestClientFactory) -> None:
path1 = os.path.join(tmpdir, "test1.txt")
with open(path1, "wb") as file:
file.write(b"<file1 content>")
path2 = os.path.join(tmpdir, "test2.txt")
with open(path2, "wb") as file:
file.write(b"<file2 content>")
client = test_client_factory(multi_items_app)
with open(path1, "rb") as f1, open(path2, "rb") as f2:
response = client.post(
"/",
data={"test1": "abc"},
files=[("test1", f1), ("test1", ("test2.txt", f2, "text/plain"))],
)
assert response.json() == {
"test1": [
"abc",
{
"filename": "test1.txt",
"size": 15,
"content": "<file1 content>",
"content_type": "text/plain",
},
{
"filename": "test2.txt",
"size": 15,
"content": "<file2 content>",
"content_type": "text/plain",
},
]
}
def test_multipart_request_mixed_files_and_data(tmpdir: Path, test_client_factory: TestClientFactory) -> None:
client = test_client_factory(app)
response = client.post(
"/",
data=(
# data
b"--a7f7ac8d4e2e437c877bb7b8d7cc549c\r\n" # type: ignore
b'Content-Disposition: form-data; name="field0"\r\n\r\n'
b"value0\r\n"
# file
b"--a7f7ac8d4e2e437c877bb7b8d7cc549c\r\n"
b'Content-Disposition: form-data; name="file"; filename="file.txt"\r\n'
b"Content-Type: text/plain\r\n\r\n"
b"<file content>\r\n"
# data
b"--a7f7ac8d4e2e437c877bb7b8d7cc549c\r\n"
b'Content-Disposition: form-data; name="field1"\r\n\r\n'
b"value1\r\n"
b"--a7f7ac8d4e2e437c877bb7b8d7cc549c--\r\n"
),
headers={"Content-Type": ("multipart/form-data; boundary=a7f7ac8d4e2e437c877bb7b8d7cc549c")},
)
assert response.json() == {
"file": {
"filename": "file.txt",
"size": 14,
"content": "<file content>",
"content_type": "text/plain",
},
"field0": "value0",
"field1": "value1",
}
class ThreadTrackingSpooledTemporaryFile(SpooledTemporaryFile[bytes]):
"""Helper class to track which threads performed the rollover operation.
This is not threadsafe/multi-test safe.
"""
rollover_threads: ClassVar[set[int | None]] = set()
def rollover(self) -> None:
ThreadTrackingSpooledTemporaryFile.rollover_threads.add(threading.current_thread().ident)
super().rollover()
@pytest.fixture
def mock_spooled_temporary_file() -> Generator[None]:
try:
with mock.patch("starlette.formparsers.SpooledTemporaryFile", ThreadTrackingSpooledTemporaryFile):
yield
finally:
ThreadTrackingSpooledTemporaryFile.rollover_threads.clear()
def test_multipart_request_large_file_rollover_in_background_thread(
mock_spooled_temporary_file: None, test_client_factory: TestClientFactory
) -> None:
"""Test that Spooled file rollovers happen in background threads."""
data = BytesIO(b" " * (MultiPartParser.spool_max_size + 1))
client = test_client_factory(app_monitor_thread)
response = client.post("/", files=[("test_large", data)])
assert response.status_code == 200
# Parse the event thread id from the API response and ensure we have one
app_thread_ident = response.json().get("thread_ident")
assert app_thread_ident is not None
# Ensure the app thread was not the same as the rollover one and that a rollover thread exists
assert app_thread_ident not in ThreadTrackingSpooledTemporaryFile.rollover_threads
assert len(ThreadTrackingSpooledTemporaryFile.rollover_threads) == 1
def test_multipart_request_with_charset_for_filename(tmpdir: Path, test_client_factory: TestClientFactory) -> None:
client = test_client_factory(app)
response = client.post(
"/",
data=(
# file
b"--a7f7ac8d4e2e437c877bb7b8d7cc549c\r\n" # type: ignore
b'Content-Disposition: form-data; name="file"; filename="\xe6\x96\x87\xe6\x9b\xb8.txt"\r\n'
b"Content-Type: text/plain\r\n\r\n"
b"<file content>\r\n"
b"--a7f7ac8d4e2e437c877bb7b8d7cc549c--\r\n"
),
headers={"Content-Type": ("multipart/form-data; charset=utf-8; boundary=a7f7ac8d4e2e437c877bb7b8d7cc549c")},
)
assert response.json() == {
"file": {
"filename": "文書.txt",
"size": 14,
"content": "<file content>",
"content_type": "text/plain",
}
}
def test_multipart_request_without_charset_for_filename(tmpdir: Path, test_client_factory: TestClientFactory) -> None:
client = test_client_factory(app)
response = client.post(
"/",
data=(
# file
b"--a7f7ac8d4e2e437c877bb7b8d7cc549c\r\n" # type: ignore
b'Content-Disposition: form-data; name="file"; filename="\xe7\x94\xbb\xe5\x83\x8f.jpg"\r\n'
b"Content-Type: image/jpeg\r\n\r\n"
b"<file content>\r\n"
b"--a7f7ac8d4e2e437c877bb7b8d7cc549c--\r\n"
),
headers={"Content-Type": ("multipart/form-data; boundary=a7f7ac8d4e2e437c877bb7b8d7cc549c")},
)
assert response.json() == {
"file": {
"filename": "画像.jpg",
"size": 14,
"content": "<file content>",
"content_type": "image/jpeg",
}
}
def test_multipart_request_with_encoded_value(tmpdir: Path, test_client_factory: TestClientFactory) -> None:
client = test_client_factory(app)
response = client.post(
"/",
data=(
b"--20b303e711c4ab8c443184ac833ab00f\r\n" # type: ignore
b"Content-Disposition: form-data; "
b'name="value"\r\n\r\n'
b"Transf\xc3\xa9rer\r\n"
b"--20b303e711c4ab8c443184ac833ab00f--\r\n"
),
headers={"Content-Type": ("multipart/form-data; charset=utf-8; boundary=20b303e711c4ab8c443184ac833ab00f")},
)
assert response.json() == {"value": "Transférer"}
def test_urlencoded_request_data(tmpdir: Path, test_client_factory: TestClientFactory) -> None:
client = test_client_factory(app)
response = client.post("/", data={"some": "data"})
assert response.json() == {"some": "data"}
def test_no_request_data(tmpdir: Path, test_client_factory: TestClientFactory) -> None:
client = test_client_factory(app)
response = client.post("/")
assert response.json() == {}
def test_urlencoded_percent_encoding(tmpdir: Path, test_client_factory: TestClientFactory) -> None:
client = test_client_factory(app)
response = client.post("/", data={"some": "da ta"})
assert response.json() == {"some": "da ta"}
def test_urlencoded_percent_encoding_keys(tmpdir: Path, test_client_factory: TestClientFactory) -> None:
client = test_client_factory(app)
response = client.post("/", data={"so me": "data"})
assert response.json() == {"so me": "data"}
def test_urlencoded_multi_field_app_reads_body(tmpdir: Path, test_client_factory: TestClientFactory) -> None:
client = test_client_factory(app_read_body)
response = client.post("/", data={"some": "data", "second": "key pair"})
assert response.json() == {"some": "data", "second": "key pair"}
def test_multipart_multi_field_app_reads_body(tmpdir: Path, test_client_factory: TestClientFactory) -> None:
client = test_client_factory(app_read_body)
response = client.post("/", data={"some": "data", "second": "key pair"}, files=FORCE_MULTIPART)
assert response.json() == {"some": "data", "second": "key pair"}
def test_user_safe_decode_helper() -> None:
result = _user_safe_decode(b"\xc4\x99\xc5\xbc\xc4\x87", "utf-8")
assert result == "ężć"
def test_user_safe_decode_ignores_wrong_charset() -> None:
result = _user_safe_decode(b"abc", "latin-8")
assert result == "abc"
@pytest.mark.parametrize(
"app,expectation",
[
(app, pytest.raises(MultiPartException)),
(Starlette(routes=[Mount("/", app=app)]), does_not_raise()),
],
)
def test_missing_boundary_parameter(
app: ASGIApp,
expectation: AbstractContextManager[Exception],
test_client_factory: TestClientFactory,
) -> None:
client = test_client_factory(app)
with expectation:
res = client.post(
"/",
data=(
# file
b'Content-Disposition: form-data; name="file"; filename="\xe6\x96\x87\xe6\x9b\xb8.txt"\r\n' # type: ignore
b"Content-Type: text/plain\r\n\r\n"
b"<file content>\r\n"
),
headers={"Content-Type": "multipart/form-data; charset=utf-8"},
)
assert res.status_code == 400
assert res.text == "Missing boundary in multipart."
@pytest.mark.parametrize(
"app,expectation",
[
(app, pytest.raises(MultiPartException)),
(Starlette(routes=[Mount("/", app=app)]), does_not_raise()),
],
)
def test_missing_name_parameter_on_content_disposition(
app: ASGIApp,
expectation: AbstractContextManager[Exception],
test_client_factory: TestClientFactory,
) -> None:
client = test_client_factory(app)
with expectation:
res = client.post(
"/",
data=(
# data
b"--a7f7ac8d4e2e437c877bb7b8d7cc549c\r\n" # type: ignore
b'Content-Disposition: form-data; ="field0"\r\n\r\n'
b"value0\r\n"
),
headers={"Content-Type": ("multipart/form-data; boundary=a7f7ac8d4e2e437c877bb7b8d7cc549c")},
)
assert res.status_code == 400
assert res.text == 'The Content-Disposition header field "name" must be provided.'
@pytest.mark.parametrize(
"app,expectation",
[
(app, pytest.raises(MultiPartException)),
(Starlette(routes=[Mount("/", app=app)]), does_not_raise()),
],
)
def test_too_many_fields_raise(
app: ASGIApp,
expectation: AbstractContextManager[Exception],
test_client_factory: TestClientFactory,
) -> None:
client = test_client_factory(app)
fields = []
for i in range(1001):
fields.append(f'--B\r\nContent-Disposition: form-data; name="N{i}";\r\n\r\n\r\n')
data = "".join(fields).encode("utf-8")
with expectation:
res = client.post(
"/",
data=data, # type: ignore
headers={"Content-Type": ("multipart/form-data; boundary=B")},
)
assert res.status_code == 400
assert res.text == "Too many fields. Maximum number of fields is 1000."
@pytest.mark.parametrize(
"app,expectation",
[
(app, pytest.raises(MultiPartException)),
(Starlette(routes=[Mount("/", app=app)]), does_not_raise()),
],
)
def test_too_many_files_raise(
app: ASGIApp,
expectation: AbstractContextManager[Exception],
test_client_factory: TestClientFactory,
) -> None:
client = test_client_factory(app)
fields = []
for i in range(1001):
fields.append(f'--B\r\nContent-Disposition: form-data; name="N{i}"; filename="F{i}";\r\n\r\n\r\n')
data = "".join(fields).encode("utf-8")
with expectation:
res = client.post(
"/",
data=data, # type: ignore
headers={"Content-Type": ("multipart/form-data; boundary=B")},
)
assert res.status_code == 400
assert res.text == "Too many files. Maximum number of files is 1000."
@pytest.mark.parametrize(
"app,expectation",
[
(app, pytest.raises(MultiPartException)),
(Starlette(routes=[Mount("/", app=app)]), does_not_raise()),
],
)
def test_too_many_files_single_field_raise(
app: ASGIApp,
expectation: AbstractContextManager[Exception],
test_client_factory: TestClientFactory,
) -> None:
client = test_client_factory(app)
fields = []
for i in range(1001):
# This uses the same field name "N" for all files, equivalent to a
# multifile upload form field
fields.append(f'--B\r\nContent-Disposition: form-data; name="N"; filename="F{i}";\r\n\r\n\r\n')
data = "".join(fields).encode("utf-8")
with expectation:
res = client.post(
"/",
data=data, # type: ignore
headers={"Content-Type": ("multipart/form-data; boundary=B")},
)
assert res.status_code == 400
assert res.text == "Too many files. Maximum number of files is 1000."
@pytest.mark.parametrize(
"app,expectation",
[
(app, pytest.raises(MultiPartException)),
(Starlette(routes=[Mount("/", app=app)]), does_not_raise()),
],
)
def test_too_many_files_and_fields_raise(
app: ASGIApp,
expectation: AbstractContextManager[Exception],
test_client_factory: TestClientFactory,
) -> None:
client = test_client_factory(app)
fields = []
for i in range(1001):
fields.append(f'--B\r\nContent-Disposition: form-data; name="F{i}"; filename="F{i}";\r\n\r\n\r\n')
fields.append(f'--B\r\nContent-Disposition: form-data; name="N{i}";\r\n\r\n\r\n')
data = "".join(fields).encode("utf-8")
with expectation:
res = client.post(
"/",
data=data, # type: ignore
headers={"Content-Type": ("multipart/form-data; boundary=B")},
)
assert res.status_code == 400
assert res.text == "Too many files. Maximum number of files is 1000."
@pytest.mark.parametrize(
"app,expectation",
[
(make_app_max_parts(max_fields=1), pytest.raises(MultiPartException)),
(
Starlette(routes=[Mount("/", app=make_app_max_parts(max_fields=1))]),
does_not_raise(),
),
],
)
def test_max_fields_is_customizable_low_raises(
app: ASGIApp,
expectation: AbstractContextManager[Exception],
test_client_factory: TestClientFactory,
) -> None:
client = test_client_factory(app)
fields = []
for i in range(2):
fields.append(f'--B\r\nContent-Disposition: form-data; name="N{i}";\r\n\r\n\r\n')
data = "".join(fields).encode("utf-8")
with expectation:
res = client.post(
"/",
data=data, # type: ignore
headers={"Content-Type": ("multipart/form-data; boundary=B")},
)
assert res.status_code == 400
assert res.text == "Too many fields. Maximum number of fields is 1."
@pytest.mark.parametrize(
"app,expectation",
[
(make_app_max_parts(max_files=1), pytest.raises(MultiPartException)),
(
Starlette(routes=[Mount("/", app=make_app_max_parts(max_files=1))]),
does_not_raise(),
),
],
)
def test_max_files_is_customizable_low_raises(
app: ASGIApp,
expectation: AbstractContextManager[Exception],
test_client_factory: TestClientFactory,
) -> None:
client = test_client_factory(app)
fields = []
for i in range(2):
fields.append(f'--B\r\nContent-Disposition: form-data; name="F{i}"; filename="F{i}";\r\n\r\n\r\n')
data = "".join(fields).encode("utf-8")
with expectation:
res = client.post(
"/",
data=data, # type: ignore
headers={"Content-Type": ("multipart/form-data; boundary=B")},
)
assert res.status_code == 400
assert res.text == "Too many files. Maximum number of files is 1."
def test_max_fields_is_customizable_high(test_client_factory: TestClientFactory) -> None:
client = test_client_factory(make_app_max_parts(max_fields=2000, max_files=2000))
fields = []
for i in range(2000):
fields.append(f'--B\r\nContent-Disposition: form-data; name="N{i}";\r\n\r\n\r\n')
fields.append(f'--B\r\nContent-Disposition: form-data; name="F{i}"; filename="F{i}";\r\n\r\n\r\n')
data = "".join(fields).encode("utf-8")
data += b"--B--\r\n"
res = client.post(
"/",
data=data, # type: ignore
headers={"Content-Type": ("multipart/form-data; boundary=B")},
)
assert res.status_code == 200
res_data = res.json()
assert res_data["N1999"] == ""
assert res_data["F1999"] == {
"filename": "F1999",
"size": 0,
"content": "",
"content_type": None,
}
@pytest.mark.parametrize(
"app,expectation",
[
(app, pytest.raises(MultiPartException)),
(Starlette(routes=[Mount("/", app=app)]), does_not_raise()),
],
)
def test_max_part_size_exceeds_limit(
app: ASGIApp,
expectation: AbstractContextManager[Exception],
test_client_factory: TestClientFactory,
) -> None:
client = test_client_factory(app)
boundary = "------------------------4K1ON9fZkj9uCUmqLHRbbR"
multipart_data = (
f"--{boundary}\r\n"
f'Content-Disposition: form-data; name="small"\r\n\r\n'
"small content\r\n"
f"--{boundary}\r\n"
f'Content-Disposition: form-data; name="large"\r\n\r\n'
+ ("x" * 1024 * 1024 + "x") # 1MB + 1 byte of data
+ "\r\n"
f"--{boundary}--\r\n"
).encode("utf-8")
headers = {
"Content-Type": f"multipart/form-data; boundary={boundary}",
"Transfer-Encoding": "chunked",
}
with expectation:
response = client.post("/", data=multipart_data, headers=headers) # type: ignore
assert response.status_code == 400
assert response.text == "Part exceeded maximum size of 1024KB."
@pytest.mark.parametrize(
"app,expectation",
[
(make_app_max_parts(max_part_size=1024 * 10), pytest.raises(MultiPartException)),
(
Starlette(routes=[Mount("/", app=make_app_max_parts(max_part_size=1024 * 10))]),
does_not_raise(),
),
],
)
def test_max_part_size_exceeds_custom_limit(
app: ASGIApp,
expectation: AbstractContextManager[Exception],
test_client_factory: TestClientFactory,
) -> None:
client = test_client_factory(app)
boundary = "------------------------4K1ON9fZkj9uCUmqLHRbbR"
multipart_data = (
f"--{boundary}\r\n"
f'Content-Disposition: form-data; name="small"\r\n\r\n'
"small content\r\n"
f"--{boundary}\r\n"
f'Content-Disposition: form-data; name="large"\r\n\r\n'
+ ("x" * 1024 * 10 + "x") # 1MB + 1 byte of data
+ "\r\n"
f"--{boundary}--\r\n"
).encode("utf-8")
headers = {
"Content-Type": f"multipart/form-data; boundary={boundary}",
"Transfer-Encoding": "chunked",
}
with expectation:
response = client.post("/", content=multipart_data, headers=headers)
assert response.status_code == 400
assert response.text == "Part exceeded maximum size of 10KB."
|
./temp_repos/starlette/starlette/formparsers.py
|
./temp_repos/starlette/tests/test_formparsers.py
|
starlette
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'FormMessage'.
Context:
- Class Name: FormMessage
- Dependencies to Mock: stream, message, headers
- Key Imports: multipart, starlette.datastructures, enum, typing, multipart.multipart, collections.abc, python_multipart.multipart, dataclasses, python_multipart, urllib.parse
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
FormMessage
|
python
|
from __future__ import annotations
import functools
import sys
from collections.abc import Awaitable, Callable, Generator
from contextlib import AbstractAsyncContextManager, contextmanager
from typing import Any, Generic, Protocol, TypeVar, overload
from starlette.types import Scope
if sys.version_info >= (3, 13): # pragma: no cover
from inspect import iscoroutinefunction
from typing import TypeIs
else: # pragma: no cover
from asyncio import iscoroutinefunction
from typing_extensions import TypeIs
has_exceptiongroups = True
if sys.version_info < (3, 11): # pragma: no cover
try:
from exceptiongroup import BaseExceptionGroup # type: ignore[unused-ignore,import-not-found]
except ImportError:
has_exceptiongroups = False
T = TypeVar("T")
AwaitableCallable = Callable[..., Awaitable[T]]
@overload
def is_async_callable(obj: AwaitableCallable[T]) -> TypeIs[AwaitableCallable[T]]: ...
@overload
def is_async_callable(obj: Any) -> TypeIs[AwaitableCallable[Any]]: ...
def is_async_callable(obj: Any) -> Any:
while isinstance(obj, functools.partial):
obj = obj.func
return iscoroutinefunction(obj) or (callable(obj) and iscoroutinefunction(obj.__call__))
T_co = TypeVar("T_co", covariant=True)
class AwaitableOrContextManager(Awaitable[T_co], AbstractAsyncContextManager[T_co], Protocol[T_co]): ...
class SupportsAsyncClose(Protocol):
async def close(self) -> None: ... # pragma: no cover
SupportsAsyncCloseType = TypeVar("SupportsAsyncCloseType", bound=SupportsAsyncClose, covariant=False)
class AwaitableOrContextManagerWrapper(Generic[SupportsAsyncCloseType]):
__slots__ = ("aw", "entered")
def __init__(self, aw: Awaitable[SupportsAsyncCloseType]) -> None:
self.aw = aw
def __await__(self) -> Generator[Any, None, SupportsAsyncCloseType]:
return self.aw.__await__()
async def __aenter__(self) -> SupportsAsyncCloseType:
self.entered = await self.aw
return self.entered
async def __aexit__(self, *args: Any) -> None | bool:
await self.entered.close()
return None
@contextmanager
def collapse_excgroups() -> Generator[None, None, None]:
try:
yield
except BaseException as exc:
if has_exceptiongroups: # pragma: no cover
while isinstance(exc, BaseExceptionGroup) and len(exc.exceptions) == 1:
exc = exc.exceptions[0]
raise exc
def get_route_path(scope: Scope) -> str:
path: str = scope["path"]
root_path = scope.get("root_path", "")
if not root_path:
return path
if not path.startswith(root_path):
return path
if path == root_path:
return ""
if path[len(root_path)] == "/":
return path[len(root_path) :]
return path
|
import functools
from typing import Any
from unittest.mock import create_autospec
import pytest
from starlette._utils import get_route_path, is_async_callable
from starlette.types import Scope
def test_async_func() -> None:
async def async_func() -> None: ... # pragma: no cover
def func() -> None: ... # pragma: no cover
assert is_async_callable(async_func)
assert not is_async_callable(func)
def test_async_partial() -> None:
async def async_func(a: Any, b: Any) -> None: ... # pragma: no cover
def func(a: Any, b: Any) -> None: ... # pragma: no cover
partial = functools.partial(async_func, 1)
assert is_async_callable(partial)
partial = functools.partial(func, 1) # type: ignore
assert not is_async_callable(partial)
def test_async_method() -> None:
class Async:
async def method(self) -> None: ... # pragma: no cover
class Sync:
def method(self) -> None: ... # pragma: no cover
assert is_async_callable(Async().method)
assert not is_async_callable(Sync().method)
def test_async_object_call() -> None:
class Async:
async def __call__(self) -> None: ... # pragma: no cover
class Sync:
def __call__(self) -> None: ... # pragma: no cover
assert is_async_callable(Async())
assert not is_async_callable(Sync())
def test_async_partial_object_call() -> None:
class Async:
async def __call__(
self,
a: Any,
b: Any,
) -> None: ... # pragma: no cover
class Sync:
def __call__(
self,
a: Any,
b: Any,
) -> None: ... # pragma: no cover
partial = functools.partial(Async(), 1)
assert is_async_callable(partial)
partial = functools.partial(Sync(), 1) # type: ignore
assert not is_async_callable(partial)
def test_async_nested_partial() -> None:
async def async_func(
a: Any,
b: Any,
) -> None: ... # pragma: no cover
partial = functools.partial(async_func, b=2)
nested_partial = functools.partial(partial, a=1)
assert is_async_callable(nested_partial)
def test_async_mocked_async_function() -> None:
async def async_func() -> None: ... # pragma: no cover
mock = create_autospec(async_func)
assert is_async_callable(mock)
@pytest.mark.parametrize(
"scope, expected_result",
[
({"path": "/foo-123/bar", "root_path": "/foo"}, "/foo-123/bar"),
({"path": "/foo/bar", "root_path": "/foo"}, "/bar"),
({"path": "/foo", "root_path": "/foo"}, ""),
({"path": "/foo/bar", "root_path": "/bar"}, "/foo/bar"),
],
)
def test_get_route_path(scope: Scope, expected_result: str) -> None:
assert get_route_path(scope) == expected_result
|
./temp_repos/starlette/starlette/_utils.py
|
./temp_repos/starlette/tests/test__utils.py
|
starlette
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'AwaitableOrContextManager'.
Context:
- Class Name: AwaitableOrContextManager
- Dependencies to Mock: aw
- Key Imports: contextlib, inspect, exceptiongroup, functools, typing_extensions, typing, collections.abc, starlette.types, asyncio, __future__
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
AwaitableOrContextManager
|
python
|
from __future__ import annotations
import functools
import re
from collections.abc import Sequence
from starlette.datastructures import Headers, MutableHeaders
from starlette.responses import PlainTextResponse, Response
from starlette.types import ASGIApp, Message, Receive, Scope, Send
ALL_METHODS = ("DELETE", "GET", "HEAD", "OPTIONS", "PATCH", "POST", "PUT")
SAFELISTED_HEADERS = {"Accept", "Accept-Language", "Content-Language", "Content-Type"}
class CORSMiddleware:
def __init__(
self,
app: ASGIApp,
allow_origins: Sequence[str] = (),
allow_methods: Sequence[str] = ("GET",),
allow_headers: Sequence[str] = (),
allow_credentials: bool = False,
allow_origin_regex: str | None = None,
allow_private_network: bool = False,
expose_headers: Sequence[str] = (),
max_age: int = 600,
) -> None:
if "*" in allow_methods:
allow_methods = ALL_METHODS
compiled_allow_origin_regex = None
if allow_origin_regex is not None:
compiled_allow_origin_regex = re.compile(allow_origin_regex)
allow_all_origins = "*" in allow_origins
allow_all_headers = "*" in allow_headers
preflight_explicit_allow_origin = not allow_all_origins or allow_credentials
simple_headers: dict[str, str] = {}
if allow_all_origins:
simple_headers["Access-Control-Allow-Origin"] = "*"
if allow_credentials:
simple_headers["Access-Control-Allow-Credentials"] = "true"
if expose_headers:
simple_headers["Access-Control-Expose-Headers"] = ", ".join(expose_headers)
preflight_headers: dict[str, str] = {}
if preflight_explicit_allow_origin:
# The origin value will be set in preflight_response() if it is allowed.
preflight_headers["Vary"] = "Origin"
else:
preflight_headers["Access-Control-Allow-Origin"] = "*"
preflight_headers.update(
{
"Access-Control-Allow-Methods": ", ".join(allow_methods),
"Access-Control-Max-Age": str(max_age),
}
)
allow_headers = sorted(SAFELISTED_HEADERS | set(allow_headers))
if allow_headers and not allow_all_headers:
preflight_headers["Access-Control-Allow-Headers"] = ", ".join(allow_headers)
if allow_credentials:
preflight_headers["Access-Control-Allow-Credentials"] = "true"
self.app = app
self.allow_origins = allow_origins
self.allow_methods = allow_methods
self.allow_headers = [h.lower() for h in allow_headers]
self.allow_all_origins = allow_all_origins
self.allow_all_headers = allow_all_headers
self.preflight_explicit_allow_origin = preflight_explicit_allow_origin
self.allow_origin_regex = compiled_allow_origin_regex
self.allow_private_network = allow_private_network
self.simple_headers = simple_headers
self.preflight_headers = preflight_headers
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
if scope["type"] != "http": # pragma: no cover
await self.app(scope, receive, send)
return
method = scope["method"]
headers = Headers(scope=scope)
origin = headers.get("origin")
if origin is None:
await self.app(scope, receive, send)
return
if method == "OPTIONS" and "access-control-request-method" in headers:
response = self.preflight_response(request_headers=headers)
await response(scope, receive, send)
return
await self.simple_response(scope, receive, send, request_headers=headers)
def is_allowed_origin(self, origin: str) -> bool:
if self.allow_all_origins:
return True
if self.allow_origin_regex is not None and self.allow_origin_regex.fullmatch(origin):
return True
return origin in self.allow_origins
def preflight_response(self, request_headers: Headers) -> Response:
requested_origin = request_headers["origin"]
requested_method = request_headers["access-control-request-method"]
requested_headers = request_headers.get("access-control-request-headers")
requested_private_network = request_headers.get("access-control-request-private-network")
headers = dict(self.preflight_headers)
failures: list[str] = []
if self.is_allowed_origin(origin=requested_origin):
if self.preflight_explicit_allow_origin:
# The "else" case is already accounted for in self.preflight_headers
# and the value would be "*".
headers["Access-Control-Allow-Origin"] = requested_origin
else:
failures.append("origin")
if requested_method not in self.allow_methods:
failures.append("method")
# If we allow all headers, then we have to mirror back any requested
# headers in the response.
if self.allow_all_headers and requested_headers is not None:
headers["Access-Control-Allow-Headers"] = requested_headers
elif requested_headers is not None:
for header in [h.lower() for h in requested_headers.split(",")]:
if header.strip() not in self.allow_headers:
failures.append("headers")
break
if requested_private_network is not None:
if self.allow_private_network:
headers["Access-Control-Allow-Private-Network"] = "true"
else:
failures.append("private-network")
# We don't strictly need to use 400 responses here, since its up to
# the browser to enforce the CORS policy, but its more informative
# if we do.
if failures:
failure_text = "Disallowed CORS " + ", ".join(failures)
return PlainTextResponse(failure_text, status_code=400, headers=headers)
return PlainTextResponse("OK", status_code=200, headers=headers)
async def simple_response(self, scope: Scope, receive: Receive, send: Send, request_headers: Headers) -> None:
send = functools.partial(self.send, send=send, request_headers=request_headers)
await self.app(scope, receive, send)
async def send(self, message: Message, send: Send, request_headers: Headers) -> None:
if message["type"] != "http.response.start":
await send(message)
return
message.setdefault("headers", [])
headers = MutableHeaders(scope=message)
headers.update(self.simple_headers)
origin = request_headers["Origin"]
has_cookie = "cookie" in request_headers
# If request includes any cookie headers, then we must respond
# with the specific origin instead of '*'.
if self.allow_all_origins and has_cookie:
self.allow_explicit_origin(headers, origin)
# If we only allow specific origins, then we have to mirror back
# the Origin header in the response.
elif not self.allow_all_origins and self.is_allowed_origin(origin=origin):
self.allow_explicit_origin(headers, origin)
await send(message)
@staticmethod
def allow_explicit_origin(headers: MutableHeaders, origin: str) -> None:
headers["Access-Control-Allow-Origin"] = origin
headers.add_vary_header("Origin")
|
from starlette.applications import Starlette
from starlette.middleware import Middleware
from starlette.middleware.cors import CORSMiddleware
from starlette.requests import Request
from starlette.responses import PlainTextResponse
from starlette.routing import Route
from tests.types import TestClientFactory
def test_cors_allow_all(
test_client_factory: TestClientFactory,
) -> None:
def homepage(request: Request) -> PlainTextResponse:
return PlainTextResponse("Homepage", status_code=200)
app = Starlette(
routes=[Route("/", endpoint=homepage)],
middleware=[
Middleware(
CORSMiddleware,
allow_origins=["*"],
allow_headers=["*"],
allow_methods=["*"],
expose_headers=["X-Status"],
allow_credentials=True,
)
],
)
client = test_client_factory(app)
# Test pre-flight response
headers = {
"Origin": "https://example.org",
"Access-Control-Request-Method": "GET",
"Access-Control-Request-Headers": "X-Example",
}
response = client.options("/", headers=headers)
assert response.status_code == 200
assert response.text == "OK"
assert response.headers["access-control-allow-origin"] == "https://example.org"
assert response.headers["access-control-allow-headers"] == "X-Example"
assert response.headers["access-control-allow-credentials"] == "true"
assert response.headers["vary"] == "Origin"
# Test standard response
headers = {"Origin": "https://example.org"}
response = client.get("/", headers=headers)
assert response.status_code == 200
assert response.text == "Homepage"
assert response.headers["access-control-allow-origin"] == "*"
assert response.headers["access-control-expose-headers"] == "X-Status"
assert response.headers["access-control-allow-credentials"] == "true"
# Test standard credentialed response
headers = {"Origin": "https://example.org", "Cookie": "star_cookie=sugar"}
response = client.get("/", headers=headers)
assert response.status_code == 200
assert response.text == "Homepage"
assert response.headers["access-control-allow-origin"] == "https://example.org"
assert response.headers["access-control-expose-headers"] == "X-Status"
assert response.headers["access-control-allow-credentials"] == "true"
# Test non-CORS response
response = client.get("/")
assert response.status_code == 200
assert response.text == "Homepage"
assert "access-control-allow-origin" not in response.headers
def test_cors_allow_all_except_credentials(
test_client_factory: TestClientFactory,
) -> None:
def homepage(request: Request) -> PlainTextResponse:
return PlainTextResponse("Homepage", status_code=200)
app = Starlette(
routes=[Route("/", endpoint=homepage)],
middleware=[
Middleware(
CORSMiddleware,
allow_origins=["*"],
allow_headers=["*"],
allow_methods=["*"],
expose_headers=["X-Status"],
)
],
)
client = test_client_factory(app)
# Test pre-flight response
headers = {
"Origin": "https://example.org",
"Access-Control-Request-Method": "GET",
"Access-Control-Request-Headers": "X-Example",
}
response = client.options("/", headers=headers)
assert response.status_code == 200
assert response.text == "OK"
assert response.headers["access-control-allow-origin"] == "*"
assert response.headers["access-control-allow-headers"] == "X-Example"
assert "access-control-allow-credentials" not in response.headers
assert "vary" not in response.headers
# Test standard response
headers = {"Origin": "https://example.org"}
response = client.get("/", headers=headers)
assert response.status_code == 200
assert response.text == "Homepage"
assert response.headers["access-control-allow-origin"] == "*"
assert response.headers["access-control-expose-headers"] == "X-Status"
assert "access-control-allow-credentials" not in response.headers
# Test non-CORS response
response = client.get("/")
assert response.status_code == 200
assert response.text == "Homepage"
assert "access-control-allow-origin" not in response.headers
def test_cors_allow_specific_origin(
test_client_factory: TestClientFactory,
) -> None:
def homepage(request: Request) -> PlainTextResponse:
return PlainTextResponse("Homepage", status_code=200)
app = Starlette(
routes=[Route("/", endpoint=homepage)],
middleware=[
Middleware(
CORSMiddleware,
allow_origins=["https://example.org"],
allow_headers=["X-Example", "Content-Type"],
)
],
)
client = test_client_factory(app)
# Test pre-flight response
headers = {
"Origin": "https://example.org",
"Access-Control-Request-Method": "GET",
"Access-Control-Request-Headers": "X-Example, Content-Type",
}
response = client.options("/", headers=headers)
assert response.status_code == 200
assert response.text == "OK"
assert response.headers["access-control-allow-origin"] == "https://example.org"
assert response.headers["access-control-allow-headers"] == (
"Accept, Accept-Language, Content-Language, Content-Type, X-Example"
)
assert "access-control-allow-credentials" not in response.headers
# Test standard response
headers = {"Origin": "https://example.org"}
response = client.get("/", headers=headers)
assert response.status_code == 200
assert response.text == "Homepage"
assert response.headers["access-control-allow-origin"] == "https://example.org"
assert "access-control-allow-credentials" not in response.headers
# Test non-CORS response
response = client.get("/")
assert response.status_code == 200
assert response.text == "Homepage"
assert "access-control-allow-origin" not in response.headers
def test_cors_disallowed_preflight(
test_client_factory: TestClientFactory,
) -> None:
def homepage(request: Request) -> None:
pass # pragma: no cover
app = Starlette(
routes=[Route("/", endpoint=homepage)],
middleware=[
Middleware(
CORSMiddleware,
allow_origins=["https://example.org"],
allow_headers=["X-Example"],
)
],
)
client = test_client_factory(app)
# Test pre-flight response
headers = {
"Origin": "https://another.org",
"Access-Control-Request-Method": "POST",
"Access-Control-Request-Headers": "X-Nope",
}
response = client.options("/", headers=headers)
assert response.status_code == 400
assert response.text == "Disallowed CORS origin, method, headers"
assert "access-control-allow-origin" not in response.headers
# Bug specific test, https://github.com/Kludex/starlette/pull/1199
# Test preflight response text with multiple disallowed headers
headers = {
"Origin": "https://example.org",
"Access-Control-Request-Method": "GET",
"Access-Control-Request-Headers": "X-Nope-1, X-Nope-2",
}
response = client.options("/", headers=headers)
assert response.text == "Disallowed CORS headers"
def test_preflight_allows_request_origin_if_origins_wildcard_and_credentials_allowed(
test_client_factory: TestClientFactory,
) -> None:
def homepage(request: Request) -> None:
return # pragma: no cover
app = Starlette(
routes=[Route("/", endpoint=homepage)],
middleware=[
Middleware(
CORSMiddleware,
allow_origins=["*"],
allow_methods=["POST"],
allow_credentials=True,
)
],
)
client = test_client_factory(app)
# Test pre-flight response
headers = {
"Origin": "https://example.org",
"Access-Control-Request-Method": "POST",
}
response = client.options(
"/",
headers=headers,
)
assert response.status_code == 200
assert response.headers["access-control-allow-origin"] == "https://example.org"
assert response.headers["access-control-allow-credentials"] == "true"
assert response.headers["vary"] == "Origin"
def test_cors_preflight_allow_all_methods(
test_client_factory: TestClientFactory,
) -> None:
def homepage(request: Request) -> None:
pass # pragma: no cover
app = Starlette(
routes=[Route("/", endpoint=homepage)],
middleware=[Middleware(CORSMiddleware, allow_origins=["*"], allow_methods=["*"])],
)
client = test_client_factory(app)
headers = {
"Origin": "https://example.org",
"Access-Control-Request-Method": "POST",
}
for method in ("DELETE", "GET", "HEAD", "OPTIONS", "PATCH", "POST", "PUT"):
response = client.options("/", headers=headers)
assert response.status_code == 200
assert method in response.headers["access-control-allow-methods"]
def test_cors_allow_all_methods(
test_client_factory: TestClientFactory,
) -> None:
def homepage(request: Request) -> PlainTextResponse:
return PlainTextResponse("Homepage", status_code=200)
app = Starlette(
routes=[
Route(
"/",
endpoint=homepage,
methods=["delete", "get", "head", "options", "patch", "post", "put"],
)
],
middleware=[Middleware(CORSMiddleware, allow_origins=["*"], allow_methods=["*"])],
)
client = test_client_factory(app)
headers = {"Origin": "https://example.org"}
for method in ("patch", "post", "put"):
response = getattr(client, method)("/", headers=headers, json={})
assert response.status_code == 200
for method in ("delete", "get", "head", "options"):
response = getattr(client, method)("/", headers=headers)
assert response.status_code == 200
def test_cors_allow_origin_regex(
test_client_factory: TestClientFactory,
) -> None:
def homepage(request: Request) -> PlainTextResponse:
return PlainTextResponse("Homepage", status_code=200)
app = Starlette(
routes=[Route("/", endpoint=homepage)],
middleware=[
Middleware(
CORSMiddleware,
allow_headers=["X-Example", "Content-Type"],
allow_origin_regex="https://.*",
allow_credentials=True,
)
],
)
client = test_client_factory(app)
# Test standard response
headers = {"Origin": "https://example.org"}
response = client.get("/", headers=headers)
assert response.status_code == 200
assert response.text == "Homepage"
assert response.headers["access-control-allow-origin"] == "https://example.org"
assert response.headers["access-control-allow-credentials"] == "true"
# Test standard credentialed response
headers = {"Origin": "https://example.org", "Cookie": "star_cookie=sugar"}
response = client.get("/", headers=headers)
assert response.status_code == 200
assert response.text == "Homepage"
assert response.headers["access-control-allow-origin"] == "https://example.org"
assert response.headers["access-control-allow-credentials"] == "true"
# Test disallowed standard response
# Note that enforcement is a browser concern. The disallowed-ness is reflected
# in the lack of an "access-control-allow-origin" header in the response.
headers = {"Origin": "http://example.org"}
response = client.get("/", headers=headers)
assert response.status_code == 200
assert response.text == "Homepage"
assert "access-control-allow-origin" not in response.headers
# Test pre-flight response
headers = {
"Origin": "https://another.com",
"Access-Control-Request-Method": "GET",
"Access-Control-Request-Headers": "X-Example, content-type",
}
response = client.options("/", headers=headers)
assert response.status_code == 200
assert response.text == "OK"
assert response.headers["access-control-allow-origin"] == "https://another.com"
assert response.headers["access-control-allow-headers"] == (
"Accept, Accept-Language, Content-Language, Content-Type, X-Example"
)
assert response.headers["access-control-allow-credentials"] == "true"
# Test disallowed pre-flight response
headers = {
"Origin": "http://another.com",
"Access-Control-Request-Method": "GET",
"Access-Control-Request-Headers": "X-Example",
}
response = client.options("/", headers=headers)
assert response.status_code == 400
assert response.text == "Disallowed CORS origin"
assert "access-control-allow-origin" not in response.headers
def test_cors_allow_origin_regex_fullmatch(
test_client_factory: TestClientFactory,
) -> None:
def homepage(request: Request) -> PlainTextResponse:
return PlainTextResponse("Homepage", status_code=200)
app = Starlette(
routes=[Route("/", endpoint=homepage)],
middleware=[
Middleware(
CORSMiddleware,
allow_headers=["X-Example", "Content-Type"],
allow_origin_regex=r"https://.*\.example.org",
)
],
)
client = test_client_factory(app)
# Test standard response
headers = {"Origin": "https://subdomain.example.org"}
response = client.get("/", headers=headers)
assert response.status_code == 200
assert response.text == "Homepage"
assert response.headers["access-control-allow-origin"] == "https://subdomain.example.org"
assert "access-control-allow-credentials" not in response.headers
# Test disallowed standard response
headers = {"Origin": "https://subdomain.example.org.hacker.com"}
response = client.get("/", headers=headers)
assert response.status_code == 200
assert response.text == "Homepage"
assert "access-control-allow-origin" not in response.headers
def test_cors_credentialed_requests_return_specific_origin(
test_client_factory: TestClientFactory,
) -> None:
def homepage(request: Request) -> PlainTextResponse:
return PlainTextResponse("Homepage", status_code=200)
app = Starlette(
routes=[Route("/", endpoint=homepage)],
middleware=[Middleware(CORSMiddleware, allow_origins=["*"])],
)
client = test_client_factory(app)
# Test credentialed request
headers = {"Origin": "https://example.org", "Cookie": "star_cookie=sugar"}
response = client.get("/", headers=headers)
assert response.status_code == 200
assert response.text == "Homepage"
assert response.headers["access-control-allow-origin"] == "https://example.org"
assert "access-control-allow-credentials" not in response.headers
def test_cors_vary_header_defaults_to_origin(
test_client_factory: TestClientFactory,
) -> None:
def homepage(request: Request) -> PlainTextResponse:
return PlainTextResponse("Homepage", status_code=200)
app = Starlette(
routes=[Route("/", endpoint=homepage)],
middleware=[Middleware(CORSMiddleware, allow_origins=["https://example.org"])],
)
headers = {"Origin": "https://example.org"}
client = test_client_factory(app)
response = client.get("/", headers=headers)
assert response.status_code == 200
assert response.headers["vary"] == "Origin"
def test_cors_vary_header_is_not_set_for_non_credentialed_request(
test_client_factory: TestClientFactory,
) -> None:
def homepage(request: Request) -> PlainTextResponse:
return PlainTextResponse("Homepage", status_code=200, headers={"Vary": "Accept-Encoding"})
app = Starlette(
routes=[Route("/", endpoint=homepage)],
middleware=[Middleware(CORSMiddleware, allow_origins=["*"])],
)
client = test_client_factory(app)
response = client.get("/", headers={"Origin": "https://someplace.org"})
assert response.status_code == 200
assert response.headers["vary"] == "Accept-Encoding"
def test_cors_vary_header_is_properly_set_for_credentialed_request(
test_client_factory: TestClientFactory,
) -> None:
def homepage(request: Request) -> PlainTextResponse:
return PlainTextResponse("Homepage", status_code=200, headers={"Vary": "Accept-Encoding"})
app = Starlette(
routes=[Route("/", endpoint=homepage)],
middleware=[Middleware(CORSMiddleware, allow_origins=["*"])],
)
client = test_client_factory(app)
response = client.get("/", headers={"Cookie": "foo=bar", "Origin": "https://someplace.org"})
assert response.status_code == 200
assert response.headers["vary"] == "Accept-Encoding, Origin"
def test_cors_vary_header_is_properly_set_when_allow_origins_is_not_wildcard(
test_client_factory: TestClientFactory,
) -> None:
def homepage(request: Request) -> PlainTextResponse:
return PlainTextResponse("Homepage", status_code=200, headers={"Vary": "Accept-Encoding"})
app = Starlette(
routes=[
Route("/", endpoint=homepage),
],
middleware=[Middleware(CORSMiddleware, allow_origins=["https://example.org"])],
)
client = test_client_factory(app)
response = client.get("/", headers={"Origin": "https://example.org"})
assert response.status_code == 200
assert response.headers["vary"] == "Accept-Encoding, Origin"
def test_cors_allowed_origin_does_not_leak_between_credentialed_requests(
test_client_factory: TestClientFactory,
) -> None:
def homepage(request: Request) -> PlainTextResponse:
return PlainTextResponse("Homepage", status_code=200)
app = Starlette(
routes=[
Route("/", endpoint=homepage),
],
middleware=[
Middleware(
CORSMiddleware,
allow_origins=["*"],
allow_headers=["*"],
allow_methods=["*"],
)
],
)
client = test_client_factory(app)
response = client.get("/", headers={"Origin": "https://someplace.org"})
assert response.headers["access-control-allow-origin"] == "*"
assert "access-control-allow-credentials" not in response.headers
response = client.get("/", headers={"Cookie": "foo=bar", "Origin": "https://someplace.org"})
assert response.headers["access-control-allow-origin"] == "https://someplace.org"
assert "access-control-allow-credentials" not in response.headers
response = client.get("/", headers={"Origin": "https://someplace.org"})
assert response.headers["access-control-allow-origin"] == "*"
assert "access-control-allow-credentials" not in response.headers
def test_cors_private_network_access_allowed(test_client_factory: TestClientFactory) -> None:
def homepage(request: Request) -> PlainTextResponse:
return PlainTextResponse("Homepage", status_code=200)
app = Starlette(
routes=[Route("/", endpoint=homepage)],
middleware=[
Middleware(
CORSMiddleware,
allow_origins=["*"],
allow_methods=["*"],
allow_private_network=True,
)
],
)
client = test_client_factory(app)
headers_without_pna = {"Origin": "https://example.org", "Access-Control-Request-Method": "GET"}
headers_with_pna = {**headers_without_pna, "Access-Control-Request-Private-Network": "true"}
# Test preflight with Private Network Access request
response = client.options("/", headers=headers_with_pna)
assert response.status_code == 200
assert response.text == "OK"
assert response.headers["access-control-allow-private-network"] == "true"
# Test preflight without Private Network Access request
response = client.options("/", headers=headers_without_pna)
assert response.status_code == 200
assert response.text == "OK"
assert "access-control-allow-private-network" not in response.headers
# The access-control-allow-private-network header is not set for non-preflight requests
response = client.get("/", headers=headers_with_pna)
assert response.status_code == 200
assert response.text == "Homepage"
assert "access-control-allow-private-network" not in response.headers
assert "access-control-allow-origin" in response.headers
def test_cors_private_network_access_disallowed(test_client_factory: TestClientFactory) -> None:
def homepage(request: Request) -> None: ... # pragma: no cover
app = Starlette(
routes=[Route("/", endpoint=homepage)],
middleware=[
Middleware(
CORSMiddleware,
allow_origins=["*"],
allow_methods=["*"],
allow_private_network=False,
)
],
)
client = test_client_factory(app)
# Test preflight with Private Network Access request when not allowed
headers_without_pna = {"Origin": "https://example.org", "Access-Control-Request-Method": "GET"}
headers_with_pna = {**headers_without_pna, "Access-Control-Request-Private-Network": "true"}
response = client.options("/", headers=headers_without_pna)
assert response.status_code == 200
assert response.text == "OK"
assert "access-control-allow-private-network" not in response.headers
# If the request includes a Private Network Access header, but the middleware is configured to disallow it, the
# request should be denied with a 400 response.
response = client.options("/", headers=headers_with_pna)
assert response.status_code == 400
assert response.text == "Disallowed CORS private-network"
assert "access-control-allow-private-network" not in response.headers
|
./temp_repos/starlette/starlette/middleware/cors.py
|
./temp_repos/starlette/tests/middleware/test_cors.py
|
starlette
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'CORSMiddleware'.
Context:
- Class Name: CORSMiddleware
- Dependencies to Mock: app, allow_headers, max_age, allow_methods, allow_origin_regex, allow_private_network, expose_headers, allow_origins, allow_credentials
- Key Imports: starlette.datastructures, starlette.responses, functools, collections.abc, starlette.types, __future__, re
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
CORSMiddleware
|
python
|
from __future__ import annotations
from collections.abc import AsyncGenerator, AsyncIterable, Awaitable, Callable, Mapping, MutableMapping
from typing import Any, TypeVar
import anyio
from starlette._utils import collapse_excgroups
from starlette.requests import ClientDisconnect, Request
from starlette.responses import Response
from starlette.types import ASGIApp, Message, Receive, Scope, Send
RequestResponseEndpoint = Callable[[Request], Awaitable[Response]]
DispatchFunction = Callable[[Request, RequestResponseEndpoint], Awaitable[Response]]
BodyStreamGenerator = AsyncGenerator[bytes | MutableMapping[str, Any], None]
AsyncContentStream = AsyncIterable[str | bytes | memoryview | MutableMapping[str, Any]]
T = TypeVar("T")
class _CachedRequest(Request):
"""
If the user calls Request.body() from their dispatch function
we cache the entire request body in memory and pass that to downstream middlewares,
but if they call Request.stream() then all we do is send an
empty body so that downstream things don't hang forever.
"""
def __init__(self, scope: Scope, receive: Receive):
super().__init__(scope, receive)
self._wrapped_rcv_disconnected = False
self._wrapped_rcv_consumed = False
self._wrapped_rc_stream = self.stream()
async def wrapped_receive(self) -> Message:
# wrapped_rcv state 1: disconnected
if self._wrapped_rcv_disconnected:
# we've already sent a disconnect to the downstream app
# we don't need to wait to get another one
# (although most ASGI servers will just keep sending it)
return {"type": "http.disconnect"}
# wrapped_rcv state 1: consumed but not yet disconnected
if self._wrapped_rcv_consumed:
# since the downstream app has consumed us all that is left
# is to send it a disconnect
if self._is_disconnected:
# the middleware has already seen the disconnect
# since we know the client is disconnected no need to wait
# for the message
self._wrapped_rcv_disconnected = True
return {"type": "http.disconnect"}
# we don't know yet if the client is disconnected or not
# so we'll wait until we get that message
msg = await self.receive()
if msg["type"] != "http.disconnect": # pragma: no cover
# at this point a disconnect is all that we should be receiving
# if we get something else, things went wrong somewhere
raise RuntimeError(f"Unexpected message received: {msg['type']}")
self._wrapped_rcv_disconnected = True
return msg
# wrapped_rcv state 3: not yet consumed
if getattr(self, "_body", None) is not None:
# body() was called, we return it even if the client disconnected
self._wrapped_rcv_consumed = True
return {
"type": "http.request",
"body": self._body,
"more_body": False,
}
elif self._stream_consumed:
# stream() was called to completion
# return an empty body so that downstream apps don't hang
# waiting for a disconnect
self._wrapped_rcv_consumed = True
return {
"type": "http.request",
"body": b"",
"more_body": False,
}
else:
# body() was never called and stream() wasn't consumed
try:
stream = self.stream()
chunk = await stream.__anext__()
self._wrapped_rcv_consumed = self._stream_consumed
return {
"type": "http.request",
"body": chunk,
"more_body": not self._stream_consumed,
}
except ClientDisconnect:
self._wrapped_rcv_disconnected = True
return {"type": "http.disconnect"}
class BaseHTTPMiddleware:
def __init__(self, app: ASGIApp, dispatch: DispatchFunction | None = None) -> None:
self.app = app
self.dispatch_func = self.dispatch if dispatch is None else dispatch
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
if scope["type"] != "http":
await self.app(scope, receive, send)
return
request = _CachedRequest(scope, receive)
wrapped_receive = request.wrapped_receive
response_sent = anyio.Event()
app_exc: Exception | None = None
exception_already_raised = False
async def call_next(request: Request) -> Response:
async def receive_or_disconnect() -> Message:
if response_sent.is_set():
return {"type": "http.disconnect"}
async with anyio.create_task_group() as task_group:
async def wrap(func: Callable[[], Awaitable[T]]) -> T:
result = await func()
task_group.cancel_scope.cancel()
return result
task_group.start_soon(wrap, response_sent.wait)
message = await wrap(wrapped_receive)
if response_sent.is_set():
return {"type": "http.disconnect"}
return message
async def send_no_error(message: Message) -> None:
try:
await send_stream.send(message)
except anyio.BrokenResourceError:
# recv_stream has been closed, i.e. response_sent has been set.
return
async def coro() -> None:
nonlocal app_exc
with send_stream:
try:
await self.app(scope, receive_or_disconnect, send_no_error)
except Exception as exc:
app_exc = exc
task_group.start_soon(coro)
try:
message = await recv_stream.receive()
info = message.get("info", None)
if message["type"] == "http.response.debug" and info is not None:
message = await recv_stream.receive()
except anyio.EndOfStream:
if app_exc is not None:
nonlocal exception_already_raised
exception_already_raised = True
# Prevent `anyio.EndOfStream` from polluting app exception context.
# If both cause and context are None then the context is suppressed
# and `anyio.EndOfStream` is not present in the exception traceback.
# If exception cause is not None then it is propagated with
# reraising here.
# If exception has no cause but has context set then the context is
# propagated as a cause with the reraise. This is necessary in order
# to prevent `anyio.EndOfStream` from polluting the exception
# context.
raise app_exc from app_exc.__cause__ or app_exc.__context__
raise RuntimeError("No response returned.")
assert message["type"] == "http.response.start"
async def body_stream() -> BodyStreamGenerator:
async for message in recv_stream:
if message["type"] == "http.response.pathsend":
yield message
break
assert message["type"] == "http.response.body", f"Unexpected message: {message}"
body = message.get("body", b"")
if body:
yield body
if not message.get("more_body", False):
break
response = _StreamingResponse(status_code=message["status"], content=body_stream(), info=info)
response.raw_headers = message["headers"]
return response
streams: anyio.create_memory_object_stream[Message] = anyio.create_memory_object_stream()
send_stream, recv_stream = streams
with recv_stream, send_stream, collapse_excgroups():
async with anyio.create_task_group() as task_group:
response = await self.dispatch_func(request, call_next)
await response(scope, wrapped_receive, send)
response_sent.set()
recv_stream.close()
if app_exc is not None and not exception_already_raised:
raise app_exc
async def dispatch(self, request: Request, call_next: RequestResponseEndpoint) -> Response:
raise NotImplementedError() # pragma: no cover
class _StreamingResponse(Response):
def __init__(
self,
content: AsyncContentStream,
status_code: int = 200,
headers: Mapping[str, str] | None = None,
media_type: str | None = None,
info: Mapping[str, Any] | None = None,
) -> None:
self.info = info
self.body_iterator = content
self.status_code = status_code
self.media_type = media_type
self.init_headers(headers)
self.background = None
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
if self.info is not None:
await send({"type": "http.response.debug", "info": self.info})
await send(
{
"type": "http.response.start",
"status": self.status_code,
"headers": self.raw_headers,
}
)
should_close_body = True
async for chunk in self.body_iterator:
if isinstance(chunk, dict):
# We got an ASGI message which is not response body (eg: pathsend)
should_close_body = False
await send(chunk)
continue
await send({"type": "http.response.body", "body": chunk, "more_body": True})
if should_close_body:
await send({"type": "http.response.body", "body": b"", "more_body": False})
if self.background:
await self.background()
|
from __future__ import annotations
import contextvars
from collections.abc import AsyncGenerator, AsyncIterator, Generator
from contextlib import AsyncExitStack
from pathlib import Path
from typing import Any
import anyio
import pytest
from anyio.abc import TaskStatus
from starlette.applications import Starlette
from starlette.background import BackgroundTask
from starlette.middleware import Middleware, _MiddlewareFactory
from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint
from starlette.requests import ClientDisconnect, Request
from starlette.responses import FileResponse, PlainTextResponse, Response, StreamingResponse
from starlette.routing import Route, WebSocketRoute
from starlette.testclient import TestClient
from starlette.types import ASGIApp, Message, Receive, Scope, Send
from starlette.websockets import WebSocket
from tests.types import TestClientFactory
class CustomMiddleware(BaseHTTPMiddleware):
async def dispatch(
self,
request: Request,
call_next: RequestResponseEndpoint,
) -> Response:
response = await call_next(request)
response.headers["Custom-Header"] = "Example"
return response
def homepage(request: Request) -> PlainTextResponse:
return PlainTextResponse("Homepage")
def exc(request: Request) -> None:
raise Exception("Exc")
def exc_stream(request: Request) -> StreamingResponse:
return StreamingResponse(_generate_faulty_stream())
def _generate_faulty_stream() -> Generator[bytes, None, None]:
yield b"Ok"
raise Exception("Faulty Stream")
class NoResponse:
def __init__(
self,
scope: Scope,
receive: Receive,
send: Send,
):
pass
def __await__(self) -> Generator[Any, None, None]:
return self.dispatch().__await__()
async def dispatch(self) -> None:
pass
async def websocket_endpoint(session: WebSocket) -> None:
await session.accept()
await session.send_text("Hello, world!")
await session.close()
app = Starlette(
routes=[
Route("/", endpoint=homepage),
Route("/exc", endpoint=exc),
Route("/exc-stream", endpoint=exc_stream),
Route("/no-response", endpoint=NoResponse),
WebSocketRoute("/ws", endpoint=websocket_endpoint),
],
middleware=[Middleware(CustomMiddleware)],
)
def test_custom_middleware(test_client_factory: TestClientFactory) -> None:
client = test_client_factory(app)
response = client.get("/")
assert response.headers["Custom-Header"] == "Example"
with pytest.raises(Exception) as ctx:
response = client.get("/exc")
assert str(ctx.value) == "Exc"
with pytest.raises(Exception) as ctx:
response = client.get("/exc-stream")
assert str(ctx.value) == "Faulty Stream"
with pytest.raises(RuntimeError):
response = client.get("/no-response")
with client.websocket_connect("/ws") as session:
text = session.receive_text()
assert text == "Hello, world!"
def test_state_data_across_multiple_middlewares(
test_client_factory: TestClientFactory,
) -> None:
expected_value1 = "foo"
expected_value2 = "bar"
class aMiddleware(BaseHTTPMiddleware):
async def dispatch(
self,
request: Request,
call_next: RequestResponseEndpoint,
) -> Response:
request.state.foo = expected_value1
response = await call_next(request)
return response
class bMiddleware(BaseHTTPMiddleware):
async def dispatch(
self,
request: Request,
call_next: RequestResponseEndpoint,
) -> Response:
request.state.bar = expected_value2
response = await call_next(request)
response.headers["X-State-Foo"] = request.state.foo
return response
class cMiddleware(BaseHTTPMiddleware):
async def dispatch(
self,
request: Request,
call_next: RequestResponseEndpoint,
) -> Response:
response = await call_next(request)
response.headers["X-State-Bar"] = request.state.bar
return response
def homepage(request: Request) -> PlainTextResponse:
return PlainTextResponse("OK")
app = Starlette(
routes=[Route("/", homepage)],
middleware=[
Middleware(aMiddleware),
Middleware(bMiddleware),
Middleware(cMiddleware),
],
)
client = test_client_factory(app)
response = client.get("/")
assert response.text == "OK"
assert response.headers["X-State-Foo"] == expected_value1
assert response.headers["X-State-Bar"] == expected_value2
def test_app_middleware_argument(test_client_factory: TestClientFactory) -> None:
def homepage(request: Request) -> PlainTextResponse:
return PlainTextResponse("Homepage")
app = Starlette(routes=[Route("/", homepage)], middleware=[Middleware(CustomMiddleware)])
client = test_client_factory(app)
response = client.get("/")
assert response.headers["Custom-Header"] == "Example"
def test_fully_evaluated_response(test_client_factory: TestClientFactory) -> None:
# Test for https://github.com/Kludex/starlette/issues/1022
class CustomMiddleware(BaseHTTPMiddleware):
async def dispatch(
self,
request: Request,
call_next: RequestResponseEndpoint,
) -> PlainTextResponse:
await call_next(request)
return PlainTextResponse("Custom")
app = Starlette(middleware=[Middleware(CustomMiddleware)])
client = test_client_factory(app)
response = client.get("/does_not_exist")
assert response.text == "Custom"
ctxvar: contextvars.ContextVar[str] = contextvars.ContextVar("ctxvar")
class CustomMiddlewareWithoutBaseHTTPMiddleware:
def __init__(self, app: ASGIApp) -> None:
self.app = app
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
ctxvar.set("set by middleware")
await self.app(scope, receive, send)
assert ctxvar.get() == "set by endpoint"
class CustomMiddlewareUsingBaseHTTPMiddleware(BaseHTTPMiddleware):
async def dispatch(
self,
request: Request,
call_next: RequestResponseEndpoint,
) -> Response:
ctxvar.set("set by middleware")
resp = await call_next(request)
assert ctxvar.get() == "set by endpoint"
return resp # pragma: no cover
@pytest.mark.parametrize(
"middleware_cls",
[
CustomMiddlewareWithoutBaseHTTPMiddleware,
pytest.param(
CustomMiddlewareUsingBaseHTTPMiddleware,
marks=pytest.mark.xfail(
reason=(
"BaseHTTPMiddleware creates a TaskGroup which copies the context"
"and erases any changes to it made within the TaskGroup"
),
raises=AssertionError,
),
),
],
)
def test_contextvars(
test_client_factory: TestClientFactory,
middleware_cls: _MiddlewareFactory[Any],
) -> None:
# this has to be an async endpoint because Starlette calls run_in_threadpool
# on sync endpoints which has it's own set of peculiarities w.r.t propagating
# contextvars (it propagates them forwards but not backwards)
async def homepage(request: Request) -> PlainTextResponse:
assert ctxvar.get() == "set by middleware"
ctxvar.set("set by endpoint")
return PlainTextResponse("Homepage")
app = Starlette(middleware=[Middleware(middleware_cls)], routes=[Route("/", homepage)])
client = test_client_factory(app)
response = client.get("/")
assert response.status_code == 200, response.content
@pytest.mark.anyio
async def test_run_background_tasks_even_if_client_disconnects() -> None:
# test for https://github.com/Kludex/starlette/issues/1438
response_complete = anyio.Event()
background_task_run = anyio.Event()
async def sleep_and_set() -> None:
# small delay to give BaseHTTPMiddleware a chance to cancel us
# this is required to make the test fail prior to fixing the issue
# so do not be surprised if you remove it and the test still passes
await anyio.sleep(0.1)
background_task_run.set()
async def endpoint_with_background_task(_: Request) -> PlainTextResponse:
return PlainTextResponse(background=BackgroundTask(sleep_and_set))
async def passthrough(
request: Request,
call_next: RequestResponseEndpoint,
) -> Response:
return await call_next(request)
app = Starlette(
middleware=[Middleware(BaseHTTPMiddleware, dispatch=passthrough)],
routes=[Route("/", endpoint_with_background_task)],
)
scope = {
"type": "http",
"version": "3",
"method": "GET",
"path": "/",
}
async def receive() -> Message:
raise NotImplementedError("Should not be called!")
async def send(message: Message) -> None:
if message["type"] == "http.response.body":
if not message.get("more_body", False): # pragma: no branch
response_complete.set()
await app(scope, receive, send)
assert background_task_run.is_set()
def test_run_background_tasks_raise_exceptions(test_client_factory: TestClientFactory) -> None:
# test for https://github.com/Kludex/starlette/issues/2625
async def sleep_and_set() -> None:
await anyio.sleep(0.1)
raise ValueError("TEST")
async def endpoint_with_background_task(_: Request) -> PlainTextResponse:
return PlainTextResponse(background=BackgroundTask(sleep_and_set))
async def passthrough(request: Request, call_next: RequestResponseEndpoint) -> Response:
return await call_next(request)
app = Starlette(
middleware=[Middleware(BaseHTTPMiddleware, dispatch=passthrough)],
routes=[Route("/", endpoint_with_background_task)],
)
client = test_client_factory(app)
with pytest.raises(ValueError, match="TEST"):
client.get("/")
def test_exception_can_be_caught(test_client_factory: TestClientFactory) -> None:
async def error_endpoint(_: Request) -> None:
raise ValueError("TEST")
async def catches_error(request: Request, call_next: RequestResponseEndpoint) -> Response:
try:
return await call_next(request)
except ValueError as exc:
return PlainTextResponse(content=str(exc), status_code=400)
app = Starlette(
middleware=[Middleware(BaseHTTPMiddleware, dispatch=catches_error)],
routes=[Route("/", error_endpoint)],
)
client = test_client_factory(app)
response = client.get("/")
assert response.status_code == 400
assert response.text == "TEST"
@pytest.mark.anyio
async def test_do_not_block_on_background_tasks() -> None:
response_complete = anyio.Event()
events: list[str | Message] = []
async def sleep_and_set() -> None:
events.append("Background task started")
await anyio.sleep(0.1)
events.append("Background task finished")
async def endpoint_with_background_task(_: Request) -> PlainTextResponse:
return PlainTextResponse(content="Hello", background=BackgroundTask(sleep_and_set))
async def passthrough(request: Request, call_next: RequestResponseEndpoint) -> Response:
return await call_next(request)
app = Starlette(
middleware=[Middleware(BaseHTTPMiddleware, dispatch=passthrough)],
routes=[Route("/", endpoint_with_background_task)],
)
scope = {
"type": "http",
"version": "3",
"method": "GET",
"path": "/",
}
async def receive() -> Message:
raise NotImplementedError("Should not be called!")
async def send(message: Message) -> None:
if message["type"] == "http.response.body":
events.append(message)
if not message.get("more_body", False):
response_complete.set()
async with anyio.create_task_group() as tg:
tg.start_soon(app, scope, receive, send)
tg.start_soon(app, scope, receive, send)
# Without the fix, the background tasks would start and finish before the
# last http.response.body is sent.
assert events == [
{"body": b"Hello", "more_body": True, "type": "http.response.body"},
{"body": b"", "more_body": False, "type": "http.response.body"},
{"body": b"Hello", "more_body": True, "type": "http.response.body"},
{"body": b"", "more_body": False, "type": "http.response.body"},
"Background task started",
"Background task started",
"Background task finished",
"Background task finished",
]
@pytest.mark.anyio
async def test_run_context_manager_exit_even_if_client_disconnects() -> None:
# test for https://github.com/Kludex/starlette/issues/1678#issuecomment-1172916042
response_complete = anyio.Event()
context_manager_exited = anyio.Event()
async def sleep_and_set() -> None:
# small delay to give BaseHTTPMiddleware a chance to cancel us
# this is required to make the test fail prior to fixing the issue
# so do not be surprised if you remove it and the test still passes
await anyio.sleep(0.1)
context_manager_exited.set()
class ContextManagerMiddleware:
def __init__(self, app: ASGIApp):
self.app = app
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
async with AsyncExitStack() as stack:
stack.push_async_callback(sleep_and_set)
await self.app(scope, receive, send)
async def simple_endpoint(_: Request) -> PlainTextResponse:
return PlainTextResponse(background=BackgroundTask(sleep_and_set))
async def passthrough(
request: Request,
call_next: RequestResponseEndpoint,
) -> Response:
return await call_next(request)
app = Starlette(
middleware=[
Middleware(BaseHTTPMiddleware, dispatch=passthrough),
Middleware(ContextManagerMiddleware),
],
routes=[Route("/", simple_endpoint)],
)
scope = {
"type": "http",
"version": "3",
"method": "GET",
"path": "/",
}
async def receive() -> Message:
raise NotImplementedError("Should not be called!")
async def send(message: Message) -> None:
if message["type"] == "http.response.body":
if not message.get("more_body", False): # pragma: no branch
response_complete.set()
await app(scope, receive, send)
assert context_manager_exited.is_set()
def test_app_receives_http_disconnect_while_sending_if_discarded(
test_client_factory: TestClientFactory,
) -> None:
class DiscardingMiddleware(BaseHTTPMiddleware):
async def dispatch(
self,
request: Request,
call_next: Any,
) -> PlainTextResponse:
# As a matter of ordering, this test targets the case where the downstream
# app response is discarded while it is sending a response body.
# We need to wait for the downstream app to begin sending a response body
# before sending the middleware response that will overwrite the downstream
# response.
downstream_app_response = await call_next(request)
body_generator = downstream_app_response.body_iterator
try:
await body_generator.__anext__()
finally:
await body_generator.aclose()
return PlainTextResponse("Custom")
async def downstream_app(
scope: Scope,
receive: Receive,
send: Send,
) -> None:
await send(
{
"type": "http.response.start",
"status": 200,
"headers": [
(b"content-type", b"text/plain"),
],
}
)
async with anyio.create_task_group() as task_group:
async def cancel_on_disconnect(
*,
task_status: TaskStatus[None] = anyio.TASK_STATUS_IGNORED,
) -> None:
task_status.started()
while True:
message = await receive()
if message["type"] == "http.disconnect": # pragma: no branch
task_group.cancel_scope.cancel()
break
# Using start instead of start_soon to ensure that
# cancel_on_disconnect is scheduled by the event loop
# before we start returning the body
await task_group.start(cancel_on_disconnect)
# A timeout is set for 0.1 second in order to ensure that
# we never deadlock the test run in an infinite loop
with anyio.move_on_after(0.1):
while True:
await send(
{
"type": "http.response.body",
"body": b"chunk ",
"more_body": True,
}
)
pytest.fail("http.disconnect should have been received and canceled the scope") # pragma: no cover
app = DiscardingMiddleware(downstream_app)
client = test_client_factory(app)
response = client.get("/does_not_exist")
assert response.text == "Custom"
def test_app_receives_http_disconnect_after_sending_if_discarded(
test_client_factory: TestClientFactory,
) -> None:
class DiscardingMiddleware(BaseHTTPMiddleware):
async def dispatch(
self,
request: Request,
call_next: RequestResponseEndpoint,
) -> PlainTextResponse:
await call_next(request)
return PlainTextResponse("Custom")
async def downstream_app(
scope: Scope,
receive: Receive,
send: Send,
) -> None:
await send(
{
"type": "http.response.start",
"status": 200,
"headers": [
(b"content-type", b"text/plain"),
],
}
)
await send(
{
"type": "http.response.body",
"body": b"first chunk, ",
"more_body": True,
}
)
await send(
{
"type": "http.response.body",
"body": b"second chunk",
"more_body": True,
}
)
message = await receive()
assert message["type"] == "http.disconnect"
app = DiscardingMiddleware(downstream_app)
client = test_client_factory(app)
response = client.get("/does_not_exist")
assert response.text == "Custom"
def test_read_request_stream_in_app_after_middleware_calls_stream(
test_client_factory: TestClientFactory,
) -> None:
async def homepage(request: Request) -> PlainTextResponse:
expected = [b""]
async for chunk in request.stream():
assert chunk == expected.pop(0)
assert expected == []
return PlainTextResponse("Homepage")
class ConsumingMiddleware(BaseHTTPMiddleware):
async def dispatch(
self,
request: Request,
call_next: RequestResponseEndpoint,
) -> Response:
expected = [b"a", b""]
async for chunk in request.stream():
assert chunk == expected.pop(0)
assert expected == []
return await call_next(request)
app = Starlette(
routes=[Route("/", homepage, methods=["POST"])],
middleware=[Middleware(ConsumingMiddleware)],
)
client: TestClient = test_client_factory(app)
response = client.post("/", content=b"a")
assert response.status_code == 200
def test_read_request_stream_in_app_after_middleware_calls_body(
test_client_factory: TestClientFactory,
) -> None:
async def homepage(request: Request) -> PlainTextResponse:
expected = [b"a", b""]
async for chunk in request.stream():
assert chunk == expected.pop(0)
assert expected == []
return PlainTextResponse("Homepage")
class ConsumingMiddleware(BaseHTTPMiddleware):
async def dispatch(
self,
request: Request,
call_next: RequestResponseEndpoint,
) -> Response:
assert await request.body() == b"a"
return await call_next(request)
app = Starlette(
routes=[Route("/", homepage, methods=["POST"])],
middleware=[Middleware(ConsumingMiddleware)],
)
client: TestClient = test_client_factory(app)
response = client.post("/", content=b"a")
assert response.status_code == 200
def test_read_request_body_in_app_after_middleware_calls_stream(
test_client_factory: TestClientFactory,
) -> None:
async def homepage(request: Request) -> PlainTextResponse:
assert await request.body() == b""
return PlainTextResponse("Homepage")
class ConsumingMiddleware(BaseHTTPMiddleware):
async def dispatch(
self,
request: Request,
call_next: RequestResponseEndpoint,
) -> Response:
expected = [b"a", b""]
async for chunk in request.stream():
assert chunk == expected.pop(0)
assert expected == []
return await call_next(request)
app = Starlette(
routes=[Route("/", homepage, methods=["POST"])],
middleware=[Middleware(ConsumingMiddleware)],
)
client: TestClient = test_client_factory(app)
response = client.post("/", content=b"a")
assert response.status_code == 200
def test_read_request_body_in_app_after_middleware_calls_body(
test_client_factory: TestClientFactory,
) -> None:
async def homepage(request: Request) -> PlainTextResponse:
assert await request.body() == b"a"
return PlainTextResponse("Homepage")
class ConsumingMiddleware(BaseHTTPMiddleware):
async def dispatch(
self,
request: Request,
call_next: RequestResponseEndpoint,
) -> Response:
assert await request.body() == b"a"
return await call_next(request)
app = Starlette(
routes=[Route("/", homepage, methods=["POST"])],
middleware=[Middleware(ConsumingMiddleware)],
)
client: TestClient = test_client_factory(app)
response = client.post("/", content=b"a")
assert response.status_code == 200
def test_read_request_stream_in_dispatch_after_app_calls_stream(
test_client_factory: TestClientFactory,
) -> None:
async def homepage(request: Request) -> PlainTextResponse:
expected = [b"a", b""]
async for chunk in request.stream():
assert chunk == expected.pop(0)
assert expected == []
return PlainTextResponse("Homepage")
class ConsumingMiddleware(BaseHTTPMiddleware):
async def dispatch(
self,
request: Request,
call_next: RequestResponseEndpoint,
) -> Response:
resp = await call_next(request)
with pytest.raises(RuntimeError, match="Stream consumed"):
async for _ in request.stream():
raise AssertionError("should not be called") # pragma: no cover
return resp
app = Starlette(
routes=[Route("/", homepage, methods=["POST"])],
middleware=[Middleware(ConsumingMiddleware)],
)
client: TestClient = test_client_factory(app)
response = client.post("/", content=b"a")
assert response.status_code == 200
def test_read_request_stream_in_dispatch_after_app_calls_body(
test_client_factory: TestClientFactory,
) -> None:
async def homepage(request: Request) -> PlainTextResponse:
assert await request.body() == b"a"
return PlainTextResponse("Homepage")
class ConsumingMiddleware(BaseHTTPMiddleware):
async def dispatch(
self,
request: Request,
call_next: RequestResponseEndpoint,
) -> Response:
resp = await call_next(request)
with pytest.raises(RuntimeError, match="Stream consumed"):
async for _ in request.stream():
raise AssertionError("should not be called") # pragma: no cover
return resp
app = Starlette(
routes=[Route("/", homepage, methods=["POST"])],
middleware=[Middleware(ConsumingMiddleware)],
)
client: TestClient = test_client_factory(app)
response = client.post("/", content=b"a")
assert response.status_code == 200
@pytest.mark.anyio
async def test_read_request_stream_in_dispatch_wrapping_app_calls_body() -> None:
async def endpoint(scope: Scope, receive: Receive, send: Send) -> None:
request = Request(scope, receive)
async for chunk in request.stream(): # pragma: no branch
assert chunk == b"2"
break
await Response()(scope, receive, send)
class ConsumingMiddleware(BaseHTTPMiddleware):
async def dispatch(
self,
request: Request,
call_next: RequestResponseEndpoint,
) -> Response:
expected = b"1"
response: Response | None = None
async for chunk in request.stream(): # pragma: no branch
assert chunk == expected
if expected == b"1":
response = await call_next(request)
expected = b"3"
else:
break
assert response is not None
return response
async def rcv() -> AsyncGenerator[Message, None]:
yield {"type": "http.request", "body": b"1", "more_body": True}
yield {"type": "http.request", "body": b"2", "more_body": True}
yield {"type": "http.request", "body": b"3"}
raise AssertionError( # pragma: no cover
"Should not be called, no need to poll for disconnect"
)
sent: list[Message] = []
async def send(msg: Message) -> None:
sent.append(msg)
app: ASGIApp = endpoint
app = ConsumingMiddleware(app)
rcv_stream = rcv()
await app({"type": "http"}, rcv_stream.__anext__, send)
assert sent == [
{
"type": "http.response.start",
"status": 200,
"headers": [(b"content-length", b"0")],
},
{"type": "http.response.body", "body": b"", "more_body": False},
]
await rcv_stream.aclose()
def test_read_request_stream_in_dispatch_after_app_calls_body_with_middleware_calling_body_before_call_next(
test_client_factory: TestClientFactory,
) -> None:
async def homepage(request: Request) -> PlainTextResponse:
assert await request.body() == b"a"
return PlainTextResponse("Homepage")
class ConsumingMiddleware(BaseHTTPMiddleware):
async def dispatch(
self,
request: Request,
call_next: RequestResponseEndpoint,
) -> Response:
assert await request.body() == b"a" # this buffers the request body in memory
resp = await call_next(request)
async for chunk in request.stream():
if chunk:
assert chunk == b"a"
return resp
app = Starlette(
routes=[Route("/", homepage, methods=["POST"])],
middleware=[Middleware(ConsumingMiddleware)],
)
client: TestClient = test_client_factory(app)
response = client.post("/", content=b"a")
assert response.status_code == 200
def test_read_request_body_in_dispatch_after_app_calls_body_with_middleware_calling_body_before_call_next(
test_client_factory: TestClientFactory,
) -> None:
async def homepage(request: Request) -> PlainTextResponse:
assert await request.body() == b"a"
return PlainTextResponse("Homepage")
class ConsumingMiddleware(BaseHTTPMiddleware):
async def dispatch(
self,
request: Request,
call_next: RequestResponseEndpoint,
) -> Response:
assert await request.body() == b"a" # this buffers the request body in memory
resp = await call_next(request)
assert await request.body() == b"a" # no problem here
return resp
app = Starlette(
routes=[Route("/", homepage, methods=["POST"])],
middleware=[Middleware(ConsumingMiddleware)],
)
client: TestClient = test_client_factory(app)
response = client.post("/", content=b"a")
assert response.status_code == 200
@pytest.mark.anyio
async def test_read_request_disconnected_client() -> None:
"""If we receive a disconnect message when the downstream ASGI
app calls receive() the Request instance passed into the dispatch function
should get marked as disconnected.
The downstream ASGI app should not get a ClientDisconnect raised,
instead if should just receive the disconnect message.
"""
async def endpoint(scope: Scope, receive: Receive, send: Send) -> None:
msg = await receive()
assert msg["type"] == "http.disconnect"
await Response()(scope, receive, send)
class ConsumingMiddleware(BaseHTTPMiddleware):
async def dispatch(
self,
request: Request,
call_next: RequestResponseEndpoint,
) -> Response:
response = await call_next(request)
disconnected = await request.is_disconnected()
assert disconnected is True
return response
scope = {"type": "http", "method": "POST", "path": "/"}
async def receive() -> AsyncGenerator[Message, None]:
yield {"type": "http.disconnect"}
raise AssertionError("Should not be called, would hang") # pragma: no cover
async def send(msg: Message) -> None:
if msg["type"] == "http.response.start":
assert msg["status"] == 200
app: ASGIApp = ConsumingMiddleware(endpoint)
rcv = receive()
await app(scope, rcv.__anext__, send)
await rcv.aclose()
@pytest.mark.anyio
async def test_read_request_disconnected_after_consuming_steam() -> None:
async def endpoint(scope: Scope, receive: Receive, send: Send) -> None:
msg = await receive()
assert msg.pop("more_body", False) is False
assert msg == {"type": "http.request", "body": b"hi"}
msg = await receive()
assert msg == {"type": "http.disconnect"}
await Response()(scope, receive, send)
class ConsumingMiddleware(BaseHTTPMiddleware):
async def dispatch(
self,
request: Request,
call_next: RequestResponseEndpoint,
) -> Response:
await request.body()
disconnected = await request.is_disconnected()
assert disconnected is True
response = await call_next(request)
return response
scope = {"type": "http", "method": "POST", "path": "/"}
async def receive() -> AsyncGenerator[Message, None]:
yield {"type": "http.request", "body": b"hi"}
yield {"type": "http.disconnect"}
raise AssertionError("Should not be called, would hang") # pragma: no cover
async def send(msg: Message) -> None:
if msg["type"] == "http.response.start":
assert msg["status"] == 200
app: ASGIApp = ConsumingMiddleware(endpoint)
rcv = receive()
await app(scope, rcv.__anext__, send)
await rcv.aclose()
def test_downstream_middleware_modifies_receive(
test_client_factory: TestClientFactory,
) -> None:
"""If a downstream middleware modifies receive() the final ASGI app
should see the modified version.
"""
async def endpoint(scope: Scope, receive: Receive, send: Send) -> None:
request = Request(scope, receive)
body = await request.body()
assert body == b"foo foo "
await Response()(scope, receive, send)
class ConsumingMiddleware(BaseHTTPMiddleware):
async def dispatch(
self,
request: Request,
call_next: RequestResponseEndpoint,
) -> Response:
body = await request.body()
assert body == b"foo "
return await call_next(request)
def modifying_middleware(app: ASGIApp) -> ASGIApp:
async def wrapped_app(scope: Scope, receive: Receive, send: Send) -> None:
async def wrapped_receive() -> Message:
msg = await receive()
if msg["type"] == "http.request": # pragma: no branch
msg["body"] = msg["body"] * 2
return msg
await app(scope, wrapped_receive, send)
return wrapped_app
client = test_client_factory(ConsumingMiddleware(modifying_middleware(endpoint)))
resp = client.post("/", content=b"foo ")
assert resp.status_code == 200
def test_pr_1519_comment_1236166180_example() -> None:
"""
https://github.com/Kludex/starlette/pull/1519#issuecomment-1236166180
"""
bodies: list[bytes] = []
class LogRequestBodySize(BaseHTTPMiddleware):
async def dispatch(
self,
request: Request,
call_next: RequestResponseEndpoint,
) -> Response:
print(len(await request.body()))
return await call_next(request)
def replace_body_middleware(app: ASGIApp) -> ASGIApp:
async def wrapped_app(scope: Scope, receive: Receive, send: Send) -> None:
async def wrapped_rcv() -> Message:
msg = await receive()
msg["body"] += b"-foo"
return msg
await app(scope, wrapped_rcv, send)
return wrapped_app
async def endpoint(request: Request) -> Response:
body = await request.body()
bodies.append(body)
return Response()
app: ASGIApp = Starlette(routes=[Route("/", endpoint, methods=["POST"])])
app = replace_body_middleware(app)
app = LogRequestBodySize(app)
client = TestClient(app)
resp = client.post("/", content=b"Hello, World!")
resp.raise_for_status()
assert bodies == [b"Hello, World!-foo"]
@pytest.mark.anyio
async def test_multiple_middlewares_stacked_client_disconnected() -> None:
"""
Tests for:
- https://github.com/Kludex/starlette/issues/2516
- https://github.com/Kludex/starlette/pull/2687
"""
ordered_events: list[str] = []
unordered_events: list[str] = []
class MyMiddleware(BaseHTTPMiddleware):
def __init__(self, app: ASGIApp, version: int) -> None:
self.version = version
super().__init__(app)
async def dispatch(self, request: Request, call_next: RequestResponseEndpoint) -> Response:
ordered_events.append(f"{self.version}:STARTED")
res = await call_next(request)
ordered_events.append(f"{self.version}:COMPLETED")
def background() -> None:
unordered_events.append(f"{self.version}:BACKGROUND")
assert res.background is None
res.background = BackgroundTask(background)
return res
async def sleepy(request: Request) -> Response:
try:
await request.body()
except ClientDisconnect:
pass
else: # pragma: no cover
raise AssertionError("Should have raised ClientDisconnect")
return Response(b"")
app = Starlette(
routes=[Route("/", sleepy)],
middleware=[Middleware(MyMiddleware, version=_ + 1) for _ in range(10)],
)
scope = {
"type": "http",
"version": "3",
"method": "GET",
"path": "/",
}
async def receive() -> AsyncIterator[Message]:
yield {"type": "http.disconnect"}
sent: list[Message] = []
async def send(message: Message) -> None:
sent.append(message)
await app(scope, receive().__anext__, send)
assert ordered_events == [
"1:STARTED",
"2:STARTED",
"3:STARTED",
"4:STARTED",
"5:STARTED",
"6:STARTED",
"7:STARTED",
"8:STARTED",
"9:STARTED",
"10:STARTED",
"10:COMPLETED",
"9:COMPLETED",
"8:COMPLETED",
"7:COMPLETED",
"6:COMPLETED",
"5:COMPLETED",
"4:COMPLETED",
"3:COMPLETED",
"2:COMPLETED",
"1:COMPLETED",
]
assert sorted(unordered_events) == sorted(
[
"1:BACKGROUND",
"2:BACKGROUND",
"3:BACKGROUND",
"4:BACKGROUND",
"5:BACKGROUND",
"6:BACKGROUND",
"7:BACKGROUND",
"8:BACKGROUND",
"9:BACKGROUND",
"10:BACKGROUND",
]
)
assert sent == [
{
"type": "http.response.start",
"status": 200,
"headers": [(b"content-length", b"0")],
},
{"type": "http.response.body", "body": b"", "more_body": False},
]
@pytest.mark.anyio
@pytest.mark.parametrize("send_body", [True, False])
async def test_poll_for_disconnect_repeated(send_body: bool) -> None:
async def app_poll_disconnect(scope: Scope, receive: Receive, send: Send) -> None:
for _ in range(2):
msg = await receive()
while msg["type"] == "http.request":
msg = await receive()
assert msg["type"] == "http.disconnect"
await Response(b"good!")(scope, receive, send)
class MyMiddleware(BaseHTTPMiddleware):
async def dispatch(self, request: Request, call_next: RequestResponseEndpoint) -> Response:
return await call_next(request)
app = MyMiddleware(app_poll_disconnect)
scope = {
"type": "http",
"version": "3",
"method": "GET",
"path": "/",
}
async def receive() -> AsyncIterator[Message]:
# the key here is that we only ever send 1 htt.disconnect message
if send_body:
yield {"type": "http.request", "body": b"hello", "more_body": True}
yield {"type": "http.request", "body": b"", "more_body": False}
yield {"type": "http.disconnect"}
raise AssertionError("Should not be called, would hang") # pragma: no cover
sent: list[Message] = []
async def send(message: Message) -> None:
sent.append(message)
await app(scope, receive().__anext__, send)
assert sent == [
{
"type": "http.response.start",
"status": 200,
"headers": [(b"content-length", b"5")],
},
{"type": "http.response.body", "body": b"good!", "more_body": True},
{"type": "http.response.body", "body": b"", "more_body": False},
]
@pytest.mark.anyio
async def test_asgi_pathsend_events(tmpdir: Path) -> None:
path = tmpdir / "example.txt"
with path.open("w") as file:
file.write("<file content>")
response_complete = anyio.Event()
events: list[Message] = []
async def endpoint_with_pathsend(_: Request) -> FileResponse:
return FileResponse(path)
async def passthrough(request: Request, call_next: RequestResponseEndpoint) -> Response:
return await call_next(request)
app = Starlette(
middleware=[Middleware(BaseHTTPMiddleware, dispatch=passthrough)],
routes=[Route("/", endpoint_with_pathsend)],
)
scope = {
"type": "http",
"version": "3",
"method": "GET",
"path": "/",
"headers": [],
"extensions": {"http.response.pathsend": {}},
}
async def receive() -> Message:
raise NotImplementedError("Should not be called!") # pragma: no cover
async def send(message: Message) -> None:
events.append(message)
if message["type"] == "http.response.pathsend":
response_complete.set()
await app(scope, receive, send)
assert len(events) == 2
assert events[0]["type"] == "http.response.start"
assert events[1]["type"] == "http.response.pathsend"
def test_error_context_propagation(test_client_factory: TestClientFactory) -> None:
class PassthroughMiddleware(BaseHTTPMiddleware):
async def dispatch(
self,
request: Request,
call_next: RequestResponseEndpoint,
) -> Response:
return await call_next(request)
def exception_without_context(request: Request) -> None:
raise Exception("Exception")
def exception_with_context(request: Request) -> None:
try:
raise Exception("Inner exception")
except Exception:
raise Exception("Outer exception")
def exception_with_cause(request: Request) -> None:
try:
raise Exception("Inner exception")
except Exception as e:
raise Exception("Outer exception") from e
app = Starlette(
routes=[
Route("/exception-without-context", endpoint=exception_without_context),
Route("/exception-with-context", endpoint=exception_with_context),
Route("/exception-with-cause", endpoint=exception_with_cause),
],
middleware=[Middleware(PassthroughMiddleware)],
)
client = test_client_factory(app)
# For exceptions without context the context is filled with the `anyio.EndOfStream`
# but it is suppressed therefore not propagated to traceback.
with pytest.raises(Exception) as ctx:
client.get("/exception-without-context")
assert str(ctx.value) == "Exception"
assert ctx.value.__cause__ is None
assert ctx.value.__context__ is not None
assert ctx.value.__suppress_context__ is True
# For exceptions with context the context is propagated as a cause to avoid
# `anyio.EndOfStream` error from overwriting it.
with pytest.raises(Exception) as ctx:
client.get("/exception-with-context")
assert str(ctx.value) == "Outer exception"
assert ctx.value.__cause__ is not None
assert str(ctx.value.__cause__) == "Inner exception"
# For exceptions with cause check that it gets correctly propagated.
with pytest.raises(Exception) as ctx:
client.get("/exception-with-cause")
assert str(ctx.value) == "Outer exception"
assert ctx.value.__cause__ is not None
assert str(ctx.value.__cause__) == "Inner exception"
|
./temp_repos/starlette/starlette/middleware/base.py
|
./temp_repos/starlette/tests/middleware/test_base.py
|
starlette
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class '_CachedRequest'.
Context:
- Class Name: _CachedRequest
- Dependencies to Mock: status_code, app, headers, receive, dispatch, scope, content, info, media_type
- Key Imports: starlette._utils, starlette.responses, typing, anyio, collections.abc, starlette.types, __future__, starlette.requests
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
_CachedRequest
|
python
|
import abc
import asyncio
import base64
import functools
import hashlib
import html
import inspect
import keyword
import os
import re
import sys
from collections.abc import (
Awaitable,
Callable,
Container,
Generator,
Iterable,
Iterator,
Mapping,
Sized,
)
from pathlib import Path
from re import Pattern
from types import MappingProxyType
from typing import TYPE_CHECKING, Any, Final, NoReturn, Optional, TypedDict, cast
from yarl import URL
from . import hdrs
from .abc import AbstractMatchInfo, AbstractRouter, AbstractView
from .helpers import DEBUG
from .http import HttpVersion11
from .typedefs import Handler, PathLike
from .web_exceptions import (
HTTPException,
HTTPExpectationFailed,
HTTPForbidden,
HTTPMethodNotAllowed,
HTTPNotFound,
)
from .web_fileresponse import FileResponse
from .web_request import Request
from .web_response import Response, StreamResponse
from .web_routedef import AbstractRouteDef
__all__ = (
"UrlDispatcher",
"UrlMappingMatchInfo",
"AbstractResource",
"Resource",
"PlainResource",
"DynamicResource",
"AbstractRoute",
"ResourceRoute",
"StaticResource",
"View",
)
if TYPE_CHECKING:
from .web_app import Application
CIRCULAR_SYMLINK_ERROR = (RuntimeError,) if sys.version_info < (3, 13) else ()
HTTP_METHOD_RE: Final[Pattern[str]] = re.compile(
r"^[0-9A-Za-z!#\$%&'\*\+\-\.\^_`\|~]+$"
)
ROUTE_RE: Final[Pattern[str]] = re.compile(
r"(\{[_a-zA-Z][^{}]*(?:\{[^{}]*\}[^{}]*)*\})"
)
PATH_SEP: Final[str] = re.escape("/")
_ExpectHandler = Callable[[Request], Awaitable[StreamResponse | None]]
_Resolve = tuple[Optional["UrlMappingMatchInfo"], set[str]]
html_escape = functools.partial(html.escape, quote=True)
class _InfoDict(TypedDict, total=False):
path: str
formatter: str
pattern: Pattern[str]
directory: Path
prefix: str
routes: Mapping[str, "AbstractRoute"]
app: "Application"
domain: str
rule: "AbstractRuleMatching"
http_exception: HTTPException
class AbstractResource(Sized, Iterable["AbstractRoute"]):
def __init__(self, *, name: str | None = None) -> None:
self._name = name
@property
def name(self) -> str | None:
return self._name
@property
@abc.abstractmethod
def canonical(self) -> str:
"""Exposes the resource's canonical path.
For example '/foo/bar/{name}'
"""
@abc.abstractmethod # pragma: no branch
def url_for(self, **kwargs: str) -> URL:
"""Construct url for resource with additional params."""
@abc.abstractmethod # pragma: no branch
async def resolve(self, request: Request) -> _Resolve:
"""Resolve resource.
Return (UrlMappingMatchInfo, allowed_methods) pair.
"""
@abc.abstractmethod
def add_prefix(self, prefix: str) -> None:
"""Add a prefix to processed URLs.
Required for subapplications support.
"""
@abc.abstractmethod
def get_info(self) -> _InfoDict:
"""Return a dict with additional info useful for introspection"""
def freeze(self) -> None:
pass
@abc.abstractmethod
def raw_match(self, path: str) -> bool:
"""Perform a raw match against path"""
class AbstractRoute(abc.ABC):
def __init__(
self,
method: str,
handler: Handler | type[AbstractView],
*,
expect_handler: _ExpectHandler | None = None,
resource: AbstractResource | None = None,
) -> None:
if expect_handler is None:
expect_handler = _default_expect_handler
assert inspect.iscoroutinefunction(expect_handler) or (
sys.version_info < (3, 14) and asyncio.iscoroutinefunction(expect_handler)
), f"Coroutine is expected, got {expect_handler!r}"
method = method.upper()
if not HTTP_METHOD_RE.match(method):
raise ValueError(f"{method} is not allowed HTTP method")
if inspect.iscoroutinefunction(handler) or (
sys.version_info < (3, 14) and asyncio.iscoroutinefunction(handler)
):
pass
elif isinstance(handler, type) and issubclass(handler, AbstractView):
pass
else:
raise TypeError(
f"Only async functions are allowed as web-handlers, got {handler!r}"
)
self._method = method
self._handler = handler
self._expect_handler = expect_handler
self._resource = resource
@property
def method(self) -> str:
return self._method
@property
def handler(self) -> Handler:
return self._handler
@property
@abc.abstractmethod
def name(self) -> str | None:
"""Optional route's name, always equals to resource's name."""
@property
def resource(self) -> AbstractResource | None:
return self._resource
@abc.abstractmethod
def get_info(self) -> _InfoDict:
"""Return a dict with additional info useful for introspection"""
@abc.abstractmethod # pragma: no branch
def url_for(self, *args: str, **kwargs: str) -> URL:
"""Construct url for route with additional params."""
async def handle_expect_header(self, request: Request) -> StreamResponse | None:
return await self._expect_handler(request)
class UrlMappingMatchInfo(dict[str, str], AbstractMatchInfo):
__slots__ = ("_route", "_apps", "_current_app", "_frozen")
def __init__(self, match_dict: dict[str, str], route: AbstractRoute) -> None:
super().__init__(match_dict)
self._route = route
self._apps: list[Application] = []
self._current_app: Application | None = None
self._frozen = False
@property
def handler(self) -> Handler:
return self._route.handler
@property
def route(self) -> AbstractRoute:
return self._route
@property
def expect_handler(self) -> _ExpectHandler:
return self._route.handle_expect_header
@property
def http_exception(self) -> HTTPException | None:
return None
def get_info(self) -> _InfoDict: # type: ignore[override]
return self._route.get_info()
@property
def apps(self) -> tuple["Application", ...]:
return tuple(self._apps)
def add_app(self, app: "Application") -> None:
if self._frozen:
raise RuntimeError("Cannot change apps stack after .freeze() call")
if self._current_app is None:
self._current_app = app
self._apps.insert(0, app)
@property
def current_app(self) -> "Application":
app = self._current_app
assert app is not None
return app
@current_app.setter
def current_app(self, app: "Application") -> None:
if DEBUG:
if app not in self._apps:
raise RuntimeError(
f"Expected one of the following apps {self._apps!r}, got {app!r}"
)
self._current_app = app
def freeze(self) -> None:
self._frozen = True
def __repr__(self) -> str:
return f"<MatchInfo {super().__repr__()}: {self._route}>"
class MatchInfoError(UrlMappingMatchInfo):
__slots__ = ("_exception",)
def __init__(self, http_exception: HTTPException) -> None:
self._exception = http_exception
super().__init__({}, SystemRoute(self._exception))
@property
def http_exception(self) -> HTTPException:
return self._exception
def __repr__(self) -> str:
return f"<MatchInfoError {self._exception.status}: {self._exception.reason}>"
async def _default_expect_handler(request: Request) -> None:
"""Default handler for Expect header.
Just send "100 Continue" to client.
raise HTTPExpectationFailed if value of header is not "100-continue"
"""
expect = request.headers.get(hdrs.EXPECT, "")
if request.version == HttpVersion11:
if expect.lower() == "100-continue":
await request.writer.write(b"HTTP/1.1 100 Continue\r\n\r\n")
# Reset output_size as we haven't started the main body yet.
request.writer.output_size = 0
else:
raise HTTPExpectationFailed(text="Unknown Expect: %s" % expect)
class Resource(AbstractResource):
def __init__(self, *, name: str | None = None) -> None:
super().__init__(name=name)
self._routes: dict[str, ResourceRoute] = {}
self._any_route: ResourceRoute | None = None
self._allowed_methods: set[str] = set()
def add_route(
self,
method: str,
handler: type[AbstractView] | Handler,
*,
expect_handler: _ExpectHandler | None = None,
) -> "ResourceRoute":
if route := self._routes.get(method, self._any_route):
raise RuntimeError(
"Added route will never be executed, "
f"method {route.method} is already "
"registered"
)
route_obj = ResourceRoute(method, handler, self, expect_handler=expect_handler)
self.register_route(route_obj)
return route_obj
def register_route(self, route: "ResourceRoute") -> None:
assert isinstance(
route, ResourceRoute
), f"Instance of Route class is required, got {route!r}"
if route.method == hdrs.METH_ANY:
self._any_route = route
self._allowed_methods.add(route.method)
self._routes[route.method] = route
async def resolve(self, request: Request) -> _Resolve:
if (match_dict := self._match(request.rel_url.path_safe)) is None:
return None, set()
if route := self._routes.get(request.method, self._any_route):
return UrlMappingMatchInfo(match_dict, route), self._allowed_methods
return None, self._allowed_methods
@abc.abstractmethod
def _match(self, path: str) -> dict[str, str] | None:
"""Return dict of path values if path matches this resource, otherwise None."""
def __len__(self) -> int:
return len(self._routes)
def __iter__(self) -> Iterator["ResourceRoute"]:
return iter(self._routes.values())
# TODO: implement all abstract methods
class PlainResource(Resource):
def __init__(self, path: str, *, name: str | None = None) -> None:
super().__init__(name=name)
assert not path or path.startswith("/")
self._path = path
@property
def canonical(self) -> str:
return self._path
def freeze(self) -> None:
if not self._path:
self._path = "/"
def add_prefix(self, prefix: str) -> None:
assert prefix.startswith("/")
assert not prefix.endswith("/")
assert len(prefix) > 1
self._path = prefix + self._path
def _match(self, path: str) -> dict[str, str] | None:
# string comparison is about 10 times faster than regexp matching
if self._path == path:
return {}
return None
def raw_match(self, path: str) -> bool:
return self._path == path
def get_info(self) -> _InfoDict:
return {"path": self._path}
def url_for(self) -> URL: # type: ignore[override]
return URL.build(path=self._path, encoded=True)
def __repr__(self) -> str:
name = "'" + self.name + "' " if self.name is not None else ""
return f"<PlainResource {name} {self._path}>"
class DynamicResource(Resource):
DYN = re.compile(r"\{(?P<var>[_a-zA-Z][_a-zA-Z0-9]*)\}")
DYN_WITH_RE = re.compile(r"\{(?P<var>[_a-zA-Z][_a-zA-Z0-9]*):(?P<re>.+)\}")
GOOD = r"[^{}/]+"
def __init__(self, path: str, *, name: str | None = None) -> None:
super().__init__(name=name)
self._orig_path = path
pattern = ""
formatter = ""
for part in ROUTE_RE.split(path):
match = self.DYN.fullmatch(part)
if match:
pattern += "(?P<{}>{})".format(match.group("var"), self.GOOD)
formatter += "{" + match.group("var") + "}"
continue
match = self.DYN_WITH_RE.fullmatch(part)
if match:
pattern += "(?P<{var}>{re})".format(**match.groupdict())
formatter += "{" + match.group("var") + "}"
continue
if "{" in part or "}" in part:
raise ValueError(f"Invalid path '{path}'['{part}']")
part = _requote_path(part)
formatter += part
pattern += re.escape(part)
try:
compiled = re.compile(pattern)
except re.error as exc:
raise ValueError(f"Bad pattern '{pattern}': {exc}") from None
assert compiled.pattern.startswith(PATH_SEP)
assert formatter.startswith("/")
self._pattern = compiled
self._formatter = formatter
@property
def canonical(self) -> str:
return self._formatter
def add_prefix(self, prefix: str) -> None:
assert prefix.startswith("/")
assert not prefix.endswith("/")
assert len(prefix) > 1
self._pattern = re.compile(re.escape(prefix) + self._pattern.pattern)
self._formatter = prefix + self._formatter
def _match(self, path: str) -> dict[str, str] | None:
match = self._pattern.fullmatch(path)
if match is None:
return None
return {
key: _unquote_path_safe(value) for key, value in match.groupdict().items()
}
def raw_match(self, path: str) -> bool:
return self._orig_path == path
def get_info(self) -> _InfoDict:
return {"formatter": self._formatter, "pattern": self._pattern}
def url_for(self, **parts: str) -> URL:
url = self._formatter.format_map({k: _quote_path(v) for k, v in parts.items()})
return URL.build(path=url, encoded=True)
def __repr__(self) -> str:
name = "'" + self.name + "' " if self.name is not None else ""
return f"<DynamicResource {name} {self._formatter}>"
class PrefixResource(AbstractResource):
def __init__(self, prefix: str, *, name: str | None = None) -> None:
assert not prefix or prefix.startswith("/"), prefix
assert prefix in ("", "/") or not prefix.endswith("/"), prefix
super().__init__(name=name)
self._prefix = _requote_path(prefix)
self._prefix2 = self._prefix + "/"
@property
def canonical(self) -> str:
return self._prefix
def add_prefix(self, prefix: str) -> None:
assert prefix.startswith("/")
assert not prefix.endswith("/")
assert len(prefix) > 1
self._prefix = prefix + self._prefix
self._prefix2 = self._prefix + "/"
def raw_match(self, prefix: str) -> bool:
return False
# TODO: impl missing abstract methods
class StaticResource(PrefixResource):
VERSION_KEY = "v"
def __init__(
self,
prefix: str,
directory: PathLike,
*,
name: str | None = None,
expect_handler: _ExpectHandler | None = None,
chunk_size: int = 256 * 1024,
show_index: bool = False,
follow_symlinks: bool = False,
append_version: bool = False,
) -> None:
super().__init__(prefix, name=name)
try:
directory = Path(directory).expanduser().resolve(strict=True)
except FileNotFoundError as error:
raise ValueError(f"'{directory}' does not exist") from error
if not directory.is_dir():
raise ValueError(f"'{directory}' is not a directory")
self._directory = directory
self._show_index = show_index
self._chunk_size = chunk_size
self._follow_symlinks = follow_symlinks
self._expect_handler = expect_handler
self._append_version = append_version
self._routes = {
"GET": ResourceRoute(
"GET", self._handle, self, expect_handler=expect_handler
),
"HEAD": ResourceRoute(
"HEAD", self._handle, self, expect_handler=expect_handler
),
}
self._allowed_methods = set(self._routes)
def url_for( # type: ignore[override]
self,
*,
filename: PathLike,
append_version: bool | None = None,
) -> URL:
if append_version is None:
append_version = self._append_version
filename = str(filename).lstrip("/")
url = URL.build(path=self._prefix, encoded=True)
# filename is not encoded
url = url / filename
if append_version:
unresolved_path = self._directory.joinpath(filename)
try:
if self._follow_symlinks:
normalized_path = Path(os.path.normpath(unresolved_path))
normalized_path.relative_to(self._directory)
filepath = normalized_path.resolve()
else:
filepath = unresolved_path.resolve()
filepath.relative_to(self._directory)
except (ValueError, FileNotFoundError):
# ValueError for case when path point to symlink
# with follow_symlinks is False
return url # relatively safe
if filepath.is_file():
# TODO cache file content
# with file watcher for cache invalidation
with filepath.open("rb") as f:
file_bytes = f.read()
h = self._get_file_hash(file_bytes)
url = url.with_query({self.VERSION_KEY: h})
return url
return url
@staticmethod
def _get_file_hash(byte_array: bytes) -> str:
m = hashlib.sha256() # todo sha256 can be configurable param
m.update(byte_array)
b64 = base64.urlsafe_b64encode(m.digest())
return b64.decode("ascii")
def get_info(self) -> _InfoDict:
return {
"directory": self._directory,
"prefix": self._prefix,
"routes": self._routes,
}
def set_options_route(self, handler: Handler) -> None:
if "OPTIONS" in self._routes:
raise RuntimeError("OPTIONS route was set already")
self._routes["OPTIONS"] = ResourceRoute(
"OPTIONS", handler, self, expect_handler=self._expect_handler
)
self._allowed_methods.add("OPTIONS")
async def resolve(self, request: Request) -> _Resolve:
path = request.rel_url.path_safe
method = request.method
if not path.startswith(self._prefix2) and path != self._prefix:
return None, set()
allowed_methods = self._allowed_methods
if method not in allowed_methods:
return None, allowed_methods
match_dict = {"filename": _unquote_path_safe(path[len(self._prefix) + 1 :])}
return (UrlMappingMatchInfo(match_dict, self._routes[method]), allowed_methods)
def __len__(self) -> int:
return len(self._routes)
def __iter__(self) -> Iterator[AbstractRoute]:
return iter(self._routes.values())
async def _handle(self, request: Request) -> StreamResponse:
rel_url = request.match_info["filename"]
filename = Path(rel_url)
if filename.anchor:
# rel_url is an absolute name like
# /static/\\machine_name\c$ or /static/D:\path
# where the static dir is totally different
raise HTTPForbidden()
unresolved_path = self._directory.joinpath(filename)
loop = asyncio.get_running_loop()
return await loop.run_in_executor(
None, self._resolve_path_to_response, unresolved_path
)
def _resolve_path_to_response(self, unresolved_path: Path) -> StreamResponse:
"""Take the unresolved path and query the file system to form a response."""
# Check for access outside the root directory. For follow symlinks, URI
# cannot traverse out, but symlinks can. Otherwise, no access outside
# root is permitted.
try:
if self._follow_symlinks:
normalized_path = Path(os.path.normpath(unresolved_path))
normalized_path.relative_to(self._directory)
file_path = normalized_path.resolve()
else:
file_path = unresolved_path.resolve()
file_path.relative_to(self._directory)
except (ValueError, *CIRCULAR_SYMLINK_ERROR) as error:
# ValueError is raised for the relative check. Circular symlinks
# raise here on resolving for python < 3.13.
raise HTTPNotFound() from error
# if path is a directory, return the contents if permitted. Note the
# directory check will raise if a segment is not readable.
try:
if file_path.is_dir():
if self._show_index:
return Response(
text=self._directory_as_html(file_path),
content_type="text/html",
)
else:
raise HTTPForbidden()
except PermissionError as error:
raise HTTPForbidden() from error
# Return the file response, which handles all other checks.
return FileResponse(file_path, chunk_size=self._chunk_size)
def _directory_as_html(self, dir_path: Path) -> str:
"""returns directory's index as html."""
assert dir_path.is_dir()
relative_path_to_dir = dir_path.relative_to(self._directory).as_posix()
index_of = f"Index of /{html_escape(relative_path_to_dir)}"
h1 = f"<h1>{index_of}</h1>"
index_list = []
dir_index = dir_path.iterdir()
for _file in sorted(dir_index):
# show file url as relative to static path
rel_path = _file.relative_to(self._directory).as_posix()
quoted_file_url = _quote_path(f"{self._prefix}/{rel_path}")
# if file is a directory, add '/' to the end of the name
if _file.is_dir():
file_name = f"{_file.name}/"
else:
file_name = _file.name
index_list.append(
f'<li><a href="{quoted_file_url}">{html_escape(file_name)}</a></li>'
)
ul = "<ul>\n{}\n</ul>".format("\n".join(index_list))
body = f"<body>\n{h1}\n{ul}\n</body>"
head_str = f"<head>\n<title>{index_of}</title>\n</head>"
html = f"<html>\n{head_str}\n{body}\n</html>"
return html
def __repr__(self) -> str:
name = "'" + self.name + "'" if self.name is not None else ""
return f"<StaticResource {name} {self._prefix} -> {self._directory!r}>"
class PrefixedSubAppResource(PrefixResource):
def __init__(self, prefix: str, app: "Application") -> None:
super().__init__(prefix)
self._app = app
self._add_prefix_to_resources(prefix)
def add_prefix(self, prefix: str) -> None:
super().add_prefix(prefix)
self._add_prefix_to_resources(prefix)
def _add_prefix_to_resources(self, prefix: str) -> None:
router = self._app.router
for resource in router.resources():
# Since the canonical path of a resource is about
# to change, we need to unindex it and then reindex
router.unindex_resource(resource)
resource.add_prefix(prefix)
router.index_resource(resource)
def url_for(self, *args: str, **kwargs: str) -> URL:
raise RuntimeError(".url_for() is not supported by sub-application root")
def get_info(self) -> _InfoDict:
return {"app": self._app, "prefix": self._prefix}
async def resolve(self, request: Request) -> _Resolve:
match_info = await self._app.router.resolve(request)
match_info.add_app(self._app)
if isinstance(match_info.http_exception, HTTPMethodNotAllowed):
methods = match_info.http_exception.allowed_methods
else:
methods = set()
return match_info, methods
def __len__(self) -> int:
return len(self._app.router.routes())
def __iter__(self) -> Iterator[AbstractRoute]:
return iter(self._app.router.routes())
def __repr__(self) -> str:
return f"<PrefixedSubAppResource {self._prefix} -> {self._app!r}>"
class AbstractRuleMatching(abc.ABC):
@abc.abstractmethod # pragma: no branch
async def match(self, request: Request) -> bool:
"""Return bool if the request satisfies the criteria"""
@abc.abstractmethod # pragma: no branch
def get_info(self) -> _InfoDict:
"""Return a dict with additional info useful for introspection"""
@property
@abc.abstractmethod # pragma: no branch
def canonical(self) -> str:
"""Return a str"""
class Domain(AbstractRuleMatching):
re_part = re.compile(r"(?!-)[a-z\d-]{1,63}(?<!-)")
def __init__(self, domain: str) -> None:
super().__init__()
self._domain = self.validation(domain)
@property
def canonical(self) -> str:
return self._domain
def validation(self, domain: str) -> str:
if not isinstance(domain, str):
raise TypeError("Domain must be str")
domain = domain.rstrip(".").lower()
if not domain:
raise ValueError("Domain cannot be empty")
elif "://" in domain:
raise ValueError("Scheme not supported")
url = URL("http://" + domain)
assert url.raw_host is not None
if not all(self.re_part.fullmatch(x) for x in url.raw_host.split(".")):
raise ValueError("Domain not valid")
if url.port == 80:
return url.raw_host
return f"{url.raw_host}:{url.port}"
async def match(self, request: Request) -> bool:
host = request.headers.get(hdrs.HOST)
if not host:
return False
return self.match_domain(host)
def match_domain(self, host: str) -> bool:
return host.lower() == self._domain
def get_info(self) -> _InfoDict:
return {"domain": self._domain}
class MaskDomain(Domain):
re_part = re.compile(r"(?!-)[a-z\d\*-]{1,63}(?<!-)")
def __init__(self, domain: str) -> None:
super().__init__(domain)
mask = self._domain.replace(".", r"\.").replace("*", ".*")
self._mask = re.compile(mask)
@property
def canonical(self) -> str:
return self._mask.pattern
def match_domain(self, host: str) -> bool:
return self._mask.fullmatch(host) is not None
class MatchedSubAppResource(PrefixedSubAppResource):
def __init__(self, rule: AbstractRuleMatching, app: "Application") -> None:
AbstractResource.__init__(self)
self._prefix = ""
self._app = app
self._rule = rule
@property
def canonical(self) -> str:
return self._rule.canonical
def get_info(self) -> _InfoDict:
return {"app": self._app, "rule": self._rule}
async def resolve(self, request: Request) -> _Resolve:
if not await self._rule.match(request):
return None, set()
match_info = await self._app.router.resolve(request)
match_info.add_app(self._app)
if isinstance(match_info.http_exception, HTTPMethodNotAllowed):
methods = match_info.http_exception.allowed_methods
else:
methods = set()
return match_info, methods
def __repr__(self) -> str:
return f"<MatchedSubAppResource -> {self._app!r}>"
class ResourceRoute(AbstractRoute):
"""A route with resource"""
def __init__(
self,
method: str,
handler: Handler | type[AbstractView],
resource: AbstractResource,
*,
expect_handler: _ExpectHandler | None = None,
) -> None:
super().__init__(
method, handler, expect_handler=expect_handler, resource=resource
)
def __repr__(self) -> str:
return f"<ResourceRoute [{self.method}] {self._resource} -> {self.handler!r}"
@property
def name(self) -> str | None:
if self._resource is None:
return None
return self._resource.name
def url_for(self, *args: str, **kwargs: str) -> URL:
"""Construct url for route with additional params."""
assert self._resource is not None
return self._resource.url_for(*args, **kwargs)
def get_info(self) -> _InfoDict:
assert self._resource is not None
return self._resource.get_info()
class SystemRoute(AbstractRoute):
def __init__(self, http_exception: HTTPException) -> None:
super().__init__(hdrs.METH_ANY, self._handle)
self._http_exception = http_exception
def url_for(self, *args: str, **kwargs: str) -> URL:
raise RuntimeError(".url_for() is not allowed for SystemRoute")
@property
def name(self) -> str | None:
return None
def get_info(self) -> _InfoDict:
return {"http_exception": self._http_exception}
async def _handle(self, request: Request) -> StreamResponse:
raise self._http_exception
@property
def status(self) -> int:
return self._http_exception.status
@property
def reason(self) -> str:
return self._http_exception.reason
def __repr__(self) -> str:
return f"<SystemRoute {self.status}: {self.reason}>"
class View(AbstractView):
async def _iter(self) -> StreamResponse:
if self.request.method not in hdrs.METH_ALL:
self._raise_allowed_methods()
method: Callable[[], Awaitable[StreamResponse]] | None = getattr(
self, self.request.method.lower(), None
)
if method is None:
self._raise_allowed_methods()
return await method()
def __await__(self) -> Generator[None, None, StreamResponse]:
return self._iter().__await__()
def _raise_allowed_methods(self) -> NoReturn:
allowed_methods = {m for m in hdrs.METH_ALL if hasattr(self, m.lower())}
raise HTTPMethodNotAllowed(self.request.method, allowed_methods)
class ResourcesView(Sized, Iterable[AbstractResource], Container[AbstractResource]):
def __init__(self, resources: list[AbstractResource]) -> None:
self._resources = resources
def __len__(self) -> int:
return len(self._resources)
def __iter__(self) -> Iterator[AbstractResource]:
yield from self._resources
def __contains__(self, resource: object) -> bool:
return resource in self._resources
class RoutesView(Sized, Iterable[AbstractRoute], Container[AbstractRoute]):
def __init__(self, resources: list[AbstractResource]):
self._routes: list[AbstractRoute] = []
for resource in resources:
for route in resource:
self._routes.append(route)
def __len__(self) -> int:
return len(self._routes)
def __iter__(self) -> Iterator[AbstractRoute]:
yield from self._routes
def __contains__(self, route: object) -> bool:
return route in self._routes
class UrlDispatcher(AbstractRouter, Mapping[str, AbstractResource]):
NAME_SPLIT_RE = re.compile(r"[.:-]")
HTTP_NOT_FOUND = HTTPNotFound()
def __init__(self) -> None:
super().__init__()
self._resources: list[AbstractResource] = []
self._named_resources: dict[str, AbstractResource] = {}
self._resource_index: dict[str, list[AbstractResource]] = {}
self._matched_sub_app_resources: list[MatchedSubAppResource] = []
async def resolve(self, request: Request) -> UrlMappingMatchInfo:
resource_index = self._resource_index
allowed_methods: set[str] = set()
# MatchedSubAppResource is primarily used to match on domain names
# (though custom rules could match on other things). This means that
# the traversal algorithm below can't be applied, and that we likely
# need to check these first so a sub app that defines the same path
# as a parent app will get priority if there's a domain match.
#
# For most cases we do not expect there to be many of these since
# currently they are only added by `.add_domain()`.
for resource in self._matched_sub_app_resources:
match_dict, allowed = await resource.resolve(request)
if match_dict is not None:
return match_dict
else:
allowed_methods |= allowed
# Walk the url parts looking for candidates. We walk the url backwards
# to ensure the most explicit match is found first. If there are multiple
# candidates for a given url part because there are multiple resources
# registered for the same canonical path, we resolve them in a linear
# fashion to ensure registration order is respected.
url_part = request.rel_url.path_safe
while url_part:
for candidate in resource_index.get(url_part, ()):
match_dict, allowed = await candidate.resolve(request)
if match_dict is not None:
return match_dict
else:
allowed_methods |= allowed
if url_part == "/":
break
url_part = url_part.rpartition("/")[0] or "/"
if allowed_methods:
return MatchInfoError(HTTPMethodNotAllowed(request.method, allowed_methods))
return MatchInfoError(self.HTTP_NOT_FOUND)
def __iter__(self) -> Iterator[str]:
return iter(self._named_resources)
def __len__(self) -> int:
return len(self._named_resources)
def __contains__(self, resource: object) -> bool:
return resource in self._named_resources
def __getitem__(self, name: str) -> AbstractResource:
return self._named_resources[name]
def resources(self) -> ResourcesView:
return ResourcesView(self._resources)
def routes(self) -> RoutesView:
return RoutesView(self._resources)
def named_resources(self) -> Mapping[str, AbstractResource]:
return MappingProxyType(self._named_resources)
def register_resource(self, resource: AbstractResource) -> None:
assert isinstance(
resource, AbstractResource
), f"Instance of AbstractResource class is required, got {resource!r}"
if self.frozen:
raise RuntimeError("Cannot register a resource into frozen router.")
name = resource.name
if name is not None:
parts = self.NAME_SPLIT_RE.split(name)
for part in parts:
if keyword.iskeyword(part):
raise ValueError(
f"Incorrect route name {name!r}, "
"python keywords cannot be used "
"for route name"
)
if not part.isidentifier():
raise ValueError(
f"Incorrect route name {name!r}, "
"the name should be a sequence of "
"python identifiers separated "
"by dash, dot or column"
)
if name in self._named_resources:
raise ValueError(
f"Duplicate {name!r}, "
f"already handled by {self._named_resources[name]!r}"
)
self._named_resources[name] = resource
self._resources.append(resource)
if isinstance(resource, MatchedSubAppResource):
# We cannot index match sub-app resources because they have match rules
self._matched_sub_app_resources.append(resource)
else:
self.index_resource(resource)
def _get_resource_index_key(self, resource: AbstractResource) -> str:
"""Return a key to index the resource in the resource index."""
if "{" in (index_key := resource.canonical):
# strip at the first { to allow for variables, and than
# rpartition at / to allow for variable parts in the path
# For example if the canonical path is `/core/locations{tail:.*}`
# the index key will be `/core` since index is based on the
# url parts split by `/`
index_key = index_key.partition("{")[0].rpartition("/")[0]
return index_key.rstrip("/") or "/"
def index_resource(self, resource: AbstractResource) -> None:
"""Add a resource to the resource index."""
resource_key = self._get_resource_index_key(resource)
# There may be multiple resources for a canonical path
# so we keep them in a list to ensure that registration
# order is respected.
self._resource_index.setdefault(resource_key, []).append(resource)
def unindex_resource(self, resource: AbstractResource) -> None:
"""Remove a resource from the resource index."""
resource_key = self._get_resource_index_key(resource)
self._resource_index[resource_key].remove(resource)
def add_resource(self, path: str, *, name: str | None = None) -> Resource:
if path and not path.startswith("/"):
raise ValueError("path should be started with / or be empty")
# Reuse last added resource if path and name are the same
if self._resources:
resource = self._resources[-1]
if resource.name == name and resource.raw_match(path):
return cast(Resource, resource)
if not ("{" in path or "}" in path or ROUTE_RE.search(path)):
resource = PlainResource(path, name=name)
self.register_resource(resource)
return resource
resource = DynamicResource(path, name=name)
self.register_resource(resource)
return resource
def add_route(
self,
method: str,
path: str,
handler: Handler | type[AbstractView],
*,
name: str | None = None,
expect_handler: _ExpectHandler | None = None,
) -> AbstractRoute:
resource = self.add_resource(path, name=name)
return resource.add_route(method, handler, expect_handler=expect_handler)
def add_static(
self,
prefix: str,
path: PathLike,
*,
name: str | None = None,
expect_handler: _ExpectHandler | None = None,
chunk_size: int = 256 * 1024,
show_index: bool = False,
follow_symlinks: bool = False,
append_version: bool = False,
) -> StaticResource:
"""Add static files view.
prefix - url prefix
path - folder with files
"""
assert prefix.startswith("/")
if prefix.endswith("/"):
prefix = prefix[:-1]
resource = StaticResource(
prefix,
path,
name=name,
expect_handler=expect_handler,
chunk_size=chunk_size,
show_index=show_index,
follow_symlinks=follow_symlinks,
append_version=append_version,
)
self.register_resource(resource)
return resource
def add_head(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
"""Shortcut for add_route with method HEAD."""
return self.add_route(hdrs.METH_HEAD, path, handler, **kwargs)
def add_options(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
"""Shortcut for add_route with method OPTIONS."""
return self.add_route(hdrs.METH_OPTIONS, path, handler, **kwargs)
def add_get(
self,
path: str,
handler: Handler,
*,
name: str | None = None,
allow_head: bool = True,
**kwargs: Any,
) -> AbstractRoute:
"""Shortcut for add_route with method GET.
If allow_head is true, another
route is added allowing head requests to the same endpoint.
"""
resource = self.add_resource(path, name=name)
if allow_head:
resource.add_route(hdrs.METH_HEAD, handler, **kwargs)
return resource.add_route(hdrs.METH_GET, handler, **kwargs)
def add_post(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
"""Shortcut for add_route with method POST."""
return self.add_route(hdrs.METH_POST, path, handler, **kwargs)
def add_put(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
"""Shortcut for add_route with method PUT."""
return self.add_route(hdrs.METH_PUT, path, handler, **kwargs)
def add_patch(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
"""Shortcut for add_route with method PATCH."""
return self.add_route(hdrs.METH_PATCH, path, handler, **kwargs)
def add_delete(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
"""Shortcut for add_route with method DELETE."""
return self.add_route(hdrs.METH_DELETE, path, handler, **kwargs)
def add_view(
self, path: str, handler: type[AbstractView], **kwargs: Any
) -> AbstractRoute:
"""Shortcut for add_route with ANY methods for a class-based view."""
return self.add_route(hdrs.METH_ANY, path, handler, **kwargs)
def freeze(self) -> None:
super().freeze()
for resource in self._resources:
resource.freeze()
def add_routes(self, routes: Iterable[AbstractRouteDef]) -> list[AbstractRoute]:
"""Append routes to route table.
Parameter should be a sequence of RouteDef objects.
Returns a list of registered AbstractRoute instances.
"""
registered_routes = []
for route_def in routes:
registered_routes.extend(route_def.register(self))
return registered_routes
def _quote_path(value: str) -> str:
return URL.build(path=value, encoded=False).raw_path
def _unquote_path_safe(value: str) -> str:
if "%" not in value:
return value
return value.replace("%2F", "/").replace("%25", "%")
def _requote_path(value: str) -> str:
# Quote non-ascii characters and other characters which must be quoted,
# but preserve existing %-sequences.
result = _quote_path(value)
if "%" in value:
result = result.replace("%25", "%")
return result
|
import asyncio
import functools
import os
import pathlib
import socket
import sys
from collections.abc import Generator
from stat import S_IFIFO, S_IMODE
from typing import Any, NoReturn
import pytest
import yarl
from aiohttp import web
from aiohttp.pytest_plugin import AiohttpClient
from aiohttp.web_urldispatcher import Resource, SystemRoute
@pytest.mark.parametrize(
"show_index,status,prefix,request_path,data",
[
pytest.param(False, 403, "/", "/", None, id="index_forbidden"),
pytest.param(
True,
200,
"/",
"/",
b"<html>\n<head>\n<title>Index of /.</title>\n</head>\n<body>\n<h1>Index of"
b' /.</h1>\n<ul>\n<li><a href="/my_dir">my_dir/</a></li>\n<li><a href="/my_file">'
b"my_file</a></li>\n</ul>\n</body>\n</html>",
),
pytest.param(
True,
200,
"/static",
"/static",
b"<html>\n<head>\n<title>Index of /.</title>\n</head>\n<body>\n<h1>Index of"
b' /.</h1>\n<ul>\n<li><a href="/static/my_dir">my_dir/</a></li>\n<li><a href="'
b'/static/my_file">my_file</a></li>\n</ul>\n</body>\n</html>',
id="index_static",
),
pytest.param(
True,
200,
"/static",
"/static/my_dir",
b"<html>\n<head>\n<title>Index of /my_dir</title>\n</head>\n<body>\n<h1>"
b'Index of /my_dir</h1>\n<ul>\n<li><a href="/static/my_dir/my_file_in_dir">'
b"my_file_in_dir</a></li>\n</ul>\n</body>\n</html>",
id="index_subdir",
),
],
)
async def test_access_root_of_static_handler(
tmp_path: pathlib.Path,
aiohttp_client: AiohttpClient,
show_index: bool,
status: int,
prefix: str,
request_path: str,
data: bytes | None,
) -> None:
# Tests the operation of static file server.
# Try to access the root of static file server, and make
# sure that correct HTTP statuses are returned depending if we directory
# index should be shown or not.
my_file = tmp_path / "my_file"
my_dir = tmp_path / "my_dir"
my_dir.mkdir()
my_file_in_dir = my_dir / "my_file_in_dir"
with my_file.open("w") as fw:
fw.write("hello")
with my_file_in_dir.open("w") as fw:
fw.write("world")
app = web.Application()
# Register global static route:
app.router.add_static(prefix, str(tmp_path), show_index=show_index)
client = await aiohttp_client(app)
# Request the root of the static directory.
async with await client.get(request_path) as r:
assert r.status == status
if data:
assert r.headers["Content-Type"] == "text/html; charset=utf-8"
read_ = await r.read()
assert read_ == data
@pytest.mark.internal # Dependent on filesystem
@pytest.mark.skipif(
not sys.platform.startswith("linux"),
reason="Invalid filenames on some filesystems (like Windows)",
)
@pytest.mark.parametrize(
"show_index,status,prefix,request_path,data",
[
pytest.param(False, 403, "/", "/", None, id="index_forbidden"),
pytest.param(
True,
200,
"/",
"/",
b"<html>\n<head>\n<title>Index of /.</title>\n</head>\n<body>\n<h1>Index of"
b' /.</h1>\n<ul>\n<li><a href="/%3Cimg%20src=0%20onerror=alert(1)%3E.dir">&l'
b't;img src=0 onerror=alert(1)>.dir/</a></li>\n<li><a href="/%3Cimg%20sr'
b'c=0%20onerror=alert(1)%3E.txt"><img src=0 onerror=alert(1)>.txt</a></l'
b"i>\n</ul>\n</body>\n</html>",
),
pytest.param(
True,
200,
"/static",
"/static",
b"<html>\n<head>\n<title>Index of /.</title>\n</head>\n<body>\n<h1>Index of"
b' /.</h1>\n<ul>\n<li><a href="/static/%3Cimg%20src=0%20onerror=alert(1)%3E.'
b'dir"><img src=0 onerror=alert(1)>.dir/</a></li>\n<li><a href="/stat'
b'ic/%3Cimg%20src=0%20onerror=alert(1)%3E.txt"><img src=0 onerror=alert(1)&'
b"gt;.txt</a></li>\n</ul>\n</body>\n</html>",
id="index_static",
),
pytest.param(
True,
200,
"/static",
"/static/<img src=0 onerror=alert(1)>.dir",
b"<html>\n<head>\n<title>Index of /<img src=0 onerror=alert(1)>.dir</t"
b"itle>\n</head>\n<body>\n<h1>Index of /<img src=0 onerror=alert(1)>.di"
b'r</h1>\n<ul>\n<li><a href="/static/%3Cimg%20src=0%20onerror=alert(1)%3E.di'
b'r/my_file_in_dir">my_file_in_dir</a></li>\n</ul>\n</body>\n</html>',
id="index_subdir",
),
],
)
async def test_access_root_of_static_handler_xss(
tmp_path: pathlib.Path,
aiohttp_client: AiohttpClient,
show_index: bool,
status: int,
prefix: str,
request_path: str,
data: bytes | None,
) -> None:
# Tests the operation of static file server.
# Try to access the root of static file server, and make
# sure that correct HTTP statuses are returned depending if we directory
# index should be shown or not.
# Ensure that html in file names is escaped.
# Ensure that links are url quoted.
my_file = tmp_path / "<img src=0 onerror=alert(1)>.txt"
my_dir = tmp_path / "<img src=0 onerror=alert(1)>.dir"
my_dir.mkdir()
my_file_in_dir = my_dir / "my_file_in_dir"
with my_file.open("w") as fw:
fw.write("hello")
with my_file_in_dir.open("w") as fw:
fw.write("world")
app = web.Application()
# Register global static route:
app.router.add_static(prefix, str(tmp_path), show_index=show_index)
client = await aiohttp_client(app)
# Request the root of the static directory.
async with await client.get(request_path) as r:
assert r.status == status
if data:
assert r.headers["Content-Type"] == "text/html; charset=utf-8"
read_ = await r.read()
assert read_ == data
async def test_follow_symlink(
tmp_path: pathlib.Path, aiohttp_client: AiohttpClient
) -> None:
# Tests the access to a symlink, in static folder
data = "hello world"
my_dir_path = tmp_path / "my_dir"
my_dir_path.mkdir()
my_file_path = my_dir_path / "my_file_in_dir"
with my_file_path.open("w") as fw:
fw.write(data)
my_symlink_path = tmp_path / "my_symlink"
pathlib.Path(str(my_symlink_path)).symlink_to(str(my_dir_path), True)
app = web.Application()
# Register global static route:
app.router.add_static("/", str(tmp_path), follow_symlinks=True)
client = await aiohttp_client(app)
# Request the root of the static directory.
r = await client.get("/my_symlink/my_file_in_dir")
assert r.status == 200
assert (await r.text()) == data
async def test_follow_symlink_directory_traversal(
tmp_path: pathlib.Path, aiohttp_client: AiohttpClient
) -> None:
# Tests that follow_symlinks does not allow directory transversal
data = "private"
private_file = tmp_path / "private_file"
private_file.write_text(data)
safe_path = tmp_path / "safe_dir"
safe_path.mkdir()
app = web.Application()
# Register global static route:
app.router.add_static("/", str(safe_path), follow_symlinks=True)
client = await aiohttp_client(app)
await client.start_server()
# We need to use a raw socket to test this, as the client will normalize
# the path before sending it to the server.
reader, writer = await asyncio.open_connection(client.host, client.port)
writer.write(b"GET /../private_file HTTP/1.1\r\n\r\n")
response = await reader.readuntil(b"\r\n\r\n")
assert b"404 Not Found" in response
writer.close()
await writer.wait_closed()
await client.close()
async def test_follow_symlink_directory_traversal_after_normalization(
tmp_path: pathlib.Path, aiohttp_client: AiohttpClient
) -> None:
# Tests that follow_symlinks does not allow directory transversal
# after normalization
#
# Directory structure
# |-- secret_dir
# | |-- private_file (should never be accessible)
# | |-- symlink_target_dir
# | |-- symlink_target_file (should be accessible via the my_symlink symlink)
# | |-- sandbox_dir
# | |-- my_symlink -> symlink_target_dir
#
secret_path = tmp_path / "secret_dir"
secret_path.mkdir()
# This file is below the symlink target and should not be reachable
private_file = secret_path / "private_file"
private_file.write_text("private")
symlink_target_path = secret_path / "symlink_target_dir"
symlink_target_path.mkdir()
sandbox_path = symlink_target_path / "sandbox_dir"
sandbox_path.mkdir()
# This file should be reachable via the symlink
symlink_target_file = symlink_target_path / "symlink_target_file"
symlink_target_file.write_text("readable")
my_symlink_path = sandbox_path / "my_symlink"
pathlib.Path(str(my_symlink_path)).symlink_to(str(symlink_target_path), True)
app = web.Application()
# Register global static route:
app.router.add_static("/", str(sandbox_path), follow_symlinks=True)
client = await aiohttp_client(app)
await client.start_server()
# We need to use a raw socket to test this, as the client will normalize
# the path before sending it to the server.
reader, writer = await asyncio.open_connection(client.host, client.port)
writer.write(b"GET /my_symlink/../private_file HTTP/1.1\r\n\r\n")
response = await reader.readuntil(b"\r\n\r\n")
assert b"404 Not Found" in response
writer.close()
await writer.wait_closed()
reader, writer = await asyncio.open_connection(client.host, client.port)
writer.write(b"GET /my_symlink/symlink_target_file HTTP/1.1\r\n\r\n")
response = await reader.readuntil(b"\r\n\r\n")
assert b"200 OK" in response
response = await reader.readuntil(b"readable")
assert response == b"readable"
writer.close()
await writer.wait_closed()
await client.close()
@pytest.mark.parametrize(
"dir_name,filename,data",
[
("", "test file.txt", "test text"),
("test dir name", "test dir file .txt", "test text file folder"),
],
)
async def test_access_to_the_file_with_spaces(
tmp_path: pathlib.Path,
aiohttp_client: AiohttpClient,
dir_name: str,
filename: str,
data: str,
) -> None:
# Checks operation of static files with spaces
my_dir_path = tmp_path / dir_name
if my_dir_path != tmp_path:
my_dir_path.mkdir()
my_file_path = my_dir_path / filename
with my_file_path.open("w") as fw:
fw.write(data)
app = web.Application()
url = "/" + str(pathlib.Path(dir_name, filename))
app.router.add_static("/", str(tmp_path))
client = await aiohttp_client(app)
r = await client.get(url)
assert r.status == 200
assert (await r.text()) == data
async def test_access_non_existing_resource(
tmp_path: pathlib.Path, aiohttp_client: AiohttpClient
) -> None:
# Tests accessing non-existing resource
# Try to access a non-exiting resource and make sure that 404 HTTP status
# returned.
app = web.Application()
# Register global static route:
app.router.add_static("/", str(tmp_path), show_index=True)
client = await aiohttp_client(app)
# Request the root of the static directory.
async with client.get("/non_existing_resource") as r:
assert r.status == 404
@pytest.mark.parametrize(
"registered_path,request_url",
[
("/a:b", "/a:b"),
("/a@b", "/a@b"),
("/a:b", "/a%3Ab"),
],
)
async def test_url_escaping(
aiohttp_client: AiohttpClient, registered_path: str, request_url: str
) -> None:
# Tests accessing a resource with
app = web.Application()
async def handler(request: web.Request) -> web.Response:
return web.Response()
app.router.add_get(registered_path, handler)
client = await aiohttp_client(app)
async with client.get(request_url) as r:
assert r.status == 200
async def test_handler_metadata_persistence() -> None:
# Tests accessing metadata of a handler after registering it on the app
# router.
app = web.Application()
async def async_handler(request: web.Request) -> web.Response:
"""Doc"""
assert False
app.router.add_get("/async", async_handler)
for resource in app.router.resources():
for route in resource:
assert route.handler.__doc__ == "Doc"
@pytest.mark.skipif(
sys.platform.startswith("win32"), reason="Cannot remove read access on Windows"
)
@pytest.mark.parametrize("file_request", ["", "my_file.txt"])
async def test_static_directory_without_read_permission(
tmp_path: pathlib.Path, aiohttp_client: AiohttpClient, file_request: str
) -> None:
"""Test static directory without read permission receives forbidden response."""
my_dir = tmp_path / "my_dir"
my_dir.mkdir()
my_dir.chmod(0o000)
app = web.Application()
app.router.add_static("/", str(tmp_path), show_index=True)
client = await aiohttp_client(app)
async with client.get(f"/{my_dir.name}/{file_request}") as r:
assert r.status == 403
@pytest.mark.parametrize("file_request", ["", "my_file.txt"])
async def test_static_directory_with_mock_permission_error(
monkeypatch: pytest.MonkeyPatch,
tmp_path: pathlib.Path,
aiohttp_client: AiohttpClient,
file_request: str,
) -> None:
"""Test static directory with mock permission errors receives forbidden response."""
my_dir = tmp_path / "my_dir"
my_dir.mkdir()
real_iterdir = pathlib.Path.iterdir
real_is_dir = pathlib.Path.is_dir
def mock_iterdir(self: pathlib.Path) -> Generator[pathlib.Path, None, None]:
if my_dir.samefile(self):
raise PermissionError()
return real_iterdir(self)
def mock_is_dir(self: pathlib.Path, **kwargs: Any) -> bool:
if my_dir.samefile(self.parent):
raise PermissionError()
return real_is_dir(self, **kwargs)
monkeypatch.setattr("pathlib.Path.iterdir", mock_iterdir)
monkeypatch.setattr("pathlib.Path.is_dir", mock_is_dir)
app = web.Application()
app.router.add_static("/", str(tmp_path), show_index=True)
client = await aiohttp_client(app)
async with client.get("/") as r:
assert r.status == 200
async with client.get(f"/{my_dir.name}/{file_request}") as r:
assert r.status == 403
@pytest.mark.skipif(
sys.platform.startswith("win32"), reason="Cannot remove read access on Windows"
)
async def test_static_file_without_read_permission(
tmp_path: pathlib.Path, aiohttp_client: AiohttpClient
) -> None:
"""Test static file without read permission receives forbidden response."""
my_file = tmp_path / "my_file.txt"
my_file.write_text("secret")
my_file.chmod(0o000)
app = web.Application()
app.router.add_static("/", str(tmp_path))
client = await aiohttp_client(app)
async with client.get(f"/{my_file.name}") as r:
assert r.status == 403
async def test_static_file_with_mock_permission_error(
monkeypatch: pytest.MonkeyPatch,
tmp_path: pathlib.Path,
aiohttp_client: AiohttpClient,
) -> None:
"""Test static file with mock permission errors receives forbidden response."""
my_file = tmp_path / "my_file.txt"
my_file.write_text("secret")
my_readable = tmp_path / "my_readable.txt"
my_readable.write_text("info")
real_open = pathlib.Path.open
def mock_open(self: pathlib.Path, *args: Any, **kwargs: Any) -> Any:
if my_file.samefile(self):
raise PermissionError()
return real_open(self, *args, **kwargs)
monkeypatch.setattr("pathlib.Path.open", mock_open)
app = web.Application()
app.router.add_static("/", str(tmp_path))
client = await aiohttp_client(app)
# Test the mock only applies to my_file, then test the permission error.
async with client.get(f"/{my_readable.name}") as r:
assert r.status == 200
async with client.get(f"/{my_file.name}") as r:
assert r.status == 403
async def test_access_symlink_loop(
tmp_path: pathlib.Path, aiohttp_client: AiohttpClient
) -> None:
# Tests the access to a looped symlink, which could not be resolved.
my_dir_path = tmp_path / "my_symlink"
pathlib.Path(str(my_dir_path)).symlink_to(str(my_dir_path), True)
app = web.Application()
# Register global static route:
app.router.add_static("/", str(tmp_path), show_index=True)
client = await aiohttp_client(app)
# Request the root of the static directory.
async with client.get("/" + my_dir_path.name) as r:
assert r.status == 404
async def test_access_compressed_file_as_symlink(
tmp_path: pathlib.Path, aiohttp_client: AiohttpClient
) -> None:
"""Test that compressed file variants as symlinks are ignored."""
private_file = tmp_path / "private.txt"
private_file.write_text("private info")
www_dir = tmp_path / "www"
www_dir.mkdir()
gz_link = www_dir / "file.txt.gz"
gz_link.symlink_to(f"../{private_file.name}")
app = web.Application()
app.router.add_static("/", www_dir)
client = await aiohttp_client(app)
# Symlink should be ignored; response reflects missing uncompressed file.
async with client.get(f"/{gz_link.stem}", auto_decompress=False) as resp:
assert resp.status == 404
# Again symlin is ignored, and then uncompressed is served.
txt_file = gz_link.with_suffix("")
txt_file.write_text("public data")
resp = await client.get(f"/{txt_file.name}")
assert resp.status == 200
assert resp.headers.get("Content-Encoding") is None
assert resp.content_type == "text/plain"
assert await resp.text() == "public data"
resp.release()
await client.close()
async def test_access_special_resource(
unix_sockname: str, aiohttp_client: AiohttpClient
) -> None:
"""Test access to non-regular files is forbidden using a UNIX domain socket."""
if not getattr(socket, "AF_UNIX", None):
pytest.skip("UNIX domain sockets not supported")
my_special = pathlib.Path(unix_sockname)
tmp_path = my_special.parent
my_socket = socket.socket(socket.AF_UNIX)
my_socket.bind(str(my_special))
assert my_special.is_socket()
app = web.Application()
app.router.add_static("/", str(tmp_path))
client = await aiohttp_client(app)
async with client.get(f"/{my_special.name}") as r:
assert r.status == 403
my_socket.close()
async def test_access_mock_special_resource(
monkeypatch: pytest.MonkeyPatch,
tmp_path: pathlib.Path,
aiohttp_client: AiohttpClient,
) -> None:
"""Test access to non-regular files is forbidden using a mock FIFO."""
my_special = tmp_path / "my_special"
my_special.touch()
real_result = my_special.stat()
real_stat = os.stat
def mock_stat(path: Any, **kwargs: Any) -> os.stat_result:
s = real_stat(path, **kwargs)
if os.path.samestat(s, real_result):
mock_mode = S_IFIFO | S_IMODE(s.st_mode)
s = os.stat_result([mock_mode] + list(s)[1:])
return s
monkeypatch.setattr("pathlib.Path.stat", mock_stat)
monkeypatch.setattr("os.stat", mock_stat)
app = web.Application()
app.router.add_static("/", str(tmp_path))
client = await aiohttp_client(app)
async with client.get(f"/{my_special.name}") as r:
assert r.status == 403
async def test_partially_applied_handler(aiohttp_client: AiohttpClient) -> None:
app = web.Application()
async def handler(data: bytes, request: web.Request) -> web.Response:
return web.Response(body=data)
app.router.add_route("GET", "/", functools.partial(handler, b"hello"))
client = await aiohttp_client(app)
r = await client.get("/")
data = await r.read()
assert data == b"hello"
async def test_static_head(
tmp_path: pathlib.Path, aiohttp_client: AiohttpClient
) -> None:
# Test HEAD on static route
my_file_path = tmp_path / "test.txt"
with my_file_path.open("wb") as fw:
fw.write(b"should_not_see_this\n")
app = web.Application()
app.router.add_static("/", str(tmp_path))
client = await aiohttp_client(app)
async with client.head("/test.txt") as r:
assert r.status == 200
# Check that there is no content sent (see #4809). This can't easily be
# done with aiohttp_client because the buffering can consume the content.
reader, writer = await asyncio.open_connection(client.host, client.port)
writer.write(b"HEAD /test.txt HTTP/1.1\r\n")
writer.write(b"Host: localhost\r\n")
writer.write(b"Connection: close\r\n")
writer.write(b"\r\n")
while await reader.readline() != b"\r\n":
pass
content = await reader.read()
writer.close()
assert content == b""
def test_system_route() -> None:
route = SystemRoute(web.HTTPCreated(reason="test"))
with pytest.raises(RuntimeError):
route.url_for()
assert route.name is None
assert route.resource is None
assert "<SystemRoute 201: test>" == repr(route)
assert 201 == route.status
assert "test" == route.reason
async def test_allow_head(aiohttp_client: AiohttpClient) -> None:
# Test allow_head on routes.
app = web.Application()
async def handler(request: web.Request) -> web.Response:
return web.Response()
app.router.add_get("/a", handler, name="a")
app.router.add_get("/b", handler, allow_head=False, name="b")
client = await aiohttp_client(app)
async with client.get("/a") as r:
assert r.status == 200
async with client.head("/a") as r:
assert r.status == 200
async with client.get("/b") as r:
assert r.status == 200
async with client.head("/b") as r:
assert r.status == 405
@pytest.mark.parametrize(
"path",
(
"/a",
"/{a}",
"/{a:.*}",
),
)
def test_reuse_last_added_resource(path: str) -> None:
# Test that adding a route with the same name and path of the last added
# resource doesn't create a new resource.
app = web.Application()
async def handler(request: web.Request) -> web.Response:
assert False
app.router.add_get(path, handler, name="a")
app.router.add_post(path, handler, name="a")
assert len(app.router.resources()) == 1
def test_resource_raw_match() -> None:
app = web.Application()
async def handler(request: web.Request) -> web.Response:
assert False
route = app.router.add_get("/a", handler, name="a")
assert route.resource is not None
assert route.resource.raw_match("/a")
route = app.router.add_get("/{b}", handler, name="b")
assert route.resource is not None
assert route.resource.raw_match("/{b}")
resource = app.router.add_static("/static", ".")
assert not resource.raw_match("/static")
async def test_add_view(aiohttp_client: AiohttpClient) -> None:
app = web.Application()
class MyView(web.View):
async def get(self) -> web.Response:
return web.Response()
async def post(self) -> web.Response:
return web.Response()
app.router.add_view("/a", MyView)
client = await aiohttp_client(app)
async with client.get("/a") as r:
assert r.status == 200
async with client.post("/a") as r:
assert r.status == 200
async with client.put("/a") as r:
assert r.status == 405
async def test_decorate_view(aiohttp_client: AiohttpClient) -> None:
routes = web.RouteTableDef()
@routes.view("/a")
class MyView(web.View):
async def get(self) -> web.Response:
return web.Response()
async def post(self) -> web.Response:
return web.Response()
app = web.Application()
app.router.add_routes(routes)
client = await aiohttp_client(app)
async with client.get("/a") as r:
assert r.status == 200
async with client.post("/a") as r:
assert r.status == 200
async with client.put("/a") as r:
assert r.status == 405
async def test_web_view(aiohttp_client: AiohttpClient) -> None:
app = web.Application()
class MyView(web.View):
async def get(self) -> web.Response:
return web.Response()
async def post(self) -> web.Response:
return web.Response()
app.router.add_routes([web.view("/a", MyView)])
client = await aiohttp_client(app)
async with client.get("/a") as r:
assert r.status == 200
async with client.post("/a") as r:
assert r.status == 200
async with client.put("/a") as r:
assert r.status == 405
async def test_static_absolute_url(
aiohttp_client: AiohttpClient, tmp_path: pathlib.Path
) -> None:
# requested url is an absolute name like
# /static/\\machine_name\c$ or /static/D:\path
# where the static dir is totally different
app = web.Application()
file_path = tmp_path / "file.txt"
file_path.write_text("sample text", "ascii")
here = pathlib.Path(__file__).parent
app.router.add_static("/static", here)
client = await aiohttp_client(app)
async with client.get("/static/" + str(file_path.resolve())) as resp:
assert resp.status == 403
async def test_for_issue_5250(
aiohttp_client: AiohttpClient, tmp_path: pathlib.Path
) -> None:
app = web.Application()
app.router.add_static("/foo", tmp_path)
async def get_foobar(request: web.Request) -> web.Response:
return web.Response(body="success!")
app.router.add_get("/foobar", get_foobar)
client = await aiohttp_client(app)
async with await client.get("/foobar") as resp:
assert resp.status == 200
assert (await resp.text()) == "success!"
@pytest.mark.parametrize(
("route_definition", "urlencoded_path", "expected_http_resp_status"),
(
("/467,802,24834/hello", "/467%2C802%2C24834/hello", 200),
("/{user_ids:([0-9]+)(,([0-9]+))*}/hello", "/467%2C802%2C24834/hello", 200),
("/467,802,24834/hello", "/467,802,24834/hello", 200),
("/{user_ids:([0-9]+)(,([0-9]+))*}/hello", "/467,802,24834/hello", 200),
("/1%2C3/hello", "/1%2C3/hello", 404),
),
)
async def test_decoded_url_match(
aiohttp_client: AiohttpClient,
route_definition: str,
urlencoded_path: str,
expected_http_resp_status: int,
) -> None:
app = web.Application()
async def handler(request: web.Request) -> web.Response:
return web.Response()
app.router.add_get(route_definition, handler)
client = await aiohttp_client(app)
async with client.get(yarl.URL(urlencoded_path, encoded=True)) as resp:
assert resp.status == expected_http_resp_status
async def test_decoded_raw_match_regex(aiohttp_client: AiohttpClient) -> None:
"""Verify that raw_match only matches decoded url."""
app = web.Application()
async def handler(request: web.Request) -> NoReturn:
assert False
app.router.add_get("/467%2C802%2C24834%2C24952%2C25362%2C40574/hello", handler)
client = await aiohttp_client(app)
async with client.get(
yarl.URL("/467%2C802%2C24834%2C24952%2C25362%2C40574/hello", encoded=True)
) as resp:
assert resp.status == 404 # should only match decoded url
async def test_order_is_preserved(aiohttp_client: AiohttpClient) -> None:
"""Test route order is preserved.
Note that fixed/static paths are always preferred over a regex path.
"""
app = web.Application()
async def handler(request: web.Request) -> web.Response:
assert isinstance(request.match_info._route.resource, Resource)
return web.Response(text=request.match_info._route.resource.canonical)
app.router.add_get("/first/x/{b}/", handler)
app.router.add_get(r"/first/{x:.*/b}", handler)
app.router.add_get(r"/second/{user}/info", handler)
app.router.add_get("/second/bob/info", handler)
app.router.add_get("/third/bob/info", handler)
app.router.add_get(r"/third/{user}/info", handler)
app.router.add_get(r"/forth/{name:\d+}", handler)
app.router.add_get("/forth/42", handler)
app.router.add_get("/fifth/42", handler)
app.router.add_get(r"/fifth/{name:\d+}", handler)
client = await aiohttp_client(app)
r = await client.get("/first/x/b/")
assert r.status == 200
assert await r.text() == "/first/x/{b}/"
r = await client.get("/second/frank/info")
assert r.status == 200
assert await r.text() == "/second/{user}/info"
# Fixed/static paths are always preferred over regex paths
r = await client.get("/second/bob/info")
assert r.status == 200
assert await r.text() == "/second/bob/info"
r = await client.get("/third/bob/info")
assert r.status == 200
assert await r.text() == "/third/bob/info"
r = await client.get("/third/frank/info")
assert r.status == 200
assert await r.text() == "/third/{user}/info"
r = await client.get("/forth/21")
assert r.status == 200
assert await r.text() == "/forth/{name}"
# Fixed/static paths are always preferred over regex paths
r = await client.get("/forth/42")
assert r.status == 200
assert await r.text() == "/forth/42"
r = await client.get("/fifth/21")
assert r.status == 200
assert await r.text() == "/fifth/{name}"
r = await client.get("/fifth/42")
assert r.status == 200
assert await r.text() == "/fifth/42"
async def test_url_with_many_slashes(aiohttp_client: AiohttpClient) -> None:
app = web.Application()
class MyView(web.View):
async def get(self) -> web.Response:
return web.Response()
app.router.add_routes([web.view("/a", MyView)])
client = await aiohttp_client(app)
async with client.get("///a") as r:
assert r.status == 200
async def test_subapp_domain_routing_same_path(aiohttp_client: AiohttpClient) -> None:
"""Regression test for #11665."""
app = web.Application()
sub_app = web.Application()
async def mainapp_handler(request: web.Request) -> web.Response:
assert False
async def subapp_handler(request: web.Request) -> web.Response:
return web.Response(text="SUBAPP")
app.router.add_get("/", mainapp_handler)
sub_app.router.add_get("/", subapp_handler)
app.add_domain("different.example.com", sub_app)
client = await aiohttp_client(app)
async with client.get("/", headers={"Host": "different.example.com"}) as r:
assert r.status == 200
result = await r.text()
assert result == "SUBAPP"
async def test_route_with_regex(aiohttp_client: AiohttpClient) -> None:
"""Test a route with a regex preceded by a fixed string."""
app = web.Application()
async def handler(request: web.Request) -> web.Response:
assert isinstance(request.match_info._route.resource, Resource)
return web.Response(text=request.match_info._route.resource.canonical)
app.router.add_get("/core/locations{tail:.*}", handler)
client = await aiohttp_client(app)
r = await client.get("/core/locations/tail/here")
assert r.status == 200
assert await r.text() == "/core/locations{tail}"
r = await client.get("/core/locations_tail_here")
assert r.status == 200
assert await r.text() == "/core/locations{tail}"
r = await client.get("/core/locations_tail;id=abcdef")
assert r.status == 200
assert await r.text() == "/core/locations{tail}"
|
./temp_repos/aiohttp/aiohttp/web_urldispatcher.py
|
./temp_repos/aiohttp/tests/test_web_urldispatcher.py
|
aiohttp
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class '_InfoDict'.
Context:
- Class Name: _InfoDict
- Dependencies to Mock: match_dict, path, app, handler, directory, resources, http_exception, resource, route, domain, method, rule, prefix
- Key Imports: yarl, web_request, keyword, web_response, web_app, pathlib, html, hashlib, os, helpers
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
_InfoDict
|
python
|
"""Various helper functions"""
import asyncio
import base64
import binascii
import contextlib
import dataclasses
import datetime
import enum
import functools
import inspect
import netrc
import os
import platform
import re
import sys
import time
import warnings
import weakref
from collections import namedtuple
from collections.abc import Callable, Iterable, Iterator, Mapping
from contextlib import suppress
from email.message import EmailMessage
from email.parser import HeaderParser
from email.policy import HTTP
from email.utils import parsedate
from http.cookies import SimpleCookie
from math import ceil
from pathlib import Path
from types import MappingProxyType, TracebackType
from typing import (
TYPE_CHECKING,
Any,
ContextManager,
Generic,
Optional,
Protocol,
TypeVar,
Union,
final,
get_args,
overload,
)
from urllib.parse import quote
from urllib.request import getproxies, proxy_bypass
from multidict import CIMultiDict, MultiDict, MultiDictProxy, MultiMapping
from propcache.api import under_cached_property as reify
from yarl import URL
from . import hdrs
from .log import client_logger
from .typedefs import PathLike # noqa
if sys.version_info >= (3, 11):
import asyncio as async_timeout
else:
import async_timeout
if TYPE_CHECKING:
from dataclasses import dataclass as frozen_dataclass_decorator
else:
frozen_dataclass_decorator = functools.partial(
dataclasses.dataclass, frozen=True, slots=True
)
__all__ = ("BasicAuth", "ChainMapProxy", "ETag", "frozen_dataclass_decorator", "reify")
COOKIE_MAX_LENGTH = 4096
_T = TypeVar("_T")
_S = TypeVar("_S")
_SENTINEL = enum.Enum("_SENTINEL", "sentinel")
sentinel = _SENTINEL.sentinel
NO_EXTENSIONS = bool(os.environ.get("AIOHTTP_NO_EXTENSIONS"))
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1
EMPTY_BODY_STATUS_CODES = frozenset((204, 304, *range(100, 200)))
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.2
EMPTY_BODY_METHODS = hdrs.METH_HEAD_ALL
DEBUG = sys.flags.dev_mode or (
not sys.flags.ignore_environment and bool(os.environ.get("PYTHONASYNCIODEBUG"))
)
CHAR = {chr(i) for i in range(0, 128)}
CTL = {chr(i) for i in range(0, 32)} | {
chr(127),
}
SEPARATORS = {
"(",
")",
"<",
">",
"@",
",",
";",
":",
"\\",
'"',
"/",
"[",
"]",
"?",
"=",
"{",
"}",
" ",
chr(9),
}
TOKEN = CHAR ^ CTL ^ SEPARATORS
json_re = re.compile(r"(?:application/|[\w.-]+/[\w.+-]+?\+)json$", re.IGNORECASE)
class BasicAuth(namedtuple("BasicAuth", ["login", "password", "encoding"])):
"""Http basic authentication helper."""
def __new__(
cls, login: str, password: str = "", encoding: str = "latin1"
) -> "BasicAuth":
if login is None:
raise ValueError("None is not allowed as login value")
if password is None:
raise ValueError("None is not allowed as password value")
if ":" in login:
raise ValueError('A ":" is not allowed in login (RFC 1945#section-11.1)')
return super().__new__(cls, login, password, encoding)
@classmethod
def decode(cls, auth_header: str, encoding: str = "latin1") -> "BasicAuth":
"""Create a BasicAuth object from an Authorization HTTP header."""
try:
auth_type, encoded_credentials = auth_header.split(" ", 1)
except ValueError:
raise ValueError("Could not parse authorization header.")
if auth_type.lower() != "basic":
raise ValueError("Unknown authorization method %s" % auth_type)
try:
decoded = base64.b64decode(
encoded_credentials.encode("ascii"), validate=True
).decode(encoding)
except binascii.Error:
raise ValueError("Invalid base64 encoding.")
try:
# RFC 2617 HTTP Authentication
# https://www.ietf.org/rfc/rfc2617.txt
# the colon must be present, but the username and password may be
# otherwise blank.
username, password = decoded.split(":", 1)
except ValueError:
raise ValueError("Invalid credentials.")
return cls(username, password, encoding=encoding)
@classmethod
def from_url(cls, url: URL, *, encoding: str = "latin1") -> Optional["BasicAuth"]:
"""Create BasicAuth from url."""
if not isinstance(url, URL):
raise TypeError("url should be yarl.URL instance")
# Check raw_user and raw_password first as yarl is likely
# to already have these values parsed from the netloc in the cache.
if url.raw_user is None and url.raw_password is None:
return None
return cls(url.user or "", url.password or "", encoding=encoding)
def encode(self) -> str:
"""Encode credentials."""
creds = (f"{self.login}:{self.password}").encode(self.encoding)
return "Basic %s" % base64.b64encode(creds).decode(self.encoding)
def strip_auth_from_url(url: URL) -> tuple[URL, BasicAuth | None]:
"""Remove user and password from URL if present and return BasicAuth object."""
# Check raw_user and raw_password first as yarl is likely
# to already have these values parsed from the netloc in the cache.
if url.raw_user is None and url.raw_password is None:
return url, None
return url.with_user(None), BasicAuth(url.user or "", url.password or "")
def netrc_from_env() -> netrc.netrc | None:
"""Load netrc from file.
Attempt to load it from the path specified by the env-var
NETRC or in the default location in the user's home directory.
Returns None if it couldn't be found or fails to parse.
"""
netrc_env = os.environ.get("NETRC")
if netrc_env is not None:
netrc_path = Path(netrc_env)
else:
try:
home_dir = Path.home()
except RuntimeError as e:
# if pathlib can't resolve home, it may raise a RuntimeError
client_logger.debug(
"Could not resolve home directory when "
"trying to look for .netrc file: %s",
e,
)
return None
netrc_path = home_dir / (
"_netrc" if platform.system() == "Windows" else ".netrc"
)
try:
return netrc.netrc(str(netrc_path))
except netrc.NetrcParseError as e:
client_logger.warning("Could not parse .netrc file: %s", e)
except OSError as e:
netrc_exists = False
with contextlib.suppress(OSError):
netrc_exists = netrc_path.is_file()
# we couldn't read the file (doesn't exist, permissions, etc.)
if netrc_env or netrc_exists:
# only warn if the environment wanted us to load it,
# or it appears like the default file does actually exist
client_logger.warning("Could not read .netrc file: %s", e)
return None
@frozen_dataclass_decorator
class ProxyInfo:
proxy: URL
proxy_auth: BasicAuth | None
def basicauth_from_netrc(netrc_obj: netrc.netrc | None, host: str) -> BasicAuth:
"""
Return :py:class:`~aiohttp.BasicAuth` credentials for ``host`` from ``netrc_obj``.
:raises LookupError: if ``netrc_obj`` is :py:data:`None` or if no
entry is found for the ``host``.
"""
if netrc_obj is None:
raise LookupError("No .netrc file found")
auth_from_netrc = netrc_obj.authenticators(host)
if auth_from_netrc is None:
raise LookupError(f"No entry for {host!s} found in the `.netrc` file.")
login, account, password = auth_from_netrc
# TODO(PY311): username = login or account
# Up to python 3.10, account could be None if not specified,
# and login will be empty string if not specified. From 3.11,
# login and account will be empty string if not specified.
username = login if (login or account is None) else account
# TODO(PY311): Remove this, as password will be empty string
# if not specified
if password is None:
password = "" # type: ignore[unreachable]
return BasicAuth(username, password)
def proxies_from_env() -> dict[str, ProxyInfo]:
proxy_urls = {
k: URL(v)
for k, v in getproxies().items()
if k in ("http", "https", "ws", "wss")
}
netrc_obj = netrc_from_env()
stripped = {k: strip_auth_from_url(v) for k, v in proxy_urls.items()}
ret = {}
for proto, val in stripped.items():
proxy, auth = val
if proxy.scheme in ("https", "wss"):
client_logger.warning(
"%s proxies %s are not supported, ignoring", proxy.scheme.upper(), proxy
)
continue
if netrc_obj and auth is None:
if proxy.host is not None:
try:
auth = basicauth_from_netrc(netrc_obj, proxy.host)
except LookupError:
auth = None
ret[proto] = ProxyInfo(proxy, auth)
return ret
def get_env_proxy_for_url(url: URL) -> tuple[URL, BasicAuth | None]:
"""Get a permitted proxy for the given URL from the env."""
if url.host is not None and proxy_bypass(url.host):
raise LookupError(f"Proxying is disallowed for `{url.host!r}`")
proxies_in_env = proxies_from_env()
try:
proxy_info = proxies_in_env[url.scheme]
except KeyError:
raise LookupError(f"No proxies found for `{url!s}` in the env")
else:
return proxy_info.proxy, proxy_info.proxy_auth
@frozen_dataclass_decorator
class MimeType:
type: str
subtype: str
suffix: str
parameters: "MultiDictProxy[str]"
@functools.lru_cache(maxsize=56)
def parse_mimetype(mimetype: str) -> MimeType:
"""Parses a MIME type into its components.
mimetype is a MIME type string.
Returns a MimeType object.
Example:
>>> parse_mimetype('text/html; charset=utf-8')
MimeType(type='text', subtype='html', suffix='',
parameters={'charset': 'utf-8'})
"""
if not mimetype:
return MimeType(
type="", subtype="", suffix="", parameters=MultiDictProxy(MultiDict())
)
parts = mimetype.split(";")
params: MultiDict[str] = MultiDict()
for item in parts[1:]:
if not item:
continue
key, _, value = item.partition("=")
params.add(key.lower().strip(), value.strip(' "'))
fulltype = parts[0].strip().lower()
if fulltype == "*":
fulltype = "*/*"
mtype, _, stype = fulltype.partition("/")
stype, _, suffix = stype.partition("+")
return MimeType(
type=mtype, subtype=stype, suffix=suffix, parameters=MultiDictProxy(params)
)
class EnsureOctetStream(EmailMessage):
def __init__(self) -> None:
super().__init__()
# https://www.rfc-editor.org/rfc/rfc9110#section-8.3-5
self.set_default_type("application/octet-stream")
def get_content_type(self) -> str:
"""Re-implementation from Message
Returns application/octet-stream in place of plain/text when
value is wrong.
The way this class is used guarantees that content-type will
be present so simplify the checks wrt to the base implementation.
"""
value = self.get("content-type", "").lower()
# Based on the implementation of _splitparam in the standard library
ctype, _, _ = value.partition(";")
ctype = ctype.strip()
if ctype.count("/") != 1:
return self.get_default_type()
return ctype
@functools.lru_cache(maxsize=56)
def parse_content_type(raw: str) -> tuple[str, MappingProxyType[str, str]]:
"""Parse Content-Type header.
Returns a tuple of the parsed content type and a
MappingProxyType of parameters. The default returned value
is `application/octet-stream`
"""
msg = HeaderParser(EnsureOctetStream, policy=HTTP).parsestr(f"Content-Type: {raw}")
content_type = msg.get_content_type()
params = msg.get_params(())
content_dict = dict(params[1:]) # First element is content type again
return content_type, MappingProxyType(content_dict)
def guess_filename(obj: Any, default: str | None = None) -> str | None:
name = getattr(obj, "name", None)
if name and isinstance(name, str) and name[0] != "<" and name[-1] != ">":
return Path(name).name
return default
not_qtext_re = re.compile(r"[^\041\043-\133\135-\176]")
QCONTENT = {chr(i) for i in range(0x20, 0x7F)} | {"\t"}
def quoted_string(content: str) -> str:
"""Return 7-bit content as quoted-string.
Format content into a quoted-string as defined in RFC5322 for
Internet Message Format. Notice that this is not the 8-bit HTTP
format, but the 7-bit email format. Content must be in usascii or
a ValueError is raised.
"""
if not (QCONTENT > set(content)):
raise ValueError(f"bad content for quoted-string {content!r}")
return not_qtext_re.sub(lambda x: "\\" + x.group(0), content)
def content_disposition_header(
disptype: str,
quote_fields: bool = True,
_charset: str = "utf-8",
params: dict[str, str] | None = None,
) -> str:
"""Sets ``Content-Disposition`` header for MIME.
This is the MIME payload Content-Disposition header from RFC 2183
and RFC 7579 section 4.2, not the HTTP Content-Disposition from
RFC 6266.
disptype is a disposition type: inline, attachment, form-data.
Should be valid extension token (see RFC 2183)
quote_fields performs value quoting to 7-bit MIME headers
according to RFC 7578. Set to quote_fields to False if recipient
can take 8-bit file names and field values.
_charset specifies the charset to use when quote_fields is True.
params is a dict with disposition params.
"""
if not disptype or not (TOKEN > set(disptype)):
raise ValueError(f"bad content disposition type {disptype!r}")
value = disptype
if params:
lparams = []
for key, val in params.items():
if not key or not (TOKEN > set(key)):
raise ValueError(f"bad content disposition parameter {key!r}={val!r}")
if quote_fields:
if key.lower() == "filename":
qval = quote(val, "", encoding=_charset)
lparams.append((key, '"%s"' % qval))
else:
try:
qval = quoted_string(val)
except ValueError:
qval = "".join(
(_charset, "''", quote(val, "", encoding=_charset))
)
lparams.append((key + "*", qval))
else:
lparams.append((key, '"%s"' % qval))
else:
qval = val.replace("\\", "\\\\").replace('"', '\\"')
lparams.append((key, '"%s"' % qval))
sparams = "; ".join("=".join(pair) for pair in lparams)
value = "; ".join((value, sparams))
return value
def is_expected_content_type(
response_content_type: str, expected_content_type: str
) -> bool:
"""Checks if received content type is processable as an expected one.
Both arguments should be given without parameters.
"""
if expected_content_type == "application/json":
return json_re.match(response_content_type) is not None
return expected_content_type in response_content_type
def is_ip_address(host: str | None) -> bool:
"""Check if host looks like an IP Address.
This check is only meant as a heuristic to ensure that
a host is not a domain name.
"""
if not host:
return False
# For a host to be an ipv4 address, it must be all numeric.
# The host must contain a colon to be an IPv6 address.
return ":" in host or host.replace(".", "").isdigit()
_cached_current_datetime: int | None = None
_cached_formatted_datetime = ""
def rfc822_formatted_time() -> str:
global _cached_current_datetime
global _cached_formatted_datetime
now = int(time.time())
if now != _cached_current_datetime:
# Weekday and month names for HTTP date/time formatting;
# always English!
# Tuples are constants stored in codeobject!
_weekdayname = ("Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun")
_monthname = (
"", # Dummy so we can use 1-based month numbers
"Jan",
"Feb",
"Mar",
"Apr",
"May",
"Jun",
"Jul",
"Aug",
"Sep",
"Oct",
"Nov",
"Dec",
)
year, month, day, hh, mm, ss, wd, *tail = time.gmtime(now)
_cached_formatted_datetime = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % (
_weekdayname[wd],
day,
_monthname[month],
year,
hh,
mm,
ss,
)
_cached_current_datetime = now
return _cached_formatted_datetime
def _weakref_handle(info: "tuple[weakref.ref[object], str]") -> None:
ref, name = info
ob = ref()
if ob is not None:
with suppress(Exception):
getattr(ob, name)()
def weakref_handle(
ob: object,
name: str,
timeout: float | None,
loop: asyncio.AbstractEventLoop,
timeout_ceil_threshold: float = 5,
) -> asyncio.TimerHandle | None:
if timeout is not None and timeout > 0:
when = loop.time() + timeout
if timeout >= timeout_ceil_threshold:
when = ceil(when)
return loop.call_at(when, _weakref_handle, (weakref.ref(ob), name))
return None
def call_later(
cb: Callable[[], Any],
timeout: float | None,
loop: asyncio.AbstractEventLoop,
timeout_ceil_threshold: float = 5,
) -> asyncio.TimerHandle | None:
if timeout is None or timeout <= 0:
return None
now = loop.time()
when = calculate_timeout_when(now, timeout, timeout_ceil_threshold)
return loop.call_at(when, cb)
def calculate_timeout_when(
loop_time: float,
timeout: float,
timeout_ceiling_threshold: float,
) -> float:
"""Calculate when to execute a timeout."""
when = loop_time + timeout
if timeout > timeout_ceiling_threshold:
return ceil(when)
return when
class TimeoutHandle:
"""Timeout handle"""
__slots__ = ("_timeout", "_loop", "_ceil_threshold", "_callbacks")
def __init__(
self,
loop: asyncio.AbstractEventLoop,
timeout: float | None,
ceil_threshold: float = 5,
) -> None:
self._timeout = timeout
self._loop = loop
self._ceil_threshold = ceil_threshold
self._callbacks: list[
tuple[Callable[..., None], tuple[Any, ...], dict[str, Any]]
] = []
def register(
self, callback: Callable[..., None], *args: Any, **kwargs: Any
) -> None:
self._callbacks.append((callback, args, kwargs))
def close(self) -> None:
self._callbacks.clear()
def start(self) -> asyncio.TimerHandle | None:
timeout = self._timeout
if timeout is not None and timeout > 0:
when = self._loop.time() + timeout
if timeout >= self._ceil_threshold:
when = ceil(when)
return self._loop.call_at(when, self.__call__)
else:
return None
def timer(self) -> "BaseTimerContext":
if self._timeout is not None and self._timeout > 0:
timer = TimerContext(self._loop)
self.register(timer.timeout)
return timer
else:
return TimerNoop()
def __call__(self) -> None:
for cb, args, kwargs in self._callbacks:
with suppress(Exception):
cb(*args, **kwargs)
self._callbacks.clear()
class BaseTimerContext(ContextManager["BaseTimerContext"]):
__slots__ = ()
def assert_timeout(self) -> None:
"""Raise TimeoutError if timeout has been exceeded."""
class TimerNoop(BaseTimerContext):
__slots__ = ()
def __enter__(self) -> BaseTimerContext:
return self
def __exit__(
self,
exc_type: type[BaseException] | None,
exc_val: BaseException | None,
exc_tb: TracebackType | None,
) -> None:
return
class TimerContext(BaseTimerContext):
"""Low resolution timeout context manager"""
__slots__ = ("_loop", "_tasks", "_cancelled", "_cancelling")
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
self._loop = loop
self._tasks: list[asyncio.Task[Any]] = []
self._cancelled = False
self._cancelling = 0
def assert_timeout(self) -> None:
"""Raise TimeoutError if timer has already been cancelled."""
if self._cancelled:
raise asyncio.TimeoutError from None
def __enter__(self) -> BaseTimerContext:
task = asyncio.current_task(loop=self._loop)
if task is None:
raise RuntimeError("Timeout context manager should be used inside a task")
if sys.version_info >= (3, 11):
# Remember if the task was already cancelling
# so when we __exit__ we can decide if we should
# raise asyncio.TimeoutError or let the cancellation propagate
self._cancelling = task.cancelling()
if self._cancelled:
raise asyncio.TimeoutError from None
self._tasks.append(task)
return self
def __exit__(
self,
exc_type: type[BaseException] | None,
exc_val: BaseException | None,
exc_tb: TracebackType | None,
) -> bool | None:
enter_task: asyncio.Task[Any] | None = None
if self._tasks:
enter_task = self._tasks.pop()
if exc_type is asyncio.CancelledError and self._cancelled:
assert enter_task is not None
# The timeout was hit, and the task was cancelled
# so we need to uncancel the last task that entered the context manager
# since the cancellation should not leak out of the context manager
if sys.version_info >= (3, 11):
# If the task was already cancelling don't raise
# asyncio.TimeoutError and instead return None
# to allow the cancellation to propagate
if enter_task.uncancel() > self._cancelling:
return None
raise asyncio.TimeoutError from exc_val
return None
def timeout(self) -> None:
if not self._cancelled:
for task in set(self._tasks):
task.cancel()
self._cancelled = True
def ceil_timeout(
delay: float | None, ceil_threshold: float = 5
) -> async_timeout.Timeout:
if delay is None or delay <= 0:
return async_timeout.timeout(None)
loop = asyncio.get_running_loop()
now = loop.time()
when = now + delay
if delay > ceil_threshold:
when = ceil(when)
return async_timeout.timeout_at(when)
class HeadersMixin:
"""Mixin for handling headers."""
_headers: MultiMapping[str]
_content_type: str | None = None
_content_dict: dict[str, str] | None = None
_stored_content_type: str | None | _SENTINEL = sentinel
def _parse_content_type(self, raw: str | None) -> None:
self._stored_content_type = raw
if raw is None:
# default value according to RFC 2616
self._content_type = "application/octet-stream"
self._content_dict = {}
else:
content_type, content_mapping_proxy = parse_content_type(raw)
self._content_type = content_type
# _content_dict needs to be mutable so we can update it
self._content_dict = content_mapping_proxy.copy()
@property
def content_type(self) -> str:
"""The value of content part for Content-Type HTTP header."""
raw = self._headers.get(hdrs.CONTENT_TYPE)
if self._stored_content_type != raw:
self._parse_content_type(raw)
assert self._content_type is not None
return self._content_type
@property
def charset(self) -> str | None:
"""The value of charset part for Content-Type HTTP header."""
raw = self._headers.get(hdrs.CONTENT_TYPE)
if self._stored_content_type != raw:
self._parse_content_type(raw)
assert self._content_dict is not None
return self._content_dict.get("charset")
@property
def content_length(self) -> int | None:
"""The value of Content-Length HTTP header."""
content_length = self._headers.get(hdrs.CONTENT_LENGTH)
return None if content_length is None else int(content_length)
def set_result(fut: "asyncio.Future[_T]", result: _T) -> None:
if not fut.done():
fut.set_result(result)
_EXC_SENTINEL = BaseException()
class ErrorableProtocol(Protocol):
def set_exception(
self,
exc: type[BaseException] | BaseException,
exc_cause: BaseException = ...,
) -> None: ...
def set_exception(
fut: Union["asyncio.Future[_T]", ErrorableProtocol],
exc: type[BaseException] | BaseException,
exc_cause: BaseException = _EXC_SENTINEL,
) -> None:
"""Set future exception.
If the future is marked as complete, this function is a no-op.
:param exc_cause: An exception that is a direct cause of ``exc``.
Only set if provided.
"""
if asyncio.isfuture(fut) and fut.done():
return
exc_is_sentinel = exc_cause is _EXC_SENTINEL
exc_causes_itself = exc is exc_cause
if not exc_is_sentinel and not exc_causes_itself:
exc.__cause__ = exc_cause
fut.set_exception(exc)
@functools.total_ordering
class BaseKey(Generic[_T]):
"""Base for concrete context storage key classes.
Each storage is provided with its own sub-class for the sake of some additional type safety.
"""
__slots__ = ("_name", "_t", "__orig_class__")
# This may be set by Python when instantiating with a generic type. We need to
# support this, in order to support types that are not concrete classes,
# like Iterable, which can't be passed as the second parameter to __init__.
__orig_class__: type[object]
# TODO(PY314): Change Type to TypeForm (this should resolve unreachable below).
def __init__(self, name: str, t: type[_T] | None = None):
# Prefix with module name to help deduplicate key names.
frame = inspect.currentframe()
while frame:
if frame.f_code.co_name == "<module>":
module: str = frame.f_globals["__name__"]
break
frame = frame.f_back
else:
raise RuntimeError("Failed to get module name.")
# https://github.com/python/mypy/issues/14209
self._name = module + "." + name # type: ignore[possibly-undefined]
self._t = t
def __lt__(self, other: object) -> bool:
if isinstance(other, BaseKey):
return self._name < other._name
return True # Order BaseKey above other types.
def __repr__(self) -> str:
t = self._t
if t is None:
with suppress(AttributeError):
# Set to type arg.
t = get_args(self.__orig_class__)[0]
if t is None:
t_repr = "<<Unknown>>"
elif isinstance(t, type):
if t.__module__ == "builtins":
t_repr = t.__qualname__
else:
t_repr = f"{t.__module__}.{t.__qualname__}"
else:
t_repr = repr(t) # type: ignore[unreachable]
return f"<{self.__class__.__name__}({self._name}, type={t_repr})>"
class AppKey(BaseKey[_T]):
"""Keys for static typing support in Application."""
class RequestKey(BaseKey[_T]):
"""Keys for static typing support in Request."""
class ResponseKey(BaseKey[_T]):
"""Keys for static typing support in Response."""
@final
class ChainMapProxy(Mapping[str | AppKey[Any], Any]):
__slots__ = ("_maps",)
def __init__(self, maps: Iterable[Mapping[str | AppKey[Any], Any]]) -> None:
self._maps = tuple(maps)
def __init_subclass__(cls) -> None:
raise TypeError(
f"Inheritance class {cls.__name__} from ChainMapProxy is forbidden"
)
@overload # type: ignore[override]
def __getitem__(self, key: AppKey[_T]) -> _T: ...
@overload
def __getitem__(self, key: str) -> Any: ...
def __getitem__(self, key: str | AppKey[_T]) -> Any:
for mapping in self._maps:
try:
return mapping[key]
except KeyError:
pass
raise KeyError(key)
@overload # type: ignore[override]
def get(self, key: AppKey[_T], default: _S) -> _T | _S: ...
@overload
def get(self, key: AppKey[_T], default: None = ...) -> _T | None: ...
@overload
def get(self, key: str, default: Any = ...) -> Any: ...
def get(self, key: str | AppKey[_T], default: Any = None) -> Any:
try:
return self[key]
except KeyError:
return default
def __len__(self) -> int:
# reuses stored hash values if possible
return len(set().union(*self._maps))
def __iter__(self) -> Iterator[str | AppKey[Any]]:
d: dict[str | AppKey[Any], Any] = {}
for mapping in reversed(self._maps):
# reuses stored hash values if possible
d.update(mapping)
return iter(d)
def __contains__(self, key: object) -> bool:
return any(key in m for m in self._maps)
def __bool__(self) -> bool:
return any(self._maps)
def __repr__(self) -> str:
content = ", ".join(map(repr, self._maps))
return f"ChainMapProxy({content})"
class CookieMixin:
"""Mixin for handling cookies."""
_cookies: SimpleCookie | None = None
@property
def cookies(self) -> SimpleCookie:
if self._cookies is None:
self._cookies = SimpleCookie()
return self._cookies
def set_cookie(
self,
name: str,
value: str,
*,
expires: str | None = None,
domain: str | None = None,
max_age: int | str | None = None,
path: str = "/",
secure: bool | None = None,
httponly: bool | None = None,
samesite: str | None = None,
partitioned: bool | None = None,
) -> None:
"""Set or update response cookie.
Sets new cookie or updates existent with new value.
Also updates only those params which are not None.
"""
if self._cookies is None:
self._cookies = SimpleCookie()
self._cookies[name] = value
c = self._cookies[name]
if expires is not None:
c["expires"] = expires
elif c.get("expires") == "Thu, 01 Jan 1970 00:00:00 GMT":
del c["expires"]
if domain is not None:
c["domain"] = domain
if max_age is not None:
c["max-age"] = str(max_age)
elif "max-age" in c:
del c["max-age"]
c["path"] = path
if secure is not None:
c["secure"] = secure
if httponly is not None:
c["httponly"] = httponly
if samesite is not None:
c["samesite"] = samesite
if partitioned is not None:
c["partitioned"] = partitioned
if DEBUG:
cookie_length = len(c.output(header="")[1:])
if cookie_length > COOKIE_MAX_LENGTH:
warnings.warn(
"The size of is too large, it might get ignored by the client.",
UserWarning,
stacklevel=2,
)
def del_cookie(
self,
name: str,
*,
domain: str | None = None,
path: str = "/",
secure: bool | None = None,
httponly: bool | None = None,
samesite: str | None = None,
) -> None:
"""Delete cookie.
Creates new empty expired cookie.
"""
# TODO: do we need domain/path here?
if self._cookies is not None:
self._cookies.pop(name, None)
self.set_cookie(
name,
"",
max_age=0,
expires="Thu, 01 Jan 1970 00:00:00 GMT",
domain=domain,
path=path,
secure=secure,
httponly=httponly,
samesite=samesite,
)
def populate_with_cookies(headers: "CIMultiDict[str]", cookies: SimpleCookie) -> None:
for cookie in cookies.values():
value = cookie.output(header="")[1:]
headers.add(hdrs.SET_COOKIE, value)
# https://tools.ietf.org/html/rfc7232#section-2.3
_ETAGC = r"[!\x23-\x7E\x80-\xff]+"
_ETAGC_RE = re.compile(_ETAGC)
_QUOTED_ETAG = rf'(W/)?"({_ETAGC})"'
QUOTED_ETAG_RE = re.compile(_QUOTED_ETAG)
LIST_QUOTED_ETAG_RE = re.compile(rf"({_QUOTED_ETAG})(?:\s*,\s*|$)|(.)")
ETAG_ANY = "*"
@frozen_dataclass_decorator
class ETag:
value: str
is_weak: bool = False
def validate_etag_value(value: str) -> None:
if value != ETAG_ANY and not _ETAGC_RE.fullmatch(value):
raise ValueError(
f"Value {value!r} is not a valid etag. Maybe it contains '\"'?"
)
def parse_http_date(date_str: str | None) -> datetime.datetime | None:
"""Process a date string, return a datetime object"""
if date_str is not None:
timetuple = parsedate(date_str)
if timetuple is not None:
with suppress(ValueError):
return datetime.datetime(*timetuple[:6], tzinfo=datetime.timezone.utc)
return None
@functools.lru_cache
def must_be_empty_body(method: str, code: int) -> bool:
"""Check if a request must return an empty body."""
return (
code in EMPTY_BODY_STATUS_CODES
or method in EMPTY_BODY_METHODS
or (200 <= code < 300 and method in hdrs.METH_CONNECT_ALL)
)
def should_remove_content_length(method: str, code: int) -> bool:
"""Check if a Content-Length header should be removed.
This should always be a subset of must_be_empty_body
"""
# https://www.rfc-editor.org/rfc/rfc9110.html#section-8.6-8
# https://www.rfc-editor.org/rfc/rfc9110.html#section-15.4.5-4
return code in EMPTY_BODY_STATUS_CODES or (
200 <= code < 300 and method in hdrs.METH_CONNECT_ALL
)
|
import asyncio
import base64
import datetime
import gc
import sys
import weakref
from collections.abc import Iterator
from math import ceil, modf
from pathlib import Path
from types import MappingProxyType
from unittest import mock
from urllib.request import getproxies_environment
import pytest
from multidict import CIMultiDict, MultiDict, MultiDictProxy
from yarl import URL
from aiohttp import helpers, web
from aiohttp.helpers import (
EMPTY_BODY_METHODS,
is_expected_content_type,
must_be_empty_body,
parse_http_date,
should_remove_content_length,
)
# ------------------- parse_mimetype ----------------------------------
@pytest.mark.parametrize(
"mimetype, expected",
[
("", helpers.MimeType("", "", "", MultiDictProxy(MultiDict()))),
("*", helpers.MimeType("*", "*", "", MultiDictProxy(MultiDict()))),
(
"application/json",
helpers.MimeType("application", "json", "", MultiDictProxy(MultiDict())),
),
(
"application/json; charset=utf-8",
helpers.MimeType(
"application",
"json",
"",
MultiDictProxy(MultiDict({"charset": "utf-8"})),
),
),
(
"""application/json; charset=utf-8;""",
helpers.MimeType(
"application",
"json",
"",
MultiDictProxy(MultiDict({"charset": "utf-8"})),
),
),
(
'ApPlIcAtIoN/JSON;ChaRseT="UTF-8"',
helpers.MimeType(
"application",
"json",
"",
MultiDictProxy(MultiDict({"charset": "UTF-8"})),
),
),
(
"application/rss+xml",
helpers.MimeType("application", "rss", "xml", MultiDictProxy(MultiDict())),
),
(
"text/plain;base64",
helpers.MimeType(
"text", "plain", "", MultiDictProxy(MultiDict({"base64": ""}))
),
),
],
)
def test_parse_mimetype(mimetype: str, expected: helpers.MimeType) -> None:
result = helpers.parse_mimetype(mimetype)
assert isinstance(result, helpers.MimeType)
assert result == expected
# ------------------- parse_content_type ------------------------------
@pytest.mark.parametrize(
"content_type, expected",
[
(
"text/plain",
("text/plain", MultiDictProxy(MultiDict())),
),
(
"wrong",
("application/octet-stream", MultiDictProxy(MultiDict())),
),
],
)
def test_parse_content_type(
content_type: str, expected: tuple[str, MappingProxyType[str, str]]
) -> None:
result = helpers.parse_content_type(content_type)
assert result == expected
# ------------------- guess_filename ----------------------------------
def test_guess_filename_with_file_object(tmp_path: Path) -> None:
file_path = tmp_path / "test_guess_filename"
with file_path.open("w+b") as fp:
assert helpers.guess_filename(fp, "no-throw") is not None
def test_guess_filename_with_path(tmp_path: Path) -> None:
file_path = tmp_path / "test_guess_filename"
assert helpers.guess_filename(file_path, "no-throw") is not None
def test_guess_filename_with_default() -> None:
assert helpers.guess_filename(None, "no-throw") == "no-throw"
# ------------------- BasicAuth -----------------------------------
def test_basic_auth1() -> None:
# missing password here
with pytest.raises(ValueError):
helpers.BasicAuth(None) # type: ignore[arg-type]
def test_basic_auth2() -> None:
with pytest.raises(ValueError):
helpers.BasicAuth("nkim", None) # type: ignore[arg-type]
def test_basic_with_auth_colon_in_login() -> None:
with pytest.raises(ValueError):
helpers.BasicAuth("nkim:1", "pwd")
def test_basic_auth3() -> None:
auth = helpers.BasicAuth("nkim")
assert auth.login == "nkim"
assert auth.password == ""
def test_basic_auth4() -> None:
auth = helpers.BasicAuth("nkim", "pwd")
assert auth.login == "nkim"
assert auth.password == "pwd"
assert auth.encode() == "Basic bmtpbTpwd2Q="
@pytest.mark.parametrize(
"header",
(
"Basic bmtpbTpwd2Q=",
"basic bmtpbTpwd2Q=",
),
)
def test_basic_auth_decode(header: str) -> None:
auth = helpers.BasicAuth.decode(header)
assert auth.login == "nkim"
assert auth.password == "pwd"
def test_basic_auth_invalid() -> None:
with pytest.raises(ValueError):
helpers.BasicAuth.decode("bmtpbTpwd2Q=")
def test_basic_auth_decode_not_basic() -> None:
with pytest.raises(ValueError):
helpers.BasicAuth.decode("Complex bmtpbTpwd2Q=")
def test_basic_auth_decode_bad_base64() -> None:
with pytest.raises(ValueError):
helpers.BasicAuth.decode("Basic bmtpbTpwd2Q")
@pytest.mark.parametrize("header", ("Basic ???", "Basic "))
def test_basic_auth_decode_illegal_chars_base64(header: str) -> None:
with pytest.raises(ValueError, match="Invalid base64 encoding."):
helpers.BasicAuth.decode(header)
def test_basic_auth_decode_invalid_credentials() -> None:
with pytest.raises(ValueError, match="Invalid credentials."):
header = "Basic {}".format(base64.b64encode(b"username").decode())
helpers.BasicAuth.decode(header)
@pytest.mark.parametrize(
"credentials, expected_auth",
(
(":", helpers.BasicAuth(login="", password="", encoding="latin1")),
(
"username:",
helpers.BasicAuth(login="username", password="", encoding="latin1"),
),
(
":password",
helpers.BasicAuth(login="", password="password", encoding="latin1"),
),
(
"username:password",
helpers.BasicAuth(login="username", password="password", encoding="latin1"),
),
),
)
def test_basic_auth_decode_blank_username( # type: ignore[misc]
credentials: str, expected_auth: helpers.BasicAuth
) -> None:
header = f"Basic {base64.b64encode(credentials.encode()).decode()}"
assert helpers.BasicAuth.decode(header) == expected_auth
def test_basic_auth_from_url() -> None:
url = URL("http://user:[email protected]")
auth = helpers.BasicAuth.from_url(url)
assert auth is not None
assert auth.login == "user"
assert auth.password == "pass"
def test_basic_auth_no_user_from_url() -> None:
url = URL("http://:[email protected]")
auth = helpers.BasicAuth.from_url(url)
assert auth is not None
assert auth.login == ""
assert auth.password == "pass"
def test_basic_auth_no_auth_from_url() -> None:
url = URL("http://example.com")
auth = helpers.BasicAuth.from_url(url)
assert auth is None
def test_basic_auth_from_not_url() -> None:
with pytest.raises(TypeError):
helpers.BasicAuth.from_url("http://user:[email protected]") # type: ignore[arg-type]
# ----------------------------------- is_ip_address() ----------------------
def test_is_ip_address() -> None:
assert helpers.is_ip_address("127.0.0.1")
assert helpers.is_ip_address("::1")
assert helpers.is_ip_address("FE80:0000:0000:0000:0202:B3FF:FE1E:8329")
# Hostnames
assert not helpers.is_ip_address("localhost")
assert not helpers.is_ip_address("www.example.com")
def test_ipv4_addresses() -> None:
ip_addresses = [
"0.0.0.0",
"127.0.0.1",
"255.255.255.255",
]
for address in ip_addresses:
assert helpers.is_ip_address(address)
def test_ipv6_addresses() -> None:
ip_addresses = [
"0:0:0:0:0:0:0:0",
"FFFF:FFFF:FFFF:FFFF:FFFF:FFFF:FFFF:FFFF",
"00AB:0002:3008:8CFD:00AB:0002:3008:8CFD",
"00ab:0002:3008:8cfd:00ab:0002:3008:8cfd",
"AB:02:3008:8CFD:AB:02:3008:8CFD",
"AB:02:3008:8CFD::02:3008:8CFD",
"::",
"1::1",
]
for address in ip_addresses:
assert helpers.is_ip_address(address)
def test_host_addresses() -> None:
hosts = [
"www.four.part.host",
"www.python.org",
"foo.bar",
"localhost",
]
for host in hosts:
assert not helpers.is_ip_address(host)
def test_is_ip_address_invalid_type() -> None:
with pytest.raises(TypeError):
helpers.is_ip_address(123) # type: ignore[arg-type]
with pytest.raises(TypeError):
helpers.is_ip_address(object()) # type: ignore[arg-type]
# ----------------------------------- TimeoutHandle -------------------
def test_timeout_handle(loop: asyncio.AbstractEventLoop) -> None:
handle = helpers.TimeoutHandle(loop, 10.2)
cb = mock.Mock()
handle.register(cb)
assert cb == handle._callbacks[0][0]
handle.close()
assert not handle._callbacks
def test_when_timeout_smaller_second(loop: asyncio.AbstractEventLoop) -> None:
timeout = 0.1
handle = helpers.TimeoutHandle(loop, timeout)
timer = loop.time() + timeout
start_handle = handle.start()
assert start_handle is not None
when = start_handle.when()
handle.close()
assert isinstance(when, float)
assert when - timer == pytest.approx(0, abs=0.001)
def test_when_timeout_smaller_second_with_low_threshold(
loop: asyncio.AbstractEventLoop,
) -> None:
timeout = 0.1
handle = helpers.TimeoutHandle(loop, timeout, 0.01)
timer = loop.time() + timeout
start_handle = handle.start()
assert start_handle is not None
when = start_handle.when()
handle.close()
assert isinstance(when, int)
assert when == ceil(timer)
def test_timeout_handle_cb_exc(loop: asyncio.AbstractEventLoop) -> None:
handle = helpers.TimeoutHandle(loop, 10.2)
cb = mock.Mock()
handle.register(cb)
cb.side_effect = ValueError()
handle()
assert cb.called
assert not handle._callbacks
def test_timer_context_not_cancelled() -> None:
with mock.patch("aiohttp.helpers.asyncio") as m_asyncio:
m_asyncio.TimeoutError = asyncio.TimeoutError
loop = mock.Mock()
ctx = helpers.TimerContext(loop)
ctx.timeout()
with pytest.raises(asyncio.TimeoutError):
with ctx:
pass
assert not m_asyncio.current_task.return_value.cancel.called
@pytest.mark.skipif(
sys.version_info < (3, 11), reason="Python 3.11+ is required for .cancelling()"
)
async def test_timer_context_timeout_does_not_leak_upward() -> None:
"""Verify that the TimerContext does not leak cancellation outside the context manager."""
loop = asyncio.get_running_loop()
ctx = helpers.TimerContext(loop)
current_task = asyncio.current_task()
assert current_task is not None
with pytest.raises(asyncio.TimeoutError):
with ctx:
assert current_task.cancelling() == 0
loop.call_soon(ctx.timeout)
await asyncio.sleep(1)
# After the context manager exits, the task should no longer be cancelling
assert current_task.cancelling() == 0
@pytest.mark.skipif(
sys.version_info < (3, 11), reason="Python 3.11+ is required for .cancelling()"
)
async def test_timer_context_timeout_does_swallow_cancellation() -> None:
"""Verify that the TimerContext does not swallow cancellation."""
loop = asyncio.get_running_loop()
current_task = asyncio.current_task()
assert current_task is not None
ctx = helpers.TimerContext(loop)
async def task_with_timeout() -> None:
new_task = asyncio.current_task()
assert new_task is not None
with pytest.raises(asyncio.TimeoutError):
with ctx:
assert new_task.cancelling() == 0
await asyncio.sleep(1)
task = asyncio.create_task(task_with_timeout())
await asyncio.sleep(0)
task.cancel()
assert task.cancelling() == 1
ctx.timeout()
# Cancellation should not leak into the current task
assert current_task.cancelling() == 0
# Cancellation should not be swallowed if the task is cancelled
# and it also times out
await asyncio.sleep(0)
with pytest.raises(asyncio.CancelledError):
await task
assert task.cancelling() == 1
def test_timer_context_no_task(loop: asyncio.AbstractEventLoop) -> None:
with pytest.raises(RuntimeError):
with helpers.TimerContext(loop):
pass
async def test_weakref_handle(loop: asyncio.AbstractEventLoop) -> None:
cb = mock.Mock()
helpers.weakref_handle(cb, "test", 0.01, loop)
await asyncio.sleep(0.1)
assert cb.test.called
async def test_weakref_handle_with_small_threshold(
loop: asyncio.AbstractEventLoop,
) -> None:
cb = mock.Mock()
loop = mock.Mock()
loop.time.return_value = 10
helpers.weakref_handle(cb, "test", 0.1, loop, 0.01)
loop.call_at.assert_called_with(
11, helpers._weakref_handle, (weakref.ref(cb), "test")
)
async def test_weakref_handle_weak(loop: asyncio.AbstractEventLoop) -> None:
cb = mock.Mock()
helpers.weakref_handle(cb, "test", 0.01, loop)
del cb
gc.collect()
await asyncio.sleep(0.1)
# -------------------- ceil math -------------------------
def test_ceil_call_later() -> None:
cb = mock.Mock()
loop = mock.Mock()
loop.time.return_value = 10.1
helpers.call_later(cb, 10.1, loop)
loop.call_at.assert_called_with(21.0, cb)
async def test_ceil_timeout_round(loop: asyncio.AbstractEventLoop) -> None:
async with helpers.ceil_timeout(7.5) as cm:
if sys.version_info >= (3, 11):
w = cm.when()
assert w is not None
frac, integer = modf(w)
else:
assert cm.deadline is not None
frac, integer = modf(cm.deadline)
assert frac == 0
async def test_ceil_timeout_small(loop: asyncio.AbstractEventLoop) -> None:
async with helpers.ceil_timeout(1.1) as cm:
if sys.version_info >= (3, 11):
w = cm.when()
assert w is not None
frac, integer = modf(w)
else:
assert cm.deadline is not None
frac, integer = modf(cm.deadline)
# a chance for exact integer with zero fraction is negligible
assert frac != 0
def test_ceil_call_later_with_small_threshold() -> None:
cb = mock.Mock()
loop = mock.Mock()
loop.time.return_value = 10.1
helpers.call_later(cb, 4.5, loop, 1)
loop.call_at.assert_called_with(15, cb)
def test_ceil_call_later_no_timeout() -> None:
cb = mock.Mock()
loop = mock.Mock()
helpers.call_later(cb, 0, loop)
assert not loop.call_at.called
async def test_ceil_timeout_none(loop: asyncio.AbstractEventLoop) -> None:
async with helpers.ceil_timeout(None) as cm:
if sys.version_info >= (3, 11):
assert cm.when() is None
else:
assert cm.deadline is None
async def test_ceil_timeout_small_with_overriden_threshold(
loop: asyncio.AbstractEventLoop,
) -> None:
async with helpers.ceil_timeout(1.5, ceil_threshold=1) as cm:
if sys.version_info >= (3, 11):
w = cm.when()
assert w is not None
frac, integer = modf(w)
else:
assert cm.deadline is not None
frac, integer = modf(cm.deadline)
assert frac == 0
# -------------------------------- ContentDisposition -------------------
@pytest.mark.parametrize(
"params, quote_fields, _charset, expected",
[
(dict(foo="bar"), True, "utf-8", 'attachment; foo="bar"'),
(dict(foo="bar[]"), True, "utf-8", 'attachment; foo="bar[]"'),
(dict(foo=' a""b\\'), True, "utf-8", 'attachment; foo="\\ a\\"\\"b\\\\"'),
(dict(foo="bär"), True, "utf-8", "attachment; foo*=utf-8''b%C3%A4r"),
(dict(foo='bär "\\'), False, "utf-8", 'attachment; foo="bär \\"\\\\"'),
(dict(foo="bär"), True, "latin-1", "attachment; foo*=latin-1''b%E4r"),
(dict(filename="bär"), True, "utf-8", 'attachment; filename="b%C3%A4r"'),
(dict(filename="bär"), True, "latin-1", 'attachment; filename="b%E4r"'),
(
dict(filename='bär "\\'),
False,
"utf-8",
'attachment; filename="bär \\"\\\\"',
),
],
)
def test_content_disposition(
params: dict[str, str], quote_fields: bool, _charset: str, expected: str
) -> None:
result = helpers.content_disposition_header(
"attachment", quote_fields=quote_fields, _charset=_charset, params=params
)
assert result == expected
def test_content_disposition_bad_type() -> None:
with pytest.raises(ValueError):
helpers.content_disposition_header("foo bar")
with pytest.raises(ValueError):
helpers.content_disposition_header("—Ç–µ—Å—Ç")
with pytest.raises(ValueError):
helpers.content_disposition_header("foo\x00bar")
with pytest.raises(ValueError):
helpers.content_disposition_header("")
def test_set_content_disposition_bad_param() -> None:
with pytest.raises(ValueError):
helpers.content_disposition_header("inline", params={"foo bar": "baz"})
with pytest.raises(ValueError):
helpers.content_disposition_header("inline", params={"—Ç–µ—Å—Ç": "baz"})
with pytest.raises(ValueError):
helpers.content_disposition_header("inline", params={"": "baz"})
with pytest.raises(ValueError):
helpers.content_disposition_header("inline", params={"foo\x00bar": "baz"})
# --------------------- proxies_from_env ------------------------------
@pytest.mark.parametrize(
("proxy_env_vars", "url_input", "expected_scheme"),
(
({"http_proxy": "http://aiohttp.io/path"}, "http://aiohttp.io/path", "http"),
({"https_proxy": "http://aiohttp.io/path"}, "http://aiohttp.io/path", "https"),
({"ws_proxy": "http://aiohttp.io/path"}, "http://aiohttp.io/path", "ws"),
({"wss_proxy": "http://aiohttp.io/path"}, "http://aiohttp.io/path", "wss"),
),
indirect=["proxy_env_vars"],
ids=("http", "https", "ws", "wss"),
)
@pytest.mark.usefixtures("proxy_env_vars")
def test_proxies_from_env(url_input: str, expected_scheme: str) -> None:
url = URL(url_input)
ret = helpers.proxies_from_env()
assert ret.keys() == {expected_scheme}
assert ret[expected_scheme].proxy == url
assert ret[expected_scheme].proxy_auth is None
@pytest.mark.parametrize(
("proxy_env_vars", "url_input", "expected_scheme"),
(
(
{"https_proxy": "https://aiohttp.io/path"},
"https://aiohttp.io/path",
"https",
),
({"wss_proxy": "wss://aiohttp.io/path"}, "wss://aiohttp.io/path", "wss"),
),
indirect=["proxy_env_vars"],
ids=("https", "wss"),
)
@pytest.mark.usefixtures("proxy_env_vars")
def test_proxies_from_env_skipped(
caplog: pytest.LogCaptureFixture, url_input: str, expected_scheme: str
) -> None:
url = URL(url_input)
assert helpers.proxies_from_env() == {}
assert len(caplog.records) == 1
log_message = (
f"{expected_scheme.upper()!s} proxies {url!s} are not supported, ignoring"
)
assert caplog.record_tuples == [("aiohttp.client", 30, log_message)]
@pytest.mark.parametrize(
("proxy_env_vars", "url_input", "expected_scheme"),
(
(
{"http_proxy": "http://user:[email protected]/path"},
"http://user:[email protected]/path",
"http",
),
),
indirect=["proxy_env_vars"],
ids=("http",),
)
@pytest.mark.usefixtures("proxy_env_vars")
def test_proxies_from_env_http_with_auth(url_input: str, expected_scheme: str) -> None:
url = URL("http://user:[email protected]/path")
ret = helpers.proxies_from_env()
assert ret.keys() == {expected_scheme}
assert ret[expected_scheme].proxy == url.with_user(None)
proxy_auth = ret[expected_scheme].proxy_auth
assert proxy_auth is not None
assert proxy_auth.login == "user"
assert proxy_auth.password == "pass"
assert proxy_auth.encoding == "latin1"
# --------------------- get_env_proxy_for_url ------------------------------
@pytest.fixture
def proxy_env_vars(
monkeypatch: pytest.MonkeyPatch, request: pytest.FixtureRequest
) -> object:
for schema in getproxies_environment().keys():
monkeypatch.delenv(f"{schema}_proxy", False)
for proxy_type, proxy_list in request.param.items():
monkeypatch.setenv(proxy_type, proxy_list)
return request.param
@pytest.mark.parametrize(
("proxy_env_vars", "url_input", "expected_err_msg"),
(
(
{"no_proxy": "aiohttp.io"},
"http://aiohttp.io/path",
r"Proxying is disallowed for `'aiohttp.io'`",
),
(
{"no_proxy": "aiohttp.io,proxy.com"},
"http://aiohttp.io/path",
r"Proxying is disallowed for `'aiohttp.io'`",
),
(
{"http_proxy": "http://example.com"},
"https://aiohttp.io/path",
r"No proxies found for `https://aiohttp.io/path` in the env",
),
(
{"https_proxy": "https://example.com"},
"http://aiohttp.io/path",
r"No proxies found for `http://aiohttp.io/path` in the env",
),
(
{},
"https://aiohttp.io/path",
r"No proxies found for `https://aiohttp.io/path` in the env",
),
(
{"https_proxy": "https://example.com"},
"",
r"No proxies found for `` in the env",
),
),
indirect=["proxy_env_vars"],
ids=(
"url_matches_the_no_proxy_list",
"url_matches_the_no_proxy_list_multiple",
"url_scheme_does_not_match_http_proxy_list",
"url_scheme_does_not_match_https_proxy_list",
"no_proxies_are_set",
"url_is_empty",
),
)
@pytest.mark.usefixtures("proxy_env_vars")
def test_get_env_proxy_for_url_negative(url_input: str, expected_err_msg: str) -> None:
url = URL(url_input)
with pytest.raises(LookupError, match=expected_err_msg):
helpers.get_env_proxy_for_url(url)
@pytest.mark.parametrize(
("proxy_env_vars", "url_input"),
(
({"http_proxy": "http://example.com"}, "http://aiohttp.io/path"),
({"https_proxy": "http://example.com"}, "https://aiohttp.io/path"),
(
{"http_proxy": "http://example.com,http://proxy.org"},
"http://aiohttp.io/path",
),
),
indirect=["proxy_env_vars"],
ids=(
"url_scheme_match_http_proxy_list",
"url_scheme_match_https_proxy_list",
"url_scheme_match_http_proxy_list_multiple",
),
)
def test_get_env_proxy_for_url(proxy_env_vars: dict[str, str], url_input: str) -> None:
url = URL(url_input)
proxy, proxy_auth = helpers.get_env_proxy_for_url(url)
proxy_list = proxy_env_vars[url.scheme + "_proxy"]
assert proxy == URL(proxy_list)
assert proxy_auth is None
# ------------- set_result / set_exception ----------------------
async def test_set_result(loop: asyncio.AbstractEventLoop) -> None:
fut = loop.create_future()
helpers.set_result(fut, 123)
assert 123 == await fut
async def test_set_result_cancelled(loop: asyncio.AbstractEventLoop) -> None:
fut = loop.create_future()
fut.cancel()
helpers.set_result(fut, 123)
with pytest.raises(asyncio.CancelledError):
await fut
async def test_set_exception(loop: asyncio.AbstractEventLoop) -> None:
fut = loop.create_future()
helpers.set_exception(fut, RuntimeError())
with pytest.raises(RuntimeError):
await fut
async def test_set_exception_cancelled(loop: asyncio.AbstractEventLoop) -> None:
fut = loop.create_future()
fut.cancel()
helpers.set_exception(fut, RuntimeError())
with pytest.raises(asyncio.CancelledError):
await fut
# ----------- ChainMapProxy --------------------------
AppKeyDict = dict[str | web.AppKey[object], object]
class TestChainMapProxy:
def test_inheritance(self) -> None:
with pytest.raises(TypeError):
class A(helpers.ChainMapProxy): # type: ignore[misc]
pass
def test_getitem(self) -> None:
d1: AppKeyDict = {"a": 2, "b": 3}
d2: AppKeyDict = {"a": 1}
cp = helpers.ChainMapProxy([d1, d2])
assert cp["a"] == 2
assert cp["b"] == 3
def test_getitem_not_found(self) -> None:
d: AppKeyDict = {"a": 1}
cp = helpers.ChainMapProxy([d])
with pytest.raises(KeyError):
cp["b"]
def test_get(self) -> None:
d1: AppKeyDict = {"a": 2, "b": 3}
d2: AppKeyDict = {"a": 1}
cp = helpers.ChainMapProxy([d1, d2])
assert cp.get("a") == 2
def test_get_default(self) -> None:
d1: AppKeyDict = {"a": 2, "b": 3}
d2: AppKeyDict = {"a": 1}
cp = helpers.ChainMapProxy([d1, d2])
assert cp.get("c", 4) == 4
def test_get_non_default(self) -> None:
d1: AppKeyDict = {"a": 2, "b": 3}
d2: AppKeyDict = {"a": 1}
cp = helpers.ChainMapProxy([d1, d2])
assert cp.get("a", 4) == 2
def test_len(self) -> None:
d1: AppKeyDict = {"a": 2, "b": 3}
d2: AppKeyDict = {"a": 1}
cp = helpers.ChainMapProxy([d1, d2])
assert len(cp) == 2
def test_iter(self) -> None:
d1: AppKeyDict = {"a": 2, "b": 3}
d2: AppKeyDict = {"a": 1}
cp = helpers.ChainMapProxy([d1, d2])
assert set(cp) == {"a", "b"}
def test_contains(self) -> None:
d1: AppKeyDict = {"a": 2, "b": 3}
d2: AppKeyDict = {"a": 1}
cp = helpers.ChainMapProxy([d1, d2])
assert "a" in cp
assert "b" in cp
assert "c" not in cp
def test_bool(self) -> None:
assert helpers.ChainMapProxy([{"a": 1}])
assert not helpers.ChainMapProxy([{}, {}])
assert not helpers.ChainMapProxy([])
def test_repr(self) -> None:
d1: AppKeyDict = {"a": 2, "b": 3}
d2: AppKeyDict = {"a": 1}
cp = helpers.ChainMapProxy([d1, d2])
expected = f"ChainMapProxy({d1!r}, {d2!r})"
assert expected == repr(cp)
def test_is_expected_content_type_json_match_exact() -> None:
expected_ct = "application/json"
response_ct = "application/json"
assert is_expected_content_type(
response_content_type=response_ct, expected_content_type=expected_ct
)
def test_is_expected_content_type_json_match_partially() -> None:
expected_ct = "application/json"
response_ct = "application/alto-costmap+json" # mime-type from rfc7285
assert is_expected_content_type(
response_content_type=response_ct, expected_content_type=expected_ct
)
def test_is_expected_content_type_non_application_json_suffix() -> None:
expected_ct = "application/json"
response_ct = "model/gltf+json" # rfc 6839
assert is_expected_content_type(
response_content_type=response_ct, expected_content_type=expected_ct
)
def test_is_expected_content_type_non_application_json_private_suffix() -> None:
expected_ct = "application/json"
response_ct = "x-foo/bar+json" # rfc 6839
assert is_expected_content_type(
response_content_type=response_ct, expected_content_type=expected_ct
)
def test_is_expected_content_type_json_non_lowercase() -> None:
"""Per RFC 2045, media type matching is case insensitive."""
expected_ct = "application/json"
response_ct = "Application/JSON"
assert is_expected_content_type(
response_content_type=response_ct, expected_content_type=expected_ct
)
def test_is_expected_content_type_json_trailing_chars() -> None:
expected_ct = "application/json"
response_ct = "application/json-seq"
assert not is_expected_content_type(
response_content_type=response_ct, expected_content_type=expected_ct
)
def test_is_expected_content_type_non_json_match_exact() -> None:
expected_ct = "text/javascript"
response_ct = "text/javascript"
assert is_expected_content_type(
response_content_type=response_ct, expected_content_type=expected_ct
)
def test_is_expected_content_type_non_json_not_match() -> None:
expected_ct = "application/json"
response_ct = "text/plain"
assert not is_expected_content_type(
response_content_type=response_ct, expected_content_type=expected_ct
)
# It's necessary to subclass CookieMixin before using it.
# See the comments on its __slots__.
class CookieImplementation(helpers.CookieMixin):
pass
def test_cookies_mixin() -> None:
sut = CookieImplementation()
assert sut.cookies == {}
assert str(sut.cookies) == ""
sut.set_cookie("name", "value")
assert str(sut.cookies) == "Set-Cookie: name=value; Path=/"
sut.set_cookie("name", "")
assert str(sut.cookies) == 'Set-Cookie: name=""; Path=/'
sut.set_cookie("name", "value")
assert str(sut.cookies) == "Set-Cookie: name=value; Path=/"
sut.set_cookie("name", "other_value")
assert str(sut.cookies) == "Set-Cookie: name=other_value; Path=/"
sut.cookies["name"] = "another_other_value"
sut.cookies["name"]["max-age"] = 10
assert (
str(sut.cookies) == "Set-Cookie: name=another_other_value; Max-Age=10; Path=/"
)
sut.del_cookie("name")
expected = (
'Set-Cookie: name=""; '
"expires=Thu, 01 Jan 1970 00:00:00 GMT; Max-Age=0; Path=/"
)
assert str(sut.cookies) == expected
sut.del_cookie("name")
assert str(sut.cookies) == expected
sut.set_cookie("name", "value", domain="local.host")
expected = "Set-Cookie: name=value; Domain=local.host; Path=/"
assert str(sut.cookies) == expected
def test_cookies_mixin_path() -> None:
sut = CookieImplementation()
assert sut.cookies == {}
sut.set_cookie("name", "value", path="/some/path")
assert str(sut.cookies) == "Set-Cookie: name=value; Path=/some/path"
sut.set_cookie("name", "value", expires="123")
assert str(sut.cookies) == "Set-Cookie: name=value; expires=123; Path=/"
sut.set_cookie(
"name",
"value",
domain="example.com",
path="/home",
expires="123",
max_age="10",
secure=True,
httponly=True,
samesite="lax",
)
assert (
str(sut.cookies).lower() == "set-cookie: name=value; "
"domain=example.com; "
"expires=123; "
"httponly; "
"max-age=10; "
"path=/home; "
"samesite=lax; "
"secure"
)
@pytest.mark.skipif(sys.version_info < (3, 14), reason="No partitioned support")
def test_cookies_mixin_partitioned() -> None:
sut = CookieImplementation()
assert sut.cookies == {}
sut.set_cookie("name", "value", partitioned=False)
assert str(sut.cookies) == "Set-Cookie: name=value; Path=/"
sut.set_cookie("name", "value", partitioned=True)
assert str(sut.cookies) == "Set-Cookie: name=value; Partitioned; Path=/"
def test_sutonse_cookie__issue_del_cookie() -> None:
sut = CookieImplementation()
assert sut.cookies == {}
assert str(sut.cookies) == ""
sut.del_cookie("name")
expected = (
'Set-Cookie: name=""; '
"expires=Thu, 01 Jan 1970 00:00:00 GMT; Max-Age=0; Path=/"
)
assert str(sut.cookies) == expected
def test_cookie_set_after_del() -> None:
sut = CookieImplementation()
sut.del_cookie("name")
sut.set_cookie("name", "val")
# check for Max-Age dropped
expected = "Set-Cookie: name=val; Path=/"
assert str(sut.cookies) == expected
def test_populate_with_cookies() -> None:
cookies_mixin = CookieImplementation()
cookies_mixin.set_cookie("name", "value")
headers = CIMultiDict[str]()
helpers.populate_with_cookies(headers, cookies_mixin.cookies)
assert headers == CIMultiDict({"Set-Cookie": "name=value; Path=/"})
@pytest.mark.parametrize(
["value", "expected"],
[
# email.utils.parsedate returns None
pytest.param("xxyyzz", None),
# datetime.datetime fails with ValueError("year 4446413 is out of range")
pytest.param("Tue, 08 Oct 4446413 00:56:40 GMT", None),
# datetime.datetime fails with ValueError("second must be in 0..59")
pytest.param("Tue, 08 Oct 2000 00:56:80 GMT", None),
# OK
pytest.param(
"Tue, 08 Oct 2000 00:56:40 GMT",
datetime.datetime(2000, 10, 8, 0, 56, 40, tzinfo=datetime.timezone.utc),
),
# OK (ignore timezone and overwrite to UTC)
pytest.param(
"Tue, 08 Oct 2000 00:56:40 +0900",
datetime.datetime(2000, 10, 8, 0, 56, 40, tzinfo=datetime.timezone.utc),
),
],
)
def test_parse_http_date(value: str, expected: datetime.datetime | None) -> None:
assert parse_http_date(value) == expected
@pytest.mark.parametrize(
["netrc_contents", "expected_username"],
[
(
"machine example.com login username password pass\n",
"username",
),
],
indirect=("netrc_contents",),
)
@pytest.mark.usefixtures("netrc_contents")
def test_netrc_from_env(expected_username: str) -> None:
"""Test that reading netrc files from env works as expected"""
netrc_obj = helpers.netrc_from_env()
assert netrc_obj is not None
auth = netrc_obj.authenticators("example.com")
assert auth is not None
assert auth[0] == expected_username
@pytest.fixture
def protected_dir(tmp_path: Path) -> Iterator[Path]:
protected_dir = tmp_path / "protected"
protected_dir.mkdir()
try:
protected_dir.chmod(0o600)
yield protected_dir
finally:
protected_dir.rmdir()
def test_netrc_from_home_does_not_raise_if_access_denied(
protected_dir: Path, monkeypatch: pytest.MonkeyPatch
) -> None:
monkeypatch.setattr(Path, "home", lambda: protected_dir)
monkeypatch.delenv("NETRC", raising=False)
helpers.netrc_from_env()
@pytest.mark.parametrize(
["netrc_contents", "expected_auth"],
[
(
"machine example.com login username password pass\n",
helpers.BasicAuth("username", "pass"),
),
(
"machine example.com account username password pass\n",
helpers.BasicAuth("username", "pass"),
),
(
"machine example.com password pass\n",
helpers.BasicAuth("", "pass"),
),
],
indirect=("netrc_contents",),
)
@pytest.mark.usefixtures("netrc_contents")
def test_basicauth_present_in_netrc( # type: ignore[misc]
expected_auth: helpers.BasicAuth,
) -> None:
"""Test that netrc file contents are properly parsed into BasicAuth tuples"""
netrc_obj = helpers.netrc_from_env()
assert expected_auth == helpers.basicauth_from_netrc(netrc_obj, "example.com")
@pytest.mark.parametrize(
["netrc_contents"],
[
("",),
],
indirect=("netrc_contents",),
)
@pytest.mark.usefixtures("netrc_contents")
def test_read_basicauth_from_empty_netrc() -> None:
"""Test that an error is raised if netrc doesn't have an entry for our host"""
netrc_obj = helpers.netrc_from_env()
with pytest.raises(
LookupError, match="No entry for example.com found in the `.netrc` file."
):
helpers.basicauth_from_netrc(netrc_obj, "example.com")
def test_method_must_be_empty_body() -> None:
"""Test that HEAD is the only method that unequivocally must have an empty body."""
assert "HEAD" in EMPTY_BODY_METHODS
# CONNECT is only empty on a successful response
assert "CONNECT" not in EMPTY_BODY_METHODS
def test_should_remove_content_length_is_subset_of_must_be_empty_body() -> None:
"""Test should_remove_content_length is always a subset of must_be_empty_body."""
assert should_remove_content_length("GET", 101) is True
assert must_be_empty_body("GET", 101) is True
assert should_remove_content_length("GET", 102) is True
assert must_be_empty_body("GET", 102) is True
assert should_remove_content_length("GET", 204) is True
assert must_be_empty_body("GET", 204) is True
assert should_remove_content_length("GET", 204) is True
assert must_be_empty_body("GET", 204) is True
assert should_remove_content_length("GET", 200) is False
assert must_be_empty_body("GET", 200) is False
assert should_remove_content_length("HEAD", 200) is False
assert must_be_empty_body("HEAD", 200) is True
# CONNECT is only empty on a successful response
assert should_remove_content_length("CONNECT", 200) is True
assert must_be_empty_body("CONNECT", 200) is True
assert should_remove_content_length("CONNECT", 201) is True
assert must_be_empty_body("CONNECT", 201) is True
assert should_remove_content_length("CONNECT", 300) is False
assert must_be_empty_body("CONNECT", 300) is False
|
./temp_repos/aiohttp/aiohttp/helpers.py
|
./temp_repos/aiohttp/tests/test_helpers.py
|
aiohttp
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'BasicAuth'.
Context:
- Class Name: BasicAuth
- Dependencies to Mock: t, ceil_threshold, loop, name, timeout, maps
- Key Imports: yarl, collections, email.policy, pathlib, binascii, dataclasses, os, contextlib, math, propcache.api
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
BasicAuth
|
python
|
import asyncio
import datetime
import enum
import json
import math
import time
import warnings
from collections.abc import Iterator, MutableMapping
from concurrent.futures import Executor
from http import HTTPStatus
from typing import TYPE_CHECKING, Any, Optional, TypeVar, Union, cast, overload
from multidict import CIMultiDict, istr
from . import hdrs, payload
from .abc import AbstractStreamWriter
from .compression_utils import ZLibCompressor
from .helpers import (
ETAG_ANY,
QUOTED_ETAG_RE,
CookieMixin,
ETag,
HeadersMixin,
ResponseKey,
must_be_empty_body,
parse_http_date,
populate_with_cookies,
rfc822_formatted_time,
sentinel,
should_remove_content_length,
validate_etag_value,
)
from .http import SERVER_SOFTWARE, HttpVersion10, HttpVersion11
from .payload import Payload
from .typedefs import JSONEncoder, LooseHeaders
REASON_PHRASES = {http_status.value: http_status.phrase for http_status in HTTPStatus}
LARGE_BODY_SIZE = 1024**2
__all__ = ("ContentCoding", "StreamResponse", "Response", "json_response")
if TYPE_CHECKING:
from .web_request import BaseRequest
_T = TypeVar("_T")
# TODO(py311): Convert to StrEnum for wider use
class ContentCoding(enum.Enum):
# The content codings that we have support for.
#
# Additional registered codings are listed at:
# https://www.iana.org/assignments/http-parameters/http-parameters.xhtml#content-coding
deflate = "deflate"
gzip = "gzip"
identity = "identity"
CONTENT_CODINGS = {coding.value: coding for coding in ContentCoding}
############################################################
# HTTP Response classes
############################################################
class StreamResponse(
MutableMapping[str | ResponseKey[Any], Any], HeadersMixin, CookieMixin
):
_body: None | bytes | bytearray | Payload
_length_check = True
_body = None
_keep_alive: bool | None = None
_chunked: bool = False
_compression: bool = False
_compression_strategy: int | None = None
_compression_force: ContentCoding | None = None
_req: Optional["BaseRequest"] = None
_payload_writer: AbstractStreamWriter | None = None
_eof_sent: bool = False
_must_be_empty_body: bool | None = None
_body_length = 0
_send_headers_immediately = True
def __init__(
self,
*,
status: int = 200,
reason: str | None = None,
headers: LooseHeaders | None = None,
_real_headers: CIMultiDict[str] | None = None,
) -> None:
"""Initialize a new stream response object.
_real_headers is an internal parameter used to pass a pre-populated
headers object. It is used by the `Response` class to avoid copying
the headers when creating a new response object. It is not intended
to be used by external code.
"""
self._state: dict[str | ResponseKey[Any], Any] = {}
if _real_headers is not None:
self._headers = _real_headers
elif headers is not None:
self._headers: CIMultiDict[str] = CIMultiDict(headers)
else:
self._headers = CIMultiDict()
self._set_status(status, reason)
@property
def prepared(self) -> bool:
return self._eof_sent or self._payload_writer is not None
@property
def task(self) -> "asyncio.Task[None] | None":
if self._req:
return self._req.task
else:
return None
@property
def status(self) -> int:
return self._status
@property
def chunked(self) -> bool:
return self._chunked
@property
def compression(self) -> bool:
return self._compression
@property
def reason(self) -> str:
return self._reason
def set_status(
self,
status: int,
reason: str | None = None,
) -> None:
assert (
not self.prepared
), "Cannot change the response status code after the headers have been sent"
self._set_status(status, reason)
def _set_status(self, status: int, reason: str | None) -> None:
self._status = status
if reason is None:
reason = REASON_PHRASES.get(self._status, "")
elif "\n" in reason:
raise ValueError("Reason cannot contain \\n")
self._reason = reason
@property
def keep_alive(self) -> bool | None:
return self._keep_alive
def force_close(self) -> None:
self._keep_alive = False
@property
def body_length(self) -> int:
return self._body_length
def enable_chunked_encoding(self) -> None:
"""Enables automatic chunked transfer encoding."""
if hdrs.CONTENT_LENGTH in self._headers:
raise RuntimeError(
"You can't enable chunked encoding when a content length is set"
)
self._chunked = True
def enable_compression(
self,
force: ContentCoding | None = None,
strategy: int | None = None,
) -> None:
"""Enables response compression encoding."""
# Don't enable compression if content is already encoded.
# This prevents double compression and provides a safe, predictable behavior
# without breaking existing code that may call enable_compression() on
# responses that already have Content-Encoding set (e.g., FileResponse
# serving pre-compressed files).
if hdrs.CONTENT_ENCODING in self._headers:
return
self._compression = True
self._compression_force = force
self._compression_strategy = strategy
@property
def headers(self) -> "CIMultiDict[str]":
return self._headers
@property
def content_length(self) -> int | None:
# Just a placeholder for adding setter
return super().content_length
@content_length.setter
def content_length(self, value: int | None) -> None:
if value is not None:
value = int(value)
if self._chunked:
raise RuntimeError(
"You can't set content length when chunked encoding is enable"
)
self._headers[hdrs.CONTENT_LENGTH] = str(value)
else:
self._headers.pop(hdrs.CONTENT_LENGTH, None)
@property
def content_type(self) -> str:
# Just a placeholder for adding setter
return super().content_type
@content_type.setter
def content_type(self, value: str) -> None:
self.content_type # read header values if needed
self._content_type = str(value)
self._generate_content_type_header()
@property
def charset(self) -> str | None:
# Just a placeholder for adding setter
return super().charset
@charset.setter
def charset(self, value: str | None) -> None:
ctype = self.content_type # read header values if needed
if ctype == "application/octet-stream":
raise RuntimeError(
"Setting charset for application/octet-stream "
"doesn't make sense, setup content_type first"
)
assert self._content_dict is not None
if value is None:
self._content_dict.pop("charset", None)
else:
self._content_dict["charset"] = str(value).lower()
self._generate_content_type_header()
@property
def last_modified(self) -> datetime.datetime | None:
"""The value of Last-Modified HTTP header, or None.
This header is represented as a `datetime` object.
"""
return parse_http_date(self._headers.get(hdrs.LAST_MODIFIED))
@last_modified.setter
def last_modified(
self, value: int | float | datetime.datetime | str | None
) -> None:
if value is None:
self._headers.pop(hdrs.LAST_MODIFIED, None)
elif isinstance(value, (int, float)):
self._headers[hdrs.LAST_MODIFIED] = time.strftime(
"%a, %d %b %Y %H:%M:%S GMT", time.gmtime(math.ceil(value))
)
elif isinstance(value, datetime.datetime):
self._headers[hdrs.LAST_MODIFIED] = time.strftime(
"%a, %d %b %Y %H:%M:%S GMT", value.utctimetuple()
)
elif isinstance(value, str):
self._headers[hdrs.LAST_MODIFIED] = value
else:
msg = f"Unsupported type for last_modified: {type(value).__name__}" # type: ignore[unreachable]
raise TypeError(msg)
@property
def etag(self) -> ETag | None:
quoted_value = self._headers.get(hdrs.ETAG)
if not quoted_value:
return None
elif quoted_value == ETAG_ANY:
return ETag(value=ETAG_ANY)
match = QUOTED_ETAG_RE.fullmatch(quoted_value)
if not match:
return None
is_weak, value = match.group(1, 2)
return ETag(
is_weak=bool(is_weak),
value=value,
)
@etag.setter
def etag(self, value: ETag | str | None) -> None:
if value is None:
self._headers.pop(hdrs.ETAG, None)
elif (isinstance(value, str) and value == ETAG_ANY) or (
isinstance(value, ETag) and value.value == ETAG_ANY
):
self._headers[hdrs.ETAG] = ETAG_ANY
elif isinstance(value, str):
validate_etag_value(value)
self._headers[hdrs.ETAG] = f'"{value}"'
elif isinstance(value, ETag) and isinstance(value.value, str): # type: ignore[redundant-expr]
validate_etag_value(value.value)
hdr_value = f'W/"{value.value}"' if value.is_weak else f'"{value.value}"'
self._headers[hdrs.ETAG] = hdr_value
else:
raise ValueError(
f"Unsupported etag type: {type(value)}. "
f"etag must be str, ETag or None"
)
def _generate_content_type_header(
self, CONTENT_TYPE: istr = hdrs.CONTENT_TYPE
) -> None:
assert self._content_dict is not None
assert self._content_type is not None
params = "; ".join(f"{k}={v}" for k, v in self._content_dict.items())
if params:
ctype = self._content_type + "; " + params
else:
ctype = self._content_type
self._headers[CONTENT_TYPE] = ctype
async def _do_start_compression(self, coding: ContentCoding) -> None:
if coding is ContentCoding.identity:
return
assert self._payload_writer is not None
self._headers[hdrs.CONTENT_ENCODING] = coding.value
self._payload_writer.enable_compression(
coding.value, self._compression_strategy
)
# Compressed payload may have different content length,
# remove the header
self._headers.popall(hdrs.CONTENT_LENGTH, None)
async def _start_compression(self, request: "BaseRequest") -> None:
if self._compression_force:
await self._do_start_compression(self._compression_force)
return
# Encoding comparisons should be case-insensitive
# https://www.rfc-editor.org/rfc/rfc9110#section-8.4.1
accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, "").lower()
for value, coding in CONTENT_CODINGS.items():
if value in accept_encoding:
await self._do_start_compression(coding)
return
async def prepare(self, request: "BaseRequest") -> AbstractStreamWriter | None:
if self._eof_sent:
return None
if self._payload_writer is not None:
return self._payload_writer
self._must_be_empty_body = must_be_empty_body(request.method, self.status)
return await self._start(request)
async def _start(self, request: "BaseRequest") -> AbstractStreamWriter:
self._req = request
writer = self._payload_writer = request._payload_writer
await self._prepare_headers()
await request._prepare_hook(self)
await self._write_headers()
return writer
async def _prepare_headers(self) -> None:
request = self._req
assert request is not None
writer = self._payload_writer
assert writer is not None
keep_alive = self._keep_alive
if keep_alive is None:
keep_alive = request.keep_alive
self._keep_alive = keep_alive
version = request.version
headers = self._headers
if self._cookies:
populate_with_cookies(headers, self._cookies)
if self._compression:
await self._start_compression(request)
if self._chunked:
if version != HttpVersion11:
raise RuntimeError(
"Using chunked encoding is forbidden "
f"for HTTP/{request.version.major}.{request.version.minor}"
)
if not self._must_be_empty_body:
writer.enable_chunking()
headers[hdrs.TRANSFER_ENCODING] = "chunked"
elif self._length_check: # Disabled for WebSockets
writer.length = self.content_length
if writer.length is None:
if version >= HttpVersion11:
if not self._must_be_empty_body:
writer.enable_chunking()
headers[hdrs.TRANSFER_ENCODING] = "chunked"
elif not self._must_be_empty_body:
keep_alive = False
# HTTP 1.1: https://tools.ietf.org/html/rfc7230#section-3.3.2
# HTTP 1.0: https://tools.ietf.org/html/rfc1945#section-10.4
if self._must_be_empty_body:
if hdrs.CONTENT_LENGTH in headers and should_remove_content_length(
request.method, self.status
):
del headers[hdrs.CONTENT_LENGTH]
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.1-10
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.1-13
if hdrs.TRANSFER_ENCODING in headers:
del headers[hdrs.TRANSFER_ENCODING]
elif (writer.length if self._length_check else self.content_length) != 0:
# https://www.rfc-editor.org/rfc/rfc9110#section-8.3-5
headers.setdefault(hdrs.CONTENT_TYPE, "application/octet-stream")
headers.setdefault(hdrs.DATE, rfc822_formatted_time())
headers.setdefault(hdrs.SERVER, SERVER_SOFTWARE)
# connection header
if hdrs.CONNECTION not in headers:
if keep_alive:
if version == HttpVersion10:
headers[hdrs.CONNECTION] = "keep-alive"
elif version == HttpVersion11:
headers[hdrs.CONNECTION] = "close"
async def _write_headers(self) -> None:
request = self._req
assert request is not None
writer = self._payload_writer
assert writer is not None
# status line
version = request.version
status_line = f"HTTP/{version[0]}.{version[1]} {self._status} {self._reason}"
await writer.write_headers(status_line, self._headers)
# Send headers immediately if not opted into buffering
if self._send_headers_immediately:
writer.send_headers()
async def write(
self, data: Union[bytes, bytearray, "memoryview[int]", "memoryview[bytes]"]
) -> None:
assert isinstance(
data, (bytes, bytearray, memoryview)
), "data argument must be byte-ish (%r)" % type(data)
if self._eof_sent:
raise RuntimeError("Cannot call write() after write_eof()")
if self._payload_writer is None:
raise RuntimeError("Cannot call write() before prepare()")
await self._payload_writer.write(data)
async def drain(self) -> None:
assert not self._eof_sent, "EOF has already been sent"
assert self._payload_writer is not None, "Response has not been started"
warnings.warn(
"drain method is deprecated, use await resp.write()",
DeprecationWarning,
stacklevel=2,
)
await self._payload_writer.drain()
async def write_eof(self, data: bytes = b"") -> None:
assert isinstance(
data, (bytes, bytearray, memoryview)
), "data argument must be byte-ish (%r)" % type(data)
if self._eof_sent:
return
assert self._payload_writer is not None, "Response has not been started"
await self._payload_writer.write_eof(data)
self._eof_sent = True
self._req = None
self._body_length = self._payload_writer.output_size
self._payload_writer = None
def __repr__(self) -> str:
if self._eof_sent:
info = "eof"
elif self.prepared:
assert self._req is not None
info = f"{self._req.method} {self._req.path} "
else:
info = "not prepared"
return f"<{self.__class__.__name__} {self.reason} {info}>"
@overload # type: ignore[override]
def __getitem__(self, key: ResponseKey[_T]) -> _T: ...
@overload
def __getitem__(self, key: str) -> Any: ...
def __getitem__(self, key: str | ResponseKey[_T]) -> Any:
return self._state[key]
@overload # type: ignore[override]
def __setitem__(self, key: ResponseKey[_T], value: _T) -> None: ...
@overload
def __setitem__(self, key: str, value: Any) -> None: ...
def __setitem__(self, key: str | ResponseKey[_T], value: Any) -> None:
self._state[key] = value
def __delitem__(self, key: str | ResponseKey[_T]) -> None:
del self._state[key]
def __len__(self) -> int:
return len(self._state)
def __iter__(self) -> Iterator[str | ResponseKey[Any]]:
return iter(self._state)
def __hash__(self) -> int:
return hash(id(self))
def __eq__(self, other: object) -> bool:
return self is other
def __bool__(self) -> bool:
return True
class Response(StreamResponse):
_compressed_body: bytes | None = None
_send_headers_immediately = False
def __init__(
self,
*,
body: Any = None,
status: int = 200,
reason: str | None = None,
text: str | None = None,
headers: LooseHeaders | None = None,
content_type: str | None = None,
charset: str | None = None,
zlib_executor_size: int | None = None,
zlib_executor: Executor | None = None,
) -> None:
if body is not None and text is not None:
raise ValueError("body and text are not allowed together")
if headers is None:
real_headers: CIMultiDict[str] = CIMultiDict()
else:
real_headers = CIMultiDict(headers)
if content_type is not None and "charset" in content_type:
raise ValueError("charset must not be in content_type argument")
if text is not None:
if hdrs.CONTENT_TYPE in real_headers:
if content_type or charset:
raise ValueError(
"passing both Content-Type header and "
"content_type or charset params "
"is forbidden"
)
else:
# fast path for filling headers
if not isinstance(text, str):
raise TypeError("text argument must be str (%r)" % type(text))
if content_type is None:
content_type = "text/plain"
if charset is None:
charset = "utf-8"
real_headers[hdrs.CONTENT_TYPE] = content_type + "; charset=" + charset
body = text.encode(charset)
text = None
elif hdrs.CONTENT_TYPE in real_headers:
if content_type is not None or charset is not None:
raise ValueError(
"passing both Content-Type header and "
"content_type or charset params "
"is forbidden"
)
elif content_type is not None:
if charset is not None:
content_type += "; charset=" + charset
real_headers[hdrs.CONTENT_TYPE] = content_type
super().__init__(status=status, reason=reason, _real_headers=real_headers)
if text is not None:
self.text = text
else:
self.body = body
self._zlib_executor_size = zlib_executor_size
self._zlib_executor = zlib_executor
@property
def body(self) -> bytes | bytearray | Payload | None:
return self._body
@body.setter
def body(self, body: Any) -> None:
if body is None:
self._body = None
elif isinstance(body, (bytes, bytearray)):
self._body = body
else:
try:
self._body = body = payload.PAYLOAD_REGISTRY.get(body)
except payload.LookupError:
raise ValueError("Unsupported body type %r" % type(body))
headers = self._headers
# set content-type
if hdrs.CONTENT_TYPE not in headers:
headers[hdrs.CONTENT_TYPE] = body.content_type
# copy payload headers
if body.headers:
for key, value in body.headers.items():
if key not in headers:
headers[key] = value
self._compressed_body = None
@property
def text(self) -> str | None:
if self._body is None:
return None
# Note: When _body is a Payload (e.g. FilePayload), this may do blocking I/O
# This is generally safe as most common payloads (BytesPayload, StringPayload)
# don't do blocking I/O, but be careful with file-based payloads
return self._body.decode(self.charset or "utf-8")
@text.setter
def text(self, text: str) -> None:
assert isinstance(text, str), "text argument must be str (%r)" % type(text)
if self.content_type == "application/octet-stream":
self.content_type = "text/plain"
if self.charset is None:
self.charset = "utf-8"
self._body = text.encode(self.charset)
self._compressed_body = None
@property
def content_length(self) -> int | None:
if self._chunked:
return None
if hdrs.CONTENT_LENGTH in self._headers:
return int(self._headers[hdrs.CONTENT_LENGTH])
if self._compressed_body is not None:
# Return length of the compressed body
return len(self._compressed_body)
elif isinstance(self._body, Payload):
# A payload without content length, or a compressed payload
return None
elif self._body is not None:
return len(self._body)
else:
return 0
@content_length.setter
def content_length(self, value: int | None) -> None:
raise RuntimeError("Content length is set automatically")
async def write_eof(self, data: bytes = b"") -> None:
if self._eof_sent:
return
if self._compressed_body is None:
body = self._body
else:
body = self._compressed_body
assert not data, f"data arg is not supported, got {data!r}"
assert self._req is not None
assert self._payload_writer is not None
if body is None or self._must_be_empty_body:
await super().write_eof()
elif isinstance(self._body, Payload):
await self._body.write(self._payload_writer)
await self._body.close()
await super().write_eof()
else:
await super().write_eof(cast(bytes, body))
async def _start(self, request: "BaseRequest") -> AbstractStreamWriter:
if hdrs.CONTENT_LENGTH in self._headers:
if should_remove_content_length(request.method, self.status):
del self._headers[hdrs.CONTENT_LENGTH]
elif not self._chunked:
if isinstance(self._body, Payload):
if (size := self._body.size) is not None:
self._headers[hdrs.CONTENT_LENGTH] = str(size)
else:
body_len = len(self._body) if self._body else "0"
# https://www.rfc-editor.org/rfc/rfc9110.html#section-8.6-7
if body_len != "0" or (
self.status != 304 and request.method not in hdrs.METH_HEAD_ALL
):
self._headers[hdrs.CONTENT_LENGTH] = str(body_len)
return await super()._start(request)
async def _do_start_compression(self, coding: ContentCoding) -> None:
if self._chunked or isinstance(self._body, Payload):
return await super()._do_start_compression(coding)
if coding is ContentCoding.identity:
return
# Instead of using _payload_writer.enable_compression,
# compress the whole body
compressor = ZLibCompressor(
encoding=coding.value,
max_sync_chunk_size=self._zlib_executor_size,
executor=self._zlib_executor,
)
assert self._body is not None
if self._zlib_executor_size is None and len(self._body) > LARGE_BODY_SIZE:
warnings.warn(
"Synchronous compression of large response bodies "
f"({len(self._body)} bytes) might block the async event loop. "
"Consider providing a custom value to zlib_executor_size/"
"zlib_executor response properties or disabling compression on it."
)
self._compressed_body = (
await compressor.compress(self._body) + compressor.flush()
)
self._headers[hdrs.CONTENT_ENCODING] = coding.value
self._headers[hdrs.CONTENT_LENGTH] = str(len(self._compressed_body))
def json_response(
data: Any = sentinel,
*,
text: str | None = None,
body: bytes | None = None,
status: int = 200,
reason: str | None = None,
headers: LooseHeaders | None = None,
content_type: str = "application/json",
dumps: JSONEncoder = json.dumps,
) -> Response:
if data is not sentinel:
if text or body:
raise ValueError("only one of data, text, or body should be specified")
else:
text = dumps(data)
return Response(
text=text,
body=body,
status=status,
reason=reason,
headers=headers,
content_type=content_type,
)
|
import collections.abc
import datetime
import gzip
import io
import json
import re
import sys
import weakref
from collections.abc import AsyncIterator, Iterator
from concurrent.futures import ThreadPoolExecutor
from unittest import mock
import aiosignal
import pytest
from multidict import CIMultiDict, CIMultiDictProxy, MultiDict
from aiohttp import HttpVersion, HttpVersion10, HttpVersion11, hdrs, web
from aiohttp.abc import AbstractStreamWriter
from aiohttp.helpers import ETag
from aiohttp.http_writer import StreamWriter, _serialize_headers
from aiohttp.multipart import BodyPartReader, MultipartWriter
from aiohttp.payload import BytesPayload, StringPayload
from aiohttp.test_utils import make_mocked_request
from aiohttp.typedefs import LooseHeaders
def make_request(
method: str,
path: str,
headers: LooseHeaders = CIMultiDict(),
version: HttpVersion = HttpVersion11,
*,
app: web.Application | None = None,
writer: AbstractStreamWriter | None = None,
) -> web.Request:
if app is None:
app = mock.create_autospec(
web.Application, spec_set=True, on_response_prepare=aiosignal.Signal(app)
)
app.on_response_prepare.freeze()
return make_mocked_request(
method, path, headers, version=version, app=app, writer=writer
)
@pytest.fixture
def buf() -> bytearray:
return bytearray()
@pytest.fixture
def writer(buf: bytearray) -> AbstractStreamWriter:
writer = mock.create_autospec(AbstractStreamWriter, spec_set=True)
async def write_headers(status_line: str, headers: CIMultiDict[str]) -> None:
b_headers = _serialize_headers(status_line, headers)
buf.extend(b_headers)
async def write_eof(chunk: bytes = b"") -> None:
buf.extend(chunk)
writer.write_eof.side_effect = write_eof
writer.write_headers.side_effect = write_headers
return writer # type: ignore[no-any-return]
def test_stream_response_ctor() -> None:
resp = web.StreamResponse()
assert 200 == resp.status
assert resp.keep_alive is None
assert resp.task is None
req = mock.Mock()
resp._req = req
assert resp.task is req.task
def test_stream_response_hashable() -> None:
# should not raise exception
hash(web.StreamResponse())
def test_stream_response_eq() -> None:
resp1 = web.StreamResponse()
resp2 = web.StreamResponse()
assert resp1 == resp1
assert not resp1 == resp2
def test_stream_response_is_mutable_mapping() -> None:
resp = web.StreamResponse()
assert isinstance(resp, collections.abc.MutableMapping)
assert resp # even when the MutableMapping is empty, response should always be True
resp["key"] = "value"
assert "value" == resp["key"]
def test_stream_response_delitem() -> None:
resp = web.StreamResponse()
resp["key"] = "value"
del resp["key"]
assert "key" not in resp
def test_stream_response_len() -> None:
resp = web.StreamResponse()
assert len(resp) == 0
resp["key"] = "value"
assert len(resp) == 1
def test_response_iter() -> None:
resp = web.StreamResponse()
resp["key"] = "value"
resp["key2"] = "value2"
key3 = web.ResponseKey("key3", str)
resp[key3] = "value3"
assert set(resp) == {"key", "key2", key3}
def test_responsekey() -> None:
resp = web.StreamResponse()
key = web.ResponseKey("key", str)
resp[key] = "value"
assert resp[key] == "value"
assert len(resp) == 1
del resp[key]
assert len(resp) == 0
def test_response_get_responsekey() -> None:
resp = web.StreamResponse()
key = web.ResponseKey("key", int)
assert resp.get(key, "foo") == "foo"
resp[key] = 5
assert resp.get(key, "foo") == 5
def test_responsekey_repr_concrete() -> None:
key = web.ResponseKey("key", int)
assert repr(key) in (
"<ResponseKey(__channelexec__.key, type=int)>", # pytest-xdist
"<ResponseKey(__main__.key, type=int)>",
)
key2 = web.ResponseKey("key", web.Request)
assert repr(key2) in (
# pytest-xdist:
"<ResponseKey(__channelexec__.key, type=aiohttp.web_request.Request)>",
"<ResponseKey(__main__.key, type=aiohttp.web_request.Request)>",
)
def test_responsekey_repr_nonconcrete() -> None:
key = web.ResponseKey("key", Iterator[int])
if sys.version_info < (3, 11):
assert repr(key) in (
# pytest-xdist:
"<ResponseKey(__channelexec__.key, type=collections.abc.Iterator)>",
"<ResponseKey(__main__.key, type=collections.abc.Iterator)>",
)
else:
assert repr(key) in (
# pytest-xdist:
"<ResponseKey(__channelexec__.key, type=collections.abc.Iterator[int])>",
"<ResponseKey(__main__.key, type=collections.abc.Iterator[int])>",
)
def test_responsekey_repr_annotated() -> None:
key = web.ResponseKey[Iterator[int]]("key")
if sys.version_info < (3, 11):
assert repr(key) in (
# pytest-xdist:
"<ResponseKey(__channelexec__.key, type=collections.abc.Iterator)>",
"<ResponseKey(__main__.key, type=collections.abc.Iterator)>",
)
else:
assert repr(key) in (
# pytest-xdist:
"<ResponseKey(__channelexec__.key, type=collections.abc.Iterator[int])>",
"<ResponseKey(__main__.key, type=collections.abc.Iterator[int])>",
)
def test_content_length() -> None:
resp = web.StreamResponse()
assert resp.content_length is None
def test_content_length_setter() -> None:
resp = web.StreamResponse()
resp.content_length = 234
assert 234 == resp.content_length
def test_content_length_setter_with_enable_chunked_encoding() -> None:
resp = web.StreamResponse()
resp.enable_chunked_encoding()
with pytest.raises(RuntimeError):
resp.content_length = 234
def test_drop_content_length_header_on_setting_len_to_None() -> None:
resp = web.StreamResponse()
resp.content_length = 1
assert "1" == resp.headers["Content-Length"]
resp.content_length = None
assert "Content-Length" not in resp.headers
def test_set_content_length_to_None_on_non_set() -> None:
resp = web.StreamResponse()
resp.content_length = None
assert "Content-Length" not in resp.headers
resp.content_length = None
assert "Content-Length" not in resp.headers
def test_setting_content_type() -> None:
resp = web.StreamResponse()
resp.content_type = "text/html"
assert "text/html" == resp.headers["content-type"]
def test_setting_charset() -> None:
resp = web.StreamResponse()
resp.content_type = "text/html"
resp.charset = "koi8-r"
assert "text/html; charset=koi8-r" == resp.headers["content-type"]
def test_default_charset() -> None:
resp = web.StreamResponse()
assert resp.charset is None
def test_reset_charset() -> None:
resp = web.StreamResponse()
resp.content_type = "text/html"
resp.charset = None
assert resp.charset is None
def test_reset_charset_after_setting() -> None:
resp = web.StreamResponse()
resp.content_type = "text/html"
resp.charset = "koi8-r"
resp.charset = None
assert resp.charset is None
def test_charset_without_content_type() -> None:
resp = web.StreamResponse()
with pytest.raises(RuntimeError):
resp.charset = "koi8-r"
def test_last_modified_initial() -> None:
resp = web.StreamResponse()
assert resp.last_modified is None
def test_last_modified_string() -> None:
resp = web.StreamResponse()
dt = datetime.datetime(1990, 1, 2, 3, 4, 5, 0, datetime.timezone.utc)
resp.last_modified = "Mon, 2 Jan 1990 03:04:05 GMT"
assert resp.last_modified == dt
def test_last_modified_timestamp() -> None:
resp = web.StreamResponse()
dt = datetime.datetime(1970, 1, 1, 0, 0, 0, 0, datetime.timezone.utc)
resp.last_modified = 0
assert resp.last_modified == dt
resp.last_modified = 0.0
assert resp.last_modified == dt
def test_last_modified_datetime() -> None:
resp = web.StreamResponse()
dt = datetime.datetime(2001, 2, 3, 4, 5, 6, 0, datetime.timezone.utc)
resp.last_modified = dt
assert resp.last_modified == dt
def test_last_modified_reset() -> None:
resp = web.StreamResponse()
resp.last_modified = 0
resp.last_modified = None
assert resp.last_modified is None
def test_last_modified_invalid_type() -> None:
resp = web.StreamResponse()
with pytest.raises(TypeError, match="Unsupported type for last_modified: object"):
resp.last_modified = object() # type: ignore[assignment]
@pytest.mark.parametrize(
"header_val",
(
"xxyyzz",
"Tue, 08 Oct 4446413 00:56:40 GMT",
"Tue, 08 Oct 2000 00:56:80 GMT",
),
)
def test_last_modified_string_invalid(header_val: str) -> None:
resp = web.StreamResponse(headers={"Last-Modified": header_val})
assert resp.last_modified is None
def test_etag_initial() -> None:
resp = web.StreamResponse()
assert resp.etag is None
def test_etag_string() -> None:
resp = web.StreamResponse()
value = "0123-kotik"
resp.etag = value
assert resp.etag == ETag(value=value)
assert resp.headers[hdrs.ETAG] == f'"{value}"'
@pytest.mark.parametrize(
("etag", "expected_header"),
(
(ETag(value="0123-weak-kotik", is_weak=True), 'W/"0123-weak-kotik"'),
(ETag(value="0123-strong-kotik", is_weak=False), '"0123-strong-kotik"'),
),
)
def test_etag_class(etag: ETag, expected_header: str) -> None:
resp = web.StreamResponse()
resp.etag = etag
assert resp.etag == etag
assert resp.headers[hdrs.ETAG] == expected_header
def test_etag_any() -> None:
resp = web.StreamResponse()
resp.etag = "*"
assert resp.etag == ETag(value="*")
assert resp.headers[hdrs.ETAG] == "*"
@pytest.mark.parametrize(
"invalid_value",
(
'"invalid"',
"повинен бути ascii",
ETag(value='"invalid"', is_weak=True),
ETag(value="bad ©®"),
),
)
def test_etag_invalid_value_set(invalid_value: str | ETag) -> None:
resp = web.StreamResponse()
with pytest.raises(ValueError, match="is not a valid etag"):
resp.etag = invalid_value
@pytest.mark.parametrize(
"header",
(
"forgotten quotes",
'"∀ x ∉ ascii"',
),
)
def test_etag_invalid_value_get(header: str) -> None:
resp = web.StreamResponse()
resp.headers["ETag"] = header
assert resp.etag is None
@pytest.mark.parametrize("invalid", (123, ETag(value=123, is_weak=True))) # type: ignore[arg-type]
def test_etag_invalid_value_class(invalid: int | ETag) -> None:
resp = web.StreamResponse()
with pytest.raises(ValueError, match="Unsupported etag type"):
resp.etag = invalid # type: ignore[assignment]
def test_etag_reset() -> None:
resp = web.StreamResponse()
resp.etag = "*"
resp.etag = None
assert resp.etag is None
async def test_start() -> None:
req = make_request("GET", "/")
resp = web.StreamResponse()
assert resp.keep_alive is None
msg = await resp.prepare(req)
assert msg is not None
assert msg.write_headers.called # type: ignore[attr-defined]
msg2 = await resp.prepare(req)
assert msg is msg2
assert resp.keep_alive
req2 = make_request("GET", "/") # type: ignore[unreachable]
# with pytest.raises(RuntimeError):
msg3 = await resp.prepare(req2)
assert msg is msg3
async def test_chunked_encoding() -> None:
req = make_request("GET", "/")
resp = web.StreamResponse()
assert not resp.chunked
resp.enable_chunked_encoding()
assert resp.chunked
msg = await resp.prepare(req) # type: ignore[unreachable]
assert msg.chunked
def test_enable_chunked_encoding_with_content_length() -> None:
resp = web.StreamResponse()
resp.content_length = 234
with pytest.raises(RuntimeError):
resp.enable_chunked_encoding()
async def test_chunked_encoding_forbidden_for_http_10() -> None:
req = make_request("GET", "/", version=HttpVersion10)
resp = web.StreamResponse()
resp.enable_chunked_encoding()
with pytest.raises(RuntimeError) as ctx:
await resp.prepare(req)
assert str(ctx.value) == "Using chunked encoding is forbidden for HTTP/1.0"
@pytest.mark.usefixtures("parametrize_zlib_backend")
async def test_compression_no_accept() -> None:
req = make_request("GET", "/")
resp = web.StreamResponse()
assert not resp.chunked
assert not resp.compression
resp.enable_compression()
assert resp.compression
msg = await resp.prepare(req) # type: ignore[unreachable]
assert not msg.enable_compression.called
@pytest.mark.usefixtures("parametrize_zlib_backend")
async def test_compression_default_coding() -> None:
req = make_request(
"GET", "/", headers=CIMultiDict({hdrs.ACCEPT_ENCODING: "gzip, deflate"})
)
resp = web.StreamResponse()
assert not resp.chunked
assert not resp.compression
resp.enable_compression()
assert resp.compression
msg = await resp.prepare(req) # type: ignore[unreachable]
msg.enable_compression.assert_called_with("deflate", None)
assert "deflate" == resp.headers.get(hdrs.CONTENT_ENCODING)
assert msg.filter is not None
@pytest.mark.usefixtures("parametrize_zlib_backend")
async def test_force_compression_deflate() -> None:
req = make_request(
"GET", "/", headers=CIMultiDict({hdrs.ACCEPT_ENCODING: "gzip, deflate"})
)
resp = web.StreamResponse()
resp.enable_compression(web.ContentCoding.deflate)
assert resp.compression
msg = await resp.prepare(req)
assert msg is not None
msg.enable_compression.assert_called_with("deflate", None) # type: ignore[attr-defined]
assert "deflate" == resp.headers.get(hdrs.CONTENT_ENCODING)
@pytest.mark.usefixtures("parametrize_zlib_backend")
async def test_force_compression_deflate_large_payload() -> None:
"""Make sure a warning is thrown for large payloads compressed in the event loop."""
req = make_request(
"GET", "/", headers=CIMultiDict({hdrs.ACCEPT_ENCODING: "gzip, deflate"})
)
resp = web.Response(body=b"large")
resp.enable_compression(web.ContentCoding.deflate)
assert resp.compression
with (
pytest.warns(Warning, match="Synchronous compression of large response bodies"),
mock.patch("aiohttp.web_response.LARGE_BODY_SIZE", 2),
):
msg = await resp.prepare(req)
assert msg is not None
assert "deflate" == resp.headers.get(hdrs.CONTENT_ENCODING)
@pytest.mark.usefixtures("parametrize_zlib_backend")
async def test_force_compression_no_accept_deflate() -> None:
req = make_request("GET", "/")
resp = web.StreamResponse()
resp.enable_compression(web.ContentCoding.deflate)
assert resp.compression
msg = await resp.prepare(req)
assert msg is not None
msg.enable_compression.assert_called_with("deflate", None) # type: ignore[attr-defined]
assert "deflate" == resp.headers.get(hdrs.CONTENT_ENCODING)
@pytest.mark.usefixtures("parametrize_zlib_backend")
async def test_force_compression_gzip() -> None:
req = make_request(
"GET", "/", headers=CIMultiDict({hdrs.ACCEPT_ENCODING: "gzip, deflate"})
)
resp = web.StreamResponse()
resp.enable_compression(web.ContentCoding.gzip)
assert resp.compression
msg = await resp.prepare(req)
assert msg is not None
msg.enable_compression.assert_called_with("gzip", None) # type: ignore[attr-defined]
assert "gzip" == resp.headers.get(hdrs.CONTENT_ENCODING)
@pytest.mark.usefixtures("parametrize_zlib_backend")
async def test_force_compression_no_accept_gzip() -> None:
req = make_request("GET", "/")
resp = web.StreamResponse()
resp.enable_compression(web.ContentCoding.gzip)
assert resp.compression
msg = await resp.prepare(req)
assert msg is not None
msg.enable_compression.assert_called_with("gzip", None) # type: ignore[attr-defined]
assert "gzip" == resp.headers.get(hdrs.CONTENT_ENCODING)
@pytest.mark.usefixtures("parametrize_zlib_backend")
async def test_change_content_threaded_compression_enabled() -> None:
req = make_request("GET", "/")
body_thread_size = 1024
body = b"answer" * body_thread_size
resp = web.Response(body=body, zlib_executor_size=body_thread_size)
resp.enable_compression(web.ContentCoding.gzip)
await resp.prepare(req)
assert resp._compressed_body is not None
assert gzip.decompress(resp._compressed_body) == body
@pytest.mark.usefixtures("parametrize_zlib_backend")
async def test_change_content_threaded_compression_enabled_explicit() -> None:
req = make_request("GET", "/")
body_thread_size = 1024
body = b"answer" * body_thread_size
with ThreadPoolExecutor(1) as executor:
resp = web.Response(
body=body, zlib_executor_size=body_thread_size, zlib_executor=executor
)
resp.enable_compression(web.ContentCoding.gzip)
await resp.prepare(req)
assert resp._compressed_body is not None
assert gzip.decompress(resp._compressed_body) == body
@pytest.mark.usefixtures("parametrize_zlib_backend")
async def test_change_content_length_if_compression_enabled() -> None:
req = make_request("GET", "/")
resp = web.Response(body=b"answer")
resp.enable_compression(web.ContentCoding.gzip)
await resp.prepare(req)
assert resp.content_length is not None and resp.content_length != len(b"answer")
@pytest.mark.usefixtures("parametrize_zlib_backend")
async def test_set_content_length_if_compression_enabled() -> None:
writer = mock.Mock()
async def write_headers(status_line: str, headers: CIMultiDict[str]) -> None:
assert hdrs.CONTENT_LENGTH in headers
assert headers[hdrs.CONTENT_LENGTH] == "26"
assert hdrs.TRANSFER_ENCODING not in headers
writer.write_headers.side_effect = write_headers
req = make_request("GET", "/", writer=writer)
resp = web.Response(body=b"answer")
resp.enable_compression(web.ContentCoding.gzip)
await resp.prepare(req)
assert resp.content_length == 26
del resp.headers[hdrs.CONTENT_LENGTH]
assert resp.content_length == 26
@pytest.mark.usefixtures("parametrize_zlib_backend")
async def test_remove_content_length_if_compression_enabled_http11() -> None:
writer = mock.Mock()
async def write_headers(status_line: str, headers: CIMultiDict[str]) -> None:
assert hdrs.CONTENT_LENGTH not in headers
assert headers.get(hdrs.TRANSFER_ENCODING, "") == "chunked"
writer.write_headers.side_effect = write_headers
req = make_request("GET", "/", writer=writer)
resp = web.StreamResponse()
resp.content_length = 123
resp.enable_compression(web.ContentCoding.gzip)
await resp.prepare(req)
assert resp.content_length is None
@pytest.mark.usefixtures("parametrize_zlib_backend")
async def test_remove_content_length_if_compression_enabled_http10() -> None:
writer = mock.Mock()
async def write_headers(status_line: str, headers: CIMultiDict[str]) -> None:
assert hdrs.CONTENT_LENGTH not in headers
assert hdrs.TRANSFER_ENCODING not in headers
writer.write_headers.side_effect = write_headers
req = make_request("GET", "/", version=HttpVersion10, writer=writer)
resp = web.StreamResponse()
resp.content_length = 123
resp.enable_compression(web.ContentCoding.gzip)
await resp.prepare(req)
assert resp.content_length is None
@pytest.mark.usefixtures("parametrize_zlib_backend")
async def test_force_compression_identity() -> None:
writer = mock.Mock()
async def write_headers(status_line: str, headers: CIMultiDict[str]) -> None:
assert hdrs.CONTENT_LENGTH in headers
assert hdrs.TRANSFER_ENCODING not in headers
writer.write_headers.side_effect = write_headers
req = make_request("GET", "/", writer=writer)
resp = web.StreamResponse()
resp.content_length = 123
resp.enable_compression(web.ContentCoding.identity)
await resp.prepare(req)
assert resp.content_length == 123
@pytest.mark.usefixtures("parametrize_zlib_backend")
async def test_force_compression_identity_response() -> None:
writer = mock.Mock()
async def write_headers(status_line: str, headers: CIMultiDict[str]) -> None:
assert headers[hdrs.CONTENT_LENGTH] == "6"
assert hdrs.TRANSFER_ENCODING not in headers
writer.write_headers.side_effect = write_headers
req = make_request("GET", "/", writer=writer)
resp = web.Response(body=b"answer")
resp.enable_compression(web.ContentCoding.identity)
await resp.prepare(req)
assert resp.content_length == 6
async def test_enable_compression_with_existing_encoding() -> None:
"""Test that enable_compression does not override existing content encoding."""
writer = mock.Mock()
async def write_headers(status_line: str, headers: CIMultiDict[str]) -> None:
# Should preserve the existing content encoding
assert headers[hdrs.CONTENT_ENCODING] == "gzip"
# Should not have double encoding
assert headers.get(hdrs.CONTENT_ENCODING) != "gzip, deflate"
writer.write_headers.side_effect = write_headers
req = make_request("GET", "/", writer=writer)
resp = web.Response(body=b"answer")
# Manually set content encoding (simulating FileResponse with pre-compressed file)
resp.headers[hdrs.CONTENT_ENCODING] = "gzip"
# Try to enable compression - should be ignored
resp.enable_compression(web.ContentCoding.deflate)
await resp.prepare(req)
# Verify compression was not enabled due to existing encoding
assert not resp.compression
@pytest.mark.usefixtures("parametrize_zlib_backend")
async def test_rm_content_length_if_compression_http11() -> None:
writer = mock.Mock()
async def write_headers(status_line: str, headers: CIMultiDict[str]) -> None:
assert hdrs.CONTENT_LENGTH not in headers
assert headers.get(hdrs.TRANSFER_ENCODING, "") == "chunked"
writer.write_headers.side_effect = write_headers
req = make_request("GET", "/", writer=writer)
payload = BytesPayload(b"answer", headers={"X-Test-Header": "test"})
resp = web.Response(body=payload)
resp.body = payload
resp.enable_compression(web.ContentCoding.gzip)
await resp.prepare(req)
assert resp.content_length is None
@pytest.mark.usefixtures("parametrize_zlib_backend")
async def test_rm_content_length_if_compression_http10() -> None:
writer = mock.Mock()
async def write_headers(status_line: str, headers: CIMultiDict[str]) -> None:
assert hdrs.CONTENT_LENGTH not in headers
assert hdrs.TRANSFER_ENCODING not in headers
writer.write_headers.side_effect = write_headers
req = make_request("GET", "/", version=HttpVersion10, writer=writer)
resp = web.Response(body=BytesPayload(b"answer"))
resp.enable_compression(web.ContentCoding.gzip)
await resp.prepare(req)
assert resp.content_length is None
async def test_rm_content_length_if_204() -> None:
"""Ensure content-length is removed for 204 responses."""
writer = mock.create_autospec(StreamWriter, spec_set=True, instance=True)
async def write_headers(status_line: str, headers: CIMultiDict[str]) -> None:
assert hdrs.CONTENT_LENGTH not in headers
writer.write_headers.side_effect = write_headers
req = make_request("GET", "/", writer=writer)
payload = BytesPayload(b"answer", headers={"Content-Length": "6"})
resp = web.Response(body=payload, status=204)
resp.body = payload
await resp.prepare(req)
assert resp.content_length is None
@pytest.mark.parametrize("status", (100, 101, 204, 304))
async def test_rm_transfer_encoding_rfc_9112_6_3_http_11(status: int) -> None:
"""Remove transfer encoding for RFC 9112 sec 6.3 with HTTP/1.1."""
writer = mock.create_autospec(StreamWriter, spec_set=True, instance=True)
req = make_request("GET", "/", version=HttpVersion11, writer=writer)
resp = web.Response(status=status, headers={hdrs.TRANSFER_ENCODING: "chunked"})
await resp.prepare(req)
assert resp.content_length == 0
assert not resp.chunked
assert hdrs.CONTENT_LENGTH not in resp.headers
assert hdrs.TRANSFER_ENCODING not in resp.headers
@pytest.mark.parametrize("status", (100, 101, 102, 204, 304))
async def test_rm_content_length_1xx_204_304_responses(status: int) -> None:
"""Remove content length for 1xx, 204, and 304 responses.
Content-Length is forbidden for 1xx and 204
https://datatracker.ietf.org/doc/html/rfc7230#section-3.3.2
Content-Length is discouraged for 304.
https://datatracker.ietf.org/doc/html/rfc7232#section-4.1
"""
writer = mock.create_autospec(StreamWriter, spec_set=True, instance=True)
req = make_request("GET", "/", version=HttpVersion11, writer=writer)
resp = web.Response(status=status, body="answer")
await resp.prepare(req)
assert not resp.chunked
assert hdrs.CONTENT_LENGTH not in resp.headers
assert hdrs.TRANSFER_ENCODING not in resp.headers
async def test_head_response_keeps_content_length_of_original_body() -> None:
"""Verify HEAD response keeps the content length of the original body HTTP/1.1."""
writer = mock.create_autospec(StreamWriter, spec_set=True, instance=True)
req = make_request("HEAD", "/", version=HttpVersion11, writer=writer)
resp = web.Response(status=200, body=b"answer")
await resp.prepare(req)
assert resp.content_length == 6
assert not resp.chunked
assert resp.headers[hdrs.CONTENT_LENGTH] == "6"
assert hdrs.TRANSFER_ENCODING not in resp.headers
async def test_head_response_omits_content_length_when_body_unset() -> None:
"""Verify HEAD response omits content-length body when its unset."""
writer = mock.create_autospec(StreamWriter, spec_set=True, instance=True)
req = make_request("HEAD", "/", version=HttpVersion11, writer=writer)
resp = web.Response(status=200)
await resp.prepare(req)
assert resp.content_length == 0
assert not resp.chunked
assert hdrs.CONTENT_LENGTH not in resp.headers
assert hdrs.TRANSFER_ENCODING not in resp.headers
async def test_304_response_omits_content_length_when_body_unset() -> None:
"""Verify 304 response omits content-length body when its unset."""
writer = mock.create_autospec(StreamWriter, spec_set=True, instance=True)
req = make_request("GET", "/", version=HttpVersion11, writer=writer)
resp = web.Response(status=304)
await resp.prepare(req)
assert resp.content_length == 0
assert not resp.chunked
assert hdrs.CONTENT_LENGTH not in resp.headers
assert hdrs.TRANSFER_ENCODING not in resp.headers
async def test_content_length_on_chunked() -> None:
req = make_request("GET", "/")
resp = web.Response(body=b"answer")
assert resp.content_length == 6
resp.enable_chunked_encoding()
assert resp.content_length is None
await resp.prepare(req) # type: ignore[unreachable]
async def test_write_non_byteish() -> None:
resp = web.StreamResponse()
await resp.prepare(make_request("GET", "/"))
with pytest.raises(AssertionError):
await resp.write(123) # type: ignore[arg-type]
async def test_write_before_start() -> None:
resp = web.StreamResponse()
with pytest.raises(RuntimeError):
await resp.write(b"data")
async def test_cannot_write_after_eof() -> None:
resp = web.StreamResponse()
req = make_request("GET", "/")
await resp.prepare(req)
await resp.write(b"data")
await resp.write_eof()
req.writer.write.reset_mock() # type: ignore[attr-defined]
with pytest.raises(RuntimeError):
await resp.write(b"next data")
assert not req.writer.write.called # type: ignore[attr-defined]
async def test___repr___after_eof() -> None:
resp = web.StreamResponse()
await resp.prepare(make_request("GET", "/"))
await resp.write(b"data")
await resp.write_eof()
resp_repr = repr(resp)
assert resp_repr == "<StreamResponse OK eof>"
async def test_cannot_write_eof_before_headers() -> None:
resp = web.StreamResponse()
with pytest.raises(AssertionError):
await resp.write_eof()
async def test_cannot_write_eof_twice() -> None:
resp = web.StreamResponse()
writer = mock.create_autospec(AbstractStreamWriter, spec_set=True)
writer.write.return_value = None
writer.write_eof.return_value = None
resp_impl = await resp.prepare(make_request("GET", "/", writer=writer))
await resp.write(b"data")
assert resp_impl is not None
assert resp_impl.write.called # type: ignore[attr-defined]
await resp.write_eof()
resp_impl.write.reset_mock() # type: ignore[attr-defined]
await resp.write_eof()
assert not writer.write.called
def test_force_close() -> None:
resp = web.StreamResponse()
assert resp.keep_alive is None
resp.force_close()
assert resp.keep_alive is False
def test_set_status_with_reason() -> None:
resp = web.StreamResponse()
resp.set_status(200, "Everything is fine!")
assert 200 == resp.status
assert "Everything is fine!" == resp.reason
def test_set_status_with_empty_reason() -> None:
resp = web.StreamResponse()
resp.set_status(200, "")
assert resp.status == 200
assert resp.reason == ""
async def test_start_force_close() -> None:
req = make_request("GET", "/")
resp = web.StreamResponse()
resp.force_close()
assert not resp.keep_alive
await resp.prepare(req)
assert not resp.keep_alive
async def test___repr__() -> None:
req = make_request("GET", "/path/to")
resp = web.StreamResponse(reason="foo")
await resp.prepare(req)
assert "<StreamResponse foo GET /path/to >" == repr(resp)
def test___repr___not_prepared() -> None:
resp = web.StreamResponse(reason="foo")
assert "<StreamResponse foo not prepared>" == repr(resp)
async def test_keep_alive_http10_default() -> None:
req = make_request("GET", "/", version=HttpVersion10)
resp = web.StreamResponse()
await resp.prepare(req)
assert not resp.keep_alive
async def test_keep_alive_http10_switched_on() -> None:
headers = CIMultiDict(Connection="keep-alive")
req = make_request("GET", "/", version=HttpVersion10, headers=headers)
req._message = req._message._replace(should_close=False)
resp = web.StreamResponse()
await resp.prepare(req)
assert resp.keep_alive
async def test_keep_alive_http09() -> None:
headers = CIMultiDict(Connection="keep-alive")
req = make_request("GET", "/", version=HttpVersion(0, 9), headers=headers)
resp = web.StreamResponse()
await resp.prepare(req)
assert not resp.keep_alive
async def test_prepare_twice() -> None:
req = make_request("GET", "/")
resp = web.StreamResponse()
impl1 = await resp.prepare(req)
impl2 = await resp.prepare(req)
assert impl1 is impl2
async def test_prepare_calls_signal() -> None:
app = mock.create_autospec(web.Application, spec_set=True)
sig = mock.AsyncMock()
app.on_response_prepare = aiosignal.Signal(app)
app.on_response_prepare.append(sig)
req = make_request("GET", "/", app=app)
resp = web.StreamResponse()
await resp.prepare(req)
sig.assert_called_with(req, resp)
# Response class
def test_response_ctor() -> None:
resp = web.Response()
assert 200 == resp.status
assert "OK" == resp.reason
assert resp.body is None
assert resp.content_length == 0
assert "CONTENT-LENGTH" not in resp.headers
async def test_ctor_with_headers_and_status() -> None:
resp = web.Response(body=b"body", status=201, headers={"Age": "12", "DATE": "date"})
assert 201 == resp.status
assert b"body" == resp.body
assert resp.headers["AGE"] == "12"
req = make_mocked_request("GET", "/")
await resp._start(req)
assert 4 == resp.content_length
assert resp.headers["CONTENT-LENGTH"] == "4"
def test_ctor_content_type() -> None:
resp = web.Response(content_type="application/json")
assert 200 == resp.status
assert "OK" == resp.reason
assert 0 == resp.content_length
assert CIMultiDict([("CONTENT-TYPE", "application/json")]) == resp.headers
def test_ctor_text_body_combined() -> None:
with pytest.raises(ValueError):
web.Response(body=b"123", text="test text")
async def test_ctor_text() -> None:
resp = web.Response(text="test text")
assert 200 == resp.status
assert "OK" == resp.reason
assert 9 == resp.content_length
assert CIMultiDict([("CONTENT-TYPE", "text/plain; charset=utf-8")]) == resp.headers
assert resp.body == b"test text"
assert resp.text == "test text"
resp.headers["DATE"] = "date"
req = make_mocked_request("GET", "/", version=HttpVersion11)
await resp._start(req)
assert resp.headers["CONTENT-LENGTH"] == "9"
def test_ctor_charset() -> None:
resp = web.Response(text="текст", charset="koi8-r")
assert "текст".encode("koi8-r") == resp.body
assert "koi8-r" == resp.charset
def test_ctor_charset_default_utf8() -> None:
resp = web.Response(text="test test", charset=None)
assert "utf-8" == resp.charset
def test_ctor_charset_in_content_type() -> None:
with pytest.raises(ValueError):
web.Response(text="test test", content_type="text/plain; charset=utf-8")
def test_ctor_charset_without_text() -> None:
resp = web.Response(content_type="text/plain", charset="koi8-r")
assert "koi8-r" == resp.charset
def test_ctor_content_type_with_extra() -> None:
resp = web.Response(text="test test", content_type="text/plain; version=0.0.4")
assert resp.content_type == "text/plain"
assert resp.headers["content-type"] == "text/plain; version=0.0.4; charset=utf-8"
def test_invalid_content_type_parses_to_application_octect_stream() -> None:
resp = web.Response(text="test test", content_type="jpeg")
assert resp.content_type == "application/octet-stream"
assert resp.headers["content-type"] == "jpeg; charset=utf-8"
def test_ctor_both_content_type_param_and_header_with_text() -> None:
with pytest.raises(ValueError):
web.Response(
headers={"Content-Type": "application/json"},
content_type="text/html",
text="text",
)
def test_ctor_both_charset_param_and_header_with_text() -> None:
with pytest.raises(ValueError):
web.Response(
headers={"Content-Type": "application/json"}, charset="koi8-r", text="text"
)
def test_ctor_both_content_type_param_and_header() -> None:
with pytest.raises(ValueError):
web.Response(
headers={"Content-Type": "application/json"}, content_type="text/html"
)
def test_ctor_both_charset_param_and_header() -> None:
with pytest.raises(ValueError):
web.Response(headers={"Content-Type": "application/json"}, charset="koi8-r")
async def test_assign_nonbyteish_body() -> None:
resp = web.Response(body=b"data")
with pytest.raises(ValueError):
resp.body = 123
assert b"data" == resp.body
assert 4 == resp.content_length
resp.headers["DATE"] = "date"
req = make_mocked_request("GET", "/", version=HttpVersion11)
await resp._start(req)
assert resp.headers["CONTENT-LENGTH"] == "4"
assert 4 == resp.content_length
def test_assign_nonstr_text() -> None:
resp = web.Response(text="test")
with pytest.raises(AssertionError):
resp.text = b"123" # type: ignore[assignment]
assert b"test" == resp.body
assert 4 == resp.content_length
mpwriter = MultipartWriter(boundary="x")
mpwriter.append_payload(StringPayload("test"))
async def async_iter() -> AsyncIterator[str]:
yield "foo" # pragma: no cover
class CustomIO(io.IOBase):
def __init__(self) -> None:
self._lines = [b"", b"", b"test"]
def read(self, size: int = -1) -> bytes:
return self._lines.pop()
@pytest.mark.parametrize(
"payload,expected",
(
("test", "test"),
(CustomIO(), "test"),
(io.StringIO("test"), "test"),
(io.TextIOWrapper(io.BytesIO(b"test")), "test"),
(io.BytesIO(b"test"), "test"),
(io.BufferedReader(io.BytesIO(b"test")), "test"),
(async_iter(), None),
(BodyPartReader(b"x", CIMultiDictProxy(CIMultiDict()), mock.Mock()), None),
(
mpwriter,
"--x\r\nContent-Type: text/plain; charset=utf-8\r\nContent-Length: 4\r\n\r\ntest",
),
),
)
def test_payload_body_get_text(payload: object, expected: str | None) -> None:
resp = web.Response(body=payload)
if expected is None:
with pytest.raises(TypeError):
resp.text
else:
assert resp.text == expected
def test_response_set_content_length() -> None:
resp = web.Response()
with pytest.raises(RuntimeError):
resp.content_length = 1
async def test_send_headers_for_empty_body(
buf: bytearray, writer: AbstractStreamWriter
) -> None:
req = make_request("GET", "/", writer=writer)
resp = web.Response()
await resp.prepare(req)
await resp.write_eof()
txt = buf.decode("utf8")
lines = txt.split("\r\n")
assert len(lines) == 6
assert lines[0] == "HTTP/1.1 200 OK"
assert lines[1] == "Content-Length: 0"
assert lines[2].startswith("Date: ")
assert lines[3].startswith("Server: ")
assert lines[4] == lines[5] == ""
async def test_render_with_body(buf: bytearray, writer: AbstractStreamWriter) -> None:
req = make_request("GET", "/", writer=writer)
resp = web.Response(body=b"data")
await resp.prepare(req)
await resp.write_eof()
txt = buf.decode("utf8")
lines = txt.split("\r\n")
assert len(lines) == 7
assert lines[0] == "HTTP/1.1 200 OK"
assert lines[1] == "Content-Length: 4"
assert lines[2] == "Content-Type: application/octet-stream"
assert lines[3].startswith("Date: ")
assert lines[4].startswith("Server: ")
assert lines[5] == ""
assert lines[6] == "data"
async def test_multiline_reason(buf: bytearray, writer: AbstractStreamWriter) -> None:
with pytest.raises(ValueError, match=r"Reason cannot contain \\n"):
web.Response(reason="Bad\r\nInjected-header: foo")
async def test_send_set_cookie_header(
buf: bytearray, writer: AbstractStreamWriter
) -> None:
resp = web.Response()
resp.cookies["name"] = "value"
req = make_request("GET", "/", writer=writer)
await resp.prepare(req)
await resp.write_eof()
txt = buf.decode("utf8")
lines = txt.split("\r\n")
assert len(lines) == 7
assert lines[0] == "HTTP/1.1 200 OK"
assert lines[1] == "Content-Length: 0"
assert lines[2] == "Set-Cookie: name=value"
assert lines[3].startswith("Date: ")
assert lines[4].startswith("Server: ")
assert lines[5] == lines[6] == ""
async def test_consecutive_write_eof() -> None:
writer = mock.Mock()
writer.write_eof = mock.AsyncMock()
writer.write_headers = mock.AsyncMock()
req = make_request("GET", "/", writer=writer)
data = b"data"
resp = web.Response(body=data)
await resp.prepare(req)
await resp.write_eof()
await resp.write_eof()
writer.write_eof.assert_called_once_with(data)
def test_set_text_with_content_type() -> None:
resp = web.Response()
resp.content_type = "text/html"
resp.text = "text"
assert "text" == resp.text
assert b"text" == resp.body
assert "text/html" == resp.content_type
def test_set_text_with_charset() -> None:
resp = web.Response()
resp.content_type = "text/plain"
resp.charset = "KOI8-R"
resp.text = "текст"
assert "текст" == resp.text
assert "текст".encode("koi8-r") == resp.body
assert "koi8-r" == resp.charset
def test_default_content_type_in_stream_response() -> None:
resp = web.StreamResponse()
assert resp.content_type == "application/octet-stream"
def test_default_content_type_in_response() -> None:
resp = web.Response()
assert resp.content_type == "application/octet-stream"
def test_content_type_with_set_text() -> None:
resp = web.Response(text="text")
assert resp.content_type == "text/plain"
def test_content_type_with_set_body() -> None:
resp = web.Response(body=b"body")
assert resp.content_type == "application/octet-stream"
def test_prepared_when_not_started() -> None:
resp = web.StreamResponse()
assert not resp.prepared
async def test_prepared_when_started() -> None:
resp = web.StreamResponse()
await resp.prepare(make_request("GET", "/"))
assert resp.prepared
async def test_prepared_after_eof() -> None:
resp = web.StreamResponse()
await resp.prepare(make_request("GET", "/"))
await resp.write(b"data")
await resp.write_eof()
assert resp.prepared
async def test_drain_before_start() -> None:
resp = web.StreamResponse()
with pytest.raises(AssertionError):
await resp.drain()
async def test_changing_status_after_prepare_raises() -> None:
resp = web.StreamResponse()
await resp.prepare(make_request("GET", "/"))
with pytest.raises(AssertionError):
resp.set_status(400)
def test_nonstr_text_in_ctor() -> None:
with pytest.raises(TypeError):
web.Response(text=b"data") # type: ignore[arg-type]
def test_text_in_ctor_with_content_type() -> None:
resp = web.Response(text="data", content_type="text/html")
assert "data" == resp.text
assert "text/html" == resp.content_type
def test_text_in_ctor_with_content_type_header() -> None:
resp = web.Response(
text="текст", headers={"Content-Type": "text/html; charset=koi8-r"}
)
assert "текст".encode("koi8-r") == resp.body
assert "text/html" == resp.content_type
assert "koi8-r" == resp.charset
def test_text_in_ctor_with_content_type_header_multidict() -> None:
headers = CIMultiDict({"Content-Type": "text/html; charset=koi8-r"})
resp = web.Response(text="текст", headers=headers)
assert "текст".encode("koi8-r") == resp.body
assert "text/html" == resp.content_type
assert "koi8-r" == resp.charset
def test_body_in_ctor_with_content_type_header_multidict() -> None:
headers = CIMultiDict({"Content-Type": "text/html; charset=koi8-r"})
resp = web.Response(body="текст".encode("koi8-r"), headers=headers)
assert "текст".encode("koi8-r") == resp.body
assert "text/html" == resp.content_type
assert "koi8-r" == resp.charset
def test_text_with_empty_payload() -> None:
resp = web.Response(status=200)
assert resp.body is None
assert resp.text is None
def test_response_with_content_length_header_without_body() -> None:
resp = web.Response(headers={"Content-Length": "123"})
assert resp.content_length == 123
def test_response_with_immutable_headers() -> None:
resp = web.Response(
text="text", headers=CIMultiDictProxy(CIMultiDict({"Header": "Value"}))
)
assert resp.headers == {
"Header": "Value",
"Content-Type": "text/plain; charset=utf-8",
}
async def test_response_prepared_after_header_preparation() -> None:
req = make_request("GET", "/")
resp = web.StreamResponse()
await resp.prepare(req)
assert type(resp.headers["Server"]) is str
async def _strip_server(req: web.Request, res: web.Response) -> None:
assert "Server" in res.headers
if "Server" in res.headers:
del res.headers["Server"]
app = mock.create_autospec(web.Application, spec_set=True)
app.on_response_prepare = aiosignal.Signal(app)
app.on_response_prepare.append(_strip_server)
req = make_request("GET", "/", app=app)
resp = web.StreamResponse()
await resp.prepare(req)
assert "Server" not in resp.headers
def test_weakref_creation() -> None:
resp = web.Response()
weakref.ref(resp)
class TestJSONResponse:
def test_content_type_is_application_json_by_default(self) -> None:
resp = web.json_response("")
assert "application/json" == resp.content_type
def test_passing_text_only(self) -> None:
resp = web.json_response(text=json.dumps("jaysawn"))
assert resp.text == json.dumps("jaysawn")
def test_data_and_text_raises_value_error(self) -> None:
with pytest.raises(ValueError) as excinfo:
web.json_response(data="foo", text="bar")
expected_message = "only one of data, text, or body should be specified"
assert expected_message == excinfo.value.args[0]
def test_data_and_body_raises_value_error(self) -> None:
with pytest.raises(ValueError) as excinfo:
web.json_response(data="foo", body=b"bar")
expected_message = "only one of data, text, or body should be specified"
assert expected_message == excinfo.value.args[0]
def test_text_is_json_encoded(self) -> None:
resp = web.json_response({"foo": 42})
assert json.dumps({"foo": 42}) == resp.text
def test_content_type_is_overrideable(self) -> None:
resp = web.json_response({"foo": 42}, content_type="application/vnd.json+api")
assert "application/vnd.json+api" == resp.content_type
@pytest.mark.dev_mode
async def test_no_warn_small_cookie(
buf: bytearray, writer: AbstractStreamWriter
) -> None:
resp = web.Response()
resp.set_cookie("foo", "ÿ" + "8" * 4064, max_age=2600) # No warning
req = make_request("GET", "/", writer=writer)
await resp.prepare(req)
await resp.write_eof()
match = re.search(b"Set-Cookie: (.*?)\r\n", buf)
assert match is not None
cookie = match.group(1)
assert len(cookie) == 4096
@pytest.mark.dev_mode
async def test_warn_large_cookie(buf: bytearray, writer: AbstractStreamWriter) -> None:
resp = web.Response()
with pytest.warns(
UserWarning,
match="The size of is too large, it might get ignored by the client.",
):
resp.set_cookie("foo", "ÿ" + "8" * 4065, max_age=2600)
req = make_request("GET", "/", writer=writer)
await resp.prepare(req)
await resp.write_eof()
match = re.search(b"Set-Cookie: (.*?)\r\n", buf)
assert match is not None
cookie = match.group(1)
assert len(cookie) == 4097
@pytest.mark.parametrize("loose_header_type", (MultiDict, CIMultiDict, dict))
async def test_passing_cimultidict_to_web_response_not_mutated(
loose_header_type: type,
) -> None:
req = make_request("GET", "/")
headers = loose_header_type({})
resp = web.Response(body=b"answer", headers=headers)
await resp.prepare(req)
assert resp.content_length == 6
assert not headers
async def test_stream_response_sends_headers_immediately() -> None:
"""Test that StreamResponse sends headers immediately."""
writer = mock.create_autospec(StreamWriter, spec_set=True)
writer.write_headers = mock.AsyncMock()
writer.send_headers = mock.Mock()
writer.write_eof = mock.AsyncMock()
req = make_request("GET", "/", writer=writer)
resp = web.StreamResponse()
# StreamResponse should have _send_headers_immediately = True
assert resp._send_headers_immediately is True
# Prepare the response
await resp.prepare(req)
# Headers should be sent immediately
writer.send_headers.assert_called_once()
async def test_response_buffers_headers() -> None:
"""Test that Response buffers headers for packet coalescing."""
writer = mock.create_autospec(StreamWriter, spec_set=True)
writer.write_headers = mock.AsyncMock()
writer.send_headers = mock.Mock()
writer.write_eof = mock.AsyncMock()
req = make_request("GET", "/", writer=writer)
resp = web.Response(body=b"hello")
# Response should have _send_headers_immediately = False
assert resp._send_headers_immediately is False
# Prepare the response
await resp.prepare(req)
# Headers should NOT be sent immediately
writer.send_headers.assert_not_called()
# But write_headers should have been called
writer.write_headers.assert_called_once()
|
./temp_repos/aiohttp/aiohttp/web_response.py
|
./temp_repos/aiohttp/tests/test_web_response.py
|
aiohttp
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'ContentCoding'.
Context:
- Class Name: ContentCoding
- Dependencies to Mock: None detected
- Key Imports: time, web_request, helpers, enum, math, warnings, payload, typedefs, typing, asyncio
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
ContentCoding
|
python
|
import asyncio
import socket
import weakref
from typing import Any, Optional
from .abc import AbstractResolver, ResolveResult
__all__ = ("ThreadedResolver", "AsyncResolver", "DefaultResolver")
try:
import aiodns
aiodns_default = hasattr(aiodns.DNSResolver, "getaddrinfo")
except ImportError:
aiodns = None # type: ignore[assignment]
aiodns_default = False
_NUMERIC_SOCKET_FLAGS = socket.AI_NUMERICHOST | socket.AI_NUMERICSERV
_NAME_SOCKET_FLAGS = socket.NI_NUMERICHOST | socket.NI_NUMERICSERV
_AI_ADDRCONFIG = socket.AI_ADDRCONFIG
if hasattr(socket, "AI_MASK"):
_AI_ADDRCONFIG &= socket.AI_MASK
class ThreadedResolver(AbstractResolver):
"""Threaded resolver.
Uses an Executor for synchronous getaddrinfo() calls.
concurrent.futures.ThreadPoolExecutor is used by default.
"""
def __init__(self) -> None:
self._loop = asyncio.get_running_loop()
async def resolve(
self, host: str, port: int = 0, family: socket.AddressFamily = socket.AF_INET
) -> list[ResolveResult]:
infos = await self._loop.getaddrinfo(
host,
port,
type=socket.SOCK_STREAM,
family=family,
flags=_AI_ADDRCONFIG,
)
hosts: list[ResolveResult] = []
for family, _, proto, _, address in infos:
if family == socket.AF_INET6:
if len(address) < 3:
# IPv6 is not supported by Python build,
# or IPv6 is not enabled in the host
continue
if address[3]:
# This is essential for link-local IPv6 addresses.
# LL IPv6 is a VERY rare case. Strictly speaking, we should use
# getnameinfo() unconditionally, but performance makes sense.
resolved_host, _port = await self._loop.getnameinfo(
address, _NAME_SOCKET_FLAGS
)
port = int(_port)
else:
resolved_host, port = address[:2]
else: # IPv4
assert family == socket.AF_INET
resolved_host, port = address # type: ignore[misc]
hosts.append(
ResolveResult(
hostname=host,
host=resolved_host,
port=port,
family=family,
proto=proto,
flags=_NUMERIC_SOCKET_FLAGS,
)
)
return hosts
async def close(self) -> None:
pass
class AsyncResolver(AbstractResolver):
"""Use the `aiodns` package to make asynchronous DNS lookups"""
def __init__(self, *args: Any, **kwargs: Any) -> None:
if aiodns is None:
raise RuntimeError("Resolver requires aiodns library")
self._loop = asyncio.get_running_loop()
self._manager: _DNSResolverManager | None = None
# If custom args are provided, create a dedicated resolver instance
# This means each AsyncResolver with custom args gets its own
# aiodns.DNSResolver instance
if args or kwargs:
self._resolver = aiodns.DNSResolver(*args, **kwargs)
return
# Use the shared resolver from the manager for default arguments
self._manager = _DNSResolverManager()
self._resolver = self._manager.get_resolver(self, self._loop)
async def resolve(
self, host: str, port: int = 0, family: socket.AddressFamily = socket.AF_INET
) -> list[ResolveResult]:
try:
resp = await self._resolver.getaddrinfo(
host,
port=port,
type=socket.SOCK_STREAM,
family=family,
flags=_AI_ADDRCONFIG,
)
except aiodns.error.DNSError as exc:
msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed"
raise OSError(None, msg) from exc
hosts: list[ResolveResult] = []
for node in resp.nodes:
address: tuple[bytes, int] | tuple[bytes, int, int, int] = node.addr
family = node.family
if family == socket.AF_INET6:
if len(address) > 3 and address[3]:
# This is essential for link-local IPv6 addresses.
# LL IPv6 is a VERY rare case. Strictly speaking, we should use
# getnameinfo() unconditionally, but performance makes sense.
result = await self._resolver.getnameinfo(
(address[0].decode("ascii"), *address[1:]),
_NAME_SOCKET_FLAGS,
)
resolved_host = result.node
else:
resolved_host = address[0].decode("ascii")
port = address[1]
else: # IPv4
assert family == socket.AF_INET
resolved_host = address[0].decode("ascii")
port = address[1]
hosts.append(
ResolveResult(
hostname=host,
host=resolved_host,
port=port,
family=family,
proto=0,
flags=_NUMERIC_SOCKET_FLAGS,
)
)
if not hosts:
raise OSError(None, "DNS lookup failed")
return hosts
async def close(self) -> None:
if self._manager:
# Release the resolver from the manager if using the shared resolver
self._manager.release_resolver(self, self._loop)
self._manager = None # Clear reference to manager
self._resolver = None # type: ignore[assignment] # Clear reference to resolver
return
# Otherwise cancel our dedicated resolver
if self._resolver is not None:
self._resolver.cancel()
self._resolver = None # type: ignore[assignment] # Clear reference
class _DNSResolverManager:
"""Manager for aiodns.DNSResolver objects.
This class manages shared aiodns.DNSResolver instances
with no custom arguments across different event loops.
"""
_instance: Optional["_DNSResolverManager"] = None
def __new__(cls) -> "_DNSResolverManager":
if cls._instance is None:
cls._instance = super().__new__(cls)
cls._instance._init()
return cls._instance
def _init(self) -> None:
# Use WeakKeyDictionary to allow event loops to be garbage collected
self._loop_data: weakref.WeakKeyDictionary[
asyncio.AbstractEventLoop,
tuple[aiodns.DNSResolver, weakref.WeakSet[AsyncResolver]],
] = weakref.WeakKeyDictionary()
def get_resolver(
self, client: "AsyncResolver", loop: asyncio.AbstractEventLoop
) -> "aiodns.DNSResolver":
"""Get or create the shared aiodns.DNSResolver instance for a specific event loop.
Args:
client: The AsyncResolver instance requesting the resolver.
This is required to track resolver usage.
loop: The event loop to use for the resolver.
"""
# Create a new resolver and client set for this loop if it doesn't exist
if loop not in self._loop_data:
resolver = aiodns.DNSResolver(loop=loop)
client_set: weakref.WeakSet[AsyncResolver] = weakref.WeakSet()
self._loop_data[loop] = (resolver, client_set)
else:
# Get the existing resolver and client set
resolver, client_set = self._loop_data[loop]
# Register this client with the loop
client_set.add(client)
return resolver
def release_resolver(
self, client: "AsyncResolver", loop: asyncio.AbstractEventLoop
) -> None:
"""Release the resolver for an AsyncResolver client when it's closed.
Args:
client: The AsyncResolver instance to release.
loop: The event loop the resolver was using.
"""
# Remove client from its loop's tracking
current_loop_data = self._loop_data.get(loop)
if current_loop_data is None:
return
resolver, client_set = current_loop_data
client_set.discard(client)
# If no more clients for this loop, cancel and remove its resolver
if not client_set:
if resolver is not None:
resolver.cancel()
del self._loop_data[loop]
_DefaultType = type[AsyncResolver | ThreadedResolver]
DefaultResolver: _DefaultType = AsyncResolver if aiodns_default else ThreadedResolver
|
import asyncio
import gc
import ipaddress
import socket
from collections.abc import Awaitable, Callable, Collection, Generator, Iterable
from ipaddress import ip_address
from typing import Any, NamedTuple
from unittest.mock import Mock, create_autospec, patch
import pytest
from aiohttp.resolver import (
_NAME_SOCKET_FLAGS,
AsyncResolver,
DefaultResolver,
ThreadedResolver,
_DNSResolverManager,
)
try:
import aiodns
getaddrinfo = hasattr(aiodns.DNSResolver, "getaddrinfo")
except ImportError:
aiodns = None # type: ignore[assignment]
getaddrinfo = False
_AddrInfo4 = list[
tuple[socket.AddressFamily, None, socket.SocketKind, None, tuple[str, int]]
]
_AddrInfo6 = list[
tuple[
socket.AddressFamily, None, socket.SocketKind, None, tuple[str, int, int, int]
]
]
_UnknownAddrInfo = list[
tuple[socket.AddressFamily, socket.SocketKind, int, str, tuple[int, bytes]]
]
@pytest.fixture()
def check_no_lingering_resolvers() -> Generator[None, None, None]:
"""Verify no resolvers remain after the test.
This fixture should be used in any test that creates instances of
AsyncResolver or directly uses _DNSResolverManager.
"""
manager = _DNSResolverManager()
before = len(manager._loop_data)
yield
after = len(manager._loop_data)
if after > before: # pragma: no branch
# Force garbage collection to ensure weak references are updated
gc.collect() # pragma: no cover
after = len(manager._loop_data) # pragma: no cover
if after > before: # pragma: no cover
pytest.fail( # pragma: no cover
f"Lingering resolvers found: {(after - before)} "
"new AsyncResolver instances were not properly closed."
)
@pytest.fixture()
def dns_resolver_manager() -> Generator[_DNSResolverManager, None, None]:
"""Create a fresh _DNSResolverManager instance for testing.
Saves and restores the singleton state to avoid affecting other tests.
"""
# Save the original instance
original_instance = _DNSResolverManager._instance
# Reset the singleton
_DNSResolverManager._instance = None
# Create and yield a fresh instance
try:
yield _DNSResolverManager()
finally:
# Clean up and restore the original instance
_DNSResolverManager._instance = original_instance
class FakeAIODNSAddrInfoNode(NamedTuple):
family: int
addr: tuple[bytes, int] | tuple[bytes, int, int, int]
class FakeAIODNSAddrInfoIPv4Result:
def __init__(self, hosts: Collection[str]) -> None:
self.nodes = [
FakeAIODNSAddrInfoNode(socket.AF_INET, (h.encode(), 0)) for h in hosts
]
class FakeAIODNSAddrInfoIPv6Result:
def __init__(self, hosts: Collection[str]) -> None:
self.nodes = [
FakeAIODNSAddrInfoNode(
socket.AF_INET6,
(h.encode(), 0, 0, 3 if ip_address(h).is_link_local else 0),
)
for h in hosts
]
class FakeAIODNSNameInfoIPv6Result:
def __init__(self, host: str) -> None:
self.node = host
self.service = None
class FakeQueryResult:
def __init__(self, host: str) -> None:
self.host = host
async def fake_aiodns_getaddrinfo_ipv4_result(
hosts: Collection[str],
) -> FakeAIODNSAddrInfoIPv4Result:
return FakeAIODNSAddrInfoIPv4Result(hosts=hosts)
async def fake_aiodns_getaddrinfo_ipv6_result(
hosts: Collection[str],
) -> FakeAIODNSAddrInfoIPv6Result:
return FakeAIODNSAddrInfoIPv6Result(hosts=hosts)
async def fake_aiodns_getnameinfo_ipv6_result(
host: str,
) -> FakeAIODNSNameInfoIPv6Result:
return FakeAIODNSNameInfoIPv6Result(host)
async def fake_query_result(result: Iterable[str]) -> list[FakeQueryResult]:
return [FakeQueryResult(host=h) for h in result]
def fake_addrinfo(hosts: Collection[str]) -> Callable[..., Awaitable[_AddrInfo4]]:
async def fake(*args: Any, **kwargs: Any) -> _AddrInfo4:
if not hosts:
raise socket.gaierror
return [(socket.AF_INET, None, socket.SOCK_STREAM, None, (h, 0)) for h in hosts]
return fake
def fake_ipv6_addrinfo(hosts: Collection[str]) -> Callable[..., Awaitable[_AddrInfo6]]:
async def fake(*args: Any, **kwargs: Any) -> _AddrInfo6:
if not hosts:
raise socket.gaierror
return [
(
socket.AF_INET6,
None,
socket.SOCK_STREAM,
None,
(h, 0, 0, 3 if ip_address(h).is_link_local else 0),
)
for h in hosts
]
return fake
def fake_ipv6_nameinfo(host: str) -> Callable[..., Awaitable[tuple[str, int]]]:
async def fake(*args: Any, **kwargs: Any) -> tuple[str, int]:
return host, 0
return fake
@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required")
@pytest.mark.usefixtures("check_no_lingering_resolvers")
async def test_async_resolver_positive_ipv4_lookup(
loop: asyncio.AbstractEventLoop,
) -> None:
with patch("aiodns.DNSResolver") as mock:
mock().getaddrinfo.return_value = fake_aiodns_getaddrinfo_ipv4_result(
["127.0.0.1"]
)
resolver = AsyncResolver()
real = await resolver.resolve("www.python.org")
ipaddress.ip_address(real[0]["host"])
mock().getaddrinfo.assert_called_with(
"www.python.org",
family=socket.AF_INET,
flags=socket.AI_ADDRCONFIG,
port=0,
type=socket.SOCK_STREAM,
)
await resolver.close()
@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required")
@pytest.mark.usefixtures("check_no_lingering_resolvers")
async def test_async_resolver_positive_link_local_ipv6_lookup(
loop: asyncio.AbstractEventLoop,
) -> None:
with patch("aiodns.DNSResolver") as mock:
mock().getaddrinfo.return_value = fake_aiodns_getaddrinfo_ipv6_result(
["fe80::1"]
)
mock().getnameinfo.return_value = fake_aiodns_getnameinfo_ipv6_result(
"fe80::1%eth0"
)
resolver = AsyncResolver()
real = await resolver.resolve("www.python.org")
ipaddress.ip_address(real[0]["host"])
mock().getaddrinfo.assert_called_with(
"www.python.org",
family=socket.AF_INET,
flags=socket.AI_ADDRCONFIG,
port=0,
type=socket.SOCK_STREAM,
)
mock().getnameinfo.assert_called_with(("fe80::1", 0, 0, 3), _NAME_SOCKET_FLAGS)
await resolver.close()
@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required")
@pytest.mark.usefixtures("check_no_lingering_resolvers")
async def test_async_resolver_multiple_replies(loop: asyncio.AbstractEventLoop) -> None:
with patch("aiodns.DNSResolver") as mock:
ips = ["127.0.0.1", "127.0.0.2", "127.0.0.3", "127.0.0.4"]
mock().getaddrinfo.return_value = fake_aiodns_getaddrinfo_ipv4_result(ips)
resolver = AsyncResolver()
real = await resolver.resolve("www.google.com")
ipaddrs = [ipaddress.ip_address(x["host"]) for x in real]
assert len(ipaddrs) > 3, "Expecting multiple addresses"
await resolver.close()
@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required")
@pytest.mark.usefixtures("check_no_lingering_resolvers")
async def test_async_resolver_negative_lookup(loop: asyncio.AbstractEventLoop) -> None:
with patch("aiodns.DNSResolver") as mock:
mock().getaddrinfo.side_effect = aiodns.error.DNSError()
resolver = AsyncResolver()
with pytest.raises(OSError):
await resolver.resolve("doesnotexist.bla")
await resolver.close()
@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required")
@pytest.mark.usefixtures("check_no_lingering_resolvers")
async def test_async_resolver_no_hosts_in_getaddrinfo(
loop: asyncio.AbstractEventLoop,
) -> None:
with patch("aiodns.DNSResolver") as mock:
mock().getaddrinfo.return_value = fake_aiodns_getaddrinfo_ipv4_result([])
resolver = AsyncResolver()
with pytest.raises(OSError):
await resolver.resolve("doesnotexist.bla")
await resolver.close()
async def test_threaded_resolver_positive_lookup() -> None:
loop = Mock()
loop.getaddrinfo = fake_addrinfo(["127.0.0.1"])
resolver = ThreadedResolver()
resolver._loop = loop
real = await resolver.resolve("www.python.org")
assert real[0]["hostname"] == "www.python.org"
ipaddress.ip_address(real[0]["host"])
async def test_threaded_resolver_positive_ipv6_link_local_lookup() -> None:
loop = Mock()
loop.getaddrinfo = fake_ipv6_addrinfo(["fe80::1"])
loop.getnameinfo = fake_ipv6_nameinfo("fe80::1%eth0")
# Mock the fake function that was returned by helper functions
loop.getaddrinfo = create_autospec(loop.getaddrinfo)
loop.getnameinfo = create_autospec(loop.getnameinfo)
# Set the correct return values for mock functions
loop.getaddrinfo.return_value = await fake_ipv6_addrinfo(["fe80::1"])()
loop.getnameinfo.return_value = await fake_ipv6_nameinfo("fe80::1%eth0")()
resolver = ThreadedResolver()
resolver._loop = loop
real = await resolver.resolve("www.python.org")
assert real[0]["hostname"] == "www.python.org"
ipaddress.ip_address(real[0]["host"])
loop.getaddrinfo.assert_called_with(
"www.python.org",
0,
type=socket.SOCK_STREAM,
family=socket.AF_INET,
flags=socket.AI_ADDRCONFIG,
)
loop.getnameinfo.assert_called_with(("fe80::1", 0, 0, 3), _NAME_SOCKET_FLAGS)
async def test_threaded_resolver_multiple_replies() -> None:
loop = Mock()
ips = ["127.0.0.1", "127.0.0.2", "127.0.0.3", "127.0.0.4"]
loop.getaddrinfo = fake_addrinfo(ips)
resolver = ThreadedResolver()
resolver._loop = loop
real = await resolver.resolve("www.google.com")
ipaddrs = [ipaddress.ip_address(x["host"]) for x in real]
assert len(ipaddrs) > 3, "Expecting multiple addresses"
async def test_threaded_negative_lookup() -> None:
loop = Mock()
ips: list[str] = []
loop.getaddrinfo = fake_addrinfo(ips)
resolver = ThreadedResolver()
resolver._loop = loop
with pytest.raises(socket.gaierror):
await resolver.resolve("doesnotexist.bla")
async def test_threaded_negative_ipv6_lookup() -> None:
loop = Mock()
ips: list[str] = []
loop.getaddrinfo = fake_ipv6_addrinfo(ips)
resolver = ThreadedResolver()
resolver._loop = loop
with pytest.raises(socket.gaierror):
await resolver.resolve("doesnotexist.bla")
async def test_threaded_negative_lookup_with_unknown_result() -> None:
loop = Mock()
# If compile CPython with `--disable-ipv6` option,
# we will get an (int, bytes) tuple, instead of a Exception.
async def unknown_addrinfo(*args: Any, **kwargs: Any) -> _UnknownAddrInfo:
return [
(
socket.AF_INET6,
socket.SOCK_STREAM,
6,
"",
(10, b"\x01\xbb\x00\x00\x00\x00*\x04NB\x00\x1a\x00\x00"),
)
]
loop.getaddrinfo = unknown_addrinfo
resolver = ThreadedResolver()
resolver._loop = loop
with patch("socket.has_ipv6", False):
res = await resolver.resolve("www.python.org")
assert len(res) == 0
async def test_close_for_threaded_resolver(loop: asyncio.AbstractEventLoop) -> None:
resolver = ThreadedResolver()
await resolver.close()
@pytest.mark.skipif(aiodns is None, reason="aiodns required")
@pytest.mark.usefixtures("check_no_lingering_resolvers")
async def test_close_for_async_resolver(loop: asyncio.AbstractEventLoop) -> None:
resolver = AsyncResolver()
await resolver.close()
async def test_default_loop_for_threaded_resolver(
loop: asyncio.AbstractEventLoop,
) -> None:
asyncio.set_event_loop(loop)
resolver = ThreadedResolver()
assert resolver._loop is loop
@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required")
@pytest.mark.usefixtures("check_no_lingering_resolvers")
async def test_async_resolver_ipv6_positive_lookup(
loop: asyncio.AbstractEventLoop,
) -> None:
with patch("aiodns.DNSResolver") as mock:
mock().getaddrinfo.return_value = fake_aiodns_getaddrinfo_ipv6_result(["::1"])
resolver = AsyncResolver()
real = await resolver.resolve("www.python.org")
ipaddress.ip_address(real[0]["host"])
mock().getaddrinfo.assert_called_with(
"www.python.org",
family=socket.AF_INET,
flags=socket.AI_ADDRCONFIG,
port=0,
type=socket.SOCK_STREAM,
)
await resolver.close()
@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required")
@pytest.mark.usefixtures("check_no_lingering_resolvers")
async def test_async_resolver_error_messages_passed(
loop: asyncio.AbstractEventLoop,
) -> None:
"""Ensure error messages are passed through from aiodns."""
with patch("aiodns.DNSResolver", autospec=True, spec_set=True) as mock:
mock().getaddrinfo.side_effect = aiodns.error.DNSError(1, "Test error message")
resolver = AsyncResolver()
with pytest.raises(OSError, match="Test error message") as excinfo:
await resolver.resolve("x.org")
assert excinfo.value.strerror == "Test error message"
await resolver.close()
@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required")
@pytest.mark.usefixtures("check_no_lingering_resolvers")
async def test_async_resolver_error_messages_passed_no_hosts(
loop: asyncio.AbstractEventLoop,
) -> None:
"""Ensure error messages are passed through from aiodns."""
with patch("aiodns.DNSResolver", autospec=True, spec_set=True) as mock:
mock().getaddrinfo.return_value = fake_aiodns_getaddrinfo_ipv6_result([])
resolver = AsyncResolver()
with pytest.raises(OSError, match="DNS lookup failed") as excinfo:
await resolver.resolve("x.org")
assert excinfo.value.strerror == "DNS lookup failed"
await resolver.close()
@pytest.mark.usefixtures("check_no_lingering_resolvers")
async def test_async_resolver_aiodns_not_present(
loop: asyncio.AbstractEventLoop, monkeypatch: pytest.MonkeyPatch
) -> None:
monkeypatch.setattr("aiohttp.resolver.aiodns", None)
with pytest.raises(RuntimeError):
AsyncResolver()
@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required")
@pytest.mark.usefixtures("check_no_lingering_resolvers")
def test_aio_dns_is_default() -> None:
assert DefaultResolver is AsyncResolver
@pytest.mark.skipif(getaddrinfo, reason="aiodns <3.2.0 required")
def test_threaded_resolver_is_default() -> None:
assert DefaultResolver is ThreadedResolver
@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required")
async def test_dns_resolver_manager_sharing(
dns_resolver_manager: _DNSResolverManager,
) -> None:
"""Test that the DNSResolverManager shares a resolver among AsyncResolver instances."""
# Create two default AsyncResolver instances
resolver1 = AsyncResolver()
resolver2 = AsyncResolver()
# Check that they share the same underlying resolver
assert resolver1._resolver is resolver2._resolver
# Create an AsyncResolver with custom args
resolver3 = AsyncResolver(nameservers=["8.8.8.8"])
# Check that it has its own resolver
assert resolver1._resolver is not resolver3._resolver
# Cleanup
await resolver1.close()
await resolver2.close()
await resolver3.close()
@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required")
async def test_dns_resolver_manager_singleton(
dns_resolver_manager: _DNSResolverManager,
) -> None:
"""Test that DNSResolverManager is a singleton."""
# Create a second manager and check it's the same instance
manager1 = dns_resolver_manager
manager2 = _DNSResolverManager()
assert manager1 is manager2
@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required")
async def test_dns_resolver_manager_resolver_lifecycle(
dns_resolver_manager: _DNSResolverManager,
) -> None:
"""Test that DNSResolverManager creates and destroys resolver correctly."""
manager = dns_resolver_manager
# Initially there should be no resolvers
assert not manager._loop_data
# Create a mock AsyncResolver for testing
mock_client = Mock(spec=AsyncResolver)
mock_client._loop = asyncio.get_running_loop()
# Getting resolver should create one
mock_loop = mock_client._loop
resolver = manager.get_resolver(mock_client, mock_loop)
assert resolver is not None
assert manager._loop_data[mock_loop][0] is resolver
# Getting it again should return the same instance
assert manager.get_resolver(mock_client, mock_loop) is resolver
# Clean up
manager.release_resolver(mock_client, mock_loop)
assert not manager._loop_data
@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required")
async def test_dns_resolver_manager_client_registration(
dns_resolver_manager: _DNSResolverManager,
) -> None:
"""Test client registration and resolver release logic."""
with patch("aiodns.DNSResolver") as mock:
# Create resolver instances
resolver1 = AsyncResolver()
resolver2 = AsyncResolver()
# Both should use the same resolver from the manager
assert resolver1._resolver is resolver2._resolver
# The manager should be tracking both clients
assert resolver1._manager is resolver2._manager
manager = resolver1._manager
assert manager is not None
loop = asyncio.get_running_loop()
_, client_set = manager._loop_data[loop]
assert len(client_set) == 2
# Close one resolver
await resolver1.close()
_, client_set = manager._loop_data[loop]
assert len(client_set) == 1
# Resolver should still exist
assert manager._loop_data # Not empty
# Close the second resolver
await resolver2.close()
assert not manager._loop_data # Should be empty after closing all clients
# Now all resolvers should be canceled and removed
assert not manager._loop_data # Should be empty
mock().cancel.assert_called_once()
@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required")
async def test_dns_resolver_manager_multiple_event_loops(
dns_resolver_manager: _DNSResolverManager,
) -> None:
"""Test that DNSResolverManager correctly manages resolvers across different event loops."""
# Create separate resolvers for each loop
resolver1 = Mock(name="resolver1")
resolver2 = Mock(name="resolver2")
# Create a patch that returns different resolvers based on the loop argument
mock_resolver = Mock()
mock_resolver.side_effect = lambda loop=None, **kwargs: (
resolver1 if loop is asyncio.get_running_loop() else resolver2
)
with patch("aiodns.DNSResolver", mock_resolver):
manager = dns_resolver_manager
# Create two mock clients on different loops
mock_client1 = Mock(spec=AsyncResolver)
mock_client1._loop = asyncio.get_running_loop()
# Create a second event loop
loop2 = Mock(spec=asyncio.AbstractEventLoop)
mock_client2 = Mock(spec=AsyncResolver)
mock_client2._loop = loop2
# Get resolvers for both clients
loop1 = mock_client1._loop
loop2 = mock_client2._loop
# Get the resolvers through the manager
manager_resolver1 = manager.get_resolver(mock_client1, loop1)
manager_resolver2 = manager.get_resolver(mock_client2, loop2)
# Should be different resolvers for different loops
assert manager_resolver1 is resolver1
assert manager_resolver2 is resolver2
assert manager._loop_data[loop1][0] is resolver1
assert manager._loop_data[loop2][0] is resolver2
# Release the first resolver
manager.release_resolver(mock_client1, loop1)
# First loop's resolver should be gone, but second should remain
assert loop1 not in manager._loop_data
assert loop2 in manager._loop_data
# Release the second resolver
manager.release_resolver(mock_client2, loop2)
# Both resolvers should be gone
assert not manager._loop_data
# Verify resolver cleanup
resolver1.cancel.assert_called_once()
resolver2.cancel.assert_called_once()
@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required")
async def test_dns_resolver_manager_weakref_garbage_collection() -> None:
"""Test that release_resolver handles None resolver due to weakref garbage collection."""
manager = _DNSResolverManager()
# Create a mock resolver that will be None when accessed
mock_resolver = Mock()
mock_resolver.cancel = Mock()
with patch("aiodns.DNSResolver", return_value=mock_resolver):
# Create an AsyncResolver to get a resolver from the manager
resolver = AsyncResolver()
loop = asyncio.get_running_loop()
# Manually corrupt the data to simulate garbage collection
# by setting the resolver to None
manager._loop_data[loop] = (None, manager._loop_data[loop][1]) # type: ignore[assignment]
# This should not raise an AttributeError: 'NoneType' object has no attribute 'cancel'
await resolver.close()
# Verify no exception was raised and the loop data was cleaned up properly
# Since we set resolver to None and there was one client, the entry should be removed
assert loop not in manager._loop_data
@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required")
async def test_dns_resolver_manager_missing_loop_data() -> None:
"""Test that release_resolver handles missing loop data gracefully."""
manager = _DNSResolverManager()
with patch("aiodns.DNSResolver"):
# Create an AsyncResolver
resolver = AsyncResolver()
loop = asyncio.get_running_loop()
# Manually remove the loop data to simulate race condition
manager._loop_data.clear()
# This should not raise a KeyError
await resolver.close()
# Verify no exception was raised
assert loop not in manager._loop_data
@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required")
@pytest.mark.usefixtures("check_no_lingering_resolvers")
async def test_async_resolver_close_multiple_times() -> None:
"""Test that AsyncResolver.close() can be called multiple times without error."""
with patch("aiodns.DNSResolver") as mock_dns_resolver:
mock_resolver = Mock()
mock_resolver.cancel = Mock()
mock_dns_resolver.return_value = mock_resolver
# Create a resolver with custom args (dedicated resolver)
resolver = AsyncResolver(nameservers=["8.8.8.8"])
# Close it once
await resolver.close()
mock_resolver.cancel.assert_called_once()
# Close it again - should not raise AttributeError
await resolver.close()
# cancel should still only be called once
mock_resolver.cancel.assert_called_once()
@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required")
@pytest.mark.usefixtures("check_no_lingering_resolvers")
async def test_async_resolver_close_with_none_resolver() -> None:
"""Test that AsyncResolver.close() handles None resolver gracefully."""
with patch("aiodns.DNSResolver"):
# Create a resolver with custom args (dedicated resolver)
resolver = AsyncResolver(nameservers=["8.8.8.8"])
# Manually set resolver to None to simulate edge case
resolver._resolver = None # type: ignore[assignment]
# This should not raise AttributeError
await resolver.close()
|
./temp_repos/aiohttp/aiohttp/resolver.py
|
./temp_repos/aiohttp/tests/test_resolver.py
|
aiohttp
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'ThreadedResolver'.
Context:
- Class Name: ThreadedResolver
- Dependencies to Mock: None detected
- Key Imports: aiodns, typing, weakref, asyncio, abc, socket
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
ThreadedResolver
|
python
|
import asyncio
import datetime
import io
import re
import socket
import string
import sys
import tempfile
import types
from collections.abc import Iterator, Mapping, MutableMapping
from re import Pattern
from types import MappingProxyType
from typing import TYPE_CHECKING, Any, Final, Optional, TypeVar, cast, overload
from urllib.parse import parse_qsl
from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy
from yarl import URL
from . import hdrs
from ._cookie_helpers import parse_cookie_header
from .abc import AbstractStreamWriter
from .helpers import (
_SENTINEL,
ETAG_ANY,
LIST_QUOTED_ETAG_RE,
ChainMapProxy,
ETag,
HeadersMixin,
RequestKey,
frozen_dataclass_decorator,
is_expected_content_type,
parse_http_date,
reify,
sentinel,
set_exception,
)
from .http_parser import RawRequestMessage
from .http_writer import HttpVersion
from .multipart import BodyPartReader, MultipartReader
from .streams import EmptyStreamReader, StreamReader
from .typedefs import (
DEFAULT_JSON_DECODER,
JSONDecoder,
LooseHeaders,
RawHeaders,
StrOrURL,
)
from .web_exceptions import (
HTTPBadRequest,
HTTPRequestEntityTooLarge,
HTTPUnsupportedMediaType,
)
from .web_response import StreamResponse
if sys.version_info >= (3, 11):
from typing import Self
else:
Self = Any
__all__ = ("BaseRequest", "FileField", "Request")
if TYPE_CHECKING:
from .web_app import Application
from .web_protocol import RequestHandler
from .web_urldispatcher import UrlMappingMatchInfo
_T = TypeVar("_T")
@frozen_dataclass_decorator
class FileField:
name: str
filename: str
file: io.BufferedReader
content_type: str
headers: CIMultiDictProxy[str]
_TCHAR: Final[str] = string.digits + string.ascii_letters + r"!#$%&'*+.^_`|~-"
# '-' at the end to prevent interpretation as range in a char class
_TOKEN: Final[str] = rf"[{_TCHAR}]+"
_QDTEXT: Final[str] = r"[{}]".format(
r"".join(chr(c) for c in (0x09, 0x20, 0x21) + tuple(range(0x23, 0x7F)))
)
# qdtext includes 0x5C to escape 0x5D ('\]')
# qdtext excludes obs-text (because obsoleted, and encoding not specified)
_QUOTED_PAIR: Final[str] = r"\\[\t !-~]"
_QUOTED_STRING: Final[str] = rf'"(?:{_QUOTED_PAIR}|{_QDTEXT})*"'
_FORWARDED_PAIR: Final[str] = rf"({_TOKEN})=({_TOKEN}|{_QUOTED_STRING})(:\d{{1,4}})?"
_QUOTED_PAIR_REPLACE_RE: Final[Pattern[str]] = re.compile(r"\\([\t !-~])")
# same pattern as _QUOTED_PAIR but contains a capture group
_FORWARDED_PAIR_RE: Final[Pattern[str]] = re.compile(_FORWARDED_PAIR)
############################################################
# HTTP Request
############################################################
class BaseRequest(MutableMapping[str | RequestKey[Any], Any], HeadersMixin):
POST_METHODS = {
hdrs.METH_PATCH,
hdrs.METH_POST,
hdrs.METH_PUT,
hdrs.METH_TRACE,
hdrs.METH_DELETE,
}
_post: MultiDictProxy[str | bytes | FileField] | None = None
_read_bytes: bytes | None = None
def __init__(
self,
message: RawRequestMessage,
payload: StreamReader,
protocol: "RequestHandler[Self]",
payload_writer: AbstractStreamWriter,
task: "asyncio.Task[None]",
loop: asyncio.AbstractEventLoop,
*,
client_max_size: int = 1024**2,
state: dict[RequestKey[Any] | str, Any] | None = None,
scheme: str | None = None,
host: str | None = None,
remote: str | None = None,
) -> None:
self._message = message
self._protocol = protocol
self._payload_writer = payload_writer
self._payload = payload
self._headers: CIMultiDictProxy[str] = message.headers
self._method = message.method
self._version = message.version
self._cache: dict[str, Any] = {}
url = message.url
if url.absolute:
if scheme is not None:
url = url.with_scheme(scheme)
if host is not None:
url = url.with_host(host)
# absolute URL is given,
# override auto-calculating url, host, and scheme
# all other properties should be good
self._cache["url"] = url
self._cache["host"] = url.host
self._cache["scheme"] = url.scheme
self._rel_url = url.relative()
else:
self._rel_url = url
if scheme is not None:
self._cache["scheme"] = scheme
if host is not None:
self._cache["host"] = host
self._state = {} if state is None else state
self._task = task
self._client_max_size = client_max_size
self._loop = loop
self._transport_sslcontext = protocol.ssl_context
self._transport_peername = protocol.peername
if remote is not None:
self._cache["remote"] = remote
def clone(
self,
*,
method: str | _SENTINEL = sentinel,
rel_url: StrOrURL | _SENTINEL = sentinel,
headers: LooseHeaders | _SENTINEL = sentinel,
scheme: str | _SENTINEL = sentinel,
host: str | _SENTINEL = sentinel,
remote: str | _SENTINEL = sentinel,
client_max_size: int | _SENTINEL = sentinel,
) -> "BaseRequest":
"""Clone itself with replacement some attributes.
Creates and returns a new instance of Request object. If no parameters
are given, an exact copy is returned. If a parameter is not passed, it
will reuse the one from the current request object.
"""
if self._read_bytes:
raise RuntimeError("Cannot clone request after reading its content")
dct: dict[str, Any] = {}
if method is not sentinel:
dct["method"] = method
if rel_url is not sentinel:
new_url: URL = URL(rel_url)
dct["url"] = new_url
dct["path"] = str(new_url)
if headers is not sentinel:
# a copy semantic
new_headers = CIMultiDictProxy(CIMultiDict(headers))
dct["headers"] = new_headers
dct["raw_headers"] = tuple(
(k.encode("utf-8"), v.encode("utf-8")) for k, v in new_headers.items()
)
message = self._message._replace(**dct)
kwargs: dict[str, str] = {}
if scheme is not sentinel:
kwargs["scheme"] = scheme
if host is not sentinel:
kwargs["host"] = host
if remote is not sentinel:
kwargs["remote"] = remote
if client_max_size is sentinel:
client_max_size = self._client_max_size
return self.__class__(
message,
self._payload,
self._protocol, # type: ignore[arg-type]
self._payload_writer,
self._task,
self._loop,
client_max_size=client_max_size,
state=self._state.copy(),
**kwargs,
)
@property
def task(self) -> "asyncio.Task[None]":
return self._task
@property
def protocol(self) -> "RequestHandler[Self]":
return self._protocol
@property
def transport(self) -> asyncio.Transport | None:
return self._protocol.transport
@property
def writer(self) -> AbstractStreamWriter:
return self._payload_writer
@property
def client_max_size(self) -> int:
return self._client_max_size
@reify
def rel_url(self) -> URL:
return self._rel_url
# MutableMapping API
@overload # type: ignore[override]
def __getitem__(self, key: RequestKey[_T]) -> _T: ...
@overload
def __getitem__(self, key: str) -> Any: ...
def __getitem__(self, key: str | RequestKey[_T]) -> Any:
return self._state[key]
@overload # type: ignore[override]
def __setitem__(self, key: RequestKey[_T], value: _T) -> None: ...
@overload
def __setitem__(self, key: str, value: Any) -> None: ...
def __setitem__(self, key: str | RequestKey[_T], value: Any) -> None:
self._state[key] = value
def __delitem__(self, key: str | RequestKey[_T]) -> None:
del self._state[key]
def __len__(self) -> int:
return len(self._state)
def __iter__(self) -> Iterator[str | RequestKey[Any]]:
return iter(self._state)
########
@reify
def secure(self) -> bool:
"""A bool indicating if the request is handled with SSL."""
return self.scheme == "https"
@reify
def forwarded(self) -> tuple[Mapping[str, str], ...]:
"""A tuple containing all parsed Forwarded header(s).
Makes an effort to parse Forwarded headers as specified by RFC 7239:
- It adds one (immutable) dictionary per Forwarded 'field-value', ie
per proxy. The element corresponds to the data in the Forwarded
field-value added by the first proxy encountered by the client. Each
subsequent item corresponds to those added by later proxies.
- It checks that every value has valid syntax in general as specified
in section 4: either a 'token' or a 'quoted-string'.
- It un-escapes found escape sequences.
- It does NOT validate 'by' and 'for' contents as specified in section
6.
- It does NOT validate 'host' contents (Host ABNF).
- It does NOT validate 'proto' contents for valid URI scheme names.
Returns a tuple containing one or more immutable dicts
"""
elems = []
for field_value in self._message.headers.getall(hdrs.FORWARDED, ()):
length = len(field_value)
pos = 0
need_separator = False
elem: dict[str, str] = {}
elems.append(types.MappingProxyType(elem))
while 0 <= pos < length:
match = _FORWARDED_PAIR_RE.match(field_value, pos)
if match is not None: # got a valid forwarded-pair
if need_separator:
# bad syntax here, skip to next comma
pos = field_value.find(",", pos)
else:
name, value, port = match.groups()
if value[0] == '"':
# quoted string: remove quotes and unescape
value = _QUOTED_PAIR_REPLACE_RE.sub(r"\1", value[1:-1])
if port:
value += port
elem[name.lower()] = value
pos += len(match.group(0))
need_separator = True
elif field_value[pos] == ",": # next forwarded-element
need_separator = False
elem = {}
elems.append(types.MappingProxyType(elem))
pos += 1
elif field_value[pos] == ";": # next forwarded-pair
need_separator = False
pos += 1
elif field_value[pos] in " \t":
# Allow whitespace even between forwarded-pairs, though
# RFC 7239 doesn't. This simplifies code and is in line
# with Postel's law.
pos += 1
else:
# bad syntax here, skip to next comma
pos = field_value.find(",", pos)
return tuple(elems)
@reify
def scheme(self) -> str:
"""A string representing the scheme of the request.
Hostname is resolved in this order:
- overridden value by .clone(scheme=new_scheme) call.
- type of connection to peer: HTTPS if socket is SSL, HTTP otherwise.
'http' or 'https'.
"""
if self._transport_sslcontext:
return "https"
else:
return "http"
@reify
def method(self) -> str:
"""Read only property for getting HTTP method.
The value is upper-cased str like 'GET', 'POST', 'PUT' etc.
"""
return self._method
@reify
def version(self) -> HttpVersion:
"""Read only property for getting HTTP version of request.
Returns aiohttp.protocol.HttpVersion instance.
"""
return self._version
@reify
def host(self) -> str:
"""Hostname of the request.
Hostname is resolved in this order:
- overridden value by .clone(host=new_host) call.
- HOST HTTP header
- socket.getfqdn() value
For example, 'example.com' or 'localhost:8080'.
For historical reasons, the port number may be included.
"""
host = self._message.headers.get(hdrs.HOST)
if host is not None:
return host
return socket.getfqdn()
@reify
def remote(self) -> str | None:
"""Remote IP of client initiated HTTP request.
The IP is resolved in this order:
- overridden value by .clone(remote=new_remote) call.
- peername of opened socket
"""
if self._transport_peername is None:
return None
if isinstance(self._transport_peername, (list, tuple)):
return str(self._transport_peername[0])
return str(self._transport_peername)
@reify
def url(self) -> URL:
"""The full URL of the request."""
# authority is used here because it may include the port number
# and we want yarl to parse it correctly
return URL.build(scheme=self.scheme, authority=self.host).join(self._rel_url)
@reify
def path(self) -> str:
"""The URL including *PATH INFO* without the host or scheme.
E.g., ``/app/blog``
"""
return self._rel_url.path
@reify
def path_qs(self) -> str:
"""The URL including PATH_INFO and the query string.
E.g, /app/blog?id=10
"""
return str(self._rel_url)
@reify
def raw_path(self) -> str:
"""The URL including raw *PATH INFO* without the host or scheme.
Warning, the path is unquoted and may contains non valid URL characters
E.g., ``/my%2Fpath%7Cwith%21some%25strange%24characters``
"""
return self._message.path
@reify
def query(self) -> MultiDictProxy[str]:
"""A multidict with all the variables in the query string."""
return self._rel_url.query
@reify
def query_string(self) -> str:
"""The query string in the URL.
E.g., id=10
"""
return self._rel_url.query_string
@reify
def headers(self) -> CIMultiDictProxy[str]:
"""A case-insensitive multidict proxy with all headers."""
return self._headers
@reify
def raw_headers(self) -> RawHeaders:
"""A sequence of pairs for all headers."""
return self._message.raw_headers
@reify
def if_modified_since(self) -> datetime.datetime | None:
"""The value of If-Modified-Since HTTP header, or None.
This header is represented as a `datetime` object.
"""
return parse_http_date(self.headers.get(hdrs.IF_MODIFIED_SINCE))
@reify
def if_unmodified_since(self) -> datetime.datetime | None:
"""The value of If-Unmodified-Since HTTP header, or None.
This header is represented as a `datetime` object.
"""
return parse_http_date(self.headers.get(hdrs.IF_UNMODIFIED_SINCE))
@staticmethod
def _etag_values(etag_header: str) -> Iterator[ETag]:
"""Extract `ETag` objects from raw header."""
if etag_header == ETAG_ANY:
yield ETag(
is_weak=False,
value=ETAG_ANY,
)
else:
for match in LIST_QUOTED_ETAG_RE.finditer(etag_header):
is_weak, value, garbage = match.group(2, 3, 4)
# Any symbol captured by 4th group means
# that the following sequence is invalid.
if garbage:
break
yield ETag(
is_weak=bool(is_weak),
value=value,
)
@classmethod
def _if_match_or_none_impl(
cls, header_value: str | None
) -> tuple[ETag, ...] | None:
if not header_value:
return None
return tuple(cls._etag_values(header_value))
@reify
def if_match(self) -> tuple[ETag, ...] | None:
"""The value of If-Match HTTP header, or None.
This header is represented as a `tuple` of `ETag` objects.
"""
return self._if_match_or_none_impl(self.headers.get(hdrs.IF_MATCH))
@reify
def if_none_match(self) -> tuple[ETag, ...] | None:
"""The value of If-None-Match HTTP header, or None.
This header is represented as a `tuple` of `ETag` objects.
"""
return self._if_match_or_none_impl(self.headers.get(hdrs.IF_NONE_MATCH))
@reify
def if_range(self) -> datetime.datetime | None:
"""The value of If-Range HTTP header, or None.
This header is represented as a `datetime` object.
"""
return parse_http_date(self.headers.get(hdrs.IF_RANGE))
@reify
def keep_alive(self) -> bool:
"""Is keepalive enabled by client?"""
return not self._message.should_close
@reify
def cookies(self) -> Mapping[str, str]:
"""Return request cookies.
A read-only dictionary-like object.
"""
# Use parse_cookie_header for RFC 6265 compliant Cookie header parsing
# that accepts special characters in cookie names (fixes #2683)
parsed = parse_cookie_header(self.headers.get(hdrs.COOKIE, ""))
# Extract values from Morsel objects
return MappingProxyType({name: morsel.value for name, morsel in parsed})
@reify
def http_range(self) -> "slice[int, int, int]":
"""The content of Range HTTP header.
Return a slice instance.
"""
rng = self._headers.get(hdrs.RANGE)
start, end = None, None
if rng is not None:
try:
pattern = r"^bytes=(\d*)-(\d*)$"
start, end = re.findall(pattern, rng)[0]
except IndexError: # pattern was not found in header
raise ValueError("range not in acceptable format")
end = int(end) if end else None
start = int(start) if start else None
if start is None and end is not None:
# end with no start is to return tail of content
start = -end
end = None
if start is not None and end is not None:
# end is inclusive in range header, exclusive for slice
end += 1
if start >= end:
raise ValueError("start cannot be after end")
if start is end is None: # No valid range supplied
raise ValueError("No start or end of range specified")
return slice(start, end, 1)
@reify
def content(self) -> StreamReader:
"""Return raw payload stream."""
return self._payload
@property
def can_read_body(self) -> bool:
"""Return True if request's HTTP BODY can be read, False otherwise."""
return not self._payload.at_eof()
@reify
def body_exists(self) -> bool:
"""Return True if request has HTTP BODY, False otherwise."""
return type(self._payload) is not EmptyStreamReader
async def release(self) -> None:
"""Release request.
Eat unread part of HTTP BODY if present.
"""
while not self._payload.at_eof():
await self._payload.readany()
async def read(self) -> bytes:
"""Read request body if present.
Returns bytes object with full request content.
"""
if self._read_bytes is None:
body = bytearray()
while True:
chunk = await self._payload.readany()
body.extend(chunk)
if self._client_max_size:
body_size = len(body)
if body_size > self._client_max_size:
raise HTTPRequestEntityTooLarge(
max_size=self._client_max_size, actual_size=body_size
)
if not chunk:
break
self._read_bytes = bytes(body)
return self._read_bytes
async def text(self) -> str:
"""Return BODY as text using encoding from .charset."""
bytes_body = await self.read()
encoding = self.charset or "utf-8"
try:
return bytes_body.decode(encoding)
except LookupError:
raise HTTPUnsupportedMediaType()
async def json(
self,
*,
loads: JSONDecoder = DEFAULT_JSON_DECODER,
content_type: str | None = "application/json",
) -> Any:
"""Return BODY as JSON."""
body = await self.text()
if content_type:
if not is_expected_content_type(self.content_type, content_type):
raise HTTPBadRequest(
text=(
"Attempt to decode JSON with "
"unexpected mimetype: %s" % self.content_type
)
)
return loads(body)
async def multipart(self) -> MultipartReader:
"""Return async iterator to process BODY as multipart."""
return MultipartReader(self._headers, self._payload)
async def post(self) -> "MultiDictProxy[str | bytes | FileField]":
"""Return POST parameters."""
if self._post is not None:
return self._post
if self._method not in self.POST_METHODS:
self._post = MultiDictProxy(MultiDict())
return self._post
content_type = self.content_type
if content_type not in (
"",
"application/x-www-form-urlencoded",
"multipart/form-data",
):
self._post = MultiDictProxy(MultiDict())
return self._post
out: MultiDict[str | bytes | FileField] = MultiDict()
if content_type == "multipart/form-data":
multipart = await self.multipart()
max_size = self._client_max_size
field = await multipart.next()
while field is not None:
size = 0
field_ct = field.headers.get(hdrs.CONTENT_TYPE)
if isinstance(field, BodyPartReader):
assert field.name is not None
# Note that according to RFC 7578, the Content-Type header
# is optional, even for files, so we can't assume it's
# present.
# https://tools.ietf.org/html/rfc7578#section-4.4
if field.filename:
# store file in temp file
tmp = await self._loop.run_in_executor(
None, tempfile.TemporaryFile
)
chunk = await field.read_chunk(size=2**16)
while chunk:
chunk = field.decode(chunk)
await self._loop.run_in_executor(None, tmp.write, chunk)
size += len(chunk)
if 0 < max_size < size:
await self._loop.run_in_executor(None, tmp.close)
raise HTTPRequestEntityTooLarge(
max_size=max_size, actual_size=size
)
chunk = await field.read_chunk(size=2**16)
await self._loop.run_in_executor(None, tmp.seek, 0)
if field_ct is None:
field_ct = "application/octet-stream"
ff = FileField(
field.name,
field.filename,
cast(io.BufferedReader, tmp),
field_ct,
field.headers,
)
out.add(field.name, ff)
else:
# deal with ordinary data
value = await field.read(decode=True)
if field_ct is None or field_ct.startswith("text/"):
charset = field.get_charset(default="utf-8")
out.add(field.name, value.decode(charset))
else:
out.add(field.name, value)
size += len(value)
if 0 < max_size < size:
raise HTTPRequestEntityTooLarge(
max_size=max_size, actual_size=size
)
else:
raise ValueError(
"To decode nested multipart you need to use custom reader",
)
field = await multipart.next()
else:
data = await self.read()
if data:
charset = self.charset or "utf-8"
bytes_query = data.rstrip()
try:
query = bytes_query.decode(charset)
except LookupError:
raise HTTPUnsupportedMediaType()
out.extend(
parse_qsl(qs=query, keep_blank_values=True, encoding=charset)
)
self._post = MultiDictProxy(out)
return self._post
def get_extra_info(self, name: str, default: Any = None) -> Any:
"""Extra info from protocol transport"""
transport = self._protocol.transport
if transport is None:
return default
return transport.get_extra_info(name, default)
def __repr__(self) -> str:
ascii_encodable_path = self.path.encode("ascii", "backslashreplace").decode(
"ascii"
)
return f"<{self.__class__.__name__} {self._method} {ascii_encodable_path} >"
def __eq__(self, other: object) -> bool:
return id(self) == id(other)
def __bool__(self) -> bool:
return True
async def _prepare_hook(self, response: StreamResponse) -> None:
return
def _cancel(self, exc: BaseException) -> None:
set_exception(self._payload, exc)
def _finish(self) -> None:
if self._post is None or self.content_type != "multipart/form-data":
return
# NOTE: Release file descriptors for the
# NOTE: `tempfile.Temporaryfile`-created `_io.BufferedRandom`
# NOTE: instances of files sent within multipart request body
# NOTE: via HTTP POST request.
for file_name, file_field_object in self._post.items():
if isinstance(file_field_object, FileField):
file_field_object.file.close()
class Request(BaseRequest):
_match_info: Optional["UrlMappingMatchInfo"] = None
def clone(
self,
*,
method: str | _SENTINEL = sentinel,
rel_url: StrOrURL | _SENTINEL = sentinel,
headers: LooseHeaders | _SENTINEL = sentinel,
scheme: str | _SENTINEL = sentinel,
host: str | _SENTINEL = sentinel,
remote: str | _SENTINEL = sentinel,
client_max_size: int | _SENTINEL = sentinel,
) -> "Request":
ret = super().clone(
method=method,
rel_url=rel_url,
headers=headers,
scheme=scheme,
host=host,
remote=remote,
client_max_size=client_max_size,
)
new_ret = cast(Request, ret)
new_ret._match_info = self._match_info
return new_ret
@reify
def match_info(self) -> "UrlMappingMatchInfo":
"""Result of route resolving."""
match_info = self._match_info
assert match_info is not None
return match_info
@property
def app(self) -> "Application":
"""Application instance."""
match_info = self._match_info
assert match_info is not None
return match_info.current_app
@property
def config_dict(self) -> ChainMapProxy:
match_info = self._match_info
assert match_info is not None
lst = match_info.apps
app = self.app
idx = lst.index(app)
sublist = list(reversed(lst[: idx + 1]))
return ChainMapProxy(sublist)
async def _prepare_hook(self, response: StreamResponse) -> None:
match_info = self._match_info
if match_info is None:
return
for app in match_info._apps:
if on_response_prepare := app.on_response_prepare:
await on_response_prepare.send(self, response)
|
import asyncio
import datetime
import socket
import ssl
import sys
import weakref
from collections.abc import Iterator, MutableMapping
from typing import NoReturn
from unittest import mock
import pytest
from multidict import CIMultiDict, CIMultiDictProxy, MultiDict
from yarl import URL
from aiohttp import ETag, HttpVersion, web
from aiohttp.base_protocol import BaseProtocol
from aiohttp.http_parser import RawRequestMessage
from aiohttp.pytest_plugin import AiohttpClient
from aiohttp.streams import StreamReader
from aiohttp.test_utils import make_mocked_request
@pytest.fixture
def protocol() -> mock.Mock:
return mock.Mock(_reading_paused=False)
def test_base_ctor() -> None:
message = RawRequestMessage(
"GET",
"/path/to?a=1&b=2",
HttpVersion(1, 1),
CIMultiDictProxy(CIMultiDict()),
(),
False,
None,
False,
False,
URL("/path/to?a=1&b=2"),
)
req = web.BaseRequest(
message, mock.Mock(), mock.Mock(), mock.Mock(), mock.Mock(), mock.Mock()
)
assert "GET" == req.method
assert HttpVersion(1, 1) == req.version
# MacOS may return CamelCased host name, need .lower()
# FQDN can be wider than host, e.g.
# 'fv-az397-495' in 'fv-az397-495.internal.cloudapp.net'
assert req.host.lower() in socket.getfqdn().lower()
assert "/path/to?a=1&b=2" == req.path_qs
assert "/path/to" == req.path
assert "a=1&b=2" == req.query_string
assert CIMultiDict() == req.headers
assert () == req.raw_headers
get = req.query
assert MultiDict([("a", "1"), ("b", "2")]) == get
# second call should return the same object
assert get is req.query
assert req.keep_alive
assert req
def test_ctor() -> None:
req = make_mocked_request("GET", "/path/to?a=1&b=2")
assert "GET" == req.method
assert HttpVersion(1, 1) == req.version
# MacOS may return CamelCased host name, need .lower()
# FQDN can be wider than host, e.g.
# 'fv-az397-495' in 'fv-az397-495.internal.cloudapp.net'
assert req.host.lower() in socket.getfqdn().lower()
assert "/path/to?a=1&b=2" == req.path_qs
assert "/path/to" == req.path
assert "a=1&b=2" == req.query_string
assert CIMultiDict() == req.headers
assert () == req.raw_headers
get = req.query
assert MultiDict([("a", "1"), ("b", "2")]) == get
# second call should return the same object
assert get is req.query
assert req.keep_alive
# just make sure that all lines of make_mocked_request covered
headers = CIMultiDict(FOO="bar")
payload = mock.Mock()
protocol = mock.Mock()
app = mock.Mock()
req = make_mocked_request(
"GET",
"/path/to?a=1&b=2",
headers=headers,
protocol=protocol,
payload=payload,
app=app,
)
assert req.app is app
assert req.content is payload
assert req.protocol is protocol
assert req.transport is protocol.transport
assert req.headers == headers
assert req.raw_headers == ((b"FOO", b"bar"),)
assert req.task is req._task
def test_doubleslashes() -> None:
# NB: //foo/bar is an absolute URL with foo netloc and /bar path
req = make_mocked_request("GET", "/bar//foo/")
assert "/bar//foo/" == req.path
def test_content_type_not_specified() -> None:
req = make_mocked_request("Get", "/")
assert "application/octet-stream" == req.content_type
def test_content_type_from_spec() -> None:
req = make_mocked_request(
"Get", "/", CIMultiDict([("CONTENT-TYPE", "application/json")])
)
assert "application/json" == req.content_type
def test_content_type_from_spec_with_charset() -> None:
req = make_mocked_request(
"Get", "/", CIMultiDict([("CONTENT-TYPE", "text/html; charset=UTF-8")])
)
assert "text/html" == req.content_type
assert "UTF-8" == req.charset
def test_calc_content_type_on_getting_charset() -> None:
req = make_mocked_request(
"Get", "/", CIMultiDict([("CONTENT-TYPE", "text/html; charset=UTF-8")])
)
assert "UTF-8" == req.charset
assert "text/html" == req.content_type
def test_urlencoded_querystring() -> None:
req = make_mocked_request("GET", "/yandsearch?text=%D1%82%D0%B5%D0%BA%D1%81%D1%82")
assert {"text": "текст"} == req.query
def test_non_ascii_path() -> None:
req = make_mocked_request("GET", "/путь")
assert "/путь" == req.path
def test_non_ascii_raw_path() -> None:
req = make_mocked_request("GET", "/путь")
assert "/путь" == req.raw_path
def test_absolute_url() -> None:
req = make_mocked_request("GET", "https://example.com/path/to?a=1")
assert req.url == URL("https://example.com/path/to?a=1")
assert req.scheme == "https"
assert req.host == "example.com"
assert req.rel_url == URL.build(path="/path/to", query={"a": "1"})
def test_clone_absolute_scheme() -> None:
req = make_mocked_request("GET", "https://example.com/path/to?a=1")
assert req.scheme == "https"
req2 = req.clone(scheme="http")
assert req2.scheme == "http"
assert req2.url.scheme == "http"
def test_clone_absolute_host() -> None:
req = make_mocked_request("GET", "https://example.com/path/to?a=1")
assert req.host == "example.com"
req2 = req.clone(host="foo.test")
assert req2.host == "foo.test"
assert req2.url.host == "foo.test"
def test_content_length() -> None:
req = make_mocked_request("Get", "/", CIMultiDict([("CONTENT-LENGTH", "123")]))
assert 123 == req.content_length
def test_range_to_slice_head() -> None:
req = make_mocked_request(
"GET", "/", headers=CIMultiDict([("RANGE", "bytes=0-499")])
)
assert isinstance(req.http_range, slice)
assert req.http_range.start == 0 and req.http_range.stop == 500
def test_range_to_slice_mid() -> None:
req = make_mocked_request(
"GET", "/", headers=CIMultiDict([("RANGE", "bytes=500-999")])
)
assert isinstance(req.http_range, slice)
assert req.http_range.start == 500 and req.http_range.stop == 1000
def test_range_to_slice_tail_start() -> None:
req = make_mocked_request(
"GET", "/", headers=CIMultiDict([("RANGE", "bytes=9500-")])
)
assert isinstance(req.http_range, slice)
assert req.http_range.start == 9500 and req.http_range.stop is None
def test_range_to_slice_tail_stop() -> None:
req = make_mocked_request(
"GET", "/", headers=CIMultiDict([("RANGE", "bytes=-500")])
)
assert isinstance(req.http_range, slice)
assert req.http_range.start == -500 and req.http_range.stop is None
def test_non_keepalive_on_http10() -> None:
req = make_mocked_request("GET", "/", version=HttpVersion(1, 0))
assert not req.keep_alive
def test_non_keepalive_on_closing() -> None:
req = make_mocked_request("GET", "/", closing=True)
assert not req.keep_alive
async def test_call_POST_on_GET_request() -> None:
req = make_mocked_request("GET", "/")
ret = await req.post()
assert CIMultiDict() == ret
async def test_call_POST_on_weird_content_type() -> None:
req = make_mocked_request(
"POST", "/", headers=CIMultiDict({"CONTENT-TYPE": "something/weird"})
)
ret = await req.post()
assert CIMultiDict() == ret
async def test_call_POST_twice() -> None:
req = make_mocked_request("GET", "/")
ret1 = await req.post()
ret2 = await req.post()
assert ret1 is ret2
def test_no_request_cookies() -> None:
req = make_mocked_request("GET", "/")
assert req.cookies == {}
cookies = req.cookies
assert cookies is req.cookies
def test_request_cookie() -> None:
headers = CIMultiDict(COOKIE="cookie1=value1; cookie2=value2")
req = make_mocked_request("GET", "/", headers=headers)
assert req.cookies == {"cookie1": "value1", "cookie2": "value2"}
def test_request_cookie__set_item() -> None:
headers = CIMultiDict(COOKIE="name=value")
req = make_mocked_request("GET", "/", headers=headers)
assert req.cookies == {"name": "value"}
with pytest.raises(TypeError):
req.cookies["my"] = "value" # type: ignore[index]
def test_request_cookies_with_special_characters() -> None:
"""Test that cookies with special characters in names are accepted.
This tests the fix for issue #2683 where cookies with special characters
like {, }, / in their names would cause a 500 error. The fix makes the
cookie parser more tolerant to handle real-world cookies.
"""
# Test cookie names with curly braces (e.g., ISAWPLB{DB45DF86-F806-407C-932C-D52A60E4019E})
headers = CIMultiDict(COOKIE="{test}=value1; normal=value2")
req = make_mocked_request("GET", "/", headers=headers)
# Both cookies should be parsed successfully
assert req.cookies == {"{test}": "value1", "normal": "value2"}
# Test cookie names with forward slash
headers = CIMultiDict(COOKIE="test/name=value1; valid=value2")
req = make_mocked_request("GET", "/", headers=headers)
assert req.cookies == {"test/name": "value1", "valid": "value2"}
# Test cookie names with various special characters
headers = CIMultiDict(
COOKIE="test{foo}bar=value1; test/path=value2; normal_cookie=value3"
)
req = make_mocked_request("GET", "/", headers=headers)
assert req.cookies == {
"test{foo}bar": "value1",
"test/path": "value2",
"normal_cookie": "value3",
}
def test_request_cookies_real_world_examples() -> None:
"""Test handling of real-world cookie examples from issue #2683."""
# Example from the issue: ISAWPLB{DB45DF86-F806-407C-932C-D52A60E4019E}
headers = CIMultiDict(
COOKIE="ISAWPLB{DB45DF86-F806-407C-932C-D52A60E4019E}=val1; normal_cookie=val2"
)
req = make_mocked_request("GET", "/", headers=headers)
# All cookies should be parsed successfully
assert req.cookies == {
"ISAWPLB{DB45DF86-F806-407C-932C-D52A60E4019E}": "val1",
"normal_cookie": "val2",
}
# Multiple cookies with special characters
headers = CIMultiDict(
COOKIE="{cookie1}=val1; cookie/2=val2; cookie[3]=val3; cookie(4)=val4"
)
req = make_mocked_request("GET", "/", headers=headers)
assert req.cookies == {
"{cookie1}": "val1",
"cookie/2": "val2",
"cookie[3]": "val3",
"cookie(4)": "val4",
}
def test_request_cookies_edge_cases() -> None:
"""Test edge cases for cookie parsing."""
# Empty cookie value
headers = CIMultiDict(COOKIE="test=; normal=value")
req = make_mocked_request("GET", "/", headers=headers)
assert req.cookies == {"test": "", "normal": "value"}
# Cookie with quoted value
headers = CIMultiDict(COOKIE='test="quoted value"; normal=unquoted')
req = make_mocked_request("GET", "/", headers=headers)
assert req.cookies == {"test": "quoted value", "normal": "unquoted"}
def test_request_cookies_no_500_error() -> None:
"""Test that cookies with special characters don't cause 500 errors.
This specifically tests that issue #2683 is fixed - previously cookies
with characters like { } would cause CookieError and 500 responses.
"""
# This cookie format previously caused 500 errors
headers = CIMultiDict(COOKIE="ISAWPLB{DB45DF86-F806-407C-932C-D52A60E4019E}=test")
# Should not raise any exception when accessing cookies
req = make_mocked_request("GET", "/", headers=headers)
cookies = req.cookies # This used to raise CookieError
# Verify the cookie was parsed successfully
assert "ISAWPLB{DB45DF86-F806-407C-932C-D52A60E4019E}" in cookies
assert cookies["ISAWPLB{DB45DF86-F806-407C-932C-D52A60E4019E}"] == "test"
def test_request_cookies_quoted_values() -> None:
"""Test that quoted cookie values are handled consistently.
This tests the fix for issue #5397 where quoted cookie values were
handled inconsistently based on whether domain attributes were present.
The new parser should always unquote cookie values consistently.
"""
# Test simple quoted cookie value
headers = CIMultiDict(COOKIE='sess="quoted_value"')
req = make_mocked_request("GET", "/", headers=headers)
# Quotes should be removed consistently
assert req.cookies == {"sess": "quoted_value"}
# Test quoted cookie with semicolon in value
headers = CIMultiDict(COOKIE='data="value;with;semicolons"')
req = make_mocked_request("GET", "/", headers=headers)
assert req.cookies == {"data": "value;with;semicolons"}
# Test mixed quoted and unquoted cookies
headers = CIMultiDict(
COOKIE='quoted="value1"; unquoted=value2; also_quoted="value3"'
)
req = make_mocked_request("GET", "/", headers=headers)
assert req.cookies == {
"quoted": "value1",
"unquoted": "value2",
"also_quoted": "value3",
}
# Test escaped quotes in cookie value
headers = CIMultiDict(COOKIE=r'escaped="value with \" quote"')
req = make_mocked_request("GET", "/", headers=headers)
assert req.cookies == {"escaped": 'value with " quote'}
# Test empty quoted value
headers = CIMultiDict(COOKIE='empty=""')
req = make_mocked_request("GET", "/", headers=headers)
assert req.cookies == {"empty": ""}
def test_request_cookies_with_attributes() -> None:
"""Test that cookie attributes are parsed as cookies per RFC 6265.
Per RFC 6265 Section 5.4, Cookie headers contain only name-value pairs.
Names that match attribute names (Domain, Path, etc.) should be treated
as regular cookies, not as attributes.
"""
# Cookie with domain - both should be parsed as cookies
headers = CIMultiDict(COOKIE='sess="quoted_value"; Domain=.example.com')
req = make_mocked_request("GET", "/", headers=headers)
assert req.cookies == {"sess": "quoted_value", "Domain": ".example.com"}
# Cookie with multiple attribute names - all parsed as cookies
headers = CIMultiDict(COOKIE='token="abc123"; Path=/; Secure; HttpOnly')
req = make_mocked_request("GET", "/", headers=headers)
assert req.cookies == {"token": "abc123", "Path": "/", "Secure": "", "HttpOnly": ""}
# Multiple cookies with attribute names mixed in
headers = CIMultiDict(
COOKIE='c1="v1"; Domain=.example.com; c2="v2"; Path=/api; c3=v3; Secure'
)
req = make_mocked_request("GET", "/", headers=headers)
assert req.cookies == {
"c1": "v1",
"Domain": ".example.com",
"c2": "v2",
"Path": "/api",
"c3": "v3",
"Secure": "",
}
def test_match_info() -> None:
req = make_mocked_request("GET", "/")
assert req._match_info is req.match_info
def test_request_is_mutable_mapping() -> None:
req = make_mocked_request("GET", "/")
assert isinstance(req, MutableMapping)
assert req # even when the MutableMapping is empty, request should always be True
req["key"] = "value"
assert "value" == req["key"]
def test_request_delitem() -> None:
req = make_mocked_request("GET", "/")
req["key"] = "value"
assert "value" == req["key"]
del req["key"]
assert "key" not in req
def test_request_len() -> None:
req = make_mocked_request("GET", "/")
assert len(req) == 0
req["key"] = "value"
assert len(req) == 1
def test_request_iter() -> None:
req = make_mocked_request("GET", "/")
req["key"] = "value"
req["key2"] = "value2"
key3 = web.RequestKey("key3", str)
req[key3] = "value3"
assert set(req) == {"key", "key2", key3}
def test_requestkey() -> None:
req = make_mocked_request("GET", "/")
key = web.RequestKey("key", str)
req[key] = "value"
assert req[key] == "value"
assert len(req) == 1
del req[key]
assert len(req) == 0
def test_request_get_requestkey() -> None:
req = make_mocked_request("GET", "/")
key = web.RequestKey("key", int)
assert req.get(key, "foo") == "foo"
req[key] = 5
assert req.get(key, "foo") == 5
def test_requestkey_repr_concrete() -> None:
key = web.RequestKey("key", int)
assert repr(key) in (
"<RequestKey(__channelexec__.key, type=int)>", # pytest-xdist
"<RequestKey(__main__.key, type=int)>",
)
key2 = web.RequestKey("key", web.Request)
assert repr(key2) in (
# pytest-xdist:
"<RequestKey(__channelexec__.key, type=aiohttp.web_request.Request)>",
"<RequestKey(__main__.key, type=aiohttp.web_request.Request)>",
)
def test_requestkey_repr_nonconcrete() -> None:
key = web.RequestKey("key", Iterator[int])
if sys.version_info < (3, 11):
assert repr(key) in (
# pytest-xdist:
"<RequestKey(__channelexec__.key, type=collections.abc.Iterator)>",
"<RequestKey(__main__.key, type=collections.abc.Iterator)>",
)
else:
assert repr(key) in (
# pytest-xdist:
"<RequestKey(__channelexec__.key, type=collections.abc.Iterator[int])>",
"<RequestKey(__main__.key, type=collections.abc.Iterator[int])>",
)
def test_requestkey_repr_annotated() -> None:
key = web.RequestKey[Iterator[int]]("key")
if sys.version_info < (3, 11):
assert repr(key) in (
# pytest-xdist:
"<RequestKey(__channelexec__.key, type=collections.abc.Iterator)>",
"<RequestKey(__main__.key, type=collections.abc.Iterator)>",
)
else:
assert repr(key) in (
# pytest-xdist:
"<RequestKey(__channelexec__.key, type=collections.abc.Iterator[int])>",
"<RequestKey(__main__.key, type=collections.abc.Iterator[int])>",
)
def test___repr__() -> None:
req = make_mocked_request("GET", "/path/to")
assert "<Request GET /path/to >" == repr(req)
def test___repr___non_ascii_path() -> None:
req = make_mocked_request("GET", "/path/\U0001f415\U0001f308")
assert "<Request GET /path/\\U0001f415\\U0001f308 >" == repr(req)
def test_http_scheme() -> None:
req = make_mocked_request("GET", "/", headers={"Host": "example.com"})
assert "http" == req.scheme
assert req.secure is False
def test_https_scheme_by_ssl_transport() -> None:
context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
req = make_mocked_request(
"GET", "/", headers={"Host": "example.com"}, sslcontext=context
)
assert "https" == req.scheme
assert req.secure is True
def test_single_forwarded_header() -> None:
header = "by=identifier;for=identifier;host=identifier;proto=identifier"
req = make_mocked_request("GET", "/", headers=CIMultiDict({"Forwarded": header}))
assert req.forwarded[0]["by"] == "identifier"
assert req.forwarded[0]["for"] == "identifier"
assert req.forwarded[0]["host"] == "identifier"
assert req.forwarded[0]["proto"] == "identifier"
@pytest.mark.parametrize(
"forward_for_in, forward_for_out",
[
("1.2.3.4:1234", "1.2.3.4:1234"),
("1.2.3.4", "1.2.3.4"),
('"[2001:db8:cafe::17]:1234"', "[2001:db8:cafe::17]:1234"),
('"[2001:db8:cafe::17]"', "[2001:db8:cafe::17]"),
],
)
def test_forwarded_node_identifier(forward_for_in: str, forward_for_out: str) -> None:
header = f"for={forward_for_in}"
req = make_mocked_request("GET", "/", headers=CIMultiDict({"Forwarded": header}))
assert req.forwarded == ({"for": forward_for_out},)
def test_single_forwarded_header_camelcase() -> None:
header = "bY=identifier;fOr=identifier;HOst=identifier;pRoTO=identifier"
req = make_mocked_request("GET", "/", headers=CIMultiDict({"Forwarded": header}))
assert req.forwarded[0]["by"] == "identifier"
assert req.forwarded[0]["for"] == "identifier"
assert req.forwarded[0]["host"] == "identifier"
assert req.forwarded[0]["proto"] == "identifier"
def test_single_forwarded_header_single_param() -> None:
header = "BY=identifier"
req = make_mocked_request("GET", "/", headers=CIMultiDict({"Forwarded": header}))
assert req.forwarded[0]["by"] == "identifier"
def test_single_forwarded_header_multiple_param() -> None:
header = "By=identifier1,BY=identifier2, By=identifier3 , BY=identifier4"
req = make_mocked_request("GET", "/", headers=CIMultiDict({"Forwarded": header}))
assert len(req.forwarded) == 4
assert req.forwarded[0]["by"] == "identifier1"
assert req.forwarded[1]["by"] == "identifier2"
assert req.forwarded[2]["by"] == "identifier3"
assert req.forwarded[3]["by"] == "identifier4"
def test_single_forwarded_header_quoted_escaped() -> None:
header = r'BY=identifier;pROTO="\lala lan\d\~ 123\!&"'
req = make_mocked_request("GET", "/", headers=CIMultiDict({"Forwarded": header}))
assert req.forwarded[0]["by"] == "identifier"
assert req.forwarded[0]["proto"] == "lala land~ 123!&"
def test_single_forwarded_header_custom_param() -> None:
header = r'BY=identifier;PROTO=https;SOME="other, \"value\""'
req = make_mocked_request("GET", "/", headers=CIMultiDict({"Forwarded": header}))
assert len(req.forwarded) == 1
assert req.forwarded[0]["by"] == "identifier"
assert req.forwarded[0]["proto"] == "https"
assert req.forwarded[0]["some"] == 'other, "value"'
def test_single_forwarded_header_empty_params() -> None:
# This is allowed by the grammar given in RFC 7239
header = ";For=identifier;;PROTO=https;;;"
req = make_mocked_request("GET", "/", headers=CIMultiDict({"Forwarded": header}))
assert req.forwarded[0]["for"] == "identifier"
assert req.forwarded[0]["proto"] == "https"
def test_single_forwarded_header_bad_separator() -> None:
header = "BY=identifier PROTO=https"
req = make_mocked_request("GET", "/", headers=CIMultiDict({"Forwarded": header}))
assert "proto" not in req.forwarded[0]
def test_single_forwarded_header_injection1() -> None:
# We might receive a header like this if we're sitting behind a reverse
# proxy that blindly appends a forwarded-element without checking
# the syntax of existing field-values. We should be able to recover
# the appended element anyway.
header = 'for=_injected;by=", for=_real'
req = make_mocked_request("GET", "/", headers=CIMultiDict({"Forwarded": header}))
assert len(req.forwarded) == 2
assert "by" not in req.forwarded[0]
assert req.forwarded[1]["for"] == "_real"
def test_single_forwarded_header_injection2() -> None:
header = "very bad syntax, for=_real"
req = make_mocked_request("GET", "/", headers=CIMultiDict({"Forwarded": header}))
assert len(req.forwarded) == 2
assert "for" not in req.forwarded[0]
assert req.forwarded[1]["for"] == "_real"
def test_single_forwarded_header_long_quoted_string() -> None:
header = 'for="' + "\\\\" * 5000 + '"'
req = make_mocked_request("GET", "/", headers=CIMultiDict({"Forwarded": header}))
assert req.forwarded[0]["for"] == "\\" * 5000
def test_multiple_forwarded_headers() -> None:
headers = CIMultiDict[str]()
headers.add("Forwarded", "By=identifier1;for=identifier2, BY=identifier3")
headers.add("Forwarded", "By=identifier4;fOr=identifier5")
req = make_mocked_request("GET", "/", headers=headers)
assert len(req.forwarded) == 3
assert req.forwarded[0]["by"] == "identifier1"
assert req.forwarded[0]["for"] == "identifier2"
assert req.forwarded[1]["by"] == "identifier3"
assert req.forwarded[2]["by"] == "identifier4"
assert req.forwarded[2]["for"] == "identifier5"
def test_multiple_forwarded_headers_bad_syntax() -> None:
headers = CIMultiDict[str]()
headers.add("Forwarded", "for=_1;by=_2")
headers.add("Forwarded", "invalid value")
headers.add("Forwarded", "")
headers.add("Forwarded", "for=_3;by=_4")
req = make_mocked_request("GET", "/", headers=headers)
assert len(req.forwarded) == 4
assert req.forwarded[0]["for"] == "_1"
assert "for" not in req.forwarded[1]
assert "for" not in req.forwarded[2]
assert req.forwarded[3]["by"] == "_4"
def test_multiple_forwarded_headers_injection() -> None:
headers = CIMultiDict[str]()
# This could be sent by an attacker, hoping to "shadow" the second header.
headers.add("Forwarded", 'for=_injected;by="')
# This is added by our trusted reverse proxy.
headers.add("Forwarded", "for=_real;by=_actual_proxy")
req = make_mocked_request("GET", "/", headers=headers)
assert len(req.forwarded) == 2
assert "by" not in req.forwarded[0]
assert req.forwarded[1]["for"] == "_real"
assert req.forwarded[1]["by"] == "_actual_proxy"
def test_host_by_host_header() -> None:
req = make_mocked_request("GET", "/", headers=CIMultiDict({"Host": "example.com"}))
assert req.host == "example.com"
def test_raw_headers() -> None:
req = make_mocked_request("GET", "/", headers=CIMultiDict({"X-HEADER": "aaa"}))
assert req.raw_headers == ((b"X-HEADER", b"aaa"),)
def test_rel_url() -> None:
req = make_mocked_request("GET", "/path")
assert URL("/path") == req.rel_url
def test_url_url() -> None:
req = make_mocked_request("GET", "/path", headers={"HOST": "example.com"})
assert URL("http://example.com/path") == req.url
def test_url_non_default_port() -> None:
req = make_mocked_request("GET", "/path", headers={"HOST": "example.com:8123"})
assert req.url == URL("http://example.com:8123/path")
def test_url_ipv6() -> None:
req = make_mocked_request("GET", "/path", headers={"HOST": "[::1]:8123"})
assert req.url == URL("http://[::1]:8123/path")
def test_clone() -> None:
req = make_mocked_request("GET", "/path")
req2 = req.clone()
assert req2.method == "GET"
assert req2.rel_url == URL("/path")
def test_clone_client_max_size() -> None:
req = make_mocked_request("GET", "/path", client_max_size=1024)
req2 = req.clone()
assert req._client_max_size == req2._client_max_size
assert req2._client_max_size == 1024
def test_clone_override_client_max_size() -> None:
req = make_mocked_request("GET", "/path", client_max_size=1024)
req2 = req.clone(client_max_size=2048)
assert req2.client_max_size == 2048
def test_clone_method() -> None:
req = make_mocked_request("GET", "/path")
req2 = req.clone(method="POST")
assert req2.method == "POST"
assert req2.rel_url == URL("/path")
def test_clone_rel_url() -> None:
req = make_mocked_request("GET", "/path")
req2 = req.clone(rel_url=URL("/path2"))
assert req2.rel_url == URL("/path2")
def test_clone_rel_url_str() -> None:
req = make_mocked_request("GET", "/path")
req2 = req.clone(rel_url="/path2")
assert req2.rel_url == URL("/path2")
def test_clone_headers() -> None:
req = make_mocked_request("GET", "/path", headers={"A": "B"})
req2 = req.clone(headers=CIMultiDict({"B": "C"}))
assert req2.headers == CIMultiDict({"B": "C"})
assert req2.raw_headers == ((b"B", b"C"),)
def test_clone_headers_dict() -> None:
req = make_mocked_request("GET", "/path", headers={"A": "B"})
req2 = req.clone(headers={"B": "C"})
assert req2.headers == CIMultiDict({"B": "C"})
assert req2.raw_headers == ((b"B", b"C"),)
async def test_cannot_clone_after_read(protocol: BaseProtocol) -> None:
payload = StreamReader(protocol, 2**16, loop=asyncio.get_event_loop())
payload.feed_data(b"data")
payload.feed_eof()
req = make_mocked_request("GET", "/path", payload=payload)
await req.read()
with pytest.raises(RuntimeError):
req.clone()
async def test_make_too_big_request(protocol: BaseProtocol) -> None:
payload = StreamReader(protocol, 2**16, loop=asyncio.get_event_loop())
large_file = 1024**2 * b"x"
too_large_file = large_file + b"x"
payload.feed_data(too_large_file)
payload.feed_eof()
req = make_mocked_request("POST", "/", payload=payload)
with pytest.raises(web.HTTPRequestEntityTooLarge) as err:
await req.read()
assert err.value.status_code == 413
async def test_request_with_wrong_content_type_encoding(protocol: BaseProtocol) -> None:
payload = StreamReader(protocol, 2**16, loop=asyncio.get_event_loop())
payload.feed_data(b"{}")
payload.feed_eof()
headers = {"Content-Type": "text/html; charset=test"}
req = make_mocked_request("POST", "/", payload=payload, headers=headers)
with pytest.raises(web.HTTPUnsupportedMediaType) as err:
await req.text()
assert err.value.status_code == 415
async def test_make_too_big_request_same_size_to_max(protocol: BaseProtocol) -> None:
payload = StreamReader(protocol, 2**16, loop=asyncio.get_event_loop())
large_file = 1024**2 * b"x"
payload.feed_data(large_file)
payload.feed_eof()
req = make_mocked_request("POST", "/", payload=payload)
resp_text = await req.read()
assert resp_text == large_file
async def test_make_too_big_request_adjust_limit(protocol: BaseProtocol) -> None:
payload = StreamReader(protocol, 2**16, loop=asyncio.get_event_loop())
large_file = 1024**2 * b"x"
too_large_file = large_file + b"x"
payload.feed_data(too_large_file)
payload.feed_eof()
max_size = 1024**2 + 2
req = make_mocked_request("POST", "/", payload=payload, client_max_size=max_size)
txt = await req.read()
assert len(txt) == 1024**2 + 1
async def test_multipart_formdata(protocol: BaseProtocol) -> None:
payload = StreamReader(protocol, 2**16, loop=asyncio.get_event_loop())
payload.feed_data(
b"-----------------------------326931944431359\r\n"
b'Content-Disposition: form-data; name="a"\r\n'
b"\r\n"
b"b\r\n"
b"-----------------------------326931944431359\r\n"
b'Content-Disposition: form-data; name="c"\r\n'
b"\r\n"
b"d\r\n"
b"-----------------------------326931944431359--\r\n"
)
content_type = (
"multipart/form-data; boundary=---------------------------326931944431359"
)
payload.feed_eof()
req = make_mocked_request(
"POST", "/", headers={"CONTENT-TYPE": content_type}, payload=payload
)
result = await req.post()
assert dict(result) == {"a": "b", "c": "d"}
async def test_multipart_formdata_file(protocol: BaseProtocol) -> None:
# Make sure file uploads work, even without a content type
payload = StreamReader(protocol, 2**16, loop=asyncio.get_event_loop())
payload.feed_data(
b"-----------------------------326931944431359\r\n"
b'Content-Disposition: form-data; name="a_file"; filename="binary"\r\n'
b"\r\n"
b"\ff\r\n"
b"-----------------------------326931944431359--\r\n"
)
content_type = (
"multipart/form-data; boundary=---------------------------326931944431359"
)
payload.feed_eof()
req = make_mocked_request(
"POST", "/", headers={"CONTENT-TYPE": content_type}, payload=payload
)
result = await req.post()
assert hasattr(result["a_file"], "file")
content = result["a_file"].file.read()
assert content == b"\ff"
req._finish()
async def test_make_too_big_request_limit_None(protocol: BaseProtocol) -> None:
payload = StreamReader(protocol, 2**16, loop=asyncio.get_event_loop())
large_file = 1024**2 * b"x"
too_large_file = large_file + b"x"
payload.feed_data(too_large_file)
payload.feed_eof()
req = make_mocked_request("POST", "/", payload=payload, client_max_size=0)
txt = await req.read()
assert len(txt) == 1024**2 + 1
def test_remote_peername_tcp() -> None:
transp = mock.Mock()
transp.get_extra_info.return_value = ("10.10.10.10", 1234)
req = make_mocked_request("GET", "/", transport=transp)
assert req.remote == "10.10.10.10"
def test_remote_peername_unix() -> None:
transp = mock.Mock()
transp.get_extra_info.return_value = "/path/to/sock"
req = make_mocked_request("GET", "/", transport=transp)
assert req.remote == "/path/to/sock"
def test_save_state_on_clone() -> None:
req = make_mocked_request("GET", "/")
req["key"] = "val"
req2 = req.clone()
req2["key"] = "val2"
assert req["key"] == "val"
assert req2["key"] == "val2"
def test_clone_scheme() -> None:
req = make_mocked_request("GET", "/")
assert req.scheme == "http"
req2 = req.clone(scheme="https")
assert req2.scheme == "https"
assert req2.url.scheme == "https"
def test_clone_host() -> None:
req = make_mocked_request("GET", "/")
assert req.host != "example.com"
req2 = req.clone(host="example.com")
assert req2.host == "example.com"
assert req2.url.host == "example.com"
def test_clone_remote() -> None:
req = make_mocked_request("GET", "/")
assert req.remote != "11.11.11.11"
req2 = req.clone(remote="11.11.11.11")
assert req2.remote == "11.11.11.11"
def test_remote_with_closed_transport() -> None:
transp = mock.Mock()
transp.get_extra_info.return_value = ("10.10.10.10", 1234)
req = make_mocked_request("GET", "/", transport=transp)
req._protocol = None # type: ignore[assignment]
assert req.remote == "10.10.10.10"
def test_url_http_with_closed_transport() -> None:
req = make_mocked_request("GET", "/")
req._protocol = None # type: ignore[assignment]
assert str(req.url).startswith("http://")
def test_url_https_with_closed_transport() -> None:
c = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
req = make_mocked_request("GET", "/", sslcontext=c)
req._protocol = None # type: ignore[assignment]
assert str(req.url).startswith("https://")
async def test_get_extra_info() -> None:
valid_key = "test"
valid_value = "existent"
default_value = "default"
def get_extra_info(name: str, default: object = None) -> object:
return {valid_key: valid_value}.get(name, default)
transp = mock.Mock()
transp.get_extra_info.side_effect = get_extra_info
req = make_mocked_request("GET", "/", transport=transp)
assert req is not None
req_extra_info = req.get_extra_info(valid_key, default_value)
assert req._protocol.transport is not None
transp_extra_info = req._protocol.transport.get_extra_info(valid_key, default_value)
assert req_extra_info == transp_extra_info
req._protocol.transport = None
extra_info = req.get_extra_info(valid_key, default_value)
assert extra_info == default_value
def test_eq() -> None:
req1 = make_mocked_request("GET", "/path/to?a=1&b=2")
req2 = make_mocked_request("GET", "/path/to?a=1&b=2")
assert req1 != req2
assert req1 == req1
async def test_json(aiohttp_client: AiohttpClient) -> None:
async def handler(request: web.Request) -> web.Response:
body_text = await request.text()
assert body_text == '{"some": "data"}'
assert request.headers["Content-Type"] == "application/json"
body_json = await request.json()
assert body_json == {"some": "data"}
return web.Response()
app = web.Application()
app.router.add_post("/", handler)
client = await aiohttp_client(app)
json_data = {"some": "data"}
async with client.post("/", json=json_data) as resp:
assert 200 == resp.status
async def test_json_invalid_content_type(aiohttp_client: AiohttpClient) -> None:
async def handler(request: web.Request) -> NoReturn:
body_text = await request.text()
assert body_text == '{"some": "data"}'
assert request.headers["Content-Type"] == "text/plain"
await request.json() # raises HTTP 400
assert False
app = web.Application()
app.router.add_post("/", handler)
client = await aiohttp_client(app)
json_data = {"some": "data"}
headers = {"Content-Type": "text/plain"}
async with client.post("/", json=json_data, headers=headers) as resp:
assert 400 == resp.status
resp_text = await resp.text()
assert resp_text == (
"Attempt to decode JSON with unexpected mimetype: text/plain"
)
def test_weakref_creation() -> None:
req = make_mocked_request("GET", "/")
weakref.ref(req)
@pytest.mark.parametrize(
("header", "header_attr"),
(
pytest.param("If-Match", "if_match"),
pytest.param("If-None-Match", "if_none_match"),
),
)
@pytest.mark.parametrize(
("header_val", "expected"),
(
pytest.param(
'"67ab43", W/"54ed21", "7892,dd"',
(
ETag(is_weak=False, value="67ab43"),
ETag(is_weak=True, value="54ed21"),
ETag(is_weak=False, value="7892,dd"),
),
),
pytest.param(
'"bfc1ef-5b2c2730249c88ca92d82d"',
(ETag(is_weak=False, value="bfc1ef-5b2c2730249c88ca92d82d"),),
),
pytest.param(
'"valid-tag", "also-valid-tag",somegarbage"last-tag"',
(
ETag(is_weak=False, value="valid-tag"),
ETag(is_weak=False, value="also-valid-tag"),
),
),
pytest.param(
'"ascii", "это точно не ascii", "ascii again"',
(ETag(is_weak=False, value="ascii"),),
),
pytest.param(
"*",
(ETag(is_weak=False, value="*"),),
),
),
)
def test_etag_headers(
header: str, header_attr: str, header_val: str, expected: tuple[ETag, ...]
) -> None:
req = make_mocked_request("GET", "/", headers={header: header_val})
assert getattr(req, header_attr) == expected
@pytest.mark.parametrize(
("header", "header_attr"),
(
pytest.param("If-Modified-Since", "if_modified_since"),
pytest.param("If-Unmodified-Since", "if_unmodified_since"),
pytest.param("If-Range", "if_range"),
),
)
@pytest.mark.parametrize(
("header_val", "expected"),
(
pytest.param("xxyyzz", None),
pytest.param("Tue, 08 Oct 4446413 00:56:40 GMT", None),
pytest.param("Tue, 08 Oct 2000 00:56:80 GMT", None),
pytest.param(
"Tue, 08 Oct 2000 00:56:40 GMT",
datetime.datetime(2000, 10, 8, 0, 56, 40, tzinfo=datetime.timezone.utc),
),
),
)
def test_datetime_headers(
header: str,
header_attr: str,
header_val: str,
expected: datetime.datetime | None,
) -> None:
req = make_mocked_request("GET", "/", headers={header: header_val})
assert getattr(req, header_attr) == expected
|
./temp_repos/aiohttp/aiohttp/web_request.py
|
./temp_repos/aiohttp/tests/test_web_request.py
|
aiohttp
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'FileField'.
Context:
- Class Name: FileField
- Dependencies to Mock: payload_writer, message, task, loop, protocol, payload
- Key Imports: yarl, web_response, web_app, web_urldispatcher, web_protocol, tempfile, http_parser, io, helpers, typing
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
FileField
|
python
|
import asyncio
import enum
import io
import json
import mimetypes
import os
import sys
import warnings
from abc import ABC, abstractmethod
from collections.abc import AsyncIterable, AsyncIterator, Iterable
from itertools import chain
from typing import IO, Any, Final, TextIO
from multidict import CIMultiDict
from . import hdrs
from .abc import AbstractStreamWriter
from .helpers import (
_SENTINEL,
content_disposition_header,
guess_filename,
parse_mimetype,
sentinel,
)
from .streams import StreamReader
from .typedefs import JSONEncoder
__all__ = (
"PAYLOAD_REGISTRY",
"get_payload",
"payload_type",
"Payload",
"BytesPayload",
"StringPayload",
"IOBasePayload",
"BytesIOPayload",
"BufferedReaderPayload",
"TextIOPayload",
"StringIOPayload",
"JsonPayload",
"AsyncIterablePayload",
)
TOO_LARGE_BYTES_BODY: Final[int] = 2**20 # 1 MB
READ_SIZE: Final[int] = 2**16 # 64 KB
_CLOSE_FUTURES: set[asyncio.Future[None]] = set()
class LookupError(Exception):
"""Raised when no payload factory is found for the given data type."""
class Order(str, enum.Enum):
normal = "normal"
try_first = "try_first"
try_last = "try_last"
def get_payload(data: Any, *args: Any, **kwargs: Any) -> "Payload":
return PAYLOAD_REGISTRY.get(data, *args, **kwargs)
def register_payload(
factory: type["Payload"], type: Any, *, order: Order = Order.normal
) -> None:
PAYLOAD_REGISTRY.register(factory, type, order=order)
class payload_type:
def __init__(self, type: Any, *, order: Order = Order.normal) -> None:
self.type = type
self.order = order
def __call__(self, factory: type["Payload"]) -> type["Payload"]:
register_payload(factory, self.type, order=self.order)
return factory
PayloadType = type["Payload"]
_PayloadRegistryItem = tuple[PayloadType, Any]
class PayloadRegistry:
"""Payload registry.
note: we need zope.interface for more efficient adapter search
"""
__slots__ = ("_first", "_normal", "_last", "_normal_lookup")
def __init__(self) -> None:
self._first: list[_PayloadRegistryItem] = []
self._normal: list[_PayloadRegistryItem] = []
self._last: list[_PayloadRegistryItem] = []
self._normal_lookup: dict[Any, PayloadType] = {}
def get(
self,
data: Any,
*args: Any,
_CHAIN: "type[chain[_PayloadRegistryItem]]" = chain,
**kwargs: Any,
) -> "Payload":
if self._first:
for factory, type_ in self._first:
if isinstance(data, type_):
return factory(data, *args, **kwargs)
# Try the fast lookup first
if lookup_factory := self._normal_lookup.get(type(data)):
return lookup_factory(data, *args, **kwargs)
# Bail early if its already a Payload
if isinstance(data, Payload):
return data
# Fallback to the slower linear search
for factory, type_ in _CHAIN(self._normal, self._last):
if isinstance(data, type_):
return factory(data, *args, **kwargs)
raise LookupError()
def register(
self, factory: PayloadType, type: Any, *, order: Order = Order.normal
) -> None:
if order is Order.try_first:
self._first.append((factory, type))
elif order is Order.normal:
self._normal.append((factory, type))
if isinstance(type, Iterable):
for t in type:
self._normal_lookup[t] = factory
else:
self._normal_lookup[type] = factory
elif order is Order.try_last:
self._last.append((factory, type))
else:
raise ValueError(f"Unsupported order {order!r}")
class Payload(ABC):
_default_content_type: str = "application/octet-stream"
_size: int | None = None
_consumed: bool = False # Default: payload has not been consumed yet
_autoclose: bool = False # Default: assume resource needs explicit closing
def __init__(
self,
value: Any,
headers: (
CIMultiDict[str] | dict[str, str] | Iterable[tuple[str, str]] | None
) = None,
content_type: None | str | _SENTINEL = sentinel,
filename: str | None = None,
encoding: str | None = None,
**kwargs: Any,
) -> None:
self._encoding = encoding
self._filename = filename
self._headers = CIMultiDict[str]()
self._value = value
if content_type is not sentinel and content_type is not None:
assert isinstance(content_type, str)
self._headers[hdrs.CONTENT_TYPE] = content_type
elif self._filename is not None:
if sys.version_info >= (3, 13):
guesser = mimetypes.guess_file_type
else:
guesser = mimetypes.guess_type
content_type = guesser(self._filename)[0]
if content_type is None:
content_type = self._default_content_type
self._headers[hdrs.CONTENT_TYPE] = content_type
else:
self._headers[hdrs.CONTENT_TYPE] = self._default_content_type
if headers:
self._headers.update(headers)
@property
def size(self) -> int | None:
"""Size of the payload in bytes.
Returns the number of bytes that will be transmitted when the payload
is written. For string payloads, this is the size after encoding to bytes,
not the length of the string.
"""
return self._size
@property
def filename(self) -> str | None:
"""Filename of the payload."""
return self._filename
@property
def headers(self) -> CIMultiDict[str]:
"""Custom item headers"""
return self._headers
@property
def _binary_headers(self) -> bytes:
return (
"".join([k + ": " + v + "\r\n" for k, v in self.headers.items()]).encode(
"utf-8"
)
+ b"\r\n"
)
@property
def encoding(self) -> str | None:
"""Payload encoding"""
return self._encoding
@property
def content_type(self) -> str:
"""Content type"""
return self._headers[hdrs.CONTENT_TYPE]
@property
def consumed(self) -> bool:
"""Whether the payload has been consumed and cannot be reused."""
return self._consumed
@property
def autoclose(self) -> bool:
"""
Whether the payload can close itself automatically.
Returns True if the payload has no file handles or resources that need
explicit closing. If False, callers must await close() to release resources.
"""
return self._autoclose
def set_content_disposition(
self,
disptype: str,
quote_fields: bool = True,
_charset: str = "utf-8",
**params: str,
) -> None:
"""Sets ``Content-Disposition`` header."""
self._headers[hdrs.CONTENT_DISPOSITION] = content_disposition_header(
disptype, quote_fields=quote_fields, _charset=_charset, params=params
)
@abstractmethod
def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
"""
Return string representation of the value.
This is named decode() to allow compatibility with bytes objects.
"""
@abstractmethod
async def write(self, writer: AbstractStreamWriter) -> None:
"""
Write payload to the writer stream.
Args:
writer: An AbstractStreamWriter instance that handles the actual writing
This is a legacy method that writes the entire payload without length constraints.
Important:
For new implementations, use write_with_length() instead of this method.
This method is maintained for backwards compatibility and will eventually
delegate to write_with_length(writer, None) in all implementations.
All payload subclasses must override this method for backwards compatibility,
but new code should use write_with_length for more flexibility and control.
"""
# write_with_length is new in aiohttp 3.12
# it should be overridden by subclasses
async def write_with_length(
self, writer: AbstractStreamWriter, content_length: int | None
) -> None:
"""
Write payload with a specific content length constraint.
Args:
writer: An AbstractStreamWriter instance that handles the actual writing
content_length: Maximum number of bytes to write (None for unlimited)
This method allows writing payload content with a specific length constraint,
which is particularly useful for HTTP responses with Content-Length header.
Note:
This is the base implementation that provides backwards compatibility
for subclasses that don't override this method. Specific payload types
should override this method to implement proper length-constrained writing.
"""
# Backwards compatibility for subclasses that don't override this method
# and for the default implementation
await self.write(writer)
async def as_bytes(self, encoding: str = "utf-8", errors: str = "strict") -> bytes:
"""
Return bytes representation of the value.
This is a convenience method that calls decode() and encodes the result
to bytes using the specified encoding.
"""
# Use instance encoding if available, otherwise use parameter
actual_encoding = self._encoding or encoding
return self.decode(actual_encoding, errors).encode(actual_encoding)
def _close(self) -> None:
"""
Async safe synchronous close operations for backwards compatibility.
This method exists only for backwards compatibility with code that
needs to clean up payloads synchronously. In the future, we will
drop this method and only support the async close() method.
WARNING: This method must be safe to call from within the event loop
without blocking. Subclasses should not perform any blocking I/O here.
WARNING: This method must be called from within an event loop for
certain payload types (e.g., IOBasePayload). Calling it outside an
event loop may raise RuntimeError.
"""
# This is a no-op by default, but subclasses can override it
# for non-blocking cleanup operations.
async def close(self) -> None:
"""
Close the payload if it holds any resources.
IMPORTANT: This method must not await anything that might not finish
immediately, as it may be called during cleanup/cancellation. Schedule
any long-running operations without awaiting them.
In the future, this will be the only close method supported.
"""
self._close()
class BytesPayload(Payload):
_value: bytes
# _consumed = False (inherited) - Bytes are immutable and can be reused
_autoclose = True # No file handle, just bytes in memory
def __init__(
self, value: bytes | bytearray | memoryview, *args: Any, **kwargs: Any
) -> None:
if "content_type" not in kwargs:
kwargs["content_type"] = "application/octet-stream"
super().__init__(value, *args, **kwargs)
if isinstance(value, memoryview):
self._size = value.nbytes
elif isinstance(value, (bytes, bytearray)):
self._size = len(value)
else:
raise TypeError(f"value argument must be byte-ish, not {type(value)!r}")
if self._size > TOO_LARGE_BYTES_BODY:
warnings.warn(
"Sending a large body directly with raw bytes might"
" lock the event loop. You should probably pass an "
"io.BytesIO object instead",
ResourceWarning,
source=self,
)
def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
return self._value.decode(encoding, errors)
async def as_bytes(self, encoding: str = "utf-8", errors: str = "strict") -> bytes:
"""
Return bytes representation of the value.
This method returns the raw bytes content of the payload.
It is equivalent to accessing the _value attribute directly.
"""
return self._value
async def write(self, writer: AbstractStreamWriter) -> None:
"""
Write the entire bytes payload to the writer stream.
Args:
writer: An AbstractStreamWriter instance that handles the actual writing
This method writes the entire bytes content without any length constraint.
Note:
For new implementations that need length control, use write_with_length().
This method is maintained for backwards compatibility and is equivalent
to write_with_length(writer, None).
"""
await writer.write(self._value)
async def write_with_length(
self, writer: AbstractStreamWriter, content_length: int | None
) -> None:
"""
Write bytes payload with a specific content length constraint.
Args:
writer: An AbstractStreamWriter instance that handles the actual writing
content_length: Maximum number of bytes to write (None for unlimited)
This method writes either the entire byte sequence or a slice of it
up to the specified content_length. For BytesPayload, this operation
is performed efficiently using array slicing.
"""
if content_length is not None:
await writer.write(self._value[:content_length])
else:
await writer.write(self._value)
class StringPayload(BytesPayload):
def __init__(
self,
value: str,
*args: Any,
encoding: str | None = None,
content_type: str | None = None,
**kwargs: Any,
) -> None:
if encoding is None:
if content_type is None:
real_encoding = "utf-8"
content_type = "text/plain; charset=utf-8"
else:
mimetype = parse_mimetype(content_type)
real_encoding = mimetype.parameters.get("charset", "utf-8")
else:
if content_type is None:
content_type = "text/plain; charset=%s" % encoding
real_encoding = encoding
super().__init__(
value.encode(real_encoding),
encoding=real_encoding,
content_type=content_type,
*args,
**kwargs,
)
class StringIOPayload(StringPayload):
def __init__(self, value: IO[str], *args: Any, **kwargs: Any) -> None:
super().__init__(value.read(), *args, **kwargs)
class IOBasePayload(Payload):
_value: io.IOBase
# _consumed = False (inherited) - File can be re-read from the same position
_start_position: int | None = None
# _autoclose = False (inherited) - Has file handle that needs explicit closing
def __init__(
self, value: IO[Any], disposition: str = "attachment", *args: Any, **kwargs: Any
) -> None:
if "filename" not in kwargs:
kwargs["filename"] = guess_filename(value)
super().__init__(value, *args, **kwargs)
if self._filename is not None and disposition is not None:
if hdrs.CONTENT_DISPOSITION not in self.headers:
self.set_content_disposition(disposition, filename=self._filename)
def _set_or_restore_start_position(self) -> None:
"""Set or restore the start position of the file-like object."""
if self._start_position is None:
try:
self._start_position = self._value.tell()
except (OSError, AttributeError):
self._consumed = True # Cannot seek, mark as consumed
return
try:
self._value.seek(self._start_position)
except (OSError, AttributeError):
# Failed to seek back - mark as consumed since we've already read
self._consumed = True
def _read_and_available_len(
self, remaining_content_len: int | None
) -> tuple[int | None, bytes]:
"""
Read the file-like object and return both its total size and the first chunk.
Args:
remaining_content_len: Optional limit on how many bytes to read in this operation.
If None, READ_SIZE will be used as the default chunk size.
Returns:
A tuple containing:
- The total size of the remaining unread content (None if size cannot be determined)
- The first chunk of bytes read from the file object
This method is optimized to perform both size calculation and initial read
in a single operation, which is executed in a single executor job to minimize
context switches and file operations when streaming content.
"""
self._set_or_restore_start_position()
size = self.size # Call size only once since it does I/O
return size, self._value.read(
min(READ_SIZE, size or READ_SIZE, remaining_content_len or READ_SIZE)
)
def _read(self, remaining_content_len: int | None) -> bytes:
"""
Read a chunk of data from the file-like object.
Args:
remaining_content_len: Optional maximum number of bytes to read.
If None, READ_SIZE will be used as the default chunk size.
Returns:
A chunk of bytes read from the file object, respecting the
remaining_content_len limit if specified.
This method is used for subsequent reads during streaming after
the initial _read_and_available_len call has been made.
"""
return self._value.read(remaining_content_len or READ_SIZE) # type: ignore[no-any-return]
@property
def size(self) -> int | None:
"""
Size of the payload in bytes.
Returns the total size of the payload content from the initial position.
This ensures consistent Content-Length for requests, including 307/308 redirects
where the same payload instance is reused.
Returns None if the size cannot be determined (e.g., for unseekable streams).
"""
try:
# Store the start position on first access.
# This is critical when the same payload instance is reused (e.g., 307/308
# redirects). Without storing the initial position, after the payload is
# read once, the file position would be at EOF, which would cause the
# size calculation to return 0 (file_size - EOF position).
# By storing the start position, we ensure the size calculation always
# returns the correct total size for any subsequent use.
if self._start_position is None:
self._start_position = self._value.tell()
# Return the total size from the start position
# This ensures Content-Length is correct even after reading
return os.fstat(self._value.fileno()).st_size - self._start_position
except (AttributeError, OSError):
return None
async def write(self, writer: AbstractStreamWriter) -> None:
"""
Write the entire file-like payload to the writer stream.
Args:
writer: An AbstractStreamWriter instance that handles the actual writing
This method writes the entire file content without any length constraint.
It delegates to write_with_length() with no length limit for implementation
consistency.
Note:
For new implementations that need length control, use write_with_length() directly.
This method is maintained for backwards compatibility with existing code.
"""
await self.write_with_length(writer, None)
async def write_with_length(
self, writer: AbstractStreamWriter, content_length: int | None
) -> None:
"""
Write file-like payload with a specific content length constraint.
Args:
writer: An AbstractStreamWriter instance that handles the actual writing
content_length: Maximum number of bytes to write (None for unlimited)
This method implements optimized streaming of file content with length constraints:
1. File reading is performed in a thread pool to avoid blocking the event loop
2. Content is read and written in chunks to maintain memory efficiency
3. Writing stops when either:
- All available file content has been written (when size is known)
- The specified content_length has been reached
4. File resources are properly closed even if the operation is cancelled
The implementation carefully handles both known-size and unknown-size payloads,
as well as constrained and unconstrained content lengths.
"""
loop = asyncio.get_running_loop()
total_written_len = 0
remaining_content_len = content_length
# Get initial data and available length
available_len, chunk = await loop.run_in_executor(
None, self._read_and_available_len, remaining_content_len
)
# Process data chunks until done
while chunk:
chunk_len = len(chunk)
# Write data with or without length constraint
if remaining_content_len is None:
await writer.write(chunk)
else:
await writer.write(chunk[:remaining_content_len])
remaining_content_len -= chunk_len
total_written_len += chunk_len
# Check if we're done writing
if self._should_stop_writing(
available_len, total_written_len, remaining_content_len
):
return
# Read next chunk
chunk = await loop.run_in_executor(
None,
self._read,
(
min(READ_SIZE, remaining_content_len)
if remaining_content_len is not None
else READ_SIZE
),
)
def _should_stop_writing(
self,
available_len: int | None,
total_written_len: int,
remaining_content_len: int | None,
) -> bool:
"""
Determine if we should stop writing data.
Args:
available_len: Known size of the payload if available (None if unknown)
total_written_len: Number of bytes already written
remaining_content_len: Remaining bytes to be written for content-length limited responses
Returns:
True if we should stop writing data, based on either:
- Having written all available data (when size is known)
- Having written all requested content (when content-length is specified)
"""
return (available_len is not None and total_written_len >= available_len) or (
remaining_content_len is not None and remaining_content_len <= 0
)
def _close(self) -> None:
"""
Async safe synchronous close operations for backwards compatibility.
This method exists only for backwards
compatibility. Use the async close() method instead.
WARNING: This method MUST be called from within an event loop.
Calling it outside an event loop will raise RuntimeError.
"""
# Skip if already consumed
if self._consumed:
return
self._consumed = True # Mark as consumed to prevent further writes
# Schedule file closing without awaiting to prevent cancellation issues
loop = asyncio.get_running_loop()
close_future = loop.run_in_executor(None, self._value.close)
# Hold a strong reference to the future to prevent it from being
# garbage collected before it completes.
_CLOSE_FUTURES.add(close_future)
close_future.add_done_callback(_CLOSE_FUTURES.remove)
async def close(self) -> None:
"""
Close the payload if it holds any resources.
IMPORTANT: This method must not await anything that might not finish
immediately, as it may be called during cleanup/cancellation. Schedule
any long-running operations without awaiting them.
"""
self._close()
def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
"""
Return string representation of the value.
WARNING: This method does blocking I/O and should not be called in the event loop.
"""
return self._read_all().decode(encoding, errors)
def _read_all(self) -> bytes:
"""Read the entire file-like object and return its content as bytes."""
self._set_or_restore_start_position()
# Use readlines() to ensure we get all content
return b"".join(self._value.readlines())
async def as_bytes(self, encoding: str = "utf-8", errors: str = "strict") -> bytes:
"""
Return bytes representation of the value.
This method reads the entire file content and returns it as bytes.
It is equivalent to reading the file-like object directly.
The file reading is performed in an executor to avoid blocking the event loop.
"""
loop = asyncio.get_running_loop()
return await loop.run_in_executor(None, self._read_all)
class TextIOPayload(IOBasePayload):
_value: io.TextIOBase
# _autoclose = False (inherited) - Has text file handle that needs explicit closing
def __init__(
self,
value: TextIO,
*args: Any,
encoding: str | None = None,
content_type: str | None = None,
**kwargs: Any,
) -> None:
if encoding is None:
if content_type is None:
encoding = "utf-8"
content_type = "text/plain; charset=utf-8"
else:
mimetype = parse_mimetype(content_type)
encoding = mimetype.parameters.get("charset", "utf-8")
else:
if content_type is None:
content_type = "text/plain; charset=%s" % encoding
super().__init__(
value,
content_type=content_type,
encoding=encoding,
*args,
**kwargs,
)
def _read_and_available_len(
self, remaining_content_len: int | None
) -> tuple[int | None, bytes]:
"""
Read the text file-like object and return both its total size and the first chunk.
Args:
remaining_content_len: Optional limit on how many bytes to read in this operation.
If None, READ_SIZE will be used as the default chunk size.
Returns:
A tuple containing:
- The total size of the remaining unread content (None if size cannot be determined)
- The first chunk of bytes read from the file object, encoded using the payload's encoding
This method is optimized to perform both size calculation and initial read
in a single operation, which is executed in a single executor job to minimize
context switches and file operations when streaming content.
Note:
TextIOPayload handles encoding of the text content before writing it
to the stream. If no encoding is specified, UTF-8 is used as the default.
"""
self._set_or_restore_start_position()
size = self.size
chunk = self._value.read(
min(READ_SIZE, size or READ_SIZE, remaining_content_len or READ_SIZE)
)
return size, chunk.encode(self._encoding) if self._encoding else chunk.encode()
def _read(self, remaining_content_len: int | None) -> bytes:
"""
Read a chunk of data from the text file-like object.
Args:
remaining_content_len: Optional maximum number of bytes to read.
If None, READ_SIZE will be used as the default chunk size.
Returns:
A chunk of bytes read from the file object and encoded using the payload's
encoding. The data is automatically converted from text to bytes.
This method is used for subsequent reads during streaming after
the initial _read_and_available_len call has been made. It properly
handles text encoding, converting the text content to bytes using
the specified encoding (or UTF-8 if none was provided).
"""
chunk = self._value.read(remaining_content_len or READ_SIZE)
return chunk.encode(self._encoding) if self._encoding else chunk.encode()
def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
"""
Return string representation of the value.
WARNING: This method does blocking I/O and should not be called in the event loop.
"""
self._set_or_restore_start_position()
return self._value.read()
async def as_bytes(self, encoding: str = "utf-8", errors: str = "strict") -> bytes:
"""
Return bytes representation of the value.
This method reads the entire text file content and returns it as bytes.
It encodes the text content using the specified encoding.
The file reading is performed in an executor to avoid blocking the event loop.
"""
loop = asyncio.get_running_loop()
# Use instance encoding if available, otherwise use parameter
actual_encoding = self._encoding or encoding
def _read_and_encode() -> bytes:
self._set_or_restore_start_position()
# TextIO read() always returns the full content
return self._value.read().encode(actual_encoding, errors)
return await loop.run_in_executor(None, _read_and_encode)
class BytesIOPayload(IOBasePayload):
_value: io.BytesIO
_size: int # Always initialized in __init__
_autoclose = True # BytesIO is in-memory, safe to auto-close
def __init__(self, value: io.BytesIO, *args: Any, **kwargs: Any) -> None:
super().__init__(value, *args, **kwargs)
# Calculate size once during initialization
self._size = len(self._value.getbuffer()) - self._value.tell()
@property
def size(self) -> int:
"""Size of the payload in bytes.
Returns the number of bytes in the BytesIO buffer that will be transmitted.
This is calculated once during initialization for efficiency.
"""
return self._size
def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
self._set_or_restore_start_position()
return self._value.read().decode(encoding, errors)
async def write(self, writer: AbstractStreamWriter) -> None:
return await self.write_with_length(writer, None)
async def write_with_length(
self, writer: AbstractStreamWriter, content_length: int | None
) -> None:
"""
Write BytesIO payload with a specific content length constraint.
Args:
writer: An AbstractStreamWriter instance that handles the actual writing
content_length: Maximum number of bytes to write (None for unlimited)
This implementation is specifically optimized for BytesIO objects:
1. Reads content in chunks to maintain memory efficiency
2. Yields control back to the event loop periodically to prevent blocking
when dealing with large BytesIO objects
3. Respects content_length constraints when specified
4. Properly cleans up by closing the BytesIO object when done or on error
The periodic yielding to the event loop is important for maintaining
responsiveness when processing large in-memory buffers.
"""
self._set_or_restore_start_position()
loop_count = 0
remaining_bytes = content_length
while chunk := self._value.read(READ_SIZE):
if loop_count > 0:
# Avoid blocking the event loop
# if they pass a large BytesIO object
# and we are not in the first iteration
# of the loop
await asyncio.sleep(0)
if remaining_bytes is None:
await writer.write(chunk)
else:
await writer.write(chunk[:remaining_bytes])
remaining_bytes -= len(chunk)
if remaining_bytes <= 0:
return
loop_count += 1
async def as_bytes(self, encoding: str = "utf-8", errors: str = "strict") -> bytes:
"""
Return bytes representation of the value.
This method reads the entire BytesIO content and returns it as bytes.
It is equivalent to accessing the _value attribute directly.
"""
self._set_or_restore_start_position()
return self._value.read()
async def close(self) -> None:
"""
Close the BytesIO payload.
This does nothing since BytesIO is in-memory and does not require explicit closing.
"""
class BufferedReaderPayload(IOBasePayload):
_value: io.BufferedIOBase
# _autoclose = False (inherited) - Has buffered file handle that needs explicit closing
def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
self._set_or_restore_start_position()
return self._value.read().decode(encoding, errors)
class JsonPayload(BytesPayload):
def __init__(
self,
value: Any,
encoding: str = "utf-8",
content_type: str = "application/json",
dumps: JSONEncoder = json.dumps,
*args: Any,
**kwargs: Any,
) -> None:
super().__init__(
dumps(value).encode(encoding),
content_type=content_type,
encoding=encoding,
*args,
**kwargs,
)
class AsyncIterablePayload(Payload):
_iter: AsyncIterator[bytes] | None = None
_value: AsyncIterable[bytes]
_cached_chunks: list[bytes] | None = None
# _consumed stays False to allow reuse with cached content
_autoclose = True # Iterator doesn't need explicit closing
def __init__(self, value: AsyncIterable[bytes], *args: Any, **kwargs: Any) -> None:
if not isinstance(value, AsyncIterable):
raise TypeError(
"value argument must support "
"collections.abc.AsyncIterable interface, "
f"got {type(value)!r}"
)
if "content_type" not in kwargs:
kwargs["content_type"] = "application/octet-stream"
super().__init__(value, *args, **kwargs)
self._iter = value.__aiter__()
async def write(self, writer: AbstractStreamWriter) -> None:
"""
Write the entire async iterable payload to the writer stream.
Args:
writer: An AbstractStreamWriter instance that handles the actual writing
This method iterates through the async iterable and writes each chunk
to the writer without any length constraint.
Note:
For new implementations that need length control, use write_with_length() directly.
This method is maintained for backwards compatibility with existing code.
"""
await self.write_with_length(writer, None)
async def write_with_length(
self, writer: AbstractStreamWriter, content_length: int | None
) -> None:
"""
Write async iterable payload with a specific content length constraint.
Args:
writer: An AbstractStreamWriter instance that handles the actual writing
content_length: Maximum number of bytes to write (None for unlimited)
This implementation handles streaming of async iterable content with length constraints:
1. If cached chunks are available, writes from them
2. Otherwise iterates through the async iterable one chunk at a time
3. Respects content_length constraints when specified
4. Does NOT generate cache - that's done by as_bytes()
"""
# If we have cached chunks, use them
if self._cached_chunks is not None:
remaining_bytes = content_length
for chunk in self._cached_chunks:
if remaining_bytes is None:
await writer.write(chunk)
elif remaining_bytes > 0:
await writer.write(chunk[:remaining_bytes])
remaining_bytes -= len(chunk)
else:
break
return
# If iterator is exhausted and we don't have cached chunks, nothing to write
if self._iter is None:
return
# Stream from the iterator
remaining_bytes = content_length
try:
while True:
chunk = await anext(self._iter)
if remaining_bytes is None:
await writer.write(chunk)
# If we have a content length limit
elif remaining_bytes > 0:
await writer.write(chunk[:remaining_bytes])
remaining_bytes -= len(chunk)
# We still want to exhaust the iterator even
# if we have reached the content length limit
# since the file handle may not get closed by
# the iterator if we don't do this
except StopAsyncIteration:
# Iterator is exhausted
self._iter = None
self._consumed = True # Mark as consumed when streamed without caching
def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
"""Decode the payload content as a string if cached chunks are available."""
if self._cached_chunks is not None:
return b"".join(self._cached_chunks).decode(encoding, errors)
raise TypeError("Unable to decode - content not cached. Call as_bytes() first.")
async def as_bytes(self, encoding: str = "utf-8", errors: str = "strict") -> bytes:
"""
Return bytes representation of the value.
This method reads the entire async iterable content and returns it as bytes.
It generates and caches the chunks for future reuse.
"""
# If we have cached chunks, return them joined
if self._cached_chunks is not None:
return b"".join(self._cached_chunks)
# If iterator is exhausted and no cache, return empty
if self._iter is None:
return b""
# Read all chunks and cache them
chunks: list[bytes] = []
async for chunk in self._iter:
chunks.append(chunk)
# Iterator is exhausted, cache the chunks
self._iter = None
self._cached_chunks = chunks
# Keep _consumed as False to allow reuse with cached chunks
return b"".join(chunks)
class StreamReaderPayload(AsyncIterablePayload):
def __init__(self, value: StreamReader, *args: Any, **kwargs: Any) -> None:
super().__init__(value.iter_any(), *args, **kwargs)
PAYLOAD_REGISTRY = PayloadRegistry()
PAYLOAD_REGISTRY.register(BytesPayload, (bytes, bytearray, memoryview))
PAYLOAD_REGISTRY.register(StringPayload, str)
PAYLOAD_REGISTRY.register(StringIOPayload, io.StringIO)
PAYLOAD_REGISTRY.register(TextIOPayload, io.TextIOBase)
PAYLOAD_REGISTRY.register(BytesIOPayload, io.BytesIO)
PAYLOAD_REGISTRY.register(BufferedReaderPayload, (io.BufferedReader, io.BufferedRandom))
PAYLOAD_REGISTRY.register(IOBasePayload, io.IOBase)
PAYLOAD_REGISTRY.register(StreamReaderPayload, StreamReader)
# try_last for giving a chance to more specialized async interables like
# multipart.BodyPartReaderPayload override the default
PAYLOAD_REGISTRY.register(AsyncIterablePayload, AsyncIterable, order=Order.try_last)
|
import array
import asyncio
import io
import json
import unittest.mock
from collections.abc import AsyncIterator, Iterator
from io import StringIO
from pathlib import Path
from typing import TextIO, Union
import pytest
from multidict import CIMultiDict
from aiohttp import payload
from aiohttp.abc import AbstractStreamWriter
from aiohttp.payload import READ_SIZE
class BufferWriter(AbstractStreamWriter):
"""Test writer that captures written bytes in a buffer."""
def __init__(self) -> None:
self.buffer = bytearray()
async def write(
self, chunk: Union[bytes, bytearray, "memoryview[int]", "memoryview[bytes]"]
) -> None:
self.buffer.extend(bytes(chunk))
async def write_eof(self, chunk: bytes = b"") -> None:
"""No-op for test writer."""
async def drain(self) -> None:
"""No-op for test writer."""
def enable_compression(
self, encoding: str = "deflate", strategy: int | None = None
) -> None:
"""Compression not implemented for test writer."""
def enable_chunking(self) -> None:
"""Chunking not implemented for test writer."""
async def write_headers(self, status_line: str, headers: CIMultiDict[str]) -> None:
"""Headers not captured for payload tests."""
@pytest.fixture(autouse=True)
def cleanup(
cleanup_payload_pending_file_closes: None,
) -> None:
"""Ensure all pending file close operations complete during test teardown."""
@pytest.fixture
def registry() -> Iterator[payload.PayloadRegistry]:
old = payload.PAYLOAD_REGISTRY
reg = payload.PAYLOAD_REGISTRY = payload.PayloadRegistry()
yield reg
payload.PAYLOAD_REGISTRY = old
class Payload(payload.Payload):
def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
assert False
async def write(self, writer: AbstractStreamWriter) -> None:
pass
def test_register_type(registry: payload.PayloadRegistry) -> None:
class TestProvider:
pass
payload.register_payload(Payload, TestProvider)
p = payload.get_payload(TestProvider())
assert isinstance(p, Payload)
def test_register_unsupported_order(registry: payload.PayloadRegistry) -> None:
class TestProvider:
pass
with pytest.raises(ValueError):
payload.register_payload(
Payload, TestProvider, order=object() # type: ignore[arg-type]
)
def test_payload_ctor() -> None:
p = Payload("test", encoding="utf-8", filename="test.txt")
assert p._value == "test"
assert p._encoding == "utf-8"
assert p.size is None
assert p.filename == "test.txt"
assert p.content_type == "text/plain"
def test_payload_content_type() -> None:
p = Payload("test", headers={"content-type": "application/json"})
assert p.content_type == "application/json"
def test_bytes_payload_default_content_type() -> None:
p = payload.BytesPayload(b"data")
assert p.content_type == "application/octet-stream"
def test_bytes_payload_explicit_content_type() -> None:
p = payload.BytesPayload(b"data", content_type="application/custom")
assert p.content_type == "application/custom"
def test_bytes_payload_bad_type() -> None:
with pytest.raises(TypeError):
payload.BytesPayload(object()) # type: ignore[arg-type]
def test_bytes_payload_memoryview_correct_size() -> None:
mv = memoryview(array.array("H", [1, 2, 3]))
p = payload.BytesPayload(mv)
assert p.size == 6
def test_string_payload() -> None:
p = payload.StringPayload("test")
assert p.encoding == "utf-8"
assert p.content_type == "text/plain; charset=utf-8"
p = payload.StringPayload("test", encoding="koi8-r")
assert p.encoding == "koi8-r"
assert p.content_type == "text/plain; charset=koi8-r"
p = payload.StringPayload("test", content_type="text/plain; charset=koi8-r")
assert p.encoding == "koi8-r"
assert p.content_type == "text/plain; charset=koi8-r"
def test_string_io_payload() -> None:
s = StringIO("ű" * 5000)
p = payload.StringIOPayload(s)
assert p.encoding == "utf-8"
assert p.content_type == "text/plain; charset=utf-8"
assert p.size == 10000
def test_async_iterable_payload_default_content_type() -> None:
async def gen() -> AsyncIterator[bytes]:
return
yield b"abc" # type: ignore[unreachable] # pragma: no cover
p = payload.AsyncIterablePayload(gen())
assert p.content_type == "application/octet-stream"
def test_async_iterable_payload_explicit_content_type() -> None:
async def gen() -> AsyncIterator[bytes]:
return
yield b"abc" # type: ignore[unreachable] # pragma: no cover
p = payload.AsyncIterablePayload(gen(), content_type="application/custom")
assert p.content_type == "application/custom"
def test_async_iterable_payload_not_async_iterable() -> None:
with pytest.raises(TypeError):
payload.AsyncIterablePayload(object()) # type: ignore[arg-type]
class MockStreamWriter(AbstractStreamWriter):
"""Mock stream writer for testing payload writes."""
def __init__(self) -> None:
self.written: list[bytes] = []
async def write(
self, chunk: Union[bytes, bytearray, "memoryview[int]", "memoryview[bytes]"]
) -> None:
"""Store the chunk in the written list."""
self.written.append(bytes(chunk))
async def write_eof(self, chunk: bytes | None = None) -> None:
"""write_eof implementation - no-op for tests."""
async def drain(self) -> None:
"""Drain implementation - no-op for tests."""
def enable_compression(
self, encoding: str = "deflate", strategy: int | None = None
) -> None:
"""Enable compression - no-op for tests."""
def enable_chunking(self) -> None:
"""Enable chunking - no-op for tests."""
async def write_headers(self, status_line: str, headers: CIMultiDict[str]) -> None:
"""Write headers - no-op for tests."""
def get_written_bytes(self) -> bytes:
"""Return all written bytes as a single bytes object."""
return b"".join(self.written)
async def test_bytes_payload_write_with_length_no_limit() -> None:
"""Test BytesPayload writing with no content length limit."""
data = b"0123456789"
p = payload.BytesPayload(data)
writer = MockStreamWriter()
await p.write_with_length(writer, None)
assert writer.get_written_bytes() == data
assert len(writer.get_written_bytes()) == 10
async def test_bytes_payload_write_with_length_exact() -> None:
"""Test BytesPayload writing with exact content length."""
data = b"0123456789"
p = payload.BytesPayload(data)
writer = MockStreamWriter()
await p.write_with_length(writer, 10)
assert writer.get_written_bytes() == data
assert len(writer.get_written_bytes()) == 10
async def test_bytes_payload_write_with_length_truncated() -> None:
"""Test BytesPayload writing with truncated content length."""
data = b"0123456789"
p = payload.BytesPayload(data)
writer = MockStreamWriter()
await p.write_with_length(writer, 5)
assert writer.get_written_bytes() == b"01234"
assert len(writer.get_written_bytes()) == 5
async def test_iobase_payload_write_with_length_no_limit() -> None:
"""Test IOBasePayload writing with no content length limit."""
data = b"0123456789"
p = payload.IOBasePayload(io.BytesIO(data))
writer = MockStreamWriter()
await p.write_with_length(writer, None)
assert writer.get_written_bytes() == data
assert len(writer.get_written_bytes()) == 10
async def test_iobase_payload_write_with_length_exact() -> None:
"""Test IOBasePayload writing with exact content length."""
data = b"0123456789"
p = payload.IOBasePayload(io.BytesIO(data))
writer = MockStreamWriter()
await p.write_with_length(writer, 10)
assert writer.get_written_bytes() == data
assert len(writer.get_written_bytes()) == 10
async def test_iobase_payload_write_with_length_truncated() -> None:
"""Test IOBasePayload writing with truncated content length."""
data = b"0123456789"
p = payload.IOBasePayload(io.BytesIO(data))
writer = MockStreamWriter()
await p.write_with_length(writer, 5)
assert writer.get_written_bytes() == b"01234"
assert len(writer.get_written_bytes()) == 5
async def test_bytesio_payload_write_with_length_no_limit() -> None:
"""Test BytesIOPayload writing with no content length limit."""
data = b"0123456789"
p = payload.BytesIOPayload(io.BytesIO(data))
writer = MockStreamWriter()
await p.write_with_length(writer, None)
assert writer.get_written_bytes() == data
assert len(writer.get_written_bytes()) == 10
async def test_bytesio_payload_write_with_length_exact() -> None:
"""Test BytesIOPayload writing with exact content length."""
data = b"0123456789"
p = payload.BytesIOPayload(io.BytesIO(data))
writer = MockStreamWriter()
await p.write_with_length(writer, 10)
assert writer.get_written_bytes() == data
assert len(writer.get_written_bytes()) == 10
async def test_bytesio_payload_write_with_length_truncated() -> None:
"""Test BytesIOPayload writing with truncated content length."""
data = b"0123456789"
payload_bytesio = payload.BytesIOPayload(io.BytesIO(data))
writer = MockStreamWriter()
await payload_bytesio.write_with_length(writer, 5)
assert writer.get_written_bytes() == b"01234"
assert len(writer.get_written_bytes()) == 5
async def test_bytesio_payload_write_with_length_remaining_zero() -> None:
"""Test BytesIOPayload with content_length smaller than first read chunk."""
data = b"0123456789" * 10 # 100 bytes
bio = io.BytesIO(data)
payload_bytesio = payload.BytesIOPayload(bio)
writer = MockStreamWriter()
# Mock the read method to return smaller chunks
original_read = bio.read
read_calls = 0
def mock_read(size: int | None = None) -> bytes:
nonlocal read_calls
read_calls += 1
if read_calls == 1:
# First call: return 3 bytes (less than content_length=5)
return original_read(3)
else:
# Subsequent calls return remaining data normally
return original_read(size)
with unittest.mock.patch.object(bio, "read", mock_read):
await payload_bytesio.write_with_length(writer, 5)
assert len(writer.get_written_bytes()) == 5
assert writer.get_written_bytes() == b"01234"
async def test_bytesio_payload_large_data_multiple_chunks() -> None:
"""Test BytesIOPayload with large data requiring multiple read chunks."""
chunk_size = 2**16 # 64KB (READ_SIZE)
data = b"x" * (chunk_size + 1000) # Slightly larger than READ_SIZE
payload_bytesio = payload.BytesIOPayload(io.BytesIO(data))
writer = MockStreamWriter()
await payload_bytesio.write_with_length(writer, None)
assert writer.get_written_bytes() == data
assert len(writer.get_written_bytes()) == chunk_size + 1000
async def test_bytesio_payload_remaining_bytes_exhausted() -> None:
"""Test BytesIOPayload when remaining_bytes becomes <= 0."""
data = b"0123456789abcdef" * 1000 # 16000 bytes
payload_bytesio = payload.BytesIOPayload(io.BytesIO(data))
writer = MockStreamWriter()
await payload_bytesio.write_with_length(writer, 8000) # Exactly half the data
written = writer.get_written_bytes()
assert len(written) == 8000
assert written == data[:8000]
async def test_iobase_payload_exact_chunk_size_limit() -> None:
"""Test IOBasePayload with content length matching exactly one read chunk."""
chunk_size = 2**16 # 65536 bytes (READ_SIZE)
data = b"x" * chunk_size + b"extra" # Slightly larger than one read chunk
p = payload.IOBasePayload(io.BytesIO(data))
writer = MockStreamWriter()
await p.write_with_length(writer, chunk_size)
written = writer.get_written_bytes()
assert len(written) == chunk_size
assert written == data[:chunk_size]
async def test_iobase_payload_reads_in_chunks() -> None:
"""Test IOBasePayload reads data in chunks of READ_SIZE, not all at once."""
# Create a large file that's multiple times larger than READ_SIZE
large_data = b"x" * (READ_SIZE * 3 + 1000) # ~192KB + 1000 bytes
# Mock the file-like object to track read calls
mock_file = unittest.mock.Mock(spec=io.BytesIO)
mock_file.tell.return_value = 0
mock_file.fileno.side_effect = AttributeError # Make size return None
# Track the sizes of read() calls
read_sizes = []
def mock_read(size: int) -> bytes:
read_sizes.append(size)
# Return data based on how many times read was called
call_count = len(read_sizes)
if call_count == 1:
return large_data[:size]
elif call_count == 2:
return large_data[READ_SIZE : READ_SIZE + size]
elif call_count == 3:
return large_data[READ_SIZE * 2 : READ_SIZE * 2 + size]
else:
return large_data[READ_SIZE * 3 :]
mock_file.read.side_effect = mock_read
payload_obj = payload.IOBasePayload(mock_file)
writer = MockStreamWriter()
# Write with a large content_length
await payload_obj.write_with_length(writer, len(large_data))
# Verify that reads were limited to READ_SIZE
assert len(read_sizes) > 1 # Should have multiple reads
for read_size in read_sizes:
assert (
read_size <= READ_SIZE
), f"Read size {read_size} exceeds READ_SIZE {READ_SIZE}"
async def test_iobase_payload_large_content_length() -> None:
"""Test IOBasePayload with very large content_length doesn't read all at once."""
data = b"x" * (READ_SIZE + 1000)
# Create a custom file-like object that tracks read sizes
class TrackingBytesIO(io.BytesIO):
def __init__(self, data: bytes) -> None:
super().__init__(data)
self.read_sizes: list[int] = []
def read(self, size: int | None = -1) -> bytes:
self.read_sizes.append(size if size is not None else -1)
return super().read(size)
tracking_file = TrackingBytesIO(data)
payload_obj = payload.IOBasePayload(tracking_file)
writer = MockStreamWriter()
# Write with a very large content_length (simulating the bug scenario)
large_content_length = 10 * 1024 * 1024 # 10MB
await payload_obj.write_with_length(writer, large_content_length)
# Verify no single read exceeded READ_SIZE
for read_size in tracking_file.read_sizes:
assert (
read_size <= READ_SIZE
), f"Read size {read_size} exceeds READ_SIZE {READ_SIZE}"
# Verify the correct amount of data was written
assert writer.get_written_bytes() == data
async def test_textio_payload_reads_in_chunks() -> None:
"""Test TextIOPayload reads data in chunks of READ_SIZE, not all at once."""
# Create a large text file that's multiple times larger than READ_SIZE
large_text = "x" * (READ_SIZE * 3 + 1000) # ~192KB + 1000 chars
# Mock the file-like object to track read calls
mock_file = unittest.mock.Mock(spec=io.StringIO)
mock_file.tell.return_value = 0
mock_file.fileno.side_effect = AttributeError # Make size return None
mock_file.encoding = "utf-8"
# Track the sizes of read() calls
read_sizes = []
def mock_read(size: int) -> str:
read_sizes.append(size)
# Return data based on how many times read was called
call_count = len(read_sizes)
if call_count == 1:
return large_text[:size]
elif call_count == 2:
return large_text[READ_SIZE : READ_SIZE + size]
elif call_count == 3:
return large_text[READ_SIZE * 2 : READ_SIZE * 2 + size]
else:
return large_text[READ_SIZE * 3 :]
mock_file.read.side_effect = mock_read
payload_obj = payload.TextIOPayload(mock_file)
writer = MockStreamWriter()
# Write with a large content_length
await payload_obj.write_with_length(writer, len(large_text.encode("utf-8")))
# Verify that reads were limited to READ_SIZE
assert len(read_sizes) > 1 # Should have multiple reads
for read_size in read_sizes:
assert (
read_size <= READ_SIZE
), f"Read size {read_size} exceeds READ_SIZE {READ_SIZE}"
async def test_textio_payload_large_content_length() -> None:
"""Test TextIOPayload with very large content_length doesn't read all at once."""
text_data = "x" * (READ_SIZE + 1000)
# Create a custom file-like object that tracks read sizes
class TrackingStringIO(io.StringIO):
def __init__(self, data: str) -> None:
super().__init__(data)
self.read_sizes: list[int] = []
def read(self, size: int | None = -1) -> str:
self.read_sizes.append(size if size is not None else -1)
return super().read(size)
tracking_file = TrackingStringIO(text_data)
payload_obj = payload.TextIOPayload(tracking_file)
writer = MockStreamWriter()
# Write with a very large content_length (simulating the bug scenario)
large_content_length = 10 * 1024 * 1024 # 10MB
await payload_obj.write_with_length(writer, large_content_length)
# Verify no single read exceeded READ_SIZE
for read_size in tracking_file.read_sizes:
assert (
read_size <= READ_SIZE
), f"Read size {read_size} exceeds READ_SIZE {READ_SIZE}"
# Verify the correct amount of data was written
assert writer.get_written_bytes() == text_data.encode("utf-8")
async def test_async_iterable_payload_write_with_length_no_limit() -> None:
"""Test AsyncIterablePayload writing with no content length limit."""
async def gen() -> AsyncIterator[bytes]:
yield b"0123"
yield b"4567"
yield b"89"
p = payload.AsyncIterablePayload(gen())
writer = MockStreamWriter()
await p.write_with_length(writer, None)
assert writer.get_written_bytes() == b"0123456789"
assert len(writer.get_written_bytes()) == 10
async def test_async_iterable_payload_write_with_length_exact() -> None:
"""Test AsyncIterablePayload writing with exact content length."""
async def gen() -> AsyncIterator[bytes]:
yield b"0123"
yield b"4567"
yield b"89"
p = payload.AsyncIterablePayload(gen())
writer = MockStreamWriter()
await p.write_with_length(writer, 10)
assert writer.get_written_bytes() == b"0123456789"
assert len(writer.get_written_bytes()) == 10
async def test_async_iterable_payload_write_with_length_truncated_mid_chunk() -> None:
"""Test AsyncIterablePayload writing with content length truncating mid-chunk."""
async def gen() -> AsyncIterator[bytes]:
yield b"0123"
yield b"4567"
yield b"89" # pragma: no cover
p = payload.AsyncIterablePayload(gen())
writer = MockStreamWriter()
await p.write_with_length(writer, 6)
assert writer.get_written_bytes() == b"012345"
assert len(writer.get_written_bytes()) == 6
async def test_async_iterable_payload_write_with_length_truncated_at_chunk() -> None:
"""Test AsyncIterablePayload writing with content length truncating at chunk boundary."""
async def gen() -> AsyncIterator[bytes]:
yield b"0123"
yield b"4567" # pragma: no cover
yield b"89" # pragma: no cover
p = payload.AsyncIterablePayload(gen())
writer = MockStreamWriter()
await p.write_with_length(writer, 4)
assert writer.get_written_bytes() == b"0123"
assert len(writer.get_written_bytes()) == 4
async def test_bytes_payload_backwards_compatibility() -> None:
"""Test BytesPayload.write() backwards compatibility delegates to write_with_length()."""
p = payload.BytesPayload(b"1234567890")
writer = MockStreamWriter()
await p.write(writer)
assert writer.get_written_bytes() == b"1234567890"
async def test_textio_payload_with_encoding() -> None:
"""Test TextIOPayload reading with encoding and size constraints."""
data = io.StringIO("hello world")
p = payload.TextIOPayload(data, encoding="utf-8")
writer = MockStreamWriter()
await p.write_with_length(writer, 8)
# Should write exactly 8 bytes: "hello wo"
assert writer.get_written_bytes() == b"hello wo"
async def test_textio_payload_as_bytes() -> None:
"""Test TextIOPayload.as_bytes method with different encodings."""
# Test with UTF-8 encoding
data = io.StringIO("Hello 世界")
p = payload.TextIOPayload(data, encoding="utf-8")
# Test as_bytes() method
result = await p.as_bytes()
assert result == "Hello 世界".encode()
# Test that position is restored for multiple reads
result2 = await p.as_bytes()
assert result2 == "Hello 世界".encode()
# Test with different encoding parameter (should use instance encoding)
result3 = await p.as_bytes(encoding="latin-1")
assert result3 == "Hello 世界".encode() # Should still use utf-8
# Test with different encoding in payload
data2 = io.StringIO("Hello World")
p2 = payload.TextIOPayload(data2, encoding="latin-1")
result4 = await p2.as_bytes()
assert result4 == b"Hello World" # latin-1 encoding
# Test with no explicit encoding (defaults to utf-8)
data3 = io.StringIO("Test データ")
p3 = payload.TextIOPayload(data3)
result5 = await p3.as_bytes()
assert result5 == "Test データ".encode()
# Test with encoding errors parameter
data4 = io.StringIO("Test")
p4 = payload.TextIOPayload(data4, encoding="ascii")
result6 = await p4.as_bytes(errors="strict")
assert result6 == b"Test"
async def test_bytesio_payload_backwards_compatibility() -> None:
"""Test BytesIOPayload.write() backwards compatibility delegates to write_with_length()."""
data = io.BytesIO(b"test data")
p = payload.BytesIOPayload(data)
writer = MockStreamWriter()
await p.write(writer)
assert writer.get_written_bytes() == b"test data"
async def test_async_iterable_payload_backwards_compatibility() -> None:
"""Test AsyncIterablePayload.write() backwards compatibility delegates to write_with_length()."""
async def gen() -> AsyncIterator[bytes]:
yield b"chunk1"
yield b"chunk2" # pragma: no cover
p = payload.AsyncIterablePayload(gen())
writer = MockStreamWriter()
await p.write(writer)
assert writer.get_written_bytes() == b"chunk1chunk2"
async def test_async_iterable_payload_with_none_iterator() -> None:
"""Test AsyncIterablePayload with None iterator returns early without writing."""
async def gen() -> AsyncIterator[bytes]:
yield b"test" # pragma: no cover
p = payload.AsyncIterablePayload(gen())
# Manually set _iter to None to test the guard clause
p._iter = None
writer = MockStreamWriter()
# Should return early without writing anything
await p.write_with_length(writer, 10)
assert writer.get_written_bytes() == b""
async def test_async_iterable_payload_caching() -> None:
"""Test AsyncIterablePayload caching behavior."""
async def gen() -> AsyncIterator[bytes]:
yield b"Hello"
yield b" "
yield b"World"
p = payload.AsyncIterablePayload(gen())
# First call to as_bytes should consume iterator and cache
result1 = await p.as_bytes()
assert result1 == b"Hello World"
assert p._iter is None # Iterator exhausted
assert p._cached_chunks == [b"Hello", b" ", b"World"] # Chunks cached
assert p._consumed is False # Not marked as consumed to allow reuse
# Second call should use cache
result2 = await p.as_bytes()
assert result2 == b"Hello World"
assert p._cached_chunks == [b"Hello", b" ", b"World"] # Still cached
# decode should work with cached chunks
decoded = p.decode()
assert decoded == "Hello World"
# write_with_length should use cached chunks
writer = MockStreamWriter()
await p.write_with_length(writer, None)
assert writer.get_written_bytes() == b"Hello World"
# write_with_length with limit should respect it
writer2 = MockStreamWriter()
await p.write_with_length(writer2, 5)
assert writer2.get_written_bytes() == b"Hello"
async def test_async_iterable_payload_decode_without_cache() -> None:
"""Test AsyncIterablePayload decode raises error without cache."""
async def gen() -> AsyncIterator[bytes]:
yield b"test"
p = payload.AsyncIterablePayload(gen())
# decode should raise without cache
with pytest.raises(TypeError) as excinfo:
p.decode()
assert "Unable to decode - content not cached" in str(excinfo.value)
# After as_bytes, decode should work
await p.as_bytes()
assert p.decode() == "test"
async def test_async_iterable_payload_write_then_cache() -> None:
"""Test AsyncIterablePayload behavior when written before caching."""
async def gen() -> AsyncIterator[bytes]:
yield b"Hello"
yield b"World"
p = payload.AsyncIterablePayload(gen())
# First write without caching (streaming)
writer1 = MockStreamWriter()
await p.write_with_length(writer1, None)
assert writer1.get_written_bytes() == b"HelloWorld"
assert p._iter is None # Iterator exhausted
assert p._cached_chunks is None # No cache created
assert p._consumed is True # Marked as consumed
# Subsequent operations should handle exhausted iterator
result = await p.as_bytes()
assert result == b"" # Empty since iterator exhausted without cache
# Write should also be empty
writer2 = MockStreamWriter()
await p.write_with_length(writer2, None)
assert writer2.get_written_bytes() == b""
async def test_bytes_payload_reusability() -> None:
"""Test that BytesPayload can be written and read multiple times."""
data = b"test payload data"
p = payload.BytesPayload(data)
# First write_with_length
writer1 = MockStreamWriter()
await p.write_with_length(writer1, None)
assert writer1.get_written_bytes() == data
# Second write_with_length (simulating redirect)
writer2 = MockStreamWriter()
await p.write_with_length(writer2, None)
assert writer2.get_written_bytes() == data
# Write with partial length
writer3 = MockStreamWriter()
await p.write_with_length(writer3, 5)
assert writer3.get_written_bytes() == b"test "
# Test as_bytes multiple times
bytes1 = await p.as_bytes()
bytes2 = await p.as_bytes()
bytes3 = await p.as_bytes()
assert bytes1 == bytes2 == bytes3 == data
async def test_string_payload_reusability() -> None:
"""Test that StringPayload can be written and read multiple times."""
text = "test string data"
expected_bytes = text.encode("utf-8")
p = payload.StringPayload(text)
# First write_with_length
writer1 = MockStreamWriter()
await p.write_with_length(writer1, None)
assert writer1.get_written_bytes() == expected_bytes
# Second write_with_length (simulating redirect)
writer2 = MockStreamWriter()
await p.write_with_length(writer2, None)
assert writer2.get_written_bytes() == expected_bytes
# Write with partial length
writer3 = MockStreamWriter()
await p.write_with_length(writer3, 5)
assert writer3.get_written_bytes() == b"test "
# Test as_bytes multiple times
bytes1 = await p.as_bytes()
bytes2 = await p.as_bytes()
bytes3 = await p.as_bytes()
assert bytes1 == bytes2 == bytes3 == expected_bytes
async def test_bytes_io_payload_reusability() -> None:
"""Test that BytesIOPayload can be written and read multiple times."""
data = b"test bytesio payload"
bytes_io = io.BytesIO(data)
p = payload.BytesIOPayload(bytes_io)
# First write_with_length
writer1 = MockStreamWriter()
await p.write_with_length(writer1, None)
assert writer1.get_written_bytes() == data
# Second write_with_length (simulating redirect)
writer2 = MockStreamWriter()
await p.write_with_length(writer2, None)
assert writer2.get_written_bytes() == data
# Write with partial length
writer3 = MockStreamWriter()
await p.write_with_length(writer3, 5)
assert writer3.get_written_bytes() == b"test "
# Test as_bytes multiple times
bytes1 = await p.as_bytes()
bytes2 = await p.as_bytes()
bytes3 = await p.as_bytes()
assert bytes1 == bytes2 == bytes3 == data
async def test_string_io_payload_reusability() -> None:
"""Test that StringIOPayload can be written and read multiple times."""
text = "test stringio payload"
expected_bytes = text.encode("utf-8")
string_io = io.StringIO(text)
p = payload.StringIOPayload(string_io)
# Note: StringIOPayload reads all content in __init__ and becomes a StringPayload
# So it should be fully reusable
# First write_with_length
writer1 = MockStreamWriter()
await p.write_with_length(writer1, None)
assert writer1.get_written_bytes() == expected_bytes
# Second write_with_length (simulating redirect)
writer2 = MockStreamWriter()
await p.write_with_length(writer2, None)
assert writer2.get_written_bytes() == expected_bytes
# Write with partial length
writer3 = MockStreamWriter()
await p.write_with_length(writer3, 5)
assert writer3.get_written_bytes() == b"test "
# Test as_bytes multiple times
bytes1 = await p.as_bytes()
bytes2 = await p.as_bytes()
bytes3 = await p.as_bytes()
assert bytes1 == bytes2 == bytes3 == expected_bytes
async def test_buffered_reader_payload_reusability() -> None:
"""Test that BufferedReaderPayload can be written and read multiple times."""
data = b"test buffered reader payload"
buffer = io.BufferedReader(io.BytesIO(data))
p = payload.BufferedReaderPayload(buffer)
# First write_with_length
writer1 = MockStreamWriter()
await p.write_with_length(writer1, None)
assert writer1.get_written_bytes() == data
# Second write_with_length (simulating redirect)
writer2 = MockStreamWriter()
await p.write_with_length(writer2, None)
assert writer2.get_written_bytes() == data
# Write with partial length
writer3 = MockStreamWriter()
await p.write_with_length(writer3, 5)
assert writer3.get_written_bytes() == b"test "
# Test as_bytes multiple times
bytes1 = await p.as_bytes()
bytes2 = await p.as_bytes()
bytes3 = await p.as_bytes()
assert bytes1 == bytes2 == bytes3 == data
async def test_async_iterable_payload_reusability_with_cache() -> None:
"""Test that AsyncIterablePayload can be reused when cached via as_bytes."""
async def gen() -> AsyncIterator[bytes]:
yield b"async "
yield b"iterable "
yield b"payload"
expected_data = b"async iterable payload"
p = payload.AsyncIterablePayload(gen())
# First call to as_bytes should cache the data
bytes1 = await p.as_bytes()
assert bytes1 == expected_data
assert p._cached_chunks is not None
assert p._iter is None # Iterator exhausted
# Subsequent as_bytes calls should use cache
bytes2 = await p.as_bytes()
bytes3 = await p.as_bytes()
assert bytes1 == bytes2 == bytes3 == expected_data
# Now writes should also use the cached data
writer1 = MockStreamWriter()
await p.write_with_length(writer1, None)
assert writer1.get_written_bytes() == expected_data
# Second write should also work
writer2 = MockStreamWriter()
await p.write_with_length(writer2, None)
assert writer2.get_written_bytes() == expected_data
# Write with partial length
writer3 = MockStreamWriter()
await p.write_with_length(writer3, 5)
assert writer3.get_written_bytes() == b"async"
async def test_async_iterable_payload_no_reuse_without_cache() -> None:
"""Test that AsyncIterablePayload cannot be reused without caching."""
async def gen() -> AsyncIterator[bytes]:
yield b"test "
yield b"data"
p = payload.AsyncIterablePayload(gen())
# First write exhausts the iterator
writer1 = MockStreamWriter()
await p.write_with_length(writer1, None)
assert writer1.get_written_bytes() == b"test data"
assert p._iter is None # Iterator exhausted
assert p._consumed is True
# Second write should produce empty result
writer2 = MockStreamWriter()
await p.write_with_length(writer2, None)
assert writer2.get_written_bytes() == b""
async def test_bytes_io_payload_close_does_not_close_io() -> None:
"""Test that BytesIOPayload close() does not close the underlying BytesIO."""
bytes_io = io.BytesIO(b"data")
bytes_io_payload = payload.BytesIOPayload(bytes_io)
# Close the payload
await bytes_io_payload.close()
# BytesIO should NOT be closed
assert not bytes_io.closed
# Can still write after close
writer = MockStreamWriter()
await bytes_io_payload.write_with_length(writer, None)
assert writer.get_written_bytes() == b"data"
async def test_custom_payload_backwards_compat_as_bytes() -> None:
"""Test backwards compatibility for custom Payload that only implements decode()."""
class LegacyPayload(payload.Payload):
"""A custom payload that only implements decode() like old code might do."""
def __init__(self, data: str) -> None:
super().__init__(data, headers=CIMultiDict())
self._data = data
def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
"""Custom decode implementation."""
return self._data
async def write(self, writer: AbstractStreamWriter) -> None:
"""Write implementation which is a no-op for this test."""
# Create instance with test data
p = LegacyPayload("Hello, World!")
# Test that as_bytes() works even though it's not explicitly implemented
# The base class should call decode() and encode the result
result = await p.as_bytes()
assert result == b"Hello, World!"
# Test with different text
p2 = LegacyPayload("Test with special chars: café")
result_utf8 = await p2.as_bytes(encoding="utf-8")
assert result_utf8 == "Test with special chars: café".encode()
# Test that decode() still works as expected
assert p.decode() == "Hello, World!"
assert p2.decode() == "Test with special chars: café"
async def test_custom_payload_with_encoding_backwards_compat() -> None:
"""Test custom Payload with encoding set uses instance encoding for as_bytes()."""
class EncodedPayload(payload.Payload):
"""A custom payload with specific encoding."""
def __init__(self, data: str, encoding: str) -> None:
super().__init__(data, headers=CIMultiDict(), encoding=encoding)
self._data = data
def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
"""Custom decode implementation."""
return self._data
async def write(self, writer: AbstractStreamWriter) -> None:
"""Write implementation is a no-op."""
# Create instance with specific encoding
p = EncodedPayload("Test data", encoding="latin-1")
# as_bytes() should use the instance encoding (latin-1) not the default utf-8
result = await p.as_bytes()
assert result == b"Test data" # ASCII chars are same in latin-1
# Test with non-ASCII that differs between encodings
p2 = EncodedPayload("café", encoding="latin-1")
result_latin1 = await p2.as_bytes()
assert result_latin1 == "café".encode("latin-1")
assert result_latin1 != "café".encode() # Should be different bytes
async def test_iobase_payload_close_idempotent() -> None:
"""Test that IOBasePayload.close() is idempotent and covers the _consumed check."""
file_like = io.BytesIO(b"test data")
p = payload.IOBasePayload(file_like)
# First close should set _consumed to True
await p.close()
assert p._consumed is True
# Second close should be a no-op due to _consumed check (line 621)
await p.close()
assert p._consumed is True
def test_iobase_payload_decode() -> None:
"""Test IOBasePayload.decode() returns correct string."""
# Test with UTF-8 encoded text
text = "Hello, 世界! 🌍"
file_like = io.BytesIO(text.encode("utf-8"))
p = payload.IOBasePayload(file_like)
# decode() should return the original string
assert p.decode() == text
# Test with different encoding
latin1_text = "café"
file_like2 = io.BytesIO(latin1_text.encode("latin-1"))
p2 = payload.IOBasePayload(file_like2)
assert p2.decode("latin-1") == latin1_text
# Test that file position is restored
file_like3 = io.BytesIO(b"test data")
file_like3.read(4) # Move position forward
p3 = payload.IOBasePayload(file_like3)
# decode() should read from the stored start position (4)
assert p3.decode() == " data"
def test_bytes_payload_size() -> None:
"""Test BytesPayload.size property returns correct byte length."""
# Test with bytes
bp = payload.BytesPayload(b"Hello World")
assert bp.size == 11
# Test with empty bytes
bp_empty = payload.BytesPayload(b"")
assert bp_empty.size == 0
# Test with bytearray
ba = bytearray(b"Hello World")
bp_array = payload.BytesPayload(ba)
assert bp_array.size == 11
def test_string_payload_size() -> None:
"""Test StringPayload.size property with different encodings."""
# Test ASCII string with default UTF-8 encoding
sp = payload.StringPayload("Hello World")
assert sp.size == 11
# Test Unicode string with default UTF-8 encoding
unicode_str = "Hello 世界"
sp_unicode = payload.StringPayload(unicode_str)
assert sp_unicode.size == len(unicode_str.encode("utf-8"))
# Test with UTF-16 encoding
sp_utf16 = payload.StringPayload("Hello World", encoding="utf-16")
assert sp_utf16.size == len("Hello World".encode("utf-16"))
# Test with latin-1 encoding
sp_latin1 = payload.StringPayload("café", encoding="latin-1")
assert sp_latin1.size == len("café".encode("latin-1"))
def test_string_io_payload_size() -> None:
"""Test StringIOPayload.size property."""
# Test normal string
sio = StringIO("Hello World")
siop = payload.StringIOPayload(sio)
assert siop.size == 11
# Test Unicode string
sio_unicode = StringIO("Hello 世界")
siop_unicode = payload.StringIOPayload(sio_unicode)
assert siop_unicode.size == len("Hello 世界".encode())
# Test with custom encoding
sio_custom = StringIO("Hello")
siop_custom = payload.StringIOPayload(sio_custom, encoding="utf-16")
assert siop_custom.size == len("Hello".encode("utf-16"))
# Test with emoji to ensure correct byte count
sio_emoji = StringIO("Hello 👋🌍")
siop_emoji = payload.StringIOPayload(sio_emoji)
assert siop_emoji.size == len("Hello 👋🌍".encode())
# Verify it's not the string length
assert siop_emoji.size != len("Hello 👋🌍")
def test_all_string_payloads_size_is_bytes() -> None:
"""Test that all string-like payload classes report size in bytes, not string length."""
# Test string with multibyte characters
test_str = "Hello 👋 世界 🌍" # Contains emoji and Chinese characters
# StringPayload
sp = payload.StringPayload(test_str)
assert sp.size == len(test_str.encode("utf-8"))
assert sp.size != len(test_str) # Ensure it's not string length
# StringIOPayload
sio = StringIO(test_str)
siop = payload.StringIOPayload(sio)
assert siop.size == len(test_str.encode("utf-8"))
assert siop.size != len(test_str)
# Test with different encoding
sp_utf16 = payload.StringPayload(test_str, encoding="utf-16")
assert sp_utf16.size == len(test_str.encode("utf-16"))
assert sp_utf16.size != sp.size # Different encoding = different size
# JsonPayload (which extends BytesPayload)
json_data = {"message": test_str}
jp = payload.JsonPayload(json_data)
# JSON escapes Unicode, so we need to check the actual encoded size
json_str = json.dumps(json_data)
assert jp.size == len(json_str.encode("utf-8"))
# Test JsonPayload with ensure_ascii=False to get actual UTF-8 encoding
jp_utf8 = payload.JsonPayload(
json_data, dumps=lambda x: json.dumps(x, ensure_ascii=False)
)
json_str_utf8 = json.dumps(json_data, ensure_ascii=False)
assert jp_utf8.size == len(json_str_utf8.encode("utf-8"))
assert jp_utf8.size != len(
json_str_utf8
) # Now it's different due to multibyte chars
def test_bytes_io_payload_size() -> None:
"""Test BytesIOPayload.size property."""
# Test normal bytes
bio = io.BytesIO(b"Hello World")
biop = payload.BytesIOPayload(bio)
assert biop.size == 11
# Test empty BytesIO
bio_empty = io.BytesIO(b"")
biop_empty = payload.BytesIOPayload(bio_empty)
assert biop_empty.size == 0
# Test with position not at start
bio_pos = io.BytesIO(b"Hello World")
bio_pos.seek(5)
biop_pos = payload.BytesIOPayload(bio_pos)
assert biop_pos.size == 6 # Size should be from position to end
def test_json_payload_size() -> None:
"""Test JsonPayload.size property."""
# Test simple dict
data = {"hello": "world"}
jp = payload.JsonPayload(data)
expected_json = json.dumps(data) # Use actual json.dumps output
assert jp.size == len(expected_json.encode("utf-8"))
# Test with Unicode
data_unicode = {"message": "Hello 世界"}
jp_unicode = payload.JsonPayload(data_unicode)
expected_unicode = json.dumps(data_unicode)
assert jp_unicode.size == len(expected_unicode.encode("utf-8"))
# Test with custom encoding
data_custom = {"test": "data"}
jp_custom = payload.JsonPayload(data_custom, encoding="utf-16")
expected_custom = json.dumps(data_custom)
assert jp_custom.size == len(expected_custom.encode("utf-16"))
async def test_text_io_payload_size_matches_file_encoding(tmp_path: Path) -> None:
"""Test TextIOPayload.size when file encoding matches payload encoding."""
# Create UTF-8 file
utf8_file = tmp_path / "test_utf8.txt"
content = "Hello 世界"
# Write file in executor
loop = asyncio.get_running_loop()
await loop.run_in_executor(None, utf8_file.write_text, content, "utf-8")
# Open file in executor
def open_file() -> TextIO:
return open(utf8_file, encoding="utf-8")
f = await loop.run_in_executor(None, open_file)
try:
tiop = payload.TextIOPayload(f)
# Size should match the actual UTF-8 encoded size
assert tiop.size == len(content.encode("utf-8"))
finally:
await loop.run_in_executor(None, f.close)
async def test_text_io_payload_size_utf16(tmp_path: Path) -> None:
"""Test TextIOPayload.size reports correct size with utf-16."""
# Create UTF-16 file
utf16_file = tmp_path / "test_utf16.txt"
content = "Hello World"
loop = asyncio.get_running_loop()
# Write file in executor
await loop.run_in_executor(None, utf16_file.write_text, content, "utf-16")
# Get file size in executor
utf16_file_size = await loop.run_in_executor(
None, lambda: utf16_file.stat().st_size
)
# Open file in executor
def open_file() -> TextIO:
return open(utf16_file, encoding="utf-16")
f = await loop.run_in_executor(None, open_file)
try:
tiop = payload.TextIOPayload(f, encoding="utf-16")
# Payload reports file size on disk (UTF-16)
assert tiop.size == utf16_file_size
# Write to a buffer to see what actually gets sent
writer = BufferWriter()
await tiop.write(writer)
# Check that the actual written bytes match file size
assert len(writer.buffer) == utf16_file_size
finally:
await loop.run_in_executor(None, f.close)
async def test_iobase_payload_size_after_reading(tmp_path: Path) -> None:
"""Test that IOBasePayload.size returns correct size after file has been read.
This verifies that size calculation properly accounts for the initial
file position, which is critical for 307/308 redirects where the same
payload instance is reused.
"""
# Create a test file with known content
test_file = tmp_path / "test.txt"
content = b"Hello, World! This is test content."
await asyncio.to_thread(test_file.write_bytes, content)
expected_size = len(content)
# Open the file and create payload
f = await asyncio.to_thread(open, test_file, "rb")
try:
p = payload.BufferedReaderPayload(f)
# First size check - should return full file size
assert p.size == expected_size
# Read the file (simulating first request)
writer = BufferWriter()
await p.write(writer)
assert len(writer.buffer) == expected_size
# Second size check - should still return full file size
assert p.size == expected_size
# Attempting to write again should write the full content
writer2 = BufferWriter()
await p.write(writer2)
assert len(writer2.buffer) == expected_size
finally:
await asyncio.to_thread(f.close)
async def test_iobase_payload_size_unseekable() -> None:
"""Test that IOBasePayload.size returns None for unseekable files."""
class UnseekableFile:
"""Mock file object that doesn't support seeking."""
def __init__(self, content: bytes) -> None:
self.content = content
self.pos = 0
def read(self, size: int) -> bytes:
result = self.content[self.pos : self.pos + size]
self.pos += len(result)
return result
def tell(self) -> int:
raise OSError("Unseekable file")
content = b"Unseekable content"
f = UnseekableFile(content)
p = payload.IOBasePayload(f) # type: ignore[arg-type]
# Size should return None for unseekable files
assert p.size is None
# Payload should not be consumed before writing
assert p.consumed is False
# Writing should still work
writer = BufferWriter()
await p.write(writer)
assert writer.buffer == content
# For unseekable files that can't tell() or seek(),
# they are marked as consumed after the first write
assert p.consumed is True
async def test_empty_bytes_payload_is_reusable() -> None:
"""Test that empty BytesPayload can be safely reused across requests."""
empty_payload = payload.PAYLOAD_REGISTRY.get(b"", disposition=None)
assert isinstance(empty_payload, payload.BytesPayload)
assert empty_payload.size == 0
assert empty_payload.consumed is False
assert empty_payload.autoclose is True
initial_headers = dict(empty_payload.headers)
for i in range(3):
writer = BufferWriter()
await empty_payload.write_with_length(writer, None)
assert writer.buffer == b""
assert empty_payload.consumed is False, f"consumed flag changed on write {i+1}"
assert (
dict(empty_payload.headers) == initial_headers
), f"headers mutated on write {i+1}"
assert empty_payload.size == 0, f"size changed on write {i+1}"
assert empty_payload.headers == CIMultiDict(initial_headers)
|
./temp_repos/aiohttp/aiohttp/payload.py
|
./temp_repos/aiohttp/tests/test_payload.py
|
aiohttp
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'LookupError'.
Context:
- Class Name: LookupError
- Dependencies to Mock: encoding, headers, type, content_type, disposition, value, filename, dumps
- Key Imports: itertools, mimetypes, io, streams, helpers, enum, typedefs, warnings, typing, asyncio
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
LookupError
|
python
|
import asyncio
import functools
import random
import socket
import sys
import traceback
import warnings
from collections import OrderedDict, defaultdict, deque
from collections.abc import Awaitable, Callable, Iterator, Sequence
from contextlib import suppress
from http import HTTPStatus
from itertools import chain, cycle, islice
from time import monotonic
from types import TracebackType
from typing import TYPE_CHECKING, Any, Literal, cast
import aiohappyeyeballs
from aiohappyeyeballs import AddrInfoType, SocketFactoryType
from multidict import CIMultiDict
from . import hdrs, helpers
from .abc import AbstractResolver, ResolveResult
from .client_exceptions import (
ClientConnectionError,
ClientConnectorCertificateError,
ClientConnectorDNSError,
ClientConnectorError,
ClientConnectorSSLError,
ClientHttpProxyError,
ClientProxyConnectionError,
ServerFingerprintMismatch,
UnixClientConnectorError,
cert_errors,
ssl_errors,
)
from .client_proto import ResponseHandler
from .client_reqrep import (
SSL_ALLOWED_TYPES,
ClientRequest,
ClientRequestBase,
Fingerprint,
)
from .helpers import (
_SENTINEL,
ceil_timeout,
is_ip_address,
sentinel,
set_exception,
set_result,
)
from .log import client_logger
from .resolver import DefaultResolver
if sys.version_info >= (3, 12):
from collections.abc import Buffer
else:
Buffer = "bytes | bytearray | memoryview[int] | memoryview[bytes]"
try:
import ssl
SSLContext = ssl.SSLContext
except ImportError: # pragma: no cover
ssl = None # type: ignore[assignment]
SSLContext = object # type: ignore[misc,assignment]
EMPTY_SCHEMA_SET = frozenset({""})
HTTP_SCHEMA_SET = frozenset({"http", "https"})
WS_SCHEMA_SET = frozenset({"ws", "wss"})
HTTP_AND_EMPTY_SCHEMA_SET = HTTP_SCHEMA_SET | EMPTY_SCHEMA_SET
HIGH_LEVEL_SCHEMA_SET = HTTP_AND_EMPTY_SCHEMA_SET | WS_SCHEMA_SET
NEEDS_CLEANUP_CLOSED = (3, 13, 0) <= sys.version_info < (
3,
13,
1,
) or sys.version_info < (3, 12, 7)
# Cleanup closed is no longer needed after https://github.com/python/cpython/pull/118960
# which first appeared in Python 3.12.7 and 3.13.1
__all__ = (
"BaseConnector",
"TCPConnector",
"UnixConnector",
"NamedPipeConnector",
"AddrInfoType",
"SocketFactoryType",
)
if TYPE_CHECKING:
from .client import ClientTimeout
from .client_reqrep import ConnectionKey
from .tracing import Trace
class Connection:
"""Represents a single connection."""
__slots__ = (
"_key",
"_connector",
"_loop",
"_protocol",
"_callbacks",
"_source_traceback",
)
def __init__(
self,
connector: "BaseConnector",
key: "ConnectionKey",
protocol: ResponseHandler,
loop: asyncio.AbstractEventLoop,
) -> None:
self._key = key
self._connector = connector
self._loop = loop
self._protocol: ResponseHandler | None = protocol
self._callbacks: list[Callable[[], None]] = []
self._source_traceback = (
traceback.extract_stack(sys._getframe(1)) if loop.get_debug() else None
)
def __repr__(self) -> str:
return f"Connection<{self._key}>"
def __del__(self, _warnings: Any = warnings) -> None:
if self._protocol is not None:
_warnings.warn(
f"Unclosed connection {self!r}", ResourceWarning, source=self
)
if self._loop.is_closed():
return
self._connector._release(self._key, self._protocol, should_close=True)
context = {"client_connection": self, "message": "Unclosed connection"}
if self._source_traceback is not None:
context["source_traceback"] = self._source_traceback
self._loop.call_exception_handler(context)
def __bool__(self) -> Literal[True]:
"""Force subclasses to not be falsy, to make checks simpler."""
return True
@property
def transport(self) -> asyncio.Transport | None:
if self._protocol is None:
return None
return self._protocol.transport
@property
def protocol(self) -> ResponseHandler | None:
return self._protocol
def add_callback(self, callback: Callable[[], None]) -> None:
if callback is not None:
self._callbacks.append(callback)
def _notify_release(self) -> None:
callbacks, self._callbacks = self._callbacks[:], []
for cb in callbacks:
with suppress(Exception):
cb()
def close(self) -> None:
self._notify_release()
if self._protocol is not None:
self._connector._release(self._key, self._protocol, should_close=True)
self._protocol = None
def release(self) -> None:
self._notify_release()
if self._protocol is not None:
self._connector._release(self._key, self._protocol)
self._protocol = None
@property
def closed(self) -> bool:
return self._protocol is None or not self._protocol.is_connected()
class _ConnectTunnelConnection(Connection):
"""Special connection wrapper for CONNECT tunnels that must never be pooled.
This connection wraps the proxy connection that will be upgraded with TLS.
It must never be released to the pool because:
1. Its 'closed' future will never complete, causing session.close() to hang
2. It represents an intermediate state, not a reusable connection
3. The real connection (with TLS) will be created separately
"""
def release(self) -> None:
"""Do nothing - don't pool or close the connection.
These connections are an intermediate state during the CONNECT tunnel
setup and will be cleaned up naturally after the TLS upgrade. If they
were to be pooled, they would never be properly closed, causing
session.close() to wait forever for their 'closed' future.
"""
class _TransportPlaceholder:
"""placeholder for BaseConnector.connect function"""
__slots__ = ("closed", "transport")
def __init__(self, closed_future: asyncio.Future[Exception | None]) -> None:
"""Initialize a placeholder for a transport."""
self.closed = closed_future
self.transport = None
def close(self) -> None:
"""Close the placeholder."""
def abort(self) -> None:
"""Abort the placeholder (does nothing)."""
class BaseConnector:
"""Base connector class.
keepalive_timeout - (optional) Keep-alive timeout.
force_close - Set to True to force close and do reconnect
after each request (and between redirects).
limit - The total number of simultaneous connections.
limit_per_host - Number of simultaneous connections to one host.
enable_cleanup_closed - Enables clean-up closed ssl transports.
Disabled by default.
timeout_ceil_threshold - Trigger ceiling of timeout values when
it's above timeout_ceil_threshold.
loop - Optional event loop.
"""
_closed = True # prevent AttributeError in __del__ if ctor was failed
_source_traceback = None
# abort transport after 2 seconds (cleanup broken connections)
_cleanup_closed_period = 2.0
allowed_protocol_schema_set = HIGH_LEVEL_SCHEMA_SET
def __init__(
self,
*,
keepalive_timeout: _SENTINEL | None | float = sentinel,
force_close: bool = False,
limit: int = 100,
limit_per_host: int = 0,
enable_cleanup_closed: bool = False,
timeout_ceil_threshold: float = 5,
) -> None:
if force_close:
if keepalive_timeout is not None and keepalive_timeout is not sentinel:
raise ValueError(
"keepalive_timeout cannot be set if force_close is True"
)
else:
if keepalive_timeout is sentinel:
keepalive_timeout = 15.0
self._timeout_ceil_threshold = timeout_ceil_threshold
loop = asyncio.get_running_loop()
self._closed = False
if loop.get_debug():
self._source_traceback = traceback.extract_stack(sys._getframe(1))
# Connection pool of reusable connections.
# We use a deque to store connections because it has O(1) popleft()
# and O(1) append() operations to implement a FIFO queue.
self._conns: defaultdict[
ConnectionKey, deque[tuple[ResponseHandler, float]]
] = defaultdict(deque)
self._limit = limit
self._limit_per_host = limit_per_host
self._acquired: set[ResponseHandler] = set()
self._acquired_per_host: defaultdict[ConnectionKey, set[ResponseHandler]] = (
defaultdict(set)
)
self._keepalive_timeout = cast(float, keepalive_timeout)
self._force_close = force_close
# {host_key: FIFO list of waiters}
# The FIFO is implemented with an OrderedDict with None keys because
# python does not have an ordered set.
self._waiters: defaultdict[
ConnectionKey, OrderedDict[asyncio.Future[None], None]
] = defaultdict(OrderedDict)
self._loop = loop
self._factory = functools.partial(ResponseHandler, loop=loop)
# start keep-alive connection cleanup task
self._cleanup_handle: asyncio.TimerHandle | None = None
# start cleanup closed transports task
self._cleanup_closed_handle: asyncio.TimerHandle | None = None
if enable_cleanup_closed and not NEEDS_CLEANUP_CLOSED:
warnings.warn(
"enable_cleanup_closed ignored because "
"https://github.com/python/cpython/pull/118960 is fixed "
f"in Python version {sys.version_info}",
DeprecationWarning,
stacklevel=2,
)
enable_cleanup_closed = False
self._cleanup_closed_disabled = not enable_cleanup_closed
self._cleanup_closed_transports: list[asyncio.Transport | None] = []
self._placeholder_future: asyncio.Future[Exception | None] = (
loop.create_future()
)
self._placeholder_future.set_result(None)
self._cleanup_closed()
def __del__(self, _warnings: Any = warnings) -> None:
if self._closed:
return
if not self._conns:
return
conns = [repr(c) for c in self._conns.values()]
self._close_immediately()
_warnings.warn(f"Unclosed connector {self!r}", ResourceWarning, source=self)
context = {
"connector": self,
"connections": conns,
"message": "Unclosed connector",
}
if self._source_traceback is not None:
context["source_traceback"] = self._source_traceback
self._loop.call_exception_handler(context)
async def __aenter__(self) -> "BaseConnector":
return self
async def __aexit__(
self,
exc_type: type[BaseException] | None = None,
exc_value: BaseException | None = None,
exc_traceback: TracebackType | None = None,
) -> None:
await self.close()
@property
def force_close(self) -> bool:
"""Ultimately close connection on releasing if True."""
return self._force_close
@property
def limit(self) -> int:
"""The total number for simultaneous connections.
If limit is 0 the connector has no limit.
The default limit size is 100.
"""
return self._limit
@property
def limit_per_host(self) -> int:
"""The limit for simultaneous connections to the same endpoint.
Endpoints are the same if they are have equal
(host, port, is_ssl) triple.
"""
return self._limit_per_host
def _cleanup(self) -> None:
"""Cleanup unused transports."""
if self._cleanup_handle:
self._cleanup_handle.cancel()
# _cleanup_handle should be unset, otherwise _release() will not
# recreate it ever!
self._cleanup_handle = None
now = monotonic()
timeout = self._keepalive_timeout
if self._conns:
connections = defaultdict(deque)
deadline = now - timeout
for key, conns in self._conns.items():
alive: deque[tuple[ResponseHandler, float]] = deque()
for proto, use_time in conns:
if proto.is_connected() and use_time - deadline >= 0:
alive.append((proto, use_time))
continue
transport = proto.transport
proto.close()
if not self._cleanup_closed_disabled and key.is_ssl:
self._cleanup_closed_transports.append(transport)
if alive:
connections[key] = alive
self._conns = connections
if self._conns:
self._cleanup_handle = helpers.weakref_handle(
self,
"_cleanup",
timeout,
self._loop,
timeout_ceil_threshold=self._timeout_ceil_threshold,
)
def _cleanup_closed(self) -> None:
"""Double confirmation for transport close.
Some broken ssl servers may leave socket open without proper close.
"""
if self._cleanup_closed_handle:
self._cleanup_closed_handle.cancel()
for transport in self._cleanup_closed_transports:
if transport is not None:
transport.abort()
self._cleanup_closed_transports = []
if not self._cleanup_closed_disabled:
self._cleanup_closed_handle = helpers.weakref_handle(
self,
"_cleanup_closed",
self._cleanup_closed_period,
self._loop,
timeout_ceil_threshold=self._timeout_ceil_threshold,
)
async def close(self, *, abort_ssl: bool = False) -> None:
"""Close all opened transports.
:param abort_ssl: If True, SSL connections will be aborted immediately
without performing the shutdown handshake. This provides
faster cleanup at the cost of less graceful disconnection.
"""
waiters = self._close_immediately(abort_ssl=abort_ssl)
if waiters:
results = await asyncio.gather(*waiters, return_exceptions=True)
for res in results:
if isinstance(res, Exception):
err_msg = "Error while closing connector: " + repr(res)
client_logger.debug(err_msg)
def _close_immediately(self, *, abort_ssl: bool = False) -> list[Awaitable[object]]:
waiters: list[Awaitable[object]] = []
if self._closed:
return waiters
self._closed = True
try:
if self._loop.is_closed():
return waiters
# cancel cleanup task
if self._cleanup_handle:
self._cleanup_handle.cancel()
# cancel cleanup close task
if self._cleanup_closed_handle:
self._cleanup_closed_handle.cancel()
for data in self._conns.values():
for proto, _ in data:
if (
abort_ssl
and proto.transport
and proto.transport.get_extra_info("sslcontext") is not None
):
proto.abort()
else:
proto.close()
if closed := proto.closed:
waiters.append(closed)
for proto in self._acquired:
if (
abort_ssl
and proto.transport
and proto.transport.get_extra_info("sslcontext") is not None
):
proto.abort()
else:
proto.close()
if closed := proto.closed:
waiters.append(closed)
# TODO (A.Yushovskiy, 24-May-2019) collect transp. closing futures
for transport in self._cleanup_closed_transports:
if transport is not None:
transport.abort()
return waiters
finally:
self._conns.clear()
self._acquired.clear()
for keyed_waiters in self._waiters.values():
for keyed_waiter in keyed_waiters:
keyed_waiter.cancel()
self._waiters.clear()
self._cleanup_handle = None
self._cleanup_closed_transports.clear()
self._cleanup_closed_handle = None
@property
def closed(self) -> bool:
"""Is connector closed.
A readonly property.
"""
return self._closed
def _available_connections(self, key: "ConnectionKey") -> int:
"""
Return number of available connections.
The limit, limit_per_host and the connection key are taken into account.
If it returns less than 1 means that there are no connections
available.
"""
# check total available connections
# If there are no limits, this will always return 1
total_remain = 1
if self._limit and (total_remain := self._limit - len(self._acquired)) <= 0:
return total_remain
# check limit per host
if host_remain := self._limit_per_host:
if acquired := self._acquired_per_host.get(key):
host_remain -= len(acquired)
if total_remain > host_remain:
return host_remain
return total_remain
async def connect(
self, req: ClientRequest, traces: list["Trace"], timeout: "ClientTimeout"
) -> Connection:
"""Get from pool or create new connection."""
key = req.connection_key
if (conn := await self._get(key, traces)) is not None:
# If we do not have to wait and we can get a connection from the pool
# we can avoid the timeout ceil logic and directly return the connection
return conn
async with ceil_timeout(timeout.connect, timeout.ceil_threshold):
if self._available_connections(key) <= 0:
await self._wait_for_available_connection(key, traces)
if (conn := await self._get(key, traces)) is not None:
return conn
placeholder = cast(
ResponseHandler, _TransportPlaceholder(self._placeholder_future)
)
self._acquired.add(placeholder)
if self._limit_per_host:
self._acquired_per_host[key].add(placeholder)
try:
# Traces are done inside the try block to ensure that the
# that the placeholder is still cleaned up if an exception
# is raised.
if traces:
for trace in traces:
await trace.send_connection_create_start()
proto = await self._create_connection(req, traces, timeout)
if traces:
for trace in traces:
await trace.send_connection_create_end()
except BaseException:
self._release_acquired(key, placeholder)
raise
else:
if self._closed:
proto.close()
raise ClientConnectionError("Connector is closed.")
# The connection was successfully created, drop the placeholder
# and add the real connection to the acquired set. There should
# be no awaits after the proto is added to the acquired set
# to ensure that the connection is not left in the acquired set
# on cancellation.
self._acquired.remove(placeholder)
self._acquired.add(proto)
if self._limit_per_host:
acquired_per_host = self._acquired_per_host[key]
acquired_per_host.remove(placeholder)
acquired_per_host.add(proto)
return Connection(self, key, proto, self._loop)
async def _wait_for_available_connection(
self, key: "ConnectionKey", traces: list["Trace"]
) -> None:
"""Wait for an available connection slot."""
# We loop here because there is a race between
# the connection limit check and the connection
# being acquired. If the connection is acquired
# between the check and the await statement, we
# need to loop again to check if the connection
# slot is still available.
attempts = 0
while True:
fut: asyncio.Future[None] = self._loop.create_future()
keyed_waiters = self._waiters[key]
keyed_waiters[fut] = None
if attempts:
# If we have waited before, we need to move the waiter
# to the front of the queue as otherwise we might get
# starved and hit the timeout.
keyed_waiters.move_to_end(fut, last=False)
try:
# Traces happen in the try block to ensure that the
# the waiter is still cleaned up if an exception is raised.
if traces:
for trace in traces:
await trace.send_connection_queued_start()
await fut
if traces:
for trace in traces:
await trace.send_connection_queued_end()
finally:
# pop the waiter from the queue if its still
# there and not already removed by _release_waiter
keyed_waiters.pop(fut, None)
if not self._waiters.get(key, True):
del self._waiters[key]
if self._available_connections(key) > 0:
break
attempts += 1
async def _get(
self, key: "ConnectionKey", traces: list["Trace"]
) -> Connection | None:
"""Get next reusable connection for the key or None.
The connection will be marked as acquired.
"""
if (conns := self._conns.get(key)) is None:
return None
t1 = monotonic()
while conns:
proto, t0 = conns.popleft()
# We will we reuse the connection if its connected and
# the keepalive timeout has not been exceeded
if proto.is_connected() and t1 - t0 <= self._keepalive_timeout:
if not conns:
# The very last connection was reclaimed: drop the key
del self._conns[key]
self._acquired.add(proto)
if self._limit_per_host:
self._acquired_per_host[key].add(proto)
if traces:
for trace in traces:
try:
await trace.send_connection_reuseconn()
except BaseException:
self._release_acquired(key, proto)
raise
return Connection(self, key, proto, self._loop)
# Connection cannot be reused, close it
transport = proto.transport
proto.close()
# only for SSL transports
if not self._cleanup_closed_disabled and key.is_ssl:
self._cleanup_closed_transports.append(transport)
# No more connections: drop the key
del self._conns[key]
return None
def _release_waiter(self) -> None:
"""
Iterates over all waiters until one to be released is found.
The one to be released is not finished and
belongs to a host that has available connections.
"""
if not self._waiters:
return
# Having the dict keys ordered this avoids to iterate
# at the same order at each call.
queues = list(self._waiters)
random.shuffle(queues)
for key in queues:
if self._available_connections(key) < 1:
continue
waiters = self._waiters[key]
while waiters:
waiter, _ = waiters.popitem(last=False)
if not waiter.done():
waiter.set_result(None)
return
def _release_acquired(self, key: "ConnectionKey", proto: ResponseHandler) -> None:
"""Release acquired connection."""
if self._closed:
# acquired connection is already released on connector closing
return
self._acquired.discard(proto)
if self._limit_per_host and (conns := self._acquired_per_host.get(key)):
conns.discard(proto)
if not conns:
del self._acquired_per_host[key]
self._release_waiter()
def _release(
self,
key: "ConnectionKey",
protocol: ResponseHandler,
*,
should_close: bool = False,
) -> None:
if self._closed:
# acquired connection is already released on connector closing
return
self._release_acquired(key, protocol)
if self._force_close or should_close or protocol.should_close:
transport = protocol.transport
protocol.close()
if key.is_ssl and not self._cleanup_closed_disabled:
self._cleanup_closed_transports.append(transport)
return
self._conns[key].append((protocol, monotonic()))
if self._cleanup_handle is None:
self._cleanup_handle = helpers.weakref_handle(
self,
"_cleanup",
self._keepalive_timeout,
self._loop,
timeout_ceil_threshold=self._timeout_ceil_threshold,
)
async def _create_connection(
self, req: ClientRequest, traces: list["Trace"], timeout: "ClientTimeout"
) -> ResponseHandler:
raise NotImplementedError()
class _DNSCacheTable:
def __init__(self, ttl: float | None = None) -> None:
self._addrs_rr: dict[tuple[str, int], tuple[Iterator[ResolveResult], int]] = {}
self._timestamps: dict[tuple[str, int], float] = {}
self._ttl = ttl
def __contains__(self, host: object) -> bool:
return host in self._addrs_rr
def add(self, key: tuple[str, int], addrs: list[ResolveResult]) -> None:
self._addrs_rr[key] = (cycle(addrs), len(addrs))
if self._ttl is not None:
self._timestamps[key] = monotonic()
def remove(self, key: tuple[str, int]) -> None:
self._addrs_rr.pop(key, None)
if self._ttl is not None:
self._timestamps.pop(key, None)
def clear(self) -> None:
self._addrs_rr.clear()
self._timestamps.clear()
def next_addrs(self, key: tuple[str, int]) -> list[ResolveResult]:
loop, length = self._addrs_rr[key]
addrs = list(islice(loop, length))
# Consume one more element to shift internal state of `cycle`
next(loop)
return addrs
def expired(self, key: tuple[str, int]) -> bool:
if self._ttl is None:
return False
return self._timestamps[key] + self._ttl < monotonic()
def _make_ssl_context(verified: bool) -> SSLContext:
"""Create SSL context.
This method is not async-friendly and should be called from a thread
because it will load certificates from disk and do other blocking I/O.
"""
if ssl is None:
# No ssl support
return None # type: ignore[unreachable]
if verified:
sslcontext = ssl.create_default_context()
else:
sslcontext = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
sslcontext.options |= ssl.OP_NO_SSLv2
sslcontext.options |= ssl.OP_NO_SSLv3
sslcontext.check_hostname = False
sslcontext.verify_mode = ssl.CERT_NONE
sslcontext.options |= ssl.OP_NO_COMPRESSION
sslcontext.set_default_verify_paths()
sslcontext.set_alpn_protocols(("http/1.1",))
return sslcontext
# The default SSLContext objects are created at import time
# since they do blocking I/O to load certificates from disk,
# and imports should always be done before the event loop starts
# or in a thread.
_SSL_CONTEXT_VERIFIED = _make_ssl_context(True)
_SSL_CONTEXT_UNVERIFIED = _make_ssl_context(False)
class TCPConnector(BaseConnector):
"""TCP connector.
verify_ssl - Set to True to check ssl certifications.
fingerprint - Pass the binary sha256
digest of the expected certificate in DER format to verify
that the certificate the server presents matches. See also
https://en.wikipedia.org/wiki/HTTP_Public_Key_Pinning
resolver - Enable DNS lookups and use this
resolver
use_dns_cache - Use memory cache for DNS lookups.
ttl_dns_cache - Max seconds having cached a DNS entry, None forever.
family - socket address family
local_addr - local tuple of (host, port) to bind socket to
keepalive_timeout - (optional) Keep-alive timeout.
force_close - Set to True to force close and do reconnect
after each request (and between redirects).
limit - The total number of simultaneous connections.
limit_per_host - Number of simultaneous connections to one host.
enable_cleanup_closed - Enables clean-up closed ssl transports.
Disabled by default.
happy_eyeballs_delay - This is the “Connection Attempt Delay”
as defined in RFC 8305. To disable
the happy eyeballs algorithm, set to None.
interleave - “First Address Family Count” as defined in RFC 8305
loop - Optional event loop.
socket_factory - A SocketFactoryType function that, if supplied,
will be used to create sockets given an
AddrInfoType.
ssl_shutdown_timeout - DEPRECATED. Will be removed in aiohttp 4.0.
Grace period for SSL shutdown handshake on TLS
connections. Default is 0 seconds (immediate abort).
This parameter allowed for a clean SSL shutdown by
notifying the remote peer of connection closure,
while avoiding excessive delays during connector cleanup.
Note: Only takes effect on Python 3.11+.
"""
allowed_protocol_schema_set = HIGH_LEVEL_SCHEMA_SET | frozenset({"tcp"})
def __init__(
self,
*,
use_dns_cache: bool = True,
ttl_dns_cache: int | None = 10,
family: socket.AddressFamily = socket.AddressFamily.AF_UNSPEC,
ssl: bool | Fingerprint | SSLContext = True,
local_addr: tuple[str, int] | None = None,
resolver: AbstractResolver | None = None,
keepalive_timeout: None | float | _SENTINEL = sentinel,
force_close: bool = False,
limit: int = 100,
limit_per_host: int = 0,
enable_cleanup_closed: bool = False,
timeout_ceil_threshold: float = 5,
happy_eyeballs_delay: float | None = 0.25,
interleave: int | None = None,
socket_factory: SocketFactoryType | None = None,
ssl_shutdown_timeout: _SENTINEL | None | float = sentinel,
):
super().__init__(
keepalive_timeout=keepalive_timeout,
force_close=force_close,
limit=limit,
limit_per_host=limit_per_host,
enable_cleanup_closed=enable_cleanup_closed,
timeout_ceil_threshold=timeout_ceil_threshold,
)
if not isinstance(ssl, SSL_ALLOWED_TYPES):
raise TypeError(
"ssl should be SSLContext, Fingerprint, or bool, "
f"got {ssl!r} instead."
)
self._ssl = ssl
self._resolver: AbstractResolver
if resolver is None:
self._resolver = DefaultResolver()
self._resolver_owner = True
else:
self._resolver = resolver
self._resolver_owner = False
self._use_dns_cache = use_dns_cache
self._cached_hosts = _DNSCacheTable(ttl=ttl_dns_cache)
self._throttle_dns_futures: dict[tuple[str, int], set[asyncio.Future[None]]] = (
{}
)
self._family = family
self._local_addr_infos = aiohappyeyeballs.addr_to_addr_infos(local_addr)
self._happy_eyeballs_delay = happy_eyeballs_delay
self._interleave = interleave
self._resolve_host_tasks: set[asyncio.Task[list[ResolveResult]]] = set()
self._socket_factory = socket_factory
self._ssl_shutdown_timeout: float | None
# Handle ssl_shutdown_timeout with warning for Python < 3.11
if ssl_shutdown_timeout is sentinel:
self._ssl_shutdown_timeout = 0
else:
# Deprecation warning for ssl_shutdown_timeout parameter
warnings.warn(
"The ssl_shutdown_timeout parameter is deprecated and will be removed in aiohttp 4.0",
DeprecationWarning,
stacklevel=2,
)
if (
sys.version_info < (3, 11)
and ssl_shutdown_timeout is not None
and ssl_shutdown_timeout != 0
):
warnings.warn(
f"ssl_shutdown_timeout={ssl_shutdown_timeout} is ignored on Python < 3.11; "
"only ssl_shutdown_timeout=0 is supported. The timeout will be ignored.",
RuntimeWarning,
stacklevel=2,
)
self._ssl_shutdown_timeout = ssl_shutdown_timeout
async def close(self, *, abort_ssl: bool = False) -> None:
"""Close all opened transports.
:param abort_ssl: If True, SSL connections will be aborted immediately
without performing the shutdown handshake. If False (default),
the behavior is determined by ssl_shutdown_timeout:
- If ssl_shutdown_timeout=0: connections are aborted
- If ssl_shutdown_timeout>0: graceful shutdown is performed
"""
if self._resolver_owner:
await self._resolver.close()
# Use abort_ssl param if explicitly set, otherwise use ssl_shutdown_timeout default
await super().close(abort_ssl=abort_ssl or self._ssl_shutdown_timeout == 0)
def _close_immediately(self, *, abort_ssl: bool = False) -> list[Awaitable[object]]:
for fut in chain.from_iterable(self._throttle_dns_futures.values()):
fut.cancel()
waiters = super()._close_immediately(abort_ssl=abort_ssl)
for t in self._resolve_host_tasks:
t.cancel()
waiters.append(t)
return waiters
@property
def family(self) -> int:
"""Socket family like AF_INET."""
return self._family
@property
def use_dns_cache(self) -> bool:
"""True if local DNS caching is enabled."""
return self._use_dns_cache
def clear_dns_cache(self, host: str | None = None, port: int | None = None) -> None:
"""Remove specified host/port or clear all dns local cache."""
if host is not None and port is not None:
self._cached_hosts.remove((host, port))
elif host is not None or port is not None:
raise ValueError("either both host and port or none of them are allowed")
else:
self._cached_hosts.clear()
async def _resolve_host(
self, host: str, port: int, traces: Sequence["Trace"] | None = None
) -> list[ResolveResult]:
"""Resolve host and return list of addresses."""
if is_ip_address(host):
return [
{
"hostname": host,
"host": host,
"port": port,
"family": self._family,
"proto": 0,
"flags": 0,
}
]
if not self._use_dns_cache:
if traces:
for trace in traces:
await trace.send_dns_resolvehost_start(host)
res = await self._resolver.resolve(host, port, family=self._family)
if traces:
for trace in traces:
await trace.send_dns_resolvehost_end(host)
return res
key = (host, port)
if key in self._cached_hosts and not self._cached_hosts.expired(key):
# get result early, before any await (#4014)
result = self._cached_hosts.next_addrs(key)
if traces:
for trace in traces:
await trace.send_dns_cache_hit(host)
return result
futures: set[asyncio.Future[None]]
#
# If multiple connectors are resolving the same host, we wait
# for the first one to resolve and then use the result for all of them.
# We use a throttle to ensure that we only resolve the host once
# and then use the result for all the waiters.
#
if key in self._throttle_dns_futures:
# get futures early, before any await (#4014)
futures = self._throttle_dns_futures[key]
future: asyncio.Future[None] = self._loop.create_future()
futures.add(future)
if traces:
for trace in traces:
await trace.send_dns_cache_hit(host)
try:
await future
finally:
futures.discard(future)
return self._cached_hosts.next_addrs(key)
# update dict early, before any await (#4014)
self._throttle_dns_futures[key] = futures = set()
# In this case we need to create a task to ensure that we can shield
# the task from cancellation as cancelling this lookup should not cancel
# the underlying lookup or else the cancel event will get broadcast to
# all the waiters across all connections.
#
coro = self._resolve_host_with_throttle(key, host, port, futures, traces)
loop = asyncio.get_running_loop()
if sys.version_info >= (3, 12):
# Optimization for Python 3.12, try to send immediately
resolved_host_task = asyncio.Task(coro, loop=loop, eager_start=True)
else:
resolved_host_task = loop.create_task(coro)
if not resolved_host_task.done():
self._resolve_host_tasks.add(resolved_host_task)
resolved_host_task.add_done_callback(self._resolve_host_tasks.discard)
try:
return await asyncio.shield(resolved_host_task)
except asyncio.CancelledError:
def drop_exception(fut: "asyncio.Future[list[ResolveResult]]") -> None:
with suppress(Exception, asyncio.CancelledError):
fut.result()
resolved_host_task.add_done_callback(drop_exception)
raise
async def _resolve_host_with_throttle(
self,
key: tuple[str, int],
host: str,
port: int,
futures: set[asyncio.Future[None]],
traces: Sequence["Trace"] | None,
) -> list[ResolveResult]:
"""Resolve host and set result for all waiters.
This method must be run in a task and shielded from cancellation
to avoid cancelling the underlying lookup.
"""
try:
if traces:
for trace in traces:
await trace.send_dns_cache_miss(host)
for trace in traces:
await trace.send_dns_resolvehost_start(host)
addrs = await self._resolver.resolve(host, port, family=self._family)
if traces:
for trace in traces:
await trace.send_dns_resolvehost_end(host)
self._cached_hosts.add(key, addrs)
for fut in futures:
set_result(fut, None)
except BaseException as e:
# any DNS exception is set for the waiters to raise the same exception.
# This coro is always run in task that is shielded from cancellation so
# we should never be propagating cancellation here.
for fut in futures:
set_exception(fut, e)
raise
finally:
self._throttle_dns_futures.pop(key)
return self._cached_hosts.next_addrs(key)
async def _create_connection(
self, req: ClientRequest, traces: list["Trace"], timeout: "ClientTimeout"
) -> ResponseHandler:
"""Create connection.
Has same keyword arguments as BaseEventLoop.create_connection.
"""
if req.proxy:
_, proto = await self._create_proxy_connection(req, traces, timeout)
else:
_, proto = await self._create_direct_connection(req, traces, timeout)
return proto
def _get_ssl_context(self, req: ClientRequestBase) -> SSLContext | None:
"""Logic to get the correct SSL context
0. if req.ssl is false, return None
1. if ssl_context is specified in req, use it
2. if _ssl_context is specified in self, use it
3. otherwise:
1. if verify_ssl is not specified in req, use self.ssl_context
(will generate a default context according to self.verify_ssl)
2. if verify_ssl is True in req, generate a default SSL context
3. if verify_ssl is False in req, generate a SSL context that
won't verify
"""
if not req.is_ssl():
return None
if ssl is None: # pragma: no cover
raise RuntimeError("SSL is not supported.")
sslcontext = req.ssl
if isinstance(sslcontext, ssl.SSLContext):
return sslcontext
if sslcontext is not True:
# not verified or fingerprinted
return _SSL_CONTEXT_UNVERIFIED
sslcontext = self._ssl
if isinstance(sslcontext, ssl.SSLContext):
return sslcontext
if sslcontext is not True:
# not verified or fingerprinted
return _SSL_CONTEXT_UNVERIFIED
return _SSL_CONTEXT_VERIFIED
def _get_fingerprint(self, req: ClientRequestBase) -> "Fingerprint | None":
ret = req.ssl
if isinstance(ret, Fingerprint):
return ret
ret = self._ssl
if isinstance(ret, Fingerprint):
return ret
return None
async def _wrap_create_connection(
self,
*args: Any,
addr_infos: list[AddrInfoType],
req: ClientRequestBase,
timeout: "ClientTimeout",
client_error: type[Exception] = ClientConnectorError,
**kwargs: Any,
) -> tuple[asyncio.Transport, ResponseHandler]:
try:
async with ceil_timeout(
timeout.sock_connect, ceil_threshold=timeout.ceil_threshold
):
sock = await aiohappyeyeballs.start_connection(
addr_infos=addr_infos,
local_addr_infos=self._local_addr_infos,
happy_eyeballs_delay=self._happy_eyeballs_delay,
interleave=self._interleave,
loop=self._loop,
socket_factory=self._socket_factory,
)
# Add ssl_shutdown_timeout for Python 3.11+ when SSL is used
if (
kwargs.get("ssl")
and self._ssl_shutdown_timeout
and sys.version_info >= (3, 11)
):
kwargs["ssl_shutdown_timeout"] = self._ssl_shutdown_timeout
return await self._loop.create_connection(*args, **kwargs, sock=sock)
except cert_errors as exc:
raise ClientConnectorCertificateError(req.connection_key, exc) from exc
except ssl_errors as exc:
raise ClientConnectorSSLError(req.connection_key, exc) from exc
except OSError as exc:
if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
raise
raise client_error(req.connection_key, exc) from exc
def _warn_about_tls_in_tls(
self,
underlying_transport: asyncio.Transport,
req: ClientRequest,
) -> None:
"""Issue a warning if the requested URL has HTTPS scheme."""
if req.url.scheme != "https":
return
# Check if uvloop is being used, which supports TLS in TLS,
# otherwise assume that asyncio's native transport is being used.
if type(underlying_transport).__module__.startswith("uvloop"):
return
# Support in asyncio was added in Python 3.11 (bpo-44011)
asyncio_supports_tls_in_tls = sys.version_info >= (3, 11) or getattr(
underlying_transport,
"_start_tls_compatible",
False,
)
if asyncio_supports_tls_in_tls:
return
warnings.warn(
"An HTTPS request is being sent through an HTTPS proxy. "
"This support for TLS in TLS is known to be disabled "
"in the stdlib asyncio. This is why you'll probably see "
"an error in the log below.\n\n"
"It is possible to enable it via monkeypatching. "
"For more details, see:\n"
"* https://bugs.python.org/issue37179\n"
"* https://github.com/python/cpython/pull/28073\n\n"
"You can temporarily patch this as follows:\n"
"* https://docs.aiohttp.org/en/stable/client_advanced.html#proxy-support\n"
"* https://github.com/aio-libs/aiohttp/discussions/6044\n",
RuntimeWarning,
source=self,
# Why `4`? At least 3 of the calls in the stack originate
# from the methods in this class.
stacklevel=3,
)
async def _start_tls_connection(
self,
underlying_transport: asyncio.Transport,
req: ClientRequest,
timeout: "ClientTimeout",
client_error: type[Exception] = ClientConnectorError,
) -> tuple[asyncio.BaseTransport, ResponseHandler]:
"""Wrap the raw TCP transport with TLS."""
tls_proto = self._factory() # Create a brand new proto for TLS
sslcontext = self._get_ssl_context(req)
if TYPE_CHECKING:
# _start_tls_connection is unreachable in the current code path
# if sslcontext is None.
assert sslcontext is not None
try:
async with ceil_timeout(
timeout.sock_connect, ceil_threshold=timeout.ceil_threshold
):
try:
# ssl_shutdown_timeout is only available in Python 3.11+
if sys.version_info >= (3, 11) and self._ssl_shutdown_timeout:
tls_transport = await self._loop.start_tls(
underlying_transport,
tls_proto,
sslcontext,
server_hostname=req.server_hostname or req.url.raw_host,
ssl_handshake_timeout=timeout.total,
ssl_shutdown_timeout=self._ssl_shutdown_timeout,
)
else:
tls_transport = await self._loop.start_tls(
underlying_transport,
tls_proto,
sslcontext,
server_hostname=req.server_hostname or req.url.raw_host,
ssl_handshake_timeout=timeout.total,
)
except BaseException:
# We need to close the underlying transport since
# `start_tls()` probably failed before it had a
# chance to do this:
if self._ssl_shutdown_timeout == 0:
underlying_transport.abort()
else:
underlying_transport.close()
raise
if isinstance(tls_transport, asyncio.Transport):
fingerprint = self._get_fingerprint(req)
if fingerprint:
try:
fingerprint.check(tls_transport)
except ServerFingerprintMismatch:
tls_transport.close()
if not self._cleanup_closed_disabled:
self._cleanup_closed_transports.append(tls_transport)
raise
except cert_errors as exc:
raise ClientConnectorCertificateError(req.connection_key, exc) from exc
except ssl_errors as exc:
raise ClientConnectorSSLError(req.connection_key, exc) from exc
except OSError as exc:
if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
raise
raise client_error(req.connection_key, exc) from exc
except TypeError as type_err:
# Example cause looks like this:
# TypeError: transport <asyncio.sslproto._SSLProtocolTransport
# object at 0x7f760615e460> is not supported by start_tls()
raise ClientConnectionError(
"Cannot initialize a TLS-in-TLS connection to host "
f"{req.url.host!s}:{req.url.port:d} through an underlying connection "
f"to an HTTPS proxy {req.proxy!s} ssl:{req.ssl or 'default'} "
f"[{type_err!s}]"
) from type_err
else:
if tls_transport is None:
msg = "Failed to start TLS (possibly caused by closing transport)"
raise client_error(req.connection_key, OSError(msg))
tls_proto.connection_made(
tls_transport
) # Kick the state machine of the new TLS protocol
return tls_transport, tls_proto
def _convert_hosts_to_addr_infos(
self, hosts: list[ResolveResult]
) -> list[AddrInfoType]:
"""Converts the list of hosts to a list of addr_infos.
The list of hosts is the result of a DNS lookup. The list of
addr_infos is the result of a call to `socket.getaddrinfo()`.
"""
addr_infos: list[AddrInfoType] = []
for hinfo in hosts:
host = hinfo["host"]
is_ipv6 = ":" in host
family = socket.AF_INET6 if is_ipv6 else socket.AF_INET
if self._family and self._family != family:
continue
addr = (host, hinfo["port"], 0, 0) if is_ipv6 else (host, hinfo["port"])
addr_infos.append(
(family, socket.SOCK_STREAM, socket.IPPROTO_TCP, "", addr)
)
return addr_infos
async def _create_direct_connection(
self,
req: ClientRequestBase,
traces: list["Trace"],
timeout: "ClientTimeout",
*,
client_error: type[Exception] = ClientConnectorError,
) -> tuple[asyncio.Transport, ResponseHandler]:
sslcontext = self._get_ssl_context(req)
fingerprint = self._get_fingerprint(req)
host = req.url.raw_host
assert host is not None
# Replace multiple trailing dots with a single one.
# A trailing dot is only present for fully-qualified domain names.
# See https://github.com/aio-libs/aiohttp/pull/7364.
if host.endswith(".."):
host = host.rstrip(".") + "."
port = req.url.port
assert port is not None
try:
# Cancelling this lookup should not cancel the underlying lookup
# or else the cancel event will get broadcast to all the waiters
# across all connections.
hosts = await self._resolve_host(host, port, traces=traces)
except OSError as exc:
if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
raise
# in case of proxy it is not ClientProxyConnectionError
# it is problem of resolving proxy ip itself
raise ClientConnectorDNSError(req.connection_key, exc) from exc
last_exc: Exception | None = None
addr_infos = self._convert_hosts_to_addr_infos(hosts)
while addr_infos:
# Strip trailing dots, certificates contain FQDN without dots.
# See https://github.com/aio-libs/aiohttp/issues/3636
server_hostname = (
(req.server_hostname or host).rstrip(".") if sslcontext else None
)
try:
transp, proto = await self._wrap_create_connection(
self._factory,
timeout=timeout,
ssl=sslcontext,
addr_infos=addr_infos,
server_hostname=server_hostname,
req=req,
client_error=client_error,
)
except (ClientConnectorError, asyncio.TimeoutError) as exc:
last_exc = exc
aiohappyeyeballs.pop_addr_infos_interleave(addr_infos, self._interleave)
continue
if req.is_ssl() and fingerprint:
try:
fingerprint.check(transp)
except ServerFingerprintMismatch as exc:
transp.close()
if not self._cleanup_closed_disabled:
self._cleanup_closed_transports.append(transp)
last_exc = exc
# Remove the bad peer from the list of addr_infos
sock: socket.socket = transp.get_extra_info("socket")
bad_peer = sock.getpeername()
aiohappyeyeballs.remove_addr_infos(addr_infos, bad_peer)
continue
return transp, proto
assert last_exc is not None
raise last_exc
async def _create_proxy_connection(
self, req: ClientRequest, traces: list["Trace"], timeout: "ClientTimeout"
) -> tuple[asyncio.BaseTransport, ResponseHandler]:
headers = CIMultiDict[str]() if req.proxy_headers is None else req.proxy_headers
headers[hdrs.HOST] = req.headers[hdrs.HOST]
url = req.proxy
assert url is not None
proxy_req = ClientRequestBase(
hdrs.METH_GET,
url,
headers=headers,
auth=req.proxy_auth,
loop=self._loop,
ssl=req.ssl,
)
# create connection to proxy server
transport, proto = await self._create_direct_connection(
proxy_req, [], timeout, client_error=ClientProxyConnectionError
)
auth = proxy_req.headers.pop(hdrs.AUTHORIZATION, None)
if auth is not None:
if not req.is_ssl():
req.headers[hdrs.PROXY_AUTHORIZATION] = auth
else:
proxy_req.headers[hdrs.PROXY_AUTHORIZATION] = auth
if req.is_ssl():
self._warn_about_tls_in_tls(transport, req)
# For HTTPS requests over HTTP proxy
# we must notify proxy to tunnel connection
# so we send CONNECT command:
# CONNECT www.python.org:443 HTTP/1.1
# Host: www.python.org
#
# next we must do TLS handshake and so on
# to do this we must wrap raw socket into secure one
# asyncio handles this perfectly
proxy_req.method = hdrs.METH_CONNECT
proxy_req.url = req.url
key = req.connection_key._replace(
proxy=None, proxy_auth=None, proxy_headers_hash=None
)
conn = _ConnectTunnelConnection(self, key, proto, self._loop)
proxy_resp = await proxy_req._send(conn)
try:
protocol = conn._protocol
assert protocol is not None
# read_until_eof=True will ensure the connection isn't closed
# once the response is received and processed allowing
# START_TLS to work on the connection below.
protocol.set_response_params(
read_until_eof=True,
timeout_ceil_threshold=self._timeout_ceil_threshold,
)
resp = await proxy_resp.start(conn)
except BaseException:
proxy_resp.close()
conn.close()
raise
else:
conn._protocol = None
try:
if resp.status != 200:
message = resp.reason
if message is None:
message = HTTPStatus(resp.status).phrase
raise ClientHttpProxyError(
proxy_resp.request_info,
resp.history,
status=resp.status,
message=message,
headers=resp.headers,
)
except BaseException:
# It shouldn't be closed in `finally` because it's fed to
# `loop.start_tls()` and the docs say not to touch it after
# passing there.
transport.close()
raise
return await self._start_tls_connection(
# Access the old transport for the last time before it's
# closed and forgotten forever:
transport,
req=req,
timeout=timeout,
)
finally:
proxy_resp.close()
return transport, proto
class UnixConnector(BaseConnector):
"""Unix socket connector.
path - Unix socket path.
keepalive_timeout - (optional) Keep-alive timeout.
force_close - Set to True to force close and do reconnect
after each request (and between redirects).
limit - The total number of simultaneous connections.
limit_per_host - Number of simultaneous connections to one host.
loop - Optional event loop.
"""
allowed_protocol_schema_set = HIGH_LEVEL_SCHEMA_SET | frozenset({"unix"})
def __init__(
self,
path: str,
force_close: bool = False,
keepalive_timeout: _SENTINEL | float | None = sentinel,
limit: int = 100,
limit_per_host: int = 0,
) -> None:
super().__init__(
force_close=force_close,
keepalive_timeout=keepalive_timeout,
limit=limit,
limit_per_host=limit_per_host,
)
self._path = path
@property
def path(self) -> str:
"""Path to unix socket."""
return self._path
async def _create_connection(
self, req: ClientRequest, traces: list["Trace"], timeout: "ClientTimeout"
) -> ResponseHandler:
try:
async with ceil_timeout(
timeout.sock_connect, ceil_threshold=timeout.ceil_threshold
):
_, proto = await self._loop.create_unix_connection(
self._factory, self._path
)
except OSError as exc:
if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
raise
raise UnixClientConnectorError(self.path, req.connection_key, exc) from exc
return proto
class NamedPipeConnector(BaseConnector):
"""Named pipe connector.
Only supported by the proactor event loop.
See also: https://docs.python.org/3/library/asyncio-eventloop.html
path - Windows named pipe path.
keepalive_timeout - (optional) Keep-alive timeout.
force_close - Set to True to force close and do reconnect
after each request (and between redirects).
limit - The total number of simultaneous connections.
limit_per_host - Number of simultaneous connections to one host.
loop - Optional event loop.
"""
allowed_protocol_schema_set = HIGH_LEVEL_SCHEMA_SET | frozenset({"npipe"})
def __init__(
self,
path: str,
force_close: bool = False,
keepalive_timeout: _SENTINEL | float | None = sentinel,
limit: int = 100,
limit_per_host: int = 0,
) -> None:
super().__init__(
force_close=force_close,
keepalive_timeout=keepalive_timeout,
limit=limit,
limit_per_host=limit_per_host,
)
if not isinstance(
self._loop,
asyncio.ProactorEventLoop, # type: ignore[attr-defined]
):
raise RuntimeError(
"Named Pipes only available in proactor loop under windows"
)
self._path = path
@property
def path(self) -> str:
"""Path to the named pipe."""
return self._path
async def _create_connection(
self, req: ClientRequest, traces: list["Trace"], timeout: "ClientTimeout"
) -> ResponseHandler:
try:
async with ceil_timeout(
timeout.sock_connect, ceil_threshold=timeout.ceil_threshold
):
_, proto = await self._loop.create_pipe_connection( # type: ignore[attr-defined]
self._factory, self._path
)
# the drain is required so that the connection_made is called
# and transport is set otherwise it is not set before the
# `assert conn.transport is not None`
# in client.py's _request method
await asyncio.sleep(0)
# other option is to manually set transport like
# `proto.transport = trans`
except OSError as exc:
if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
raise
raise ClientConnectorError(req.connection_key, exc) from exc
return cast(ResponseHandler, proto)
|
# Tests of http client with custom Connector
import asyncio
import gc
import hashlib
import platform
import socket
import ssl
import sys
import uuid
import warnings
from collections import defaultdict, deque
from collections.abc import Awaitable, Callable, Iterator, Sequence
from concurrent import futures
from contextlib import closing, suppress
from typing import Any, Literal, NoReturn
from unittest import mock
import pytest
from pytest_mock import MockerFixture
from yarl import URL
import aiohttp
from aiohttp import (
ClientRequest,
ClientSession,
ClientTimeout,
connector as connector_module,
web,
)
from aiohttp.abc import ResolveResult
from aiohttp.client_proto import ResponseHandler
from aiohttp.client_reqrep import ClientRequestArgs, ConnectionKey
from aiohttp.connector import (
_SSL_CONTEXT_UNVERIFIED,
_SSL_CONTEXT_VERIFIED,
AddrInfoType,
Connection,
TCPConnector,
_ConnectTunnelConnection,
_DNSCacheTable,
)
from aiohttp.pytest_plugin import AiohttpClient, AiohttpServer
from aiohttp.test_utils import unused_port
from aiohttp.tracing import Trace
if sys.version_info >= (3, 11):
from typing import Unpack
_RequestMaker = Callable[[str, URL, Unpack[ClientRequestArgs]], ClientRequest]
else:
_RequestMaker = Any
@pytest.fixture
def key() -> ConnectionKey:
# Connection key
return ConnectionKey("localhost", 80, False, True, None, None, None)
@pytest.fixture
def key2() -> ConnectionKey:
# Connection key
return ConnectionKey("localhost", 80, False, True, None, None, None)
@pytest.fixture
def other_host_key2() -> ConnectionKey:
# Connection key
return ConnectionKey("otherhost", 80, False, True, None, None, None)
@pytest.fixture
def ssl_key() -> ConnectionKey:
# Connection key
return ConnectionKey("localhost", 80, True, True, None, None, None)
@pytest.fixture
def unix_server(
loop: asyncio.AbstractEventLoop, unix_sockname: str
) -> Iterator[Callable[[web.Application], Awaitable[None]]]:
runners = []
async def go(app: web.Application) -> None:
runner = web.AppRunner(app)
runners.append(runner)
await runner.setup()
site = web.UnixSite(runner, unix_sockname)
await site.start()
yield go
for runner in runners:
loop.run_until_complete(runner.cleanup())
@pytest.fixture
def named_pipe_server(
proactor_loop: asyncio.AbstractEventLoop, pipe_name: str
) -> Iterator[Callable[[web.Application], Awaitable[None]]]:
runners = []
async def go(app: web.Application) -> None:
runner = web.AppRunner(app)
runners.append(runner)
await runner.setup()
site = web.NamedPipeSite(runner, pipe_name)
await site.start()
yield go
for runner in runners:
proactor_loop.run_until_complete(runner.cleanup())
def create_mocked_conn(
conn_closing_result: asyncio.AbstractEventLoop | None = None,
should_close: bool = True,
**kwargs: object,
) -> mock.Mock:
assert "loop" not in kwargs
try:
loop = asyncio.get_running_loop()
except RuntimeError:
loop = asyncio.get_event_loop()
f = loop.create_future()
proto: mock.Mock = mock.create_autospec(
ResponseHandler, instance=True, should_close=should_close, closed=f
)
f.set_result(conn_closing_result)
return proto
async def test_connection_del(loop: asyncio.AbstractEventLoop) -> None:
connector = mock.Mock()
key = mock.Mock()
protocol = mock.Mock()
loop.set_debug(False)
conn = Connection(connector, key, protocol, loop=loop)
exc_handler = mock.Mock()
loop.set_exception_handler(exc_handler)
with pytest.warns(ResourceWarning):
del conn
gc.collect()
await asyncio.sleep(0)
connector._release.assert_called_with(key, protocol, should_close=True)
msg = {
"message": mock.ANY,
"client_connection": mock.ANY,
}
exc_handler.assert_called_with(loop, msg)
def test_connection_del_loop_debug(loop: asyncio.AbstractEventLoop) -> None:
connector = mock.Mock()
key = mock.Mock()
protocol = mock.Mock()
loop.set_debug(True)
conn = Connection(connector, key, protocol, loop=loop)
exc_handler = mock.Mock()
loop.set_exception_handler(exc_handler)
with pytest.warns(ResourceWarning):
del conn
gc.collect()
msg = {
"message": mock.ANY,
"client_connection": mock.ANY,
"source_traceback": mock.ANY,
}
exc_handler.assert_called_with(loop, msg)
def test_connection_del_loop_closed(loop: asyncio.AbstractEventLoop) -> None:
connector = mock.Mock()
key = mock.Mock()
protocol = mock.Mock()
loop.set_debug(True)
conn = Connection(connector, key, protocol, loop=loop)
exc_handler = mock.Mock()
loop.set_exception_handler(exc_handler)
loop.close()
with pytest.warns(ResourceWarning):
del conn
gc.collect()
assert not connector._release.called
assert not exc_handler.called
async def test_del(loop: asyncio.AbstractEventLoop, key: ConnectionKey) -> None:
conn = aiohttp.BaseConnector()
proto = create_mocked_conn(loop, should_close=False)
conn._release(key, proto)
conns_impl = conn._conns
exc_handler = mock.Mock()
loop.set_exception_handler(exc_handler)
with pytest.warns(ResourceWarning):
del conn
gc.collect()
assert not conns_impl
proto.close.assert_called_with()
msg = {
"connector": mock.ANY, # conn was deleted
"connections": mock.ANY,
"message": "Unclosed connector",
}
if loop.get_debug():
msg["source_traceback"] = mock.ANY
exc_handler.assert_called_with(loop, msg)
@pytest.mark.xfail
async def test_del_with_scheduled_cleanup( # type: ignore[misc]
loop: asyncio.AbstractEventLoop, key: ConnectionKey
) -> None:
loop.set_debug(True)
conn = aiohttp.BaseConnector(keepalive_timeout=0.01)
transp = create_mocked_conn(loop)
conn._conns[key] = deque([(transp, 123)])
conns_impl = conn._conns
exc_handler = mock.Mock()
loop.set_exception_handler(exc_handler)
with pytest.warns(ResourceWarning):
# obviously doesn't deletion because loop has a strong
# reference to connector's instance method, isn't it?
del conn
await asyncio.sleep(0.01)
gc.collect()
assert not conns_impl
transp.close.assert_called_with()
msg = {"connector": mock.ANY, "message": "Unclosed connector"} # conn was deleted
if loop.get_debug():
msg["source_traceback"] = mock.ANY
exc_handler.assert_called_with(loop, msg)
@pytest.mark.skipif(
sys.implementation.name != "cpython", reason="CPython GC is required for the test"
)
def test_del_with_closed_loop( # type: ignore[misc]
loop: asyncio.AbstractEventLoop, key: ConnectionKey
) -> None:
async def make_conn() -> aiohttp.BaseConnector:
return aiohttp.BaseConnector()
conn = loop.run_until_complete(make_conn())
transp = create_mocked_conn(loop)
conn._conns[key] = deque([(transp, 123)])
conns_impl = conn._conns
exc_handler = mock.Mock()
loop.set_exception_handler(exc_handler)
loop.close()
with pytest.warns(ResourceWarning):
del conn
gc.collect()
assert not conns_impl
assert not transp.close.called
assert exc_handler.called
async def test_del_empty_connector(loop: asyncio.AbstractEventLoop) -> None:
conn = aiohttp.BaseConnector()
exc_handler = mock.Mock()
loop.set_exception_handler(exc_handler)
del conn
assert not exc_handler.called
async def test_create_conn() -> None:
conn = aiohttp.BaseConnector()
with pytest.raises(NotImplementedError):
await conn._create_connection(object(), [], object()) # type: ignore[arg-type]
await conn.close()
async def test_async_context_manager(loop: asyncio.AbstractEventLoop) -> None:
conn = aiohttp.BaseConnector()
async with conn as c:
assert conn is c
assert conn.closed
async def test_close(key: ConnectionKey) -> None:
proto = create_mocked_conn()
conn = aiohttp.BaseConnector()
assert not conn.closed
conn._conns[key] = deque([(proto, 0)])
await conn.close()
assert not conn._conns
assert proto.close.called
assert conn.closed
async def test_close_with_proto_closed_none(key: ConnectionKey) -> None:
"""Test close when protocol.closed is None."""
# Create protocols where closed property returns None
proto1 = mock.create_autospec(ResponseHandler, instance=True)
proto1.closed = None
proto1.close = mock.Mock()
proto2 = mock.create_autospec(ResponseHandler, instance=True)
proto2.closed = None
proto2.close = mock.Mock()
conn = aiohttp.BaseConnector()
conn._conns[key] = deque([(proto1, 0)])
conn._acquired.add(proto2)
# Close the connector - this should handle the case where proto.closed is None
await conn.close()
# Verify close was called on both protocols
assert proto1.close.called
assert proto2.close.called
assert conn.closed
async def test_get(loop: asyncio.AbstractEventLoop, key: ConnectionKey) -> None:
conn = aiohttp.BaseConnector()
try:
assert await conn._get(key, []) is None
proto = create_mocked_conn(loop)
conn._conns[key] = deque([(proto, loop.time())])
connection = await conn._get(key, [])
assert connection is not None
assert connection.protocol == proto
connection.close()
finally:
await conn.close()
async def test_get_unconnected_proto(loop: asyncio.AbstractEventLoop) -> None:
conn = aiohttp.BaseConnector()
key = ConnectionKey("localhost", 80, False, False, None, None, None)
try:
assert await conn._get(key, []) is None
proto = create_mocked_conn(loop)
conn._conns[key] = deque([(proto, loop.time())])
connection = await conn._get(key, [])
assert connection is not None
assert connection.protocol == proto
connection.close()
assert await conn._get(key, []) is None
conn._conns[key] = deque([(proto, loop.time())])
proto.is_connected = lambda *args: False
assert await conn._get(key, []) is None
finally:
await conn.close()
async def test_get_unconnected_proto_ssl(loop: asyncio.AbstractEventLoop) -> None:
conn = aiohttp.BaseConnector()
key = ConnectionKey("localhost", 80, True, False, None, None, None)
try:
assert await conn._get(key, []) is None
proto = create_mocked_conn(loop)
conn._conns[key] = deque([(proto, loop.time())])
connection = await conn._get(key, [])
assert connection is not None
assert connection.protocol == proto
connection.close()
assert await conn._get(key, []) is None
conn._conns[key] = deque([(proto, loop.time())])
proto.is_connected = lambda *args: False
assert await conn._get(key, []) is None
finally:
await conn.close()
async def test_get_expired(loop: asyncio.AbstractEventLoop) -> None:
conn = aiohttp.BaseConnector()
key = ConnectionKey("localhost", 80, False, False, None, None, None)
try:
assert await conn._get(key, []) is None
proto = create_mocked_conn(loop)
conn._conns[key] = deque([(proto, loop.time() - 1000)])
assert await conn._get(key, []) is None
assert not conn._conns
finally:
await conn.close()
@pytest.mark.usefixtures("enable_cleanup_closed")
async def test_get_expired_ssl(loop: asyncio.AbstractEventLoop) -> None:
conn = aiohttp.BaseConnector(enable_cleanup_closed=True)
key = ConnectionKey("localhost", 80, True, False, None, None, None)
try:
assert await conn._get(key, []) is None
proto = create_mocked_conn(loop)
transport = proto.transport
conn._conns[key] = deque([(proto, loop.time() - 1000)])
assert await conn._get(key, []) is None
assert not conn._conns
assert conn._cleanup_closed_transports == [transport]
finally:
await conn.close()
async def test_release_acquired(key: ConnectionKey) -> None:
proto = create_mocked_conn()
conn = aiohttp.BaseConnector(limit=5, limit_per_host=10)
with mock.patch.object(conn, "_release_waiter", autospec=True, spec_set=True) as m:
conn._acquired.add(proto)
conn._acquired_per_host[key].add(proto)
conn._release_acquired(key, proto)
assert 0 == len(conn._acquired)
assert 0 == len(conn._acquired_per_host)
assert m.called
conn._release_acquired(key, proto)
assert 0 == len(conn._acquired)
assert 0 == len(conn._acquired_per_host)
await conn.close()
async def test_release_acquired_closed(key: ConnectionKey) -> None:
proto = create_mocked_conn()
conn = aiohttp.BaseConnector(limit=5)
with mock.patch.object(conn, "_release_waiter", autospec=True, spec_set=True) as m:
conn._acquired.add(proto)
conn._acquired_per_host[key].add(proto)
conn._closed = True
conn._release_acquired(key, proto)
assert 1 == len(conn._acquired)
assert 1 == len(conn._acquired_per_host[key])
assert not m.called
await conn.close()
async def test_release(loop: asyncio.AbstractEventLoop, key: ConnectionKey) -> None:
conn = aiohttp.BaseConnector()
with mock.patch.object(conn, "_release_waiter", autospec=True, spec_set=True) as m:
proto = create_mocked_conn(loop, should_close=False)
conn._acquired.add(proto)
conn._acquired_per_host[key].add(proto)
conn._release(key, proto)
assert m.called
assert conn._cleanup_handle is not None
assert conn._conns[key][0][0] == proto
assert conn._conns[key][0][1] == pytest.approx(loop.time(), abs=0.1)
assert not conn._cleanup_closed_transports
await conn.close()
@pytest.mark.usefixtures("enable_cleanup_closed")
async def test_release_ssl_transport( # type: ignore[misc]
loop: asyncio.AbstractEventLoop, ssl_key: ConnectionKey
) -> None:
conn = aiohttp.BaseConnector(enable_cleanup_closed=True)
with mock.patch.object(conn, "_release_waiter", autospec=True, spec_set=True):
proto = create_mocked_conn(loop)
transport = proto.transport
conn._acquired.add(proto)
conn._acquired_per_host[ssl_key].add(proto)
conn._release(ssl_key, proto, should_close=True)
assert conn._cleanup_closed_transports == [transport]
await conn.close()
async def test_release_already_closed(key: ConnectionKey) -> None:
conn = aiohttp.BaseConnector()
proto = create_mocked_conn()
conn._acquired.add(proto)
await conn.close()
with mock.patch.object(
conn, "_release_acquired", autospec=True, spec_set=True
) as m1:
with mock.patch.object(
conn, "_release_waiter", autospec=True, spec_set=True
) as m2:
conn._release(key, proto)
assert not m1.called
assert not m2.called
async def test_release_waiter_no_limit(
loop: asyncio.AbstractEventLoop, key: ConnectionKey, key2: ConnectionKey
) -> None:
# limit is 0
conn = aiohttp.BaseConnector(limit=0)
w = mock.Mock()
w.done.return_value = False
conn._waiters[key][w] = None
conn._release_waiter()
assert len(conn._waiters[key]) == 0
assert w.done.called
await conn.close()
async def test_release_waiter_first_available(
loop: asyncio.AbstractEventLoop, key: ConnectionKey, key2: ConnectionKey
) -> None:
conn = aiohttp.BaseConnector()
w1, w2 = mock.Mock(), mock.Mock()
w1.done.return_value = False
w2.done.return_value = False
conn._waiters[key][w2] = None
conn._waiters[key2][w1] = None
conn._release_waiter()
assert (
w1.set_result.called
and not w2.set_result.called
or not w1.set_result.called
and w2.set_result.called
)
await conn.close()
async def test_release_waiter_release_first(
loop: asyncio.AbstractEventLoop, key: ConnectionKey, key2: ConnectionKey
) -> None:
conn = aiohttp.BaseConnector(limit=1)
w1, w2 = mock.Mock(), mock.Mock()
w1.done.return_value = False
w2.done.return_value = False
conn._waiters[key][w1] = None
conn._waiters[key][w2] = None
conn._release_waiter()
assert w1.set_result.called
assert not w2.set_result.called
await conn.close()
async def test_release_waiter_skip_done_waiter(
loop: asyncio.AbstractEventLoop, key: ConnectionKey, key2: ConnectionKey
) -> None:
conn = aiohttp.BaseConnector(limit=1)
w1, w2 = mock.Mock(), mock.Mock()
w1.done.return_value = True
w2.done.return_value = False
conn._waiters[key][w1] = None
conn._waiters[key][w2] = None
conn._release_waiter()
assert not w1.set_result.called
assert w2.set_result.called
await conn.close()
async def test_release_waiter_per_host(
loop: asyncio.AbstractEventLoop, key: ConnectionKey, key2: ConnectionKey
) -> None:
# no limit
conn = aiohttp.BaseConnector(limit=0, limit_per_host=2)
w1, w2 = mock.Mock(), mock.Mock()
w1.done.return_value = False
w2.done.return_value = False
conn._waiters[key][w1] = None
conn._waiters[key2][w2] = None
conn._release_waiter()
assert (w1.set_result.called and not w2.set_result.called) or (
not w1.set_result.called and w2.set_result.called
)
await conn.close()
async def test_release_waiter_no_available(
loop: asyncio.AbstractEventLoop, key: ConnectionKey, key2: ConnectionKey
) -> None:
# limit is 0
conn = aiohttp.BaseConnector(limit=0)
w = mock.Mock()
w.done.return_value = False
conn._waiters[key][w] = None
with mock.patch.object(
conn, "_available_connections", autospec=True, spec_set=True, return_value=0
):
conn._release_waiter()
assert len(conn._waiters) == 1
assert not w.done.called
await conn.close()
async def test_release_close(key: ConnectionKey) -> None:
conn = aiohttp.BaseConnector()
proto = create_mocked_conn(should_close=True)
conn._acquired.add(proto)
conn._release(key, proto)
assert not conn._conns
assert proto.close.called
await conn.close()
async def test__release_acquired_per_host1(
loop: asyncio.AbstractEventLoop, key: ConnectionKey
) -> None:
conn = aiohttp.BaseConnector(limit_per_host=10)
conn._release_acquired(key, create_mocked_conn(loop))
assert len(conn._acquired_per_host) == 0
await conn.close()
async def test__release_acquired_per_host2(
loop: asyncio.AbstractEventLoop, key: ConnectionKey
) -> None:
conn = aiohttp.BaseConnector(limit_per_host=10)
handler = create_mocked_conn(loop)
conn._acquired_per_host[key].add(handler)
conn._release_acquired(key, handler)
assert len(conn._acquired_per_host) == 0
await conn.close()
async def test__release_acquired_per_host3(
loop: asyncio.AbstractEventLoop, key: ConnectionKey
) -> None:
conn = aiohttp.BaseConnector(limit_per_host=10)
handler = create_mocked_conn(loop)
handler2 = create_mocked_conn(loop)
conn._acquired_per_host[key].add(handler)
conn._acquired_per_host[key].add(handler2)
conn._release_acquired(key, handler)
assert len(conn._acquired_per_host) == 1
assert conn._acquired_per_host[key] == {handler2}
await conn.close()
async def test_tcp_connector_certificate_error(
loop: asyncio.AbstractEventLoop,
start_connection: mock.AsyncMock,
make_client_request: _RequestMaker,
) -> None:
req = make_client_request("GET", URL("https://127.0.0.1:443"), loop=loop)
conn = aiohttp.TCPConnector()
with mock.patch.object(
conn._loop,
"create_connection",
autospec=True,
spec_set=True,
side_effect=ssl.CertificateError,
):
with pytest.raises(aiohttp.ClientConnectorCertificateError) as ctx:
await conn.connect(req, [], ClientTimeout())
assert isinstance(ctx.value, ssl.CertificateError)
assert isinstance(ctx.value.certificate_error, ssl.CertificateError)
assert isinstance(ctx.value, aiohttp.ClientSSLError)
await conn.close()
async def test_tcp_connector_server_hostname_default(
loop: asyncio.AbstractEventLoop,
start_connection: mock.AsyncMock,
make_client_request: _RequestMaker,
) -> None:
conn = aiohttp.TCPConnector()
with mock.patch.object(
conn._loop, "create_connection", autospec=True, spec_set=True
) as create_connection:
create_connection.return_value = mock.Mock(), mock.Mock()
req = make_client_request("GET", URL("https://127.0.0.1:443"), loop=loop)
with closing(await conn.connect(req, [], ClientTimeout())):
assert create_connection.call_args.kwargs["server_hostname"] == "127.0.0.1"
await conn.close()
async def test_tcp_connector_server_hostname_override(
loop: asyncio.AbstractEventLoop,
start_connection: mock.AsyncMock,
make_client_request: _RequestMaker,
) -> None:
conn = aiohttp.TCPConnector()
with mock.patch.object(
conn._loop, "create_connection", autospec=True, spec_set=True
) as create_connection:
create_connection.return_value = mock.Mock(), mock.Mock()
req = make_client_request(
"GET", URL("https://127.0.0.1:443"), loop=loop, server_hostname="localhost"
)
with closing(await conn.connect(req, [], ClientTimeout())):
assert create_connection.call_args.kwargs["server_hostname"] == "localhost"
await conn.close()
async def test_tcp_connector_multiple_hosts_errors(
loop: asyncio.AbstractEventLoop, make_client_request: _RequestMaker
) -> None:
conn = aiohttp.TCPConnector()
ip1 = "192.168.1.1"
ip2 = "192.168.1.2"
ip3 = "192.168.1.3"
ip4 = "192.168.1.4"
ip5 = "192.168.1.5"
ips = [ip1, ip2, ip3, ip4, ip5]
addrs_tried = []
ips_tried = []
fingerprint = hashlib.sha256(b"foo").digest()
req = make_client_request(
"GET",
URL("https://mocked.host"),
ssl=aiohttp.Fingerprint(fingerprint),
loop=loop,
)
async def _resolve_host(
host: str, port: int, traces: object = None
) -> list[ResolveResult]:
return [
{
"hostname": host,
"host": ip,
"port": port,
"family": socket.AF_INET,
"proto": 0,
"flags": socket.AI_NUMERICHOST,
}
for ip in ips
]
os_error = certificate_error = ssl_error = fingerprint_error = False
connected = False
async def start_connection(
addr_infos: Sequence[AddrInfoType], **kwargs: object
) -> socket.socket:
first_addr_info = addr_infos[0]
first_addr_info_addr = first_addr_info[-1]
addrs_tried.append(first_addr_info_addr)
mock_socket = mock.create_autospec(socket.socket, spec_set=True, instance=True)
mock_socket.getpeername.return_value = first_addr_info_addr
return mock_socket # type: ignore[no-any-return]
async def create_connection(
*args: object, sock: socket.socket | None = None, **kwargs: object
) -> tuple[ResponseHandler, ResponseHandler]:
nonlocal os_error, certificate_error, ssl_error, fingerprint_error
nonlocal connected
assert isinstance(sock, socket.socket)
addr_info = sock.getpeername()
ip = addr_info[0]
ips_tried.append(ip)
if ip == ip1:
os_error = True
raise OSError
if ip == ip2:
certificate_error = True
raise ssl.CertificateError
if ip == ip3:
ssl_error = True
raise ssl.SSLError
if ip == ip4:
# Close the socket since we are not actually connecting
# and we don't want to leak it.
sock.close()
fingerprint_error = True
tr = create_mocked_conn(loop)
pr = create_mocked_conn(loop)
def get_extra_info(param: str) -> object:
if param == "sslcontext":
return True
if param == "ssl_object":
s = mock.Mock()
s.getpeercert.return_value = b"not foo"
return s
if param == "peername":
return ("192.168.1.5", 12345)
if param == "socket":
return sock
assert False, param
tr.get_extra_info = get_extra_info
return tr, pr
if ip == ip5:
# Close the socket since we are not actually connecting
# and we don't want to leak it.
sock.close()
connected = True
tr = create_mocked_conn(loop)
pr = create_mocked_conn(loop)
def get_extra_info(param: str) -> object:
if param == "sslcontext":
return True
if param == "ssl_object":
s = mock.Mock()
s.getpeercert.return_value = b"foo"
return s
assert False
tr.get_extra_info = get_extra_info
return tr, pr
assert False
with (
mock.patch.object(
conn,
"_resolve_host",
autospec=True,
spec_set=True,
side_effect=_resolve_host,
),
mock.patch.object(
conn._loop,
"create_connection",
autospec=True,
spec_set=True,
side_effect=create_connection,
),
mock.patch(
"aiohttp.connector.aiohappyeyeballs.start_connection", start_connection
),
):
established_connection = await conn.connect(req, [], ClientTimeout())
assert ips_tried == ips
assert addrs_tried == [(ip, 443) for ip in ips]
assert os_error
assert certificate_error
assert ssl_error
assert fingerprint_error
assert connected
established_connection.close()
await conn.close()
@pytest.mark.parametrize(
("happy_eyeballs_delay"),
[0.1, 0.25, None],
)
async def test_tcp_connector_happy_eyeballs( # type: ignore[misc]
loop: asyncio.AbstractEventLoop,
happy_eyeballs_delay: float | None,
make_client_request: _RequestMaker,
) -> None:
conn = aiohttp.TCPConnector(happy_eyeballs_delay=happy_eyeballs_delay)
ip1 = "dead::beef::"
ip2 = "192.168.1.1"
ips = [ip1, ip2]
addrs_tried = []
req = make_client_request(
"GET",
URL("https://mocked.host"),
loop=loop,
)
async def _resolve_host(
host: str, port: int, traces: object = None
) -> list[ResolveResult]:
return [
{
"hostname": host,
"host": ip,
"port": port,
"family": socket.AF_INET6 if ":" in ip else socket.AF_INET,
"proto": 0,
"flags": socket.AI_NUMERICHOST,
}
for ip in ips
]
os_error = False
connected = False
async def sock_connect(*args: tuple[str, int], **kwargs: object) -> None:
addr = args[1]
nonlocal os_error
addrs_tried.append(addr)
if addr[0] == ip1:
os_error = True
raise OSError
async def create_connection(
*args: object, sock: socket.socket | None = None, **kwargs: object
) -> tuple[ResponseHandler, ResponseHandler]:
assert isinstance(sock, socket.socket)
# Close the socket since we are not actually connecting
# and we don't want to leak it.
sock.close()
nonlocal connected
connected = True
tr = create_mocked_conn(loop)
pr = create_mocked_conn(loop)
return tr, pr
with mock.patch.object(
conn, "_resolve_host", autospec=True, spec_set=True, side_effect=_resolve_host
):
with mock.patch.object(
conn._loop,
"sock_connect",
autospec=True,
spec_set=True,
side_effect=sock_connect,
):
with mock.patch.object(
conn._loop,
"create_connection",
autospec=True,
spec_set=True,
side_effect=create_connection,
):
established_connection = await conn.connect(req, [], ClientTimeout())
assert addrs_tried == [(ip1, 443, 0, 0), (ip2, 443)]
assert os_error
assert connected
established_connection.close()
await conn.close()
async def test_tcp_connector_interleave(
loop: asyncio.AbstractEventLoop, make_client_request: _RequestMaker
) -> None:
conn = aiohttp.TCPConnector(interleave=2)
ip1 = "192.168.1.1"
ip2 = "192.168.1.2"
ip3 = "dead::beef::"
ip4 = "aaaa::beef::"
ip5 = "192.168.1.5"
ips = [ip1, ip2, ip3, ip4, ip5]
success_ips = []
interleave_val = None
req = make_client_request(
"GET",
URL("https://mocked.host"),
loop=loop,
)
async def _resolve_host(
host: str, port: int, traces: object = None
) -> list[ResolveResult]:
return [
{
"hostname": host,
"host": ip,
"port": port,
"family": socket.AF_INET6 if ":" in ip else socket.AF_INET,
"proto": 0,
"flags": socket.AI_NUMERICHOST,
}
for ip in ips
]
async def start_connection(
addr_infos: Sequence[AddrInfoType],
*,
interleave: int | None = None,
**kwargs: object,
) -> socket.socket:
nonlocal interleave_val
interleave_val = interleave
# Mock the 4th host connecting successfully
fourth_addr_info = addr_infos[3]
fourth_addr_info_addr = fourth_addr_info[-1]
mock_socket = mock.create_autospec(socket.socket, spec_set=True, instance=True)
mock_socket.getpeername.return_value = fourth_addr_info_addr
return mock_socket # type: ignore[no-any-return]
async def create_connection(
*args: object, sock: socket.socket | None = None, **kwargs: object
) -> tuple[ResponseHandler, ResponseHandler]:
assert isinstance(sock, socket.socket)
addr_info = sock.getpeername()
ip = addr_info[0]
success_ips.append(ip)
# Close the socket since we are not actually connecting
# and we don't want to leak it.
sock.close()
tr = create_mocked_conn(loop)
pr = create_mocked_conn(loop)
return tr, pr
with (
mock.patch.object(
conn,
"_resolve_host",
autospec=True,
spec_set=True,
side_effect=_resolve_host,
),
mock.patch.object(
conn._loop,
"create_connection",
autospec=True,
spec_set=True,
side_effect=create_connection,
),
mock.patch(
"aiohttp.connector.aiohappyeyeballs.start_connection", start_connection
),
):
established_connection = await conn.connect(req, [], ClientTimeout())
assert success_ips == [ip4]
assert interleave_val == 2
established_connection.close()
await conn.close()
async def test_tcp_connector_family_is_respected(
loop: asyncio.AbstractEventLoop, make_client_request: _RequestMaker
) -> None:
conn = aiohttp.TCPConnector(family=socket.AF_INET)
ip1 = "dead::beef::"
ip2 = "192.168.1.1"
ips = [ip1, ip2]
addrs_tried = []
req = make_client_request(
"GET",
URL("https://mocked.host"),
loop=loop,
)
async def _resolve_host(
host: str, port: int, traces: object = None
) -> list[ResolveResult]:
return [
{
"hostname": host,
"host": ip,
"port": port,
"family": socket.AF_INET6 if ":" in ip else socket.AF_INET,
"proto": 0,
"flags": socket.AI_NUMERICHOST,
}
for ip in ips
]
connected = False
async def sock_connect(*args: tuple[str, int], **kwargs: object) -> None:
addr = args[1]
addrs_tried.append(addr)
async def create_connection(
*args: object, sock: socket.socket | None = None, **kwargs: object
) -> tuple[ResponseHandler, ResponseHandler]:
assert isinstance(sock, socket.socket)
# Close the socket since we are not actually connecting
# and we don't want to leak it.
sock.close()
nonlocal connected
connected = True
tr = create_mocked_conn(loop)
pr = create_mocked_conn(loop)
return tr, pr
with mock.patch.object(
conn, "_resolve_host", autospec=True, spec_set=True, side_effect=_resolve_host
):
with mock.patch.object(
conn._loop,
"sock_connect",
autospec=True,
spec_set=True,
side_effect=sock_connect,
):
with mock.patch.object(
conn._loop,
"create_connection",
autospec=True,
spec_set=True,
side_effect=create_connection,
):
established_connection = await conn.connect(req, [], ClientTimeout())
# We should only try the IPv4 address since we specified
# the family to be AF_INET
assert addrs_tried == [(ip2, 443)]
assert connected
established_connection.close()
@pytest.mark.parametrize(
("request_url"),
[
("http://mocked.host"),
("https://mocked.host"),
],
)
async def test_tcp_connector_multiple_hosts_one_timeout( # type: ignore[misc]
loop: asyncio.AbstractEventLoop,
request_url: str,
make_client_request: _RequestMaker,
) -> None:
conn = aiohttp.TCPConnector()
ip1 = "192.168.1.1"
ip2 = "192.168.1.2"
ips = [ip1, ip2]
ips_tried = []
ips_success = []
timeout_error = False
connected = False
req = make_client_request(
"GET",
URL(request_url),
loop=loop,
)
async def _resolve_host(
host: str, port: int, traces: object = None
) -> list[ResolveResult]:
return [
{
"hostname": host,
"host": ip,
"port": port,
"family": socket.AF_INET6 if ":" in ip else socket.AF_INET,
"proto": 0,
"flags": socket.AI_NUMERICHOST,
}
for ip in ips
]
async def start_connection(
addr_infos: Sequence[AddrInfoType],
*,
interleave: int | None = None,
**kwargs: object,
) -> socket.socket:
nonlocal timeout_error
addr_info = addr_infos[0]
addr_info_addr = addr_info[-1]
ip = addr_info_addr[0]
ips_tried.append(ip)
if ip == ip1:
timeout_error = True
raise asyncio.TimeoutError
if ip == ip2:
mock_socket = mock.create_autospec(
socket.socket, spec_set=True, instance=True
)
mock_socket.getpeername.return_value = addr_info_addr
return mock_socket # type: ignore[no-any-return]
assert False
async def create_connection(
*args: object, sock: socket.socket | None = None, **kwargs: object
) -> tuple[ResponseHandler, ResponseHandler]:
nonlocal connected
assert isinstance(sock, socket.socket)
addr_info = sock.getpeername()
ip = addr_info[0]
ips_success.append(ip)
connected = True
# Close the socket since we are not actually connecting
# and we don't want to leak it.
sock.close()
tr = create_mocked_conn(loop)
pr = create_mocked_conn(loop)
return tr, pr
with (
mock.patch.object(
conn,
"_resolve_host",
autospec=True,
spec_set=True,
side_effect=_resolve_host,
),
mock.patch.object(
conn._loop,
"create_connection",
autospec=True,
spec_set=True,
side_effect=create_connection,
),
mock.patch(
"aiohttp.connector.aiohappyeyeballs.start_connection", start_connection
),
):
established_connection = await conn.connect(req, [], ClientTimeout())
assert ips_tried == ips
assert ips_success == [ip2]
assert timeout_error
assert connected
established_connection.close()
await conn.close()
async def test_tcp_connector_resolve_host(loop: asyncio.AbstractEventLoop) -> None:
conn = aiohttp.TCPConnector(use_dns_cache=True)
res = await conn._resolve_host("localhost", 8080)
assert res
for rec in res:
if rec["family"] == socket.AF_INET:
assert rec["host"] == "127.0.0.1"
assert rec["hostname"] == "localhost"
assert rec["port"] == 8080
else:
assert rec["family"] == socket.AF_INET6
assert rec["hostname"] == "localhost"
assert rec["port"] == 8080
if platform.system() == "Darwin":
assert rec["host"] in ("::1", "fe80::1", "fe80::1%lo0")
else:
assert rec["host"] == "::1"
await conn.close()
@pytest.fixture
def dns_response(loop: asyncio.AbstractEventLoop) -> Callable[[], Awaitable[list[str]]]:
async def coro() -> list[str]:
# simulates a network operation
await asyncio.sleep(0)
return ["127.0.0.1"]
return coro
async def test_tcp_connector_dns_cache_not_expired(
loop: asyncio.AbstractEventLoop, dns_response: Callable[[], Awaitable[list[str]]]
) -> None:
with mock.patch("aiohttp.connector.DefaultResolver") as m_resolver:
conn = aiohttp.TCPConnector(use_dns_cache=True, ttl_dns_cache=10)
m_resolver().resolve.return_value = dns_response()
m_resolver().close = mock.AsyncMock()
await conn._resolve_host("localhost", 8080)
await conn._resolve_host("localhost", 8080)
m_resolver().resolve.assert_called_once_with("localhost", 8080, family=0)
await conn.close()
async def test_tcp_connector_dns_cache_forever(
loop: asyncio.AbstractEventLoop, dns_response: Callable[[], Awaitable[list[str]]]
) -> None:
with mock.patch("aiohttp.connector.DefaultResolver") as m_resolver:
conn = aiohttp.TCPConnector(use_dns_cache=True, ttl_dns_cache=10)
m_resolver().resolve.return_value = dns_response()
m_resolver().close = mock.AsyncMock()
await conn._resolve_host("localhost", 8080)
await conn._resolve_host("localhost", 8080)
m_resolver().resolve.assert_called_once_with("localhost", 8080, family=0)
await conn.close()
async def test_tcp_connector_use_dns_cache_disabled(
loop: asyncio.AbstractEventLoop, dns_response: Callable[[], Awaitable[list[str]]]
) -> None:
with mock.patch("aiohttp.connector.DefaultResolver") as m_resolver:
conn = aiohttp.TCPConnector(use_dns_cache=False)
m_resolver().resolve.side_effect = [dns_response(), dns_response()]
m_resolver().close = mock.AsyncMock()
await conn._resolve_host("localhost", 8080)
await conn._resolve_host("localhost", 8080)
m_resolver().resolve.assert_has_calls(
[
mock.call("localhost", 8080, family=0),
mock.call("localhost", 8080, family=0),
]
)
await conn.close()
async def test_tcp_connector_dns_throttle_requests(
loop: asyncio.AbstractEventLoop, dns_response: Callable[[], Awaitable[list[str]]]
) -> None:
with mock.patch("aiohttp.connector.DefaultResolver") as m_resolver:
conn = aiohttp.TCPConnector(use_dns_cache=True, ttl_dns_cache=10)
m_resolver().resolve.return_value = dns_response()
m_resolver().close = mock.AsyncMock()
t = loop.create_task(conn._resolve_host("localhost", 8080))
t2 = loop.create_task(conn._resolve_host("localhost", 8080))
await asyncio.sleep(0)
await asyncio.sleep(0)
m_resolver().resolve.assert_called_once_with("localhost", 8080, family=0)
t.cancel()
t2.cancel()
with pytest.raises(asyncio.CancelledError):
await asyncio.gather(t, t2)
await conn.close()
async def test_tcp_connector_dns_throttle_requests_exception_spread(
loop: asyncio.AbstractEventLoop,
) -> None:
with mock.patch("aiohttp.connector.DefaultResolver") as m_resolver:
conn = aiohttp.TCPConnector(use_dns_cache=True, ttl_dns_cache=10)
e = Exception()
m_resolver().resolve.side_effect = e
m_resolver().close = mock.AsyncMock()
r1 = loop.create_task(conn._resolve_host("localhost", 8080))
r2 = loop.create_task(conn._resolve_host("localhost", 8080))
await asyncio.sleep(0)
await asyncio.sleep(0)
await asyncio.sleep(0)
await asyncio.sleep(0)
assert r1.exception() == e
assert r2.exception() == e
await conn.close()
async def test_tcp_connector_dns_throttle_requests_cancelled_when_close(
loop: asyncio.AbstractEventLoop, dns_response: Callable[[], Awaitable[list[str]]]
) -> None:
with mock.patch("aiohttp.connector.DefaultResolver") as m_resolver:
conn = aiohttp.TCPConnector(use_dns_cache=True, ttl_dns_cache=10)
m_resolver().resolve.return_value = dns_response()
m_resolver().close = mock.AsyncMock()
t = loop.create_task(conn._resolve_host("localhost", 8080))
f = loop.create_task(conn._resolve_host("localhost", 8080))
await asyncio.sleep(0)
await asyncio.sleep(0)
await conn.close()
t.cancel()
with pytest.raises(asyncio.CancelledError):
await asyncio.gather(t, f)
await conn.close()
@pytest.fixture
def dns_response_error(
loop: asyncio.AbstractEventLoop,
) -> Callable[[], Awaitable[NoReturn]]:
async def coro() -> NoReturn:
# simulates a network operation
await asyncio.sleep(0)
raise socket.gaierror(-3, "Temporary failure in name resolution")
return coro
async def test_tcp_connector_cancel_dns_error_captured(
loop: asyncio.AbstractEventLoop,
dns_response_error: Callable[[], Awaitable[NoReturn]],
make_client_request: _RequestMaker,
) -> None:
exception_handler_called = False
def exception_handler(loop: asyncio.AbstractEventLoop, context: object) -> None:
nonlocal exception_handler_called
exception_handler_called = True
loop.set_exception_handler(mock.Mock(side_effect=exception_handler))
with mock.patch("aiohttp.connector.DefaultResolver") as m_resolver:
req = make_client_request("GET", URL("http://temporary-failure:80"), loop=loop)
conn = aiohttp.TCPConnector(
use_dns_cache=False,
)
m_resolver().resolve.return_value = dns_response_error()
m_resolver().close = mock.AsyncMock()
f = loop.create_task(conn._create_direct_connection(req, [], ClientTimeout(0)))
await asyncio.sleep(0)
f.cancel()
with pytest.raises(asyncio.CancelledError):
await f
gc.collect()
assert exception_handler_called is False
await conn.close()
async def test_tcp_connector_dns_tracing(
loop: asyncio.AbstractEventLoop, dns_response: Callable[[], Awaitable[list[str]]]
) -> None:
session = mock.Mock()
trace_config_ctx = mock.Mock()
on_dns_resolvehost_start = mock.AsyncMock()
on_dns_resolvehost_end = mock.AsyncMock()
on_dns_cache_hit = mock.AsyncMock()
on_dns_cache_miss = mock.AsyncMock()
trace_config = aiohttp.TraceConfig(
trace_config_ctx_factory=mock.Mock(return_value=trace_config_ctx)
)
trace_config.on_dns_resolvehost_start.append(on_dns_resolvehost_start)
trace_config.on_dns_resolvehost_end.append(on_dns_resolvehost_end)
trace_config.on_dns_cache_hit.append(on_dns_cache_hit)
trace_config.on_dns_cache_miss.append(on_dns_cache_miss)
trace_config.freeze()
traces = [Trace(session, trace_config, trace_config.trace_config_ctx())]
with mock.patch("aiohttp.connector.DefaultResolver") as m_resolver:
conn = aiohttp.TCPConnector(use_dns_cache=True, ttl_dns_cache=10)
m_resolver().resolve.return_value = dns_response()
m_resolver().close = mock.AsyncMock()
await conn._resolve_host("localhost", 8080, traces=traces)
on_dns_resolvehost_start.assert_called_once_with(
session,
trace_config_ctx,
aiohttp.TraceDnsResolveHostStartParams("localhost"),
)
on_dns_resolvehost_end.assert_called_once_with(
session, trace_config_ctx, aiohttp.TraceDnsResolveHostEndParams("localhost")
)
on_dns_cache_miss.assert_called_once_with(
session, trace_config_ctx, aiohttp.TraceDnsCacheMissParams("localhost")
)
assert not on_dns_cache_hit.called
await conn._resolve_host("localhost", 8080, traces=traces)
on_dns_cache_hit.assert_called_once_with(
session, trace_config_ctx, aiohttp.TraceDnsCacheHitParams("localhost")
)
await conn.close()
async def test_tcp_connector_dns_tracing_cache_disabled(
loop: asyncio.AbstractEventLoop, dns_response: Callable[[], Awaitable[list[str]]]
) -> None:
session = mock.Mock()
trace_config_ctx = mock.Mock()
on_dns_resolvehost_start = mock.AsyncMock()
on_dns_resolvehost_end = mock.AsyncMock()
trace_config = aiohttp.TraceConfig(
trace_config_ctx_factory=mock.Mock(return_value=trace_config_ctx)
)
trace_config.on_dns_resolvehost_start.append(on_dns_resolvehost_start)
trace_config.on_dns_resolvehost_end.append(on_dns_resolvehost_end)
trace_config.freeze()
traces = [Trace(session, trace_config, trace_config.trace_config_ctx())]
with mock.patch("aiohttp.connector.DefaultResolver") as m_resolver:
conn = aiohttp.TCPConnector(use_dns_cache=False)
m_resolver().resolve.side_effect = [dns_response(), dns_response()]
m_resolver().close = mock.AsyncMock()
await conn._resolve_host("localhost", 8080, traces=traces)
await conn._resolve_host("localhost", 8080, traces=traces)
on_dns_resolvehost_start.assert_has_calls(
[
mock.call(
session,
trace_config_ctx,
aiohttp.TraceDnsResolveHostStartParams("localhost"),
),
mock.call(
session,
trace_config_ctx,
aiohttp.TraceDnsResolveHostStartParams("localhost"),
),
]
)
on_dns_resolvehost_end.assert_has_calls(
[
mock.call(
session,
trace_config_ctx,
aiohttp.TraceDnsResolveHostEndParams("localhost"),
),
mock.call(
session,
trace_config_ctx,
aiohttp.TraceDnsResolveHostEndParams("localhost"),
),
]
)
await conn.close()
async def test_tcp_connector_dns_tracing_throttle_requests(
loop: asyncio.AbstractEventLoop, dns_response: Callable[[], Awaitable[list[str]]]
) -> None:
session = mock.Mock()
trace_config_ctx = mock.Mock()
on_dns_cache_hit = mock.AsyncMock()
on_dns_cache_miss = mock.AsyncMock()
trace_config = aiohttp.TraceConfig(
trace_config_ctx_factory=mock.Mock(return_value=trace_config_ctx)
)
trace_config.on_dns_cache_hit.append(on_dns_cache_hit)
trace_config.on_dns_cache_miss.append(on_dns_cache_miss)
trace_config.freeze()
traces = [Trace(session, trace_config, trace_config.trace_config_ctx())]
with mock.patch("aiohttp.connector.DefaultResolver") as m_resolver:
conn = aiohttp.TCPConnector(use_dns_cache=True, ttl_dns_cache=10)
m_resolver().resolve.return_value = dns_response()
m_resolver().close = mock.AsyncMock()
t = loop.create_task(conn._resolve_host("localhost", 8080, traces=traces))
t1 = loop.create_task(conn._resolve_host("localhost", 8080, traces=traces))
await asyncio.sleep(0)
await asyncio.sleep(0)
on_dns_cache_hit.assert_called_once_with(
session, trace_config_ctx, aiohttp.TraceDnsCacheHitParams("localhost")
)
on_dns_cache_miss.assert_called_once_with(
session, trace_config_ctx, aiohttp.TraceDnsCacheMissParams("localhost")
)
t.cancel()
t1.cancel()
with pytest.raises(asyncio.CancelledError):
await asyncio.gather(t, t1)
await conn.close()
async def test_tcp_connector_close_resolver() -> None:
m_resolver = mock.AsyncMock()
with mock.patch("aiohttp.connector.DefaultResolver", return_value=m_resolver):
conn = aiohttp.TCPConnector(use_dns_cache=True, ttl_dns_cache=10)
await conn.close()
m_resolver.close.assert_awaited_once()
async def test_dns_error(
loop: asyncio.AbstractEventLoop, make_client_request: _RequestMaker
) -> None:
connector = aiohttp.TCPConnector()
with mock.patch.object(
connector,
"_resolve_host",
autospec=True,
spec_set=True,
side_effect=OSError("dont take it serious"),
):
req = make_client_request("GET", URL("http://www.python.org"), loop=loop)
with pytest.raises(aiohttp.ClientConnectorError):
await connector.connect(req, [], ClientTimeout())
await connector.close()
async def test_get_pop_empty_conns(
loop: asyncio.AbstractEventLoop, key: ConnectionKey
) -> None:
# see issue #473
conn = aiohttp.BaseConnector()
assert await conn._get(key, []) is None
assert not conn._conns
await conn.close()
async def test_release_close_do_not_add_to_pool(
loop: asyncio.AbstractEventLoop, key: ConnectionKey
) -> None:
# see issue #473
conn = aiohttp.BaseConnector()
proto = create_mocked_conn(loop, should_close=True)
conn._acquired.add(proto)
conn._release(key, proto)
assert not conn._conns
await conn.close()
async def test_release_close_do_not_delete_existing_connections(
loop: asyncio.AbstractEventLoop, key: ConnectionKey
) -> None:
proto1 = create_mocked_conn(loop)
conn = aiohttp.BaseConnector()
conn._conns[key] = deque([(proto1, 1)])
proto = create_mocked_conn(loop, should_close=True)
conn._acquired.add(proto)
conn._release(key, proto)
assert conn._conns[key] == deque([(proto1, 1)])
assert proto.close.called
await conn.close()
async def test_release_not_started(
loop: asyncio.AbstractEventLoop, key: ConnectionKey
) -> None:
conn = aiohttp.BaseConnector()
proto = create_mocked_conn(should_close=False)
conn._acquired.add(proto)
conn._release(key, proto)
# assert conn._conns == {key: [(proto, 10)]}
rec = conn._conns[key]
assert rec[0][0] == proto
assert rec[0][1] == pytest.approx(loop.time(), abs=0.05)
assert not proto.close.called
await conn.close()
async def test_release_not_opened(
loop: asyncio.AbstractEventLoop, key: ConnectionKey
) -> None:
conn = aiohttp.BaseConnector()
proto = create_mocked_conn(loop)
conn._acquired.add(proto)
conn._release(key, proto)
assert proto.close.called
await conn.close()
async def test_connect(
loop: asyncio.AbstractEventLoop,
key: ConnectionKey,
make_client_request: _RequestMaker,
) -> None:
proto = create_mocked_conn(loop)
proto.is_connected.return_value = True
req = make_client_request("GET", URL("http://localhost:80"), loop=loop)
conn = aiohttp.BaseConnector()
conn._conns[key] = deque([(proto, loop.time())])
with mock.patch.object(conn, "_create_connection", create_mocked_conn(loop)) as m:
m.return_value = loop.create_future()
m.return_value.set_result(proto)
connection = await conn.connect(req, [], ClientTimeout())
assert not m.called
assert connection._protocol is proto
assert connection.transport is proto.transport
assert isinstance(connection, Connection)
connection.close()
await conn.close()
async def test_connect_tracing(
loop: asyncio.AbstractEventLoop, make_client_request: _RequestMaker
) -> None:
session = mock.Mock()
trace_config_ctx = mock.Mock()
on_connection_create_start = mock.AsyncMock()
on_connection_create_end = mock.AsyncMock()
trace_config = aiohttp.TraceConfig(
trace_config_ctx_factory=mock.Mock(return_value=trace_config_ctx)
)
trace_config.on_connection_create_start.append(on_connection_create_start)
trace_config.on_connection_create_end.append(on_connection_create_end)
trace_config.freeze()
traces = [Trace(session, trace_config, trace_config.trace_config_ctx())]
proto = create_mocked_conn(loop)
proto.is_connected.return_value = True
req = make_client_request("GET", URL("http://host:80"), loop=loop)
conn = aiohttp.BaseConnector()
with mock.patch.object(
conn, "_create_connection", autospec=True, spec_set=True, return_value=proto
):
conn2 = await conn.connect(req, traces, ClientTimeout())
conn2.release()
on_connection_create_start.assert_called_with(
session, trace_config_ctx, aiohttp.TraceConnectionCreateStartParams()
)
on_connection_create_end.assert_called_with(
session, trace_config_ctx, aiohttp.TraceConnectionCreateEndParams()
)
@pytest.mark.parametrize(
"signal",
[
"on_connection_create_start",
"on_connection_create_end",
],
)
async def test_exception_during_connetion_create_tracing( # type: ignore[misc]
loop: asyncio.AbstractEventLoop, signal: str, make_client_request: _RequestMaker
) -> None:
session = mock.Mock()
trace_config_ctx = mock.Mock()
on_signal = mock.AsyncMock(side_effect=asyncio.CancelledError)
trace_config = aiohttp.TraceConfig(
trace_config_ctx_factory=mock.Mock(return_value=trace_config_ctx)
)
getattr(trace_config, signal).append(on_signal)
trace_config.freeze()
traces = [Trace(session, trace_config, trace_config.trace_config_ctx())]
proto = create_mocked_conn(loop)
proto.is_connected.return_value = True
req = make_client_request("GET", URL("http://host:80"), loop=loop)
key = req.connection_key
conn = aiohttp.BaseConnector()
assert not conn._acquired
assert key not in conn._acquired_per_host
with (
pytest.raises(asyncio.CancelledError),
mock.patch.object(
conn, "_create_connection", autospec=True, spec_set=True, return_value=proto
),
):
await conn.connect(req, traces, ClientTimeout())
assert not conn._acquired
assert key not in conn._acquired_per_host
async def test_exception_during_connection_queued_tracing(
loop: asyncio.AbstractEventLoop, make_client_request: _RequestMaker
) -> None:
session = mock.Mock()
trace_config_ctx = mock.Mock()
on_signal = mock.AsyncMock(side_effect=asyncio.CancelledError)
trace_config = aiohttp.TraceConfig(
trace_config_ctx_factory=mock.Mock(return_value=trace_config_ctx)
)
trace_config.on_connection_queued_start.append(on_signal)
trace_config.freeze()
traces = [Trace(session, trace_config, trace_config.trace_config_ctx())]
proto = create_mocked_conn(loop)
proto.is_connected.return_value = True
req = make_client_request("GET", URL("http://host:80"), loop=loop)
key = req.connection_key
conn = aiohttp.BaseConnector(limit=1)
assert not conn._acquired
assert key not in conn._acquired_per_host
with (
pytest.raises(asyncio.CancelledError),
mock.patch.object(
conn, "_create_connection", autospec=True, spec_set=True, return_value=proto
),
):
resp1 = await conn.connect(req, traces, ClientTimeout())
assert resp1
# 2nd connect request will be queued
await conn.connect(req, traces, ClientTimeout())
resp1.close()
assert not conn._waiters
assert not conn._acquired
assert key not in conn._acquired_per_host
await conn.close()
async def test_exception_during_connection_reuse_tracing(
loop: asyncio.AbstractEventLoop, make_client_request: _RequestMaker
) -> None:
session = mock.Mock()
trace_config_ctx = mock.Mock()
on_signal = mock.AsyncMock(side_effect=asyncio.CancelledError)
trace_config = aiohttp.TraceConfig(
trace_config_ctx_factory=mock.Mock(return_value=trace_config_ctx)
)
trace_config.on_connection_reuseconn.append(on_signal)
trace_config.freeze()
traces = [Trace(session, trace_config, trace_config.trace_config_ctx())]
proto = create_mocked_conn(loop)
proto.is_connected.return_value = True
req = make_client_request("GET", URL("http://host:80"), loop=loop)
key = req.connection_key
conn = aiohttp.BaseConnector()
assert not conn._acquired
assert key not in conn._acquired_per_host
with (
pytest.raises(asyncio.CancelledError),
mock.patch.object(
conn, "_create_connection", autospec=True, spec_set=True, return_value=proto
),
):
resp = await conn.connect(req, traces, ClientTimeout())
with mock.patch.object(resp.protocol, "should_close", False):
resp.release()
assert not conn._acquired
assert key not in conn._acquired_per_host
assert key in conn._conns
await conn.connect(req, traces, ClientTimeout())
assert not conn._acquired
assert key not in conn._acquired_per_host
async def test_cancellation_during_waiting_for_free_connection(
loop: asyncio.AbstractEventLoop,
make_client_request: _RequestMaker,
) -> None:
session = mock.Mock()
trace_config_ctx = mock.Mock()
waiter_wait_stated_future = loop.create_future()
async def on_connection_queued_start(*args: object, **kwargs: object) -> None:
waiter_wait_stated_future.set_result(None)
trace_config = aiohttp.TraceConfig(
trace_config_ctx_factory=mock.Mock(return_value=trace_config_ctx)
)
trace_config.on_connection_queued_start.append(on_connection_queued_start)
trace_config.freeze()
traces = [Trace(session, trace_config, trace_config.trace_config_ctx())]
proto = create_mocked_conn(loop)
proto.is_connected.return_value = True
req = make_client_request("GET", URL("http://host:80"), loop=loop)
key = req.connection_key
conn = aiohttp.BaseConnector(limit=1)
assert not conn._acquired
assert key not in conn._acquired_per_host
with mock.patch.object(
conn, "_create_connection", autospec=True, spec_set=True, return_value=proto
):
resp1 = await conn.connect(req, traces, ClientTimeout())
assert resp1
# 2nd connect request will be queued
task = asyncio.create_task(conn.connect(req, traces, ClientTimeout()))
await waiter_wait_stated_future
list(conn._waiters[key])[0].cancel()
with pytest.raises(asyncio.CancelledError):
await task
resp1.close()
assert not conn._waiters
assert not conn._acquired
assert key not in conn._acquired_per_host
async def test_close_during_connect(
loop: asyncio.AbstractEventLoop, make_client_request: _RequestMaker
) -> None:
proto = create_mocked_conn(loop)
proto.is_connected.return_value = True
fut = loop.create_future()
req = make_client_request("GET", URL("http://host:80"), loop=loop)
conn = aiohttp.BaseConnector()
with mock.patch.object(conn, "_create_connection", lambda *args: fut):
task = loop.create_task(conn.connect(req, [], ClientTimeout()))
await asyncio.sleep(0)
await conn.close()
fut.set_result(proto)
with pytest.raises(aiohttp.ClientConnectionError):
await task
assert proto.close.called
@pytest.mark.usefixtures("enable_cleanup_closed")
async def test_ctor_cleanup() -> None:
loop = mock.Mock()
loop.time.return_value = 1.5
conn = aiohttp.BaseConnector(keepalive_timeout=10, enable_cleanup_closed=True)
assert conn._cleanup_handle is None
assert conn._cleanup_closed_handle is not None
await conn.close()
async def test_cleanup(key: ConnectionKey) -> None:
# The test sets the clock to 300s. It starts with 2 connections in the
# pool. The first connection has use time of 10s. When cleanup reaches it,
# it computes the deadline = 300 - 15.0 = 285.0 (15s being the default
# keep-alive timeout value), then checks that it's overdue because
# 10 - 285.0 < 0, and releases it since it's in connected state. The second
# connection, though, is in disconnected state so it doesn't bother to
# check if it's past due and closes the underlying transport.
m1 = mock.Mock()
m2 = mock.Mock()
m1.is_connected.return_value = True
m2.is_connected.return_value = False
testset: defaultdict[ConnectionKey, deque[tuple[ResponseHandler, float]]] = (
defaultdict(deque)
)
testset[key] = deque([(m1, 10), (m2, 300)])
loop = mock.Mock()
loop.time.return_value = 300
async with aiohttp.BaseConnector() as conn:
conn._loop = loop
conn._conns = testset
existing_handle = conn._cleanup_handle = mock.Mock()
with mock.patch("aiohttp.connector.monotonic", return_value=300):
conn._cleanup()
assert existing_handle.cancel.called
assert conn._conns == {}
assert conn._cleanup_handle is None
@pytest.mark.usefixtures("enable_cleanup_closed")
async def test_cleanup_close_ssl_transport( # type: ignore[misc]
loop: asyncio.AbstractEventLoop, ssl_key: ConnectionKey
) -> None:
proto = create_mocked_conn(loop)
transport = proto.transport
testset: defaultdict[ConnectionKey, deque[tuple[ResponseHandler, float]]] = (
defaultdict(deque)
)
testset[ssl_key] = deque([(proto, 10)])
loop = mock.Mock()
new_time = asyncio.get_event_loop().time() + 300
loop.time.return_value = new_time
conn = aiohttp.BaseConnector(enable_cleanup_closed=True)
conn._loop = loop
conn._conns = testset
existing_handle = conn._cleanup_handle = mock.Mock()
with mock.patch("aiohttp.connector.monotonic", return_value=new_time):
conn._cleanup()
assert existing_handle.cancel.called
assert conn._conns == {}
assert conn._cleanup_closed_transports == [transport]
await conn.close()
await asyncio.sleep(0) # Give cleanup a chance to close transports
async def test_cleanup2(loop: asyncio.AbstractEventLoop, key: ConnectionKey) -> None:
m = create_mocked_conn()
m.is_connected.return_value = True
testset: defaultdict[ConnectionKey, deque[tuple[ResponseHandler, float]]] = (
defaultdict(deque)
)
testset[key] = deque([(m, 300)])
conn = aiohttp.BaseConnector(keepalive_timeout=10)
conn._loop = mock.Mock()
conn._loop.time.return_value = 300
with mock.patch("aiohttp.connector.monotonic", return_value=300):
conn._conns = testset
conn._cleanup()
assert conn._conns == testset
assert conn._cleanup_handle is not None
conn._loop.call_at.assert_called_with(310, mock.ANY, mock.ANY)
await conn.close()
async def test_cleanup3(loop: asyncio.AbstractEventLoop, key: ConnectionKey) -> None:
m = create_mocked_conn(loop)
m.is_connected.return_value = True
testset: defaultdict[ConnectionKey, deque[tuple[ResponseHandler, float]]] = (
defaultdict(deque)
)
testset[key] = deque([(m, 290.1), (create_mocked_conn(loop), 305.1)])
conn = aiohttp.BaseConnector(keepalive_timeout=10)
conn._loop = mock.Mock()
conn._loop.time.return_value = 308.5
conn._conns = testset
with mock.patch("aiohttp.connector.monotonic", return_value=308.5):
conn._cleanup()
assert conn._conns == {key: deque([testset[key][1]])}
assert conn._cleanup_handle is not None
conn._loop.call_at.assert_called_with(319, mock.ANY, mock.ANY)
await conn.close()
@pytest.mark.usefixtures("enable_cleanup_closed")
async def test_cleanup_closed(
loop: asyncio.AbstractEventLoop, mocker: MockerFixture
) -> None:
if not hasattr(loop, "__dict__"):
pytest.skip("can not override loop attributes")
m = mocker.spy(loop, "call_at")
conn = aiohttp.BaseConnector(enable_cleanup_closed=True)
tr = mock.Mock()
conn._cleanup_closed_handle = cleanup_closed_handle = mock.Mock()
conn._cleanup_closed_transports = [tr]
conn._cleanup_closed()
assert tr.abort.called
assert not conn._cleanup_closed_transports
assert m.called
assert cleanup_closed_handle.cancel.called
await conn.close()
async def test_cleanup_closed_is_noop_on_fixed_cpython() -> None:
"""Ensure that enable_cleanup_closed is a noop on fixed Python versions."""
with (
mock.patch("aiohttp.connector.NEEDS_CLEANUP_CLOSED", False),
pytest.warns(DeprecationWarning, match="cleanup_closed ignored"),
):
conn = aiohttp.BaseConnector(enable_cleanup_closed=True)
assert conn._cleanup_closed_disabled is True
async def test_cleanup_closed_disabled(
loop: asyncio.AbstractEventLoop, mocker: MockerFixture
) -> None:
conn = aiohttp.BaseConnector(enable_cleanup_closed=False)
tr = mock.Mock()
conn._cleanup_closed_transports = [tr]
conn._cleanup_closed()
assert tr.abort.called
assert not conn._cleanup_closed_transports
await conn.close()
async def test_tcp_connector_ctor(loop: asyncio.AbstractEventLoop) -> None:
conn = aiohttp.TCPConnector()
assert conn._ssl is True
assert conn.use_dns_cache
assert conn.family == 0
await conn.close()
@pytest.mark.skipif(
sys.version_info < (3, 11),
reason="Use test_tcp_connector_ssl_shutdown_timeout_pre_311 for Python < 3.11",
)
async def test_tcp_connector_ssl_shutdown_timeout(
loop: asyncio.AbstractEventLoop,
) -> None:
# Test default value (no warning expected)
conn = aiohttp.TCPConnector()
assert conn._ssl_shutdown_timeout == 0
await conn.close()
# Test custom value - expect deprecation warning
with pytest.warns(
DeprecationWarning, match="ssl_shutdown_timeout parameter is deprecated"
):
conn = aiohttp.TCPConnector(ssl_shutdown_timeout=1.0)
assert conn._ssl_shutdown_timeout == 1.0
await conn.close()
# Test None value - expect deprecation warning
with pytest.warns(
DeprecationWarning, match="ssl_shutdown_timeout parameter is deprecated"
):
conn = aiohttp.TCPConnector(ssl_shutdown_timeout=None)
assert conn._ssl_shutdown_timeout is None
await conn.close()
@pytest.mark.skipif(
sys.version_info >= (3, 11),
reason="This test is for Python < 3.11 runtime warning behavior",
)
async def test_tcp_connector_ssl_shutdown_timeout_pre_311(
loop: asyncio.AbstractEventLoop,
) -> None:
"""Test that both deprecation and runtime warnings are issued on Python < 3.11."""
# Test custom value - expect both deprecation and runtime warnings
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
conn = aiohttp.TCPConnector(ssl_shutdown_timeout=1.0)
# Should have both deprecation and runtime warnings
assert len(w) == 2
assert any(issubclass(warn.category, DeprecationWarning) for warn in w)
assert any(issubclass(warn.category, RuntimeWarning) for warn in w)
assert conn._ssl_shutdown_timeout == 1.0
await conn.close()
@pytest.mark.skipif(
sys.version_info < (3, 11), reason="ssl_shutdown_timeout requires Python 3.11+"
)
async def test_tcp_connector_ssl_shutdown_timeout_passed_to_create_connection( # type: ignore[misc]
loop: asyncio.AbstractEventLoop,
start_connection: mock.AsyncMock,
make_client_request: _RequestMaker,
) -> None:
# Test that ssl_shutdown_timeout is passed to create_connection for SSL connections
with pytest.warns(
DeprecationWarning, match="ssl_shutdown_timeout parameter is deprecated"
):
conn = aiohttp.TCPConnector(ssl_shutdown_timeout=2.5)
with mock.patch.object(
conn._loop, "create_connection", autospec=True, spec_set=True
) as create_connection:
create_connection.return_value = mock.Mock(), mock.Mock()
req = make_client_request("GET", URL("https://example.com"), loop=loop)
with closing(await conn.connect(req, [], ClientTimeout())):
assert create_connection.call_args.kwargs["ssl_shutdown_timeout"] == 2.5
await conn.close()
# Test with None value
with pytest.warns(
DeprecationWarning, match="ssl_shutdown_timeout parameter is deprecated"
):
conn = aiohttp.TCPConnector(ssl_shutdown_timeout=None)
with mock.patch.object(
conn._loop, "create_connection", autospec=True, spec_set=True
) as create_connection:
create_connection.return_value = mock.Mock(), mock.Mock()
req = make_client_request("GET", URL("https://example.com"), loop=loop)
with closing(await conn.connect(req, [], ClientTimeout())):
# When ssl_shutdown_timeout is None, it should not be in kwargs
assert "ssl_shutdown_timeout" not in create_connection.call_args.kwargs
await conn.close()
# Test that ssl_shutdown_timeout is NOT passed for non-SSL connections
with pytest.warns(
DeprecationWarning, match="ssl_shutdown_timeout parameter is deprecated"
):
conn = aiohttp.TCPConnector(ssl_shutdown_timeout=2.5)
with mock.patch.object(
conn._loop, "create_connection", autospec=True, spec_set=True
) as create_connection:
create_connection.return_value = mock.Mock(), mock.Mock()
req = make_client_request("GET", URL("http://example.com"), loop=loop)
with closing(await conn.connect(req, [], ClientTimeout())):
# For non-SSL connections, ssl_shutdown_timeout should not be passed
assert "ssl_shutdown_timeout" not in create_connection.call_args.kwargs
await conn.close()
@pytest.mark.skipif(sys.version_info >= (3, 11), reason="Test for Python < 3.11")
async def test_tcp_connector_ssl_shutdown_timeout_not_passed_pre_311( # type: ignore[misc]
loop: asyncio.AbstractEventLoop,
start_connection: mock.AsyncMock,
make_client_request: _RequestMaker,
) -> None:
# Test that ssl_shutdown_timeout is NOT passed to create_connection on Python < 3.11
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
conn = aiohttp.TCPConnector(ssl_shutdown_timeout=2.5)
# Should have both deprecation and runtime warnings
assert len(w) == 2
assert any(issubclass(warn.category, DeprecationWarning) for warn in w)
assert any(issubclass(warn.category, RuntimeWarning) for warn in w)
with mock.patch.object(
conn._loop, "create_connection", autospec=True, spec_set=True
) as create_connection:
create_connection.return_value = mock.Mock(), mock.Mock()
# Test with HTTPS
req = make_client_request("GET", URL("https://example.com"), loop=loop)
with closing(await conn.connect(req, [], ClientTimeout())):
assert "ssl_shutdown_timeout" not in create_connection.call_args.kwargs
# Test with HTTP
req = make_client_request("GET", URL("http://example.com"), loop=loop)
with closing(await conn.connect(req, [], ClientTimeout())):
assert "ssl_shutdown_timeout" not in create_connection.call_args.kwargs
await conn.close()
async def test_tcp_connector_close_abort_ssl_when_shutdown_timeout_zero(
loop: asyncio.AbstractEventLoop,
) -> None:
"""Test that close() uses abort() for SSL connections when ssl_shutdown_timeout=0."""
with pytest.warns(
DeprecationWarning, match="ssl_shutdown_timeout parameter is deprecated"
):
conn = aiohttp.TCPConnector(ssl_shutdown_timeout=0)
# Create a mock SSL protocol
proto = mock.create_autospec(ResponseHandler, instance=True)
proto.closed = None
# Create mock SSL transport
transport = mock.Mock()
transport.get_extra_info.return_value = mock.Mock() # Returns SSL context
transport.is_closing.return_value = False
proto.transport = transport
# Add the protocol to acquired connections
conn._acquired.add(proto)
# Close the connector
await conn.close()
# Verify abort was called instead of close for SSL connection
proto.abort.assert_called_once()
proto.close.assert_not_called()
async def test_tcp_connector_close_doesnt_abort_non_ssl_when_shutdown_timeout_zero(
loop: asyncio.AbstractEventLoop,
) -> None:
"""Test that close() still uses close() for non-SSL connections even when ssl_shutdown_timeout=0."""
with pytest.warns(
DeprecationWarning, match="ssl_shutdown_timeout parameter is deprecated"
):
conn = aiohttp.TCPConnector(ssl_shutdown_timeout=0)
# Create a mock non-SSL protocol
proto = mock.create_autospec(ResponseHandler, instance=True)
proto.closed = None
# Create mock non-SSL transport
transport = mock.Mock()
transport.get_extra_info.return_value = None # No SSL context
transport.is_closing.return_value = False
proto.transport = transport
# Add the protocol to acquired connections
conn._acquired.add(proto)
# Close the connector
await conn.close()
# Verify close was called for non-SSL connection
proto.close.assert_called_once()
proto.abort.assert_not_called()
async def test_tcp_connector_ssl_shutdown_timeout_warning_pre_311(
loop: asyncio.AbstractEventLoop,
) -> None:
"""Test that a warning is issued for non-zero ssl_shutdown_timeout on Python < 3.11."""
with (
mock.patch.object(sys, "version_info", (3, 10, 0)),
warnings.catch_warnings(record=True) as w,
):
warnings.simplefilter("always")
conn = aiohttp.TCPConnector(ssl_shutdown_timeout=5.0)
# We should get two warnings: deprecation and runtime warning
assert len(w) == 2
# Find each warning type
deprecation_warning = next(
(warn for warn in w if issubclass(warn.category, DeprecationWarning)), None
)
runtime_warning = next(
(warn for warn in w if issubclass(warn.category, RuntimeWarning)), None
)
assert deprecation_warning is not None
assert "ssl_shutdown_timeout parameter is deprecated" in str(
deprecation_warning.message
)
assert runtime_warning is not None
assert "ssl_shutdown_timeout=5.0 is ignored on Python < 3.11" in str(
runtime_warning.message
)
assert "only ssl_shutdown_timeout=0 is supported" in str(
runtime_warning.message
)
# Verify the value is still stored
assert conn._ssl_shutdown_timeout == 5.0
await conn.close()
async def test_tcp_connector_ssl_shutdown_timeout_zero_no_warning_pre_311(
loop: asyncio.AbstractEventLoop,
) -> None:
"""Test that no warning is issued for ssl_shutdown_timeout=0 on Python < 3.11."""
with (
mock.patch.object(sys, "version_info", (3, 10, 0)),
warnings.catch_warnings(record=True) as w,
):
warnings.simplefilter("always")
conn = aiohttp.TCPConnector(ssl_shutdown_timeout=0)
# We should get one warning: deprecation
assert len(w) == 1
assert issubclass(w[0].category, DeprecationWarning)
assert "ssl_shutdown_timeout parameter is deprecated" in str(w[0].message)
assert conn._ssl_shutdown_timeout == 0
await conn.close()
async def test_tcp_connector_ssl_shutdown_timeout_sentinel_no_warning_pre_311(
loop: asyncio.AbstractEventLoop,
) -> None:
"""Test that no warning is issued when sentinel is used on Python < 3.11."""
with (
mock.patch.object(sys, "version_info", (3, 10, 0)),
warnings.catch_warnings(record=True) as w,
):
warnings.simplefilter("always")
conn = aiohttp.TCPConnector() # Uses sentinel by default
assert len(w) == 0
assert conn._ssl_shutdown_timeout == 0 # Default value
await conn.close()
async def test_tcp_connector_ssl_shutdown_timeout_zero_not_passed(
loop: asyncio.AbstractEventLoop,
start_connection: mock.AsyncMock,
make_client_request: _RequestMaker,
) -> None:
"""Test that ssl_shutdown_timeout=0 is NOT passed to create_connection."""
with pytest.warns(
DeprecationWarning, match="ssl_shutdown_timeout parameter is deprecated"
):
conn = aiohttp.TCPConnector(ssl_shutdown_timeout=0)
with mock.patch.object(
conn._loop, "create_connection", autospec=True, spec_set=True
) as create_connection:
create_connection.return_value = mock.Mock(), mock.Mock()
# Test with HTTPS
req = make_client_request("GET", URL("https://example.com"), loop=loop)
with closing(await conn.connect(req, [], ClientTimeout())):
# Verify ssl_shutdown_timeout was NOT passed
assert "ssl_shutdown_timeout" not in create_connection.call_args.kwargs
# Test with HTTP (should not have ssl_shutdown_timeout anyway)
req = make_client_request("GET", URL("http://example.com"), loop=loop)
with closing(await conn.connect(req, [], ClientTimeout())):
assert "ssl_shutdown_timeout" not in create_connection.call_args.kwargs
await conn.close()
@pytest.mark.skipif(
sys.version_info < (3, 11), reason="ssl_shutdown_timeout requires Python 3.11+"
)
async def test_tcp_connector_ssl_shutdown_timeout_nonzero_passed( # type: ignore[misc]
loop: asyncio.AbstractEventLoop,
start_connection: mock.AsyncMock,
make_client_request: _RequestMaker,
) -> None:
"""Test that non-zero ssl_shutdown_timeout IS passed to create_connection on Python 3.11+."""
with pytest.warns(
DeprecationWarning, match="ssl_shutdown_timeout parameter is deprecated"
):
conn = aiohttp.TCPConnector(ssl_shutdown_timeout=5.0)
with mock.patch.object(
conn._loop, "create_connection", autospec=True, spec_set=True
) as create_connection:
create_connection.return_value = mock.Mock(), mock.Mock()
# Test with HTTPS
req = make_client_request("GET", URL("https://example.com"), loop=loop)
with closing(await conn.connect(req, [], ClientTimeout())):
# Verify ssl_shutdown_timeout WAS passed
assert create_connection.call_args.kwargs["ssl_shutdown_timeout"] == 5.0
# Test with HTTP (should not have ssl_shutdown_timeout)
req = make_client_request("GET", URL("http://example.com"), loop=loop)
with closing(await conn.connect(req, [], ClientTimeout())):
assert "ssl_shutdown_timeout" not in create_connection.call_args.kwargs
await conn.close()
async def test_tcp_connector_close_abort_ssl_connections_in_conns(
loop: asyncio.AbstractEventLoop,
) -> None:
"""Test that SSL connections in _conns are aborted when ssl_shutdown_timeout=0."""
with pytest.warns(
DeprecationWarning, match="ssl_shutdown_timeout parameter is deprecated"
):
conn = aiohttp.TCPConnector(ssl_shutdown_timeout=0)
# Create mock SSL protocol
proto = mock.create_autospec(ResponseHandler, instance=True)
proto.closed = None
# Create mock SSL transport
transport = mock.Mock()
transport.get_extra_info.return_value = mock.Mock() # Returns SSL context
proto.transport = transport
# Add the protocol to _conns
key = ConnectionKey("host", 443, True, True, None, None, None)
conn._conns[key] = deque([(proto, loop.time())])
# Close the connector
await conn.close()
# Verify abort was called for SSL connection
proto.abort.assert_called_once()
proto.close.assert_not_called()
async def test_tcp_connector_allowed_protocols(loop: asyncio.AbstractEventLoop) -> None:
conn = aiohttp.TCPConnector()
assert conn.allowed_protocol_schema_set == {"", "tcp", "http", "https", "ws", "wss"}
async def test_start_tls_exception_with_ssl_shutdown_timeout_zero(
loop: asyncio.AbstractEventLoop,
) -> None:
"""Test _start_tls_connection exception handling with ssl_shutdown_timeout=0."""
with pytest.warns(
DeprecationWarning, match="ssl_shutdown_timeout parameter is deprecated"
):
conn = aiohttp.TCPConnector(ssl_shutdown_timeout=0)
underlying_transport = mock.Mock()
req = mock.Mock()
req.server_hostname = None
req.host = "example.com"
req.is_ssl = mock.Mock(return_value=True)
# Patch _get_ssl_context to return a valid context and make start_tls fail
with (
mock.patch.object(
conn, "_get_ssl_context", return_value=ssl.create_default_context()
),
mock.patch.object(conn._loop, "start_tls", side_effect=OSError("TLS failed")),
):
with pytest.raises(OSError):
await conn._start_tls_connection(underlying_transport, req, ClientTimeout())
# Should abort, not close
underlying_transport.abort.assert_called_once()
underlying_transport.close.assert_not_called()
@pytest.mark.skipif(
sys.version_info < (3, 11),
reason="Use test_start_tls_exception_with_ssl_shutdown_timeout_nonzero_pre_311 for Python < 3.11",
)
async def test_start_tls_exception_with_ssl_shutdown_timeout_nonzero(
loop: asyncio.AbstractEventLoop,
) -> None:
"""Test _start_tls_connection exception handling with ssl_shutdown_timeout>0."""
with pytest.warns(
DeprecationWarning, match="ssl_shutdown_timeout parameter is deprecated"
):
conn = aiohttp.TCPConnector(ssl_shutdown_timeout=1.0)
underlying_transport = mock.Mock()
req = mock.Mock()
req.server_hostname = None
req.host = "example.com"
req.is_ssl = mock.Mock(return_value=True)
# Patch _get_ssl_context to return a valid context and make start_tls fail
with (
mock.patch.object(
conn, "_get_ssl_context", return_value=ssl.create_default_context()
),
mock.patch.object(conn._loop, "start_tls", side_effect=OSError("TLS failed")),
):
with pytest.raises(OSError):
await conn._start_tls_connection(underlying_transport, req, ClientTimeout())
# Should close, not abort
underlying_transport.close.assert_called_once()
underlying_transport.abort.assert_not_called()
@pytest.mark.skipif(
sys.version_info >= (3, 11),
reason="This test is for Python < 3.11 runtime warning behavior",
)
async def test_start_tls_exception_with_ssl_shutdown_timeout_nonzero_pre_311(
loop: asyncio.AbstractEventLoop,
) -> None:
"""Test _start_tls_connection exception handling with ssl_shutdown_timeout>0 on Python < 3.11."""
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
conn = aiohttp.TCPConnector(ssl_shutdown_timeout=1.0)
# Should have both deprecation and runtime warnings
assert len(w) == 2
assert any(issubclass(warn.category, DeprecationWarning) for warn in w)
assert any(issubclass(warn.category, RuntimeWarning) for warn in w)
underlying_transport = mock.Mock()
req = mock.Mock()
req.server_hostname = None
req.host = "example.com"
req.is_ssl = mock.Mock(return_value=True)
# Patch _get_ssl_context to return a valid context and make start_tls fail
with (
mock.patch.object(
conn, "_get_ssl_context", return_value=ssl.create_default_context()
),
mock.patch.object(conn._loop, "start_tls", side_effect=OSError("TLS failed")),
):
with pytest.raises(OSError):
await conn._start_tls_connection(underlying_transport, req, ClientTimeout())
# Should close, not abort
underlying_transport.close.assert_called_once()
underlying_transport.abort.assert_not_called()
async def test_invalid_ssl_param() -> None:
with pytest.raises(TypeError):
aiohttp.TCPConnector(ssl=object()) # type: ignore[arg-type]
async def test_tcp_connector_ctor_fingerprint_valid(
loop: asyncio.AbstractEventLoop,
) -> None:
valid = aiohttp.Fingerprint(hashlib.sha256(b"foo").digest())
conn = aiohttp.TCPConnector(ssl=valid)
assert conn._ssl is valid
await conn.close()
async def test_insecure_fingerprint_md5(loop: asyncio.AbstractEventLoop) -> None:
with pytest.raises(ValueError):
aiohttp.TCPConnector(ssl=aiohttp.Fingerprint(hashlib.md5(b"foo").digest()))
async def test_insecure_fingerprint_sha1(loop: asyncio.AbstractEventLoop) -> None:
with pytest.raises(ValueError):
aiohttp.TCPConnector(ssl=aiohttp.Fingerprint(hashlib.sha1(b"foo").digest()))
async def test_tcp_connector_clear_dns_cache(loop: asyncio.AbstractEventLoop) -> None:
conn = aiohttp.TCPConnector()
h1: ResolveResult = {
"hostname": "a",
"host": "127.0.0.1",
"port": 80,
"family": socket.AF_INET,
"proto": 0,
"flags": socket.AI_NUMERICHOST,
}
h2: ResolveResult = {
"hostname": "a",
"host": "127.0.0.1",
"port": 80,
"family": socket.AF_INET,
"proto": 0,
"flags": socket.AI_NUMERICHOST,
}
hosts = [h1, h2]
conn._cached_hosts.add(("localhost", 123), hosts)
conn._cached_hosts.add(("localhost", 124), hosts)
conn.clear_dns_cache("localhost", 123)
with pytest.raises(KeyError):
conn._cached_hosts.next_addrs(("localhost", 123))
assert conn._cached_hosts.next_addrs(("localhost", 124)) == hosts
# Remove removed element is OK
conn.clear_dns_cache("localhost", 123)
with pytest.raises(KeyError):
conn._cached_hosts.next_addrs(("localhost", 123))
conn.clear_dns_cache()
with pytest.raises(KeyError):
conn._cached_hosts.next_addrs(("localhost", 124))
await conn.close()
async def test_tcp_connector_clear_dns_cache_bad_args(
loop: asyncio.AbstractEventLoop,
) -> None:
conn = aiohttp.TCPConnector()
with pytest.raises(ValueError):
conn.clear_dns_cache("localhost")
await conn.close()
async def test___get_ssl_context1() -> None:
conn = aiohttp.TCPConnector()
req = mock.Mock()
req.is_ssl.return_value = False
assert conn._get_ssl_context(req) is None
await conn.close()
async def test___get_ssl_context2() -> None:
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
conn = aiohttp.TCPConnector()
req = mock.Mock()
req.is_ssl.return_value = True
req.ssl = ctx
assert conn._get_ssl_context(req) is ctx
await conn.close()
async def test___get_ssl_context3() -> None:
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
conn = aiohttp.TCPConnector(ssl=ctx)
req = mock.Mock()
req.is_ssl.return_value = True
req.ssl = True
assert conn._get_ssl_context(req) is ctx
await conn.close()
async def test___get_ssl_context4() -> None:
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
conn = aiohttp.TCPConnector(ssl=ctx)
req = mock.Mock()
req.is_ssl.return_value = True
req.ssl = False
assert conn._get_ssl_context(req) is _SSL_CONTEXT_UNVERIFIED
await conn.close()
async def test___get_ssl_context5() -> None:
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
conn = aiohttp.TCPConnector(ssl=ctx)
req = mock.Mock()
req.is_ssl.return_value = True
req.ssl = aiohttp.Fingerprint(hashlib.sha256(b"1").digest())
assert conn._get_ssl_context(req) is _SSL_CONTEXT_UNVERIFIED
await conn.close()
async def test___get_ssl_context6() -> None:
conn = aiohttp.TCPConnector()
req = mock.Mock()
req.is_ssl.return_value = True
req.ssl = True
assert conn._get_ssl_context(req) is _SSL_CONTEXT_VERIFIED
await conn.close()
async def test_ssl_context_once() -> None:
"""Test the ssl context is created only once and shared between connectors."""
conn1 = aiohttp.TCPConnector()
conn2 = aiohttp.TCPConnector()
conn3 = aiohttp.TCPConnector()
req = mock.Mock()
req.is_ssl.return_value = True
req.ssl = True
assert conn1._get_ssl_context(req) is _SSL_CONTEXT_VERIFIED
assert conn2._get_ssl_context(req) is _SSL_CONTEXT_VERIFIED
assert conn3._get_ssl_context(req) is _SSL_CONTEXT_VERIFIED
async def test_close_twice(loop: asyncio.AbstractEventLoop, key: ConnectionKey) -> None:
proto: ResponseHandler = create_mocked_conn(loop)
conn = aiohttp.BaseConnector()
conn._conns[key] = deque([(proto, 0)])
await conn.close()
assert not conn._conns
assert proto.close.called # type: ignore[attr-defined]
assert conn.closed
conn._conns = "Invalid" # type: ignore[assignment] # fill with garbage
await conn.close()
assert conn.closed
async def test_close_cancels_cleanup_handle(
loop: asyncio.AbstractEventLoop, key: ConnectionKey
) -> None:
conn = aiohttp.BaseConnector()
conn._release(key, create_mocked_conn(should_close=False))
assert conn._cleanup_handle is not None
await conn.close()
assert conn._cleanup_handle is None
async def test_close_cancels_resolve_host(
loop: asyncio.AbstractEventLoop, make_client_request: _RequestMaker
) -> None:
cancelled = False
async def delay_resolve(*args: object, **kwargs: object) -> None:
"""Delay resolve() task in order to test cancellation."""
nonlocal cancelled
try:
await asyncio.sleep(10)
except asyncio.CancelledError:
cancelled = True
raise
conn = aiohttp.TCPConnector()
req = make_client_request(
"GET", URL("http://localhost:80"), loop=loop, response_class=mock.Mock()
)
with mock.patch.object(conn._resolver, "resolve", delay_resolve):
t = asyncio.create_task(conn.connect(req, [], ClientTimeout()))
# Let it create the internal task
await asyncio.sleep(0)
# Let that task start running
await asyncio.sleep(0)
# We now have a task being tracked and can ensure that .close() cancels it.
assert len(conn._resolve_host_tasks) == 1
await conn.close()
assert cancelled
assert len(conn._resolve_host_tasks) == 0
with suppress(asyncio.CancelledError):
await t
async def test_multiple_dns_resolution_requests_success(
loop: asyncio.AbstractEventLoop, make_client_request: _RequestMaker
) -> None:
"""Verify that multiple DNS resolution requests are handled correctly."""
async def delay_resolve(*args: object, **kwargs: object) -> list[ResolveResult]:
"""Delayed resolve() task."""
for _ in range(3):
await asyncio.sleep(0)
return [
{
"hostname": "localhost",
"host": "127.0.0.1",
"port": 80,
"family": socket.AF_INET,
"proto": 0,
"flags": socket.AI_NUMERICHOST,
},
]
conn = aiohttp.TCPConnector(force_close=True)
req = make_client_request(
"GET", URL("http://localhost:80"), loop=loop, response_class=mock.Mock()
)
with (
mock.patch.object(conn._resolver, "resolve", delay_resolve),
mock.patch(
"aiohttp.connector.aiohappyeyeballs.start_connection",
side_effect=OSError(1, "Forced connection to fail"),
),
):
task1 = asyncio.create_task(conn.connect(req, [], ClientTimeout()))
# Let it create the internal task
await asyncio.sleep(0)
# Let that task start running
await asyncio.sleep(0)
# Ensure the task is running
assert len(conn._resolve_host_tasks) == 1
task2 = asyncio.create_task(conn.connect(req, [], ClientTimeout()))
task3 = asyncio.create_task(conn.connect(req, [], ClientTimeout()))
with pytest.raises(
aiohttp.ClientConnectorError, match="Forced connection to fail"
):
await task1
# Verify the the task is finished
assert len(conn._resolve_host_tasks) == 0
with pytest.raises(
aiohttp.ClientConnectorError, match="Forced connection to fail"
):
await task2
with pytest.raises(
aiohttp.ClientConnectorError, match="Forced connection to fail"
):
await task3
async def test_multiple_dns_resolution_requests_failure(
loop: asyncio.AbstractEventLoop, make_client_request: _RequestMaker
) -> None:
"""Verify that DNS resolution failure for multiple requests is handled correctly."""
async def delay_resolve(*args: object, **kwargs: object) -> list[ResolveResult]:
"""Delayed resolve() task."""
for _ in range(3):
await asyncio.sleep(0)
raise OSError(None, "DNS Resolution mock failure")
conn = aiohttp.TCPConnector(force_close=True)
req = make_client_request(
"GET", URL("http://localhost:80"), loop=loop, response_class=mock.Mock()
)
with (
mock.patch.object(conn._resolver, "resolve", delay_resolve),
mock.patch(
"aiohttp.connector.aiohappyeyeballs.start_connection",
side_effect=OSError(1, "Forced connection to fail"),
),
):
task1 = asyncio.create_task(conn.connect(req, [], ClientTimeout()))
# Let it create the internal task
await asyncio.sleep(0)
# Let that task start running
await asyncio.sleep(0)
# Ensure the task is running
assert len(conn._resolve_host_tasks) == 1
task2 = asyncio.create_task(conn.connect(req, [], ClientTimeout()))
task3 = asyncio.create_task(conn.connect(req, [], ClientTimeout()))
with pytest.raises(
aiohttp.ClientConnectorError, match="DNS Resolution mock failure"
):
await task1
# Verify the the task is finished
assert len(conn._resolve_host_tasks) == 0
with pytest.raises(
aiohttp.ClientConnectorError, match="DNS Resolution mock failure"
):
await task2
with pytest.raises(
aiohttp.ClientConnectorError, match="DNS Resolution mock failure"
):
await task3
async def test_multiple_dns_resolution_requests_cancelled(
loop: asyncio.AbstractEventLoop, make_client_request: _RequestMaker
) -> None:
"""Verify that DNS resolution cancellation does not affect other tasks."""
async def delay_resolve(*args: object, **kwargs: object) -> list[ResolveResult]:
"""Delayed resolve() task."""
for _ in range(3):
await asyncio.sleep(0)
raise OSError(None, "DNS Resolution mock failure")
conn = aiohttp.TCPConnector(force_close=True)
req = make_client_request(
"GET", URL("http://localhost:80"), loop=loop, response_class=mock.Mock()
)
with (
mock.patch.object(conn._resolver, "resolve", delay_resolve),
mock.patch(
"aiohttp.connector.aiohappyeyeballs.start_connection",
side_effect=OSError(1, "Forced connection to fail"),
),
):
task1 = asyncio.create_task(conn.connect(req, [], ClientTimeout()))
# Let it create the internal task
await asyncio.sleep(0)
# Let that task start running
await asyncio.sleep(0)
# Ensure the task is running
assert len(conn._resolve_host_tasks) == 1
task2 = asyncio.create_task(conn.connect(req, [], ClientTimeout()))
task3 = asyncio.create_task(conn.connect(req, [], ClientTimeout()))
task1.cancel()
with pytest.raises(asyncio.CancelledError):
await task1
with pytest.raises(
aiohttp.ClientConnectorError, match="DNS Resolution mock failure"
):
await task2
with pytest.raises(
aiohttp.ClientConnectorError, match="DNS Resolution mock failure"
):
await task3
# Verify the the task is finished
assert len(conn._resolve_host_tasks) == 0
async def test_multiple_dns_resolution_requests_first_cancelled(
loop: asyncio.AbstractEventLoop, make_client_request: _RequestMaker
) -> None:
"""Verify that first DNS resolution cancellation does not make other resolutions fail."""
async def delay_resolve(*args: object, **kwargs: object) -> list[ResolveResult]:
"""Delayed resolve() task."""
for _ in range(3):
await asyncio.sleep(0)
return [
{
"hostname": "localhost",
"host": "127.0.0.1",
"port": 80,
"family": socket.AF_INET,
"proto": 0,
"flags": socket.AI_NUMERICHOST,
},
]
conn = aiohttp.TCPConnector(force_close=True)
req = make_client_request(
"GET", URL("http://localhost:80"), loop=loop, response_class=mock.Mock()
)
with (
mock.patch.object(conn._resolver, "resolve", delay_resolve),
mock.patch(
"aiohttp.connector.aiohappyeyeballs.start_connection",
side_effect=OSError(1, "Forced connection to fail"),
),
):
task1 = asyncio.create_task(conn.connect(req, [], ClientTimeout()))
# Let it create the internal task
await asyncio.sleep(0)
# Let that task start running
await asyncio.sleep(0)
# Ensure the task is running
assert len(conn._resolve_host_tasks) == 1
task2 = asyncio.create_task(conn.connect(req, [], ClientTimeout()))
task3 = asyncio.create_task(conn.connect(req, [], ClientTimeout()))
task1.cancel()
with pytest.raises(asyncio.CancelledError):
await task1
# The second and third tasks should still make the connection
# even if the first one is cancelled
with pytest.raises(
aiohttp.ClientConnectorError, match="Forced connection to fail"
):
await task2
with pytest.raises(
aiohttp.ClientConnectorError, match="Forced connection to fail"
):
await task3
# Verify the the task is finished
assert len(conn._resolve_host_tasks) == 0
async def test_multiple_dns_resolution_requests_first_fails_second_successful(
loop: asyncio.AbstractEventLoop, make_client_request: _RequestMaker
) -> None:
"""Verify that first DNS resolution fails the first time and is successful the second time."""
attempt = 0
async def delay_resolve(*args: object, **kwargs: object) -> list[ResolveResult]:
"""Delayed resolve() task."""
nonlocal attempt
for _ in range(3):
await asyncio.sleep(0)
attempt += 1
if attempt == 1:
raise OSError(None, "DNS Resolution mock failure")
return [
{
"hostname": "localhost",
"host": "127.0.0.1",
"port": 80,
"family": socket.AF_INET,
"proto": 0,
"flags": socket.AI_NUMERICHOST,
},
]
conn = aiohttp.TCPConnector(force_close=True)
req = make_client_request(
"GET", URL("http://localhost:80"), loop=loop, response_class=mock.Mock()
)
with (
mock.patch.object(conn._resolver, "resolve", delay_resolve),
mock.patch(
"aiohttp.connector.aiohappyeyeballs.start_connection",
side_effect=OSError(1, "Forced connection to fail"),
),
):
task1 = asyncio.create_task(conn.connect(req, [], ClientTimeout()))
# Let it create the internal task
await asyncio.sleep(0)
# Let that task start running
await asyncio.sleep(0)
# Ensure the task is running
assert len(conn._resolve_host_tasks) == 1
task2 = asyncio.create_task(conn.connect(req, [], ClientTimeout()))
with pytest.raises(
aiohttp.ClientConnectorError, match="DNS Resolution mock failure"
):
await task1
assert len(conn._resolve_host_tasks) == 0
# The second task should also get the dns resolution failure
with pytest.raises(
aiohttp.ClientConnectorError, match="DNS Resolution mock failure"
):
await task2
# The third task is created after the resolution finished so
# it should try again and succeed
task3 = asyncio.create_task(conn.connect(req, [], ClientTimeout()))
# Let it create the internal task
await asyncio.sleep(0)
# Let that task start running
await asyncio.sleep(0)
# Ensure the task is running
assert len(conn._resolve_host_tasks) == 1
with pytest.raises(
aiohttp.ClientConnectorError, match="Forced connection to fail"
):
await task3
# Verify the the task is finished
assert len(conn._resolve_host_tasks) == 0
async def test_close_abort_closed_transports(loop: asyncio.AbstractEventLoop) -> None:
tr = mock.Mock()
conn = aiohttp.BaseConnector()
conn._cleanup_closed_transports.append(tr)
await conn.close()
assert not conn._cleanup_closed_transports
assert tr.abort.called
assert conn.closed
@pytest.mark.usefixtures("enable_cleanup_closed")
async def test_close_cancels_cleanup_closed_handle(
loop: asyncio.AbstractEventLoop,
) -> None:
conn = aiohttp.BaseConnector(enable_cleanup_closed=True)
assert conn._cleanup_closed_handle is not None
await conn.close()
assert conn._cleanup_closed_handle is None
async def test_ctor_with_default_loop(loop: asyncio.AbstractEventLoop) -> None:
conn = aiohttp.BaseConnector()
assert loop is conn._loop
await conn.close()
async def test_base_connector_allows_high_level_protocols(
loop: asyncio.AbstractEventLoop,
) -> None:
conn = aiohttp.BaseConnector()
assert conn.allowed_protocol_schema_set == {
"",
"http",
"https",
"ws",
"wss",
}
async def test_connect_with_limit(
loop: asyncio.AbstractEventLoop,
key: ConnectionKey,
make_client_request: _RequestMaker,
) -> None:
proto = create_mocked_conn(loop)
proto.is_connected.return_value = True
req = make_client_request(
"GET", URL("http://localhost:80"), loop=loop, response_class=mock.Mock()
)
conn = aiohttp.BaseConnector(limit=1, limit_per_host=10)
conn._conns[key] = deque([(proto, loop.time())])
with mock.patch.object(
conn, "_create_connection", autospec=True, spec_set=True, return_value=proto
):
connection1 = await conn.connect(req, [], ClientTimeout())
assert connection1._protocol == proto
assert 1 == len(conn._acquired)
assert proto in conn._acquired
assert key in conn._acquired_per_host
assert proto in conn._acquired_per_host[key]
acquired = False
async def f() -> None:
nonlocal acquired
connection2 = await conn.connect(req, [], ClientTimeout())
acquired = True
assert 1 == len(conn._acquired)
assert 1 == len(conn._acquired_per_host[key])
connection2.release()
task = loop.create_task(f())
await asyncio.sleep(0.01)
assert not acquired
connection1.release()
await asyncio.sleep(0)
assert acquired
await task # type: ignore[unreachable]
await conn.close()
async def test_connect_queued_operation_tracing(
loop: asyncio.AbstractEventLoop,
key: ConnectionKey,
make_client_request: _RequestMaker,
) -> None:
session = mock.Mock()
trace_config_ctx = mock.Mock()
on_connection_queued_start = mock.AsyncMock()
on_connection_queued_end = mock.AsyncMock()
trace_config = aiohttp.TraceConfig(
trace_config_ctx_factory=mock.Mock(return_value=trace_config_ctx)
)
trace_config.on_connection_queued_start.append(on_connection_queued_start)
trace_config.on_connection_queued_end.append(on_connection_queued_end)
trace_config.freeze()
traces = [Trace(session, trace_config, trace_config.trace_config_ctx())]
proto = create_mocked_conn(loop)
proto.is_connected.return_value = True
req = make_client_request(
"GET", URL("http://localhost1:80"), loop=loop, response_class=mock.Mock()
)
conn = aiohttp.BaseConnector(limit=1)
conn._conns[key] = deque([(proto, loop.time())])
with mock.patch.object(
conn, "_create_connection", autospec=True, spec_set=True, return_value=proto
):
connection1 = await conn.connect(req, traces, ClientTimeout())
async def f() -> None:
connection2 = await conn.connect(req, traces, ClientTimeout())
on_connection_queued_start.assert_called_with(
session, trace_config_ctx, aiohttp.TraceConnectionQueuedStartParams()
)
on_connection_queued_end.assert_called_with(
session, trace_config_ctx, aiohttp.TraceConnectionQueuedEndParams()
)
connection2.release()
task = asyncio.ensure_future(f())
await asyncio.sleep(0.01)
connection1.release()
await task
await conn.close()
async def test_connect_reuseconn_tracing(
loop: asyncio.AbstractEventLoop,
key: ConnectionKey,
make_client_request: _RequestMaker,
) -> None:
session = mock.Mock()
trace_config_ctx = mock.Mock()
on_connection_reuseconn = mock.AsyncMock()
trace_config = aiohttp.TraceConfig(
trace_config_ctx_factory=mock.Mock(return_value=trace_config_ctx)
)
trace_config.on_connection_reuseconn.append(on_connection_reuseconn)
trace_config.freeze()
traces = [Trace(session, trace_config, trace_config.trace_config_ctx())]
proto = create_mocked_conn(loop)
proto.is_connected.return_value = True
req = make_client_request(
"GET", URL("http://localhost:80"), loop=loop, response_class=mock.Mock()
)
conn = aiohttp.BaseConnector(limit=1)
conn._conns[key] = deque([(proto, loop.time())])
conn2 = await conn.connect(req, traces, ClientTimeout())
conn2.release()
on_connection_reuseconn.assert_called_with(
session, trace_config_ctx, aiohttp.TraceConnectionReuseconnParams()
)
await conn.close()
async def test_connect_with_limit_and_limit_per_host(
loop: asyncio.AbstractEventLoop,
key: ConnectionKey,
make_client_request: _RequestMaker,
) -> None:
proto = create_mocked_conn(loop)
proto.is_connected.return_value = True
req = make_client_request("GET", URL("http://localhost:80"), loop=loop)
conn = aiohttp.BaseConnector(limit=1000, limit_per_host=1)
conn._conns[key] = deque([(proto, loop.time())])
with mock.patch.object(
conn, "_create_connection", autospec=True, spec_set=True, return_value=proto
):
acquired = False
connection1 = await conn.connect(req, [], ClientTimeout())
async def f() -> None:
nonlocal acquired
connection2 = await conn.connect(req, [], ClientTimeout())
acquired = True
assert 1 == len(conn._acquired)
assert 1 == len(conn._acquired_per_host[key])
connection2.release()
task = loop.create_task(f())
await asyncio.sleep(0.01)
assert not acquired
connection1.release()
await asyncio.sleep(0)
assert acquired
await task # type: ignore[unreachable]
await conn.close()
async def test_connect_with_no_limit_and_limit_per_host(
loop: asyncio.AbstractEventLoop,
key: ConnectionKey,
make_client_request: _RequestMaker,
) -> None:
proto = create_mocked_conn(loop)
proto.is_connected.return_value = True
req = make_client_request("GET", URL("http://localhost1:80"), loop=loop)
conn = aiohttp.BaseConnector(limit=0, limit_per_host=1)
conn._conns[key] = deque([(proto, loop.time())])
with mock.patch.object(
conn, "_create_connection", autospec=True, spec_set=True, return_value=proto
):
acquired = False
connection1 = await conn.connect(req, [], ClientTimeout())
async def f() -> None:
nonlocal acquired
connection2 = await conn.connect(req, [], ClientTimeout())
acquired = True
connection2.release()
task = loop.create_task(f())
await asyncio.sleep(0.01)
assert not acquired
connection1.release()
await asyncio.sleep(0)
assert acquired
await task # type: ignore[unreachable]
await conn.close()
async def test_connect_with_no_limits(
loop: asyncio.AbstractEventLoop,
key: ConnectionKey,
make_client_request: _RequestMaker,
) -> None:
proto = create_mocked_conn(loop)
proto.is_connected.return_value = True
req = make_client_request("GET", URL("http://localhost:80"), loop=loop)
conn = aiohttp.BaseConnector(limit=0, limit_per_host=0)
conn._conns[key] = deque([(proto, loop.time())])
with mock.patch.object(
conn, "_create_connection", autospec=True, spec_set=True, return_value=proto
):
acquired = False
connection1 = await conn.connect(req, [], ClientTimeout())
async def f() -> None:
nonlocal acquired
connection2 = await conn.connect(req, [], ClientTimeout())
acquired = True
assert 1 == len(conn._acquired)
assert not conn._acquired_per_host
connection2.release()
task = loop.create_task(f())
await asyncio.sleep(0.01)
assert acquired
connection1.release()
await task
await conn.close()
async def test_connect_with_limit_cancelled(
loop: asyncio.AbstractEventLoop,
key: ConnectionKey,
make_client_request: _RequestMaker,
) -> None:
proto = create_mocked_conn(loop)
proto.is_connected.return_value = True
req = make_client_request("GET", URL("http://host:80"), loop=loop)
conn = aiohttp.BaseConnector(limit=1)
conn._conns[key] = deque([(proto, loop.time())])
with mock.patch.object(
conn, "_create_connection", autospec=True, spec_set=True, return_value=proto
):
connection = await conn.connect(req, [], ClientTimeout())
assert connection._protocol == proto
assert connection.transport == proto.transport
assert 1 == len(conn._acquired)
with pytest.raises(asyncio.TimeoutError):
# limit exhausted
await asyncio.wait_for(conn.connect(req, [], ClientTimeout()), 0.01)
connection.close()
await conn.close()
async def test_connect_with_capacity_release_waiters(
loop: asyncio.AbstractEventLoop,
) -> None:
async def check_with_exc(err: Exception) -> None:
conn = aiohttp.BaseConnector(limit=1)
with mock.patch.object(
conn, "_create_connection", autospec=True, spec_set=True, side_effect=err
):
with pytest.raises(Exception):
req = mock.Mock()
await conn.connect(req, [], ClientTimeout())
assert not conn._waiters
await conn.close()
await check_with_exc(OSError(1, "permission error"))
await check_with_exc(RuntimeError())
await check_with_exc(asyncio.TimeoutError())
async def test_connect_with_limit_concurrent(
loop: asyncio.AbstractEventLoop, make_client_request: _RequestMaker
) -> None:
proto = create_mocked_conn(loop)
proto.should_close = False
proto.is_connected.return_value = True
req = make_client_request("GET", URL("http://host:80"), loop=loop)
max_connections = 2
num_connections = 0
conn = aiohttp.BaseConnector(limit=max_connections)
# Use a real coroutine for _create_connection; a mock would mask
# problems that only happen when the method yields.
async def create_connection(
req: object, traces: object, timeout: object
) -> ResponseHandler:
nonlocal num_connections
num_connections += 1
await asyncio.sleep(0)
# Make a new transport mock each time because acquired
# transports are stored in a set. Reusing the same object
# messes with the count.
proto = create_mocked_conn(loop, should_close=False)
proto.is_connected.return_value = True
return proto
# Simulate something like a crawler. It opens a connection, does
# something with it, closes it, then creates tasks that make more
# connections and waits for them to finish. The crawler is started
# with multiple concurrent requests and stops when it hits a
# predefined maximum number of requests.
max_requests = 50
num_requests = 0
start_requests = max_connections + 1
async def f(start: bool = True) -> None:
nonlocal num_requests
if num_requests == max_requests:
return
num_requests += 1
if not start:
connection = await conn.connect(req, [], ClientTimeout())
await asyncio.sleep(0)
connection.release()
await asyncio.sleep(0)
tasks = [loop.create_task(f(start=False)) for i in range(start_requests)]
await asyncio.wait(tasks)
with mock.patch.object(conn, "_create_connection", create_connection):
await f()
await conn.close()
assert max_connections == num_connections
async def test_connect_waiters_cleanup(
loop: asyncio.AbstractEventLoop, make_client_request: _RequestMaker
) -> None:
proto = create_mocked_conn(loop)
proto.is_connected.return_value = True
req = make_client_request("GET", URL("http://host:80"), loop=loop)
conn = aiohttp.BaseConnector(limit=1)
with mock.patch.object(conn, "_available_connections", return_value=0):
t = loop.create_task(conn.connect(req, [], ClientTimeout()))
await asyncio.sleep(0)
assert conn._waiters.keys()
t.cancel()
await asyncio.sleep(0)
assert not conn._waiters.keys()
await conn.close()
async def test_connect_waiters_cleanup_key_error(
loop: asyncio.AbstractEventLoop, make_client_request: _RequestMaker
) -> None:
proto = create_mocked_conn(loop)
proto.is_connected.return_value = True
req = make_client_request("GET", URL("http://host:80"), loop=loop)
conn = aiohttp.BaseConnector(limit=1, limit_per_host=10)
with mock.patch.object(
conn, "_available_connections", autospec=True, spec_set=True, return_value=0
):
t = loop.create_task(conn.connect(req, [], ClientTimeout()))
await asyncio.sleep(0)
assert conn._waiters.keys()
# we delete the entry explicitly before the
# canceled connection grabs the loop again, we
# must expect a none failure termination
conn._waiters.clear()
t.cancel()
await asyncio.sleep(0)
assert not conn._waiters.keys() == []
await conn.close()
async def test_close_with_acquired_connection(
loop: asyncio.AbstractEventLoop,
key: ConnectionKey,
make_client_request: _RequestMaker,
) -> None:
proto = create_mocked_conn(loop)
proto.is_connected.return_value = True
req = make_client_request("GET", URL("http://host:80"), loop=loop)
conn = aiohttp.BaseConnector(limit=1)
conn._conns[key] = deque([(proto, loop.time())])
with mock.patch.object(
conn, "_create_connection", autospec=True, spec_set=True, return_value=proto
):
connection = await conn.connect(req, [], ClientTimeout())
assert 1 == len(conn._acquired)
await conn.close()
assert 0 == len(conn._acquired)
assert conn.closed
proto.close.assert_called_with()
assert not connection.closed
connection.close()
assert connection.closed
async def test_default_force_close(loop: asyncio.AbstractEventLoop) -> None:
connector = aiohttp.BaseConnector()
assert not connector.force_close
await connector.close()
async def test_limit_property(loop: asyncio.AbstractEventLoop) -> None:
conn = aiohttp.BaseConnector(limit=15)
assert 15 == conn.limit
await conn.close()
async def test_limit_per_host_property(loop: asyncio.AbstractEventLoop) -> None:
conn = aiohttp.BaseConnector(limit_per_host=15)
assert 15 == conn.limit_per_host
await conn.close()
async def test_limit_property_default(loop: asyncio.AbstractEventLoop) -> None:
conn = aiohttp.BaseConnector()
assert conn.limit == 100
await conn.close()
async def test_limit_per_host_property_default(loop: asyncio.AbstractEventLoop) -> None:
conn = aiohttp.BaseConnector()
assert conn.limit_per_host == 0
await conn.close()
async def test_force_close_and_explicit_keep_alive(
loop: asyncio.AbstractEventLoop,
) -> None:
aiohttp.BaseConnector(force_close=True)
aiohttp.BaseConnector(force_close=True, keepalive_timeout=None)
with pytest.raises(ValueError):
aiohttp.BaseConnector(keepalive_timeout=30, force_close=True)
async def test_error_on_connection(
loop: asyncio.AbstractEventLoop, key: ConnectionKey
) -> None:
conn = aiohttp.BaseConnector(limit=1, limit_per_host=10)
req = mock.Mock()
req.connection_key = key
proto = create_mocked_conn(loop)
i = 0
fut = loop.create_future()
exc = OSError()
async def create_connection(
req: object, traces: object, timeout: object
) -> ResponseHandler:
nonlocal i
i += 1
if i == 1:
await fut
raise exc
elif i == 2:
return proto
assert False
with mock.patch.object(conn, "_create_connection", create_connection):
t1 = loop.create_task(conn.connect(req, [], ClientTimeout()))
t2 = loop.create_task(conn.connect(req, [], ClientTimeout()))
await asyncio.sleep(0)
assert not t1.done()
assert not t2.done()
assert len(conn._acquired_per_host[key]) == 1
fut.set_result(None)
with pytest.raises(OSError):
await t1
ret = await t2
assert len(conn._acquired_per_host[key]) == 1
assert ret._key == key
assert ret.protocol == proto
assert proto in conn._acquired
ret.release()
await conn.close()
async def test_cancelled_waiter(loop: asyncio.AbstractEventLoop) -> None:
conn = aiohttp.BaseConnector(limit=1)
req = mock.Mock()
req.connection_key = "key"
proto = create_mocked_conn(loop)
async def create_connection(req: object, traces: object = None) -> ResponseHandler:
await asyncio.sleep(1)
return proto
with mock.patch.object(conn, "_create_connection", create_connection):
conn._acquired.add(proto)
conn2 = loop.create_task(conn.connect(req, [], ClientTimeout()))
await asyncio.sleep(0)
conn2.cancel()
with pytest.raises(asyncio.CancelledError):
await conn2
await conn.close()
async def test_error_on_connection_with_cancelled_waiter(
loop: asyncio.AbstractEventLoop, key: ConnectionKey
) -> None:
conn = aiohttp.BaseConnector(limit=1, limit_per_host=10)
req = mock.Mock()
req.connection_key = key
proto = create_mocked_conn()
i = 0
fut1 = loop.create_future()
fut2 = loop.create_future()
exc = OSError()
async def create_connection(
req: object, traces: object, timeout: object
) -> ResponseHandler:
nonlocal i
i += 1
if i == 1:
await fut1
raise exc
if i == 2:
await fut2
elif i == 3:
return proto
assert False
with mock.patch.object(conn, "_create_connection", create_connection):
t1 = loop.create_task(conn.connect(req, [], ClientTimeout()))
t2 = loop.create_task(conn.connect(req, [], ClientTimeout()))
t3 = loop.create_task(conn.connect(req, [], ClientTimeout()))
await asyncio.sleep(0)
assert not t1.done()
assert not t2.done()
assert len(conn._acquired_per_host[key]) == 1
fut1.set_result(None)
fut2.cancel()
with pytest.raises(OSError):
await t1
with pytest.raises(asyncio.CancelledError):
await t2
ret = await t3
assert len(conn._acquired_per_host[key]) == 1
assert ret._key == key
assert ret.protocol == proto
assert proto in conn._acquired
ret.release()
await conn.close()
async def test_tcp_connector(
aiohttp_client: AiohttpClient, loop: asyncio.AbstractEventLoop
) -> None:
async def handler(request: web.Request) -> web.Response:
return web.Response()
app = web.Application()
app.router.add_get("/", handler)
client = await aiohttp_client(app)
r = await client.get("/")
assert r.status == 200
@pytest.mark.skipif(not hasattr(socket, "AF_UNIX"), reason="requires UNIX sockets")
async def test_unix_connector_not_found( # type: ignore[misc]
loop: asyncio.AbstractEventLoop, make_client_request: _RequestMaker
) -> None:
connector = aiohttp.UnixConnector("/" + uuid.uuid4().hex)
req = make_client_request("GET", URL("http://www.python.org"), loop=loop)
with pytest.raises(aiohttp.ClientConnectorError):
await connector.connect(req, [], ClientTimeout())
@pytest.mark.skipif(not hasattr(socket, "AF_UNIX"), reason="requires UNIX sockets")
async def test_unix_connector_permission( # type: ignore[misc]
loop: asyncio.AbstractEventLoop, make_client_request: _RequestMaker
) -> None:
m = mock.AsyncMock(side_effect=PermissionError())
with mock.patch.object(loop, "create_unix_connection", m):
connector = aiohttp.UnixConnector("/" + uuid.uuid4().hex)
req = make_client_request("GET", URL("http://www.python.org"), loop=loop)
with pytest.raises(aiohttp.ClientConnectorError):
await connector.connect(req, [], ClientTimeout())
@pytest.mark.skipif(
platform.system() != "Windows", reason="Proactor Event loop present only in Windows"
)
async def test_named_pipe_connector_wrong_loop(
selector_loop: asyncio.AbstractEventLoop, pipe_name: str
) -> None:
with pytest.raises(RuntimeError):
aiohttp.NamedPipeConnector(pipe_name)
@pytest.mark.skipif(
platform.system() != "Windows", reason="Proactor Event loop present only in Windows"
)
async def test_named_pipe_connector_not_found( # type: ignore[misc]
proactor_loop: asyncio.AbstractEventLoop,
pipe_name: str,
make_client_request: _RequestMaker,
) -> None:
asyncio.set_event_loop(proactor_loop)
connector = aiohttp.NamedPipeConnector(pipe_name)
req = make_client_request("GET", URL("http://www.python.org"), loop=proactor_loop)
with pytest.raises(aiohttp.ClientConnectorError):
await connector.connect(req, [], ClientTimeout())
@pytest.mark.skipif(
platform.system() != "Windows", reason="Proactor Event loop present only in Windows"
)
async def test_named_pipe_connector_permission( # type: ignore[misc]
proactor_loop: asyncio.AbstractEventLoop,
pipe_name: str,
make_client_request: _RequestMaker,
) -> None:
m = mock.AsyncMock(side_effect=PermissionError())
with mock.patch.object(proactor_loop, "create_pipe_connection", m):
asyncio.set_event_loop(proactor_loop)
connector = aiohttp.NamedPipeConnector(pipe_name)
req = make_client_request(
"GET", URL("http://www.python.org"), loop=proactor_loop
)
with pytest.raises(aiohttp.ClientConnectorError):
await connector.connect(req, [], ClientTimeout())
async def test_default_use_dns_cache() -> None:
conn = aiohttp.TCPConnector()
assert conn.use_dns_cache
await conn.close()
async def test_resolver_not_called_with_address_is_ip(
loop: asyncio.AbstractEventLoop, make_client_request: _RequestMaker
) -> None:
resolver = mock.MagicMock()
connector = aiohttp.TCPConnector(resolver=resolver)
req = make_client_request(
"GET",
URL(f"http://127.0.0.1:{unused_port()}"),
loop=loop,
response_class=mock.Mock(),
)
with pytest.raises(OSError):
await connector.connect(req, [], ClientTimeout())
resolver.resolve.assert_not_called()
await connector.close()
async def test_tcp_connector_raise_connector_ssl_error(
aiohttp_server: AiohttpServer, ssl_ctx: ssl.SSLContext
) -> None:
async def handler(request: web.Request) -> NoReturn:
assert False
app = web.Application()
app.router.add_get("/", handler)
srv = await aiohttp_server(app, ssl=ssl_ctx)
port = unused_port()
conn = aiohttp.TCPConnector(local_addr=("127.0.0.1", port))
session = aiohttp.ClientSession(connector=conn)
url = srv.make_url("/")
err = aiohttp.ClientConnectorCertificateError
with pytest.raises(err) as ctx:
await session.get(url)
assert isinstance(ctx.value, aiohttp.ClientConnectorCertificateError)
assert isinstance(ctx.value.certificate_error, ssl.SSLError)
await session.close()
await conn.close()
@pytest.mark.parametrize(
"host",
(
pytest.param("127.0.0.1", id="ip address"),
pytest.param("localhost", id="domain name"),
pytest.param("localhost.", id="fully-qualified domain name"),
pytest.param(
"localhost...", id="fully-qualified domain name with multiple trailing dots"
),
pytest.param("príklad.localhost.", id="idna fully-qualified domain name"),
),
)
async def test_tcp_connector_do_not_raise_connector_ssl_error(
aiohttp_server: AiohttpServer,
ssl_ctx: ssl.SSLContext,
client_ssl_ctx: ssl.SSLContext,
host: str,
) -> None:
async def handler(request: web.Request) -> web.Response:
return web.Response()
app = web.Application()
app.router.add_get("/", handler)
srv = await aiohttp_server(app, ssl=ssl_ctx)
port = unused_port()
conn = aiohttp.TCPConnector(local_addr=("127.0.0.1", port))
# resolving something.localhost with the real DNS resolver does not work on macOS, so we have a stub.
async def _resolve_host(
host: str, port: int, traces: object = None
) -> list[ResolveResult]:
return [
{
"hostname": host,
"host": "127.0.0.1",
"port": port,
"family": socket.AF_INET,
"proto": 0,
"flags": socket.AI_NUMERICHOST,
},
{
"hostname": host,
"host": "::1",
"port": port,
"family": socket.AF_INET,
"proto": 0,
"flags": socket.AI_NUMERICHOST,
},
]
with mock.patch.object(
conn, "_resolve_host", autospec=True, spec_set=True, side_effect=_resolve_host
):
session = aiohttp.ClientSession(connector=conn)
url = srv.make_url("/")
r = await session.get(url.with_host(host), ssl=client_ssl_ctx)
r.release()
first_conn = next(iter(conn._conns.values()))[0][0]
assert first_conn.transport is not None
try:
_sslcontext = first_conn.transport._ssl_protocol._sslcontext # type: ignore[attr-defined]
except AttributeError:
_sslcontext = first_conn.transport._sslcontext # type: ignore[attr-defined]
assert _sslcontext is client_ssl_ctx
r.close()
await session.close()
await conn.close()
async def test_tcp_connector_uses_provided_local_addr(
aiohttp_server: AiohttpServer,
) -> None:
async def handler(request: web.Request) -> web.Response:
return web.Response()
app = web.Application()
app.router.add_get("/", handler)
srv = await aiohttp_server(app)
port = unused_port()
conn = aiohttp.TCPConnector(local_addr=("127.0.0.1", port))
session = aiohttp.ClientSession(connector=conn)
url = srv.make_url("/")
r = await session.get(url)
r.release()
first_conn = next(iter(conn._conns.values()))[0][0]
assert first_conn.transport is not None
assert first_conn.transport.get_extra_info("sockname") == ("127.0.0.1", port)
r.close()
await session.close()
await conn.close()
async def test_unix_connector(
unix_server: Callable[[web.Application], Awaitable[None]], unix_sockname: str
) -> None:
async def handler(request: web.Request) -> web.Response:
return web.Response()
app = web.Application()
app.router.add_get("/", handler)
await unix_server(app)
url = "http://127.0.0.1/"
connector = aiohttp.UnixConnector(unix_sockname)
assert unix_sockname == connector.path
assert connector.allowed_protocol_schema_set == {
"",
"http",
"https",
"ws",
"wss",
"unix",
}
session = ClientSession(connector=connector)
r = await session.get(url)
assert r.status == 200
r.close()
await session.close()
@pytest.mark.skipif(
platform.system() != "Windows", reason="Proactor Event loop present only in Windows"
)
async def test_named_pipe_connector(
proactor_loop: asyncio.AbstractEventLoop,
named_pipe_server: Callable[[web.Application], Awaitable[None]],
pipe_name: str,
) -> None:
async def handler(request: web.Request) -> web.Response:
return web.Response()
app = web.Application()
app.router.add_get("/", handler)
await named_pipe_server(app)
url = "http://this-does-not-matter.com"
connector = aiohttp.NamedPipeConnector(pipe_name)
assert pipe_name == connector.path
assert connector.allowed_protocol_schema_set == {
"",
"http",
"https",
"ws",
"wss",
"npipe",
}
session = ClientSession(connector=connector)
r = await session.get(url)
assert r.status == 200
r.close()
await session.close()
class TestDNSCacheTable:
host1 = ("localhost", 80)
host2 = ("foo", 80)
result1: ResolveResult = {
"hostname": "localhost",
"host": "127.0.0.1",
"port": 80,
"family": socket.AF_INET,
"proto": 0,
"flags": socket.AI_NUMERICHOST,
}
result2: ResolveResult = {
"hostname": "foo",
"host": "127.0.0.2",
"port": 80,
"family": socket.AF_INET,
"proto": 0,
"flags": socket.AI_NUMERICHOST,
}
@pytest.fixture
def dns_cache_table(self) -> _DNSCacheTable:
return _DNSCacheTable()
def test_next_addrs_basic(self, dns_cache_table: _DNSCacheTable) -> None:
dns_cache_table.add(self.host1, [self.result1])
dns_cache_table.add(self.host2, [self.result2])
addrs = dns_cache_table.next_addrs(self.host1)
assert addrs == [self.result1]
addrs = dns_cache_table.next_addrs(self.host2)
assert addrs == [self.result2]
with pytest.raises(KeyError):
dns_cache_table.next_addrs(("no-such-host", 80))
def test_remove(self, dns_cache_table: _DNSCacheTable) -> None:
dns_cache_table.add(self.host1, [self.result1])
dns_cache_table.remove(self.host1)
with pytest.raises(KeyError):
dns_cache_table.next_addrs(self.host1)
def test_clear(self, dns_cache_table: _DNSCacheTable) -> None:
dns_cache_table.add(self.host1, [self.result1])
dns_cache_table.clear()
with pytest.raises(KeyError):
dns_cache_table.next_addrs(self.host1)
def test_not_expired_ttl_None(self, dns_cache_table: _DNSCacheTable) -> None:
dns_cache_table.add(self.host1, [self.result1])
assert not dns_cache_table.expired(self.host1)
def test_not_expired_ttl(self) -> None:
dns_cache_table = _DNSCacheTable(ttl=0.1)
dns_cache_table.add(self.host1, [self.result1])
assert not dns_cache_table.expired(self.host1)
def test_expired_ttl(self, monkeypatch: pytest.MonkeyPatch) -> None:
dns_cache_table = _DNSCacheTable(ttl=1)
monkeypatch.setattr("aiohttp.connector.monotonic", lambda: 1)
dns_cache_table.add(self.host1, [self.result1])
monkeypatch.setattr("aiohttp.connector.monotonic", lambda: 2)
assert not dns_cache_table.expired(self.host1)
monkeypatch.setattr("aiohttp.connector.monotonic", lambda: 3)
assert dns_cache_table.expired(self.host1)
def test_never_expire(self, monkeypatch: pytest.MonkeyPatch) -> None:
dns_cache_table = _DNSCacheTable(ttl=None)
monkeypatch.setattr("aiohttp.connector.monotonic", lambda: 1)
dns_cache_table.add(self.host1, [self.result1])
monkeypatch.setattr("aiohttp.connector.monotonic", lambda: 10000000)
assert not dns_cache_table.expired(self.host1)
def test_always_expire(self, monkeypatch: pytest.MonkeyPatch) -> None:
dns_cache_table = _DNSCacheTable(ttl=0)
monkeypatch.setattr("aiohttp.connector.monotonic", lambda: 1)
dns_cache_table.add(self.host1, [self.result1])
monkeypatch.setattr("aiohttp.connector.monotonic", lambda: 1.00001)
assert dns_cache_table.expired(self.host1)
def test_next_addrs(self, dns_cache_table: _DNSCacheTable) -> None:
result3: ResolveResult = {
"hostname": "foo",
"host": "127.0.0.3",
"port": 80,
"family": socket.AF_INET,
"proto": 0,
"flags": socket.AI_NUMERICHOST,
}
dns_cache_table.add(self.host2, [self.result1, self.result2, result3])
# Each calls to next_addrs return the hosts using
# a round robin strategy.
addrs = dns_cache_table.next_addrs(self.host2)
assert addrs == [self.result1, self.result2, result3]
addrs = dns_cache_table.next_addrs(self.host2)
assert addrs == [self.result2, result3, self.result1]
addrs = dns_cache_table.next_addrs(self.host2)
assert addrs == [result3, self.result1, self.result2]
addrs = dns_cache_table.next_addrs(self.host2)
assert addrs == [self.result1, self.result2, result3]
def test_next_addrs_single(self, dns_cache_table: _DNSCacheTable) -> None:
dns_cache_table.add(self.host2, [self.result1])
addrs = dns_cache_table.next_addrs(self.host2)
assert addrs == [self.result1]
addrs = dns_cache_table.next_addrs(self.host2)
assert addrs == [self.result1]
async def test_connector_cache_trace_race() -> None:
class DummyTracer(Trace):
def __init__(self) -> None:
"""Dummy"""
async def send_dns_cache_hit(self, *args: object, **kwargs: object) -> None:
connector._cached_hosts.remove(("", 0))
token: ResolveResult = {
"hostname": "localhost",
"host": "127.0.0.1",
"port": 80,
"family": socket.AF_INET,
"proto": 0,
"flags": socket.AI_NUMERICHOST,
}
connector = TCPConnector()
connector._cached_hosts.add(("", 0), [token])
traces = [DummyTracer()]
assert await connector._resolve_host("", 0, traces) == [token]
await connector.close()
async def test_connector_throttle_trace_race(loop: asyncio.AbstractEventLoop) -> None:
key = ("", 0)
token: ResolveResult = {
"hostname": "localhost",
"host": "127.0.0.1",
"port": 80,
"family": socket.AF_INET,
"proto": 0,
"flags": socket.AI_NUMERICHOST,
}
class DummyTracer(Trace):
def __init__(self) -> None:
"""Dummy"""
async def send_dns_cache_hit(self, *args: object, **kwargs: object) -> None:
futures = connector._throttle_dns_futures.pop(key)
for fut in futures:
fut.set_result(None)
connector._cached_hosts.add(key, [token])
connector = TCPConnector()
connector._throttle_dns_futures[key] = set()
traces = [DummyTracer()]
assert await connector._resolve_host("", 0, traces) == [token]
await connector.close()
async def test_connector_resolve_in_case_of_trace_cache_miss_exception(
loop: asyncio.AbstractEventLoop,
) -> None:
token: ResolveResult = {
"hostname": "localhost",
"host": "127.0.0.1",
"port": 80,
"family": socket.AF_INET,
"proto": 0,
"flags": socket.AI_NUMERICHOST,
}
request_count = 0
class DummyTracer(Trace):
def __init__(self) -> None:
"""Dummy"""
async def send_dns_cache_hit(self, *args: object, **kwargs: object) -> None:
"""Dummy send_dns_cache_hit"""
async def send_dns_resolvehost_start(
self, *args: object, **kwargs: object
) -> None:
"""Dummy send_dns_resolvehost_start"""
async def send_dns_resolvehost_end(
self, *args: object, **kwargs: object
) -> None:
"""Dummy send_dns_resolvehost_end"""
async def send_dns_cache_miss(self, *args: object, **kwargs: object) -> None:
nonlocal request_count
request_count += 1
if request_count <= 1:
raise Exception("first attempt")
async def resolve_response() -> list[ResolveResult]:
await asyncio.sleep(0)
return [token]
with mock.patch("aiohttp.connector.DefaultResolver") as m_resolver:
m_resolver().resolve.return_value = resolve_response()
m_resolver().close = mock.AsyncMock()
connector = TCPConnector()
traces = [DummyTracer()]
with pytest.raises(Exception):
await connector._resolve_host("", 0, traces)
await connector._resolve_host("", 0, traces) == [token]
await connector.close()
async def test_connector_does_not_remove_needed_waiters(
loop: asyncio.AbstractEventLoop,
key: ConnectionKey,
make_client_request: _RequestMaker,
) -> None:
proto = create_mocked_conn(loop)
proto.is_connected.return_value = True
req = make_client_request("GET", URL("https://localhost:80"), loop=loop)
connection_key = req.connection_key
async def await_connection_and_check_waiters() -> None:
connection = await connector.connect(req, [], ClientTimeout())
try:
assert connection_key in connector._waiters
assert dummy_waiter in connector._waiters[connection_key]
finally:
connection.close()
async def allow_connection_and_add_dummy_waiter() -> None:
# `asyncio.gather` may execute coroutines not in order.
# Skip one event loop run cycle in such a case.
if connection_key not in connector._waiters:
await asyncio.sleep(0)
list(connector._waiters[connection_key])[0].set_result(None)
del connector._waiters[connection_key]
connector._waiters[connection_key][dummy_waiter] = None
connector = aiohttp.BaseConnector()
with mock.patch.object(
connector,
"_available_connections",
autospec=True,
spec_set=True,
side_effect=[0, 1, 1, 1],
):
connector._conns[key] = deque([(proto, loop.time())])
with mock.patch.object(
connector,
"_create_connection",
autospec=True,
spec_set=True,
return_value=proto,
):
dummy_waiter = loop.create_future()
await asyncio.gather(
await_connection_and_check_waiters(),
allow_connection_and_add_dummy_waiter(),
)
await connector.close()
def test_connector_multiple_event_loop(make_client_request: _RequestMaker) -> None:
"""Test the connector with multiple event loops."""
async def async_connect() -> Literal[True]:
conn = aiohttp.TCPConnector()
loop = asyncio.get_running_loop()
req = make_client_request("GET", URL("https://127.0.0.1"), loop=loop)
with suppress(aiohttp.ClientConnectorError):
with mock.patch.object(
conn._loop,
"create_connection",
autospec=True,
spec_set=True,
side_effect=ssl.CertificateError,
):
await conn.connect(req, [], ClientTimeout())
return True
def test_connect() -> Literal[True]:
loop = asyncio.new_event_loop()
try:
return loop.run_until_complete(async_connect())
finally:
loop.close()
with futures.ThreadPoolExecutor() as executor:
res_list = [executor.submit(test_connect) for _ in range(2)]
raw_response_list = [res.result() for res in futures.as_completed(res_list)]
assert raw_response_list == [True, True]
async def test_tcp_connector_socket_factory(
loop: asyncio.AbstractEventLoop,
start_connection: mock.AsyncMock,
make_client_request: _RequestMaker,
) -> None:
"""Check that socket factory is called"""
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
start_connection.return_value = s
local_addr = None
socket_factory: Callable[[AddrInfoType], socket.socket] = lambda _: s
happy_eyeballs_delay = 0.123
interleave = 3
conn = aiohttp.TCPConnector(
interleave=interleave,
local_addr=local_addr,
happy_eyeballs_delay=happy_eyeballs_delay,
socket_factory=socket_factory,
)
with mock.patch.object(
conn._loop,
"create_connection",
autospec=True,
spec_set=True,
return_value=(mock.Mock(), mock.Mock()),
):
host = "127.0.0.1"
port = 443
req = make_client_request("GET", URL(f"https://{host}:{port}"), loop=loop)
with closing(await conn.connect(req, [], ClientTimeout())):
pass
await conn.close()
start_connection.assert_called_with(
addr_infos=[
(socket.AF_INET, socket.SOCK_STREAM, socket.IPPROTO_TCP, "", (host, port))
],
local_addr_infos=local_addr,
happy_eyeballs_delay=happy_eyeballs_delay,
interleave=interleave,
loop=loop,
socket_factory=socket_factory,
)
def test_default_ssl_context_creation_without_ssl() -> None:
"""Verify _make_ssl_context does not raise when ssl is not available."""
with mock.patch.object(connector_module, "ssl", None):
assert connector_module._make_ssl_context(False) is None
assert connector_module._make_ssl_context(True) is None
def _acquired_connection(
conn: aiohttp.BaseConnector, proto: ResponseHandler, key: ConnectionKey
) -> Connection:
conn._acquired.add(proto)
conn._acquired_per_host[key].add(proto)
return Connection(conn, key, proto, conn._loop)
async def test_available_connections_with_limit_per_host(
key: ConnectionKey, other_host_key2: ConnectionKey
) -> None:
"""Verify expected values based on active connections with host limit."""
conn = aiohttp.BaseConnector(limit=3, limit_per_host=2)
assert conn._available_connections(key) == 2
assert conn._available_connections(other_host_key2) == 2
proto1 = create_mocked_conn()
connection1 = _acquired_connection(conn, proto1, key)
assert conn._available_connections(key) == 1
assert conn._available_connections(other_host_key2) == 2
proto2 = create_mocked_conn()
connection2 = _acquired_connection(conn, proto2, key)
assert conn._available_connections(key) == 0
assert conn._available_connections(other_host_key2) == 1
connection1.close()
assert conn._available_connections(key) == 1
assert conn._available_connections(other_host_key2) == 2
connection2.close()
other_proto1 = create_mocked_conn()
other_connection1 = _acquired_connection(conn, other_proto1, other_host_key2)
assert conn._available_connections(key) == 2
assert conn._available_connections(other_host_key2) == 1
other_connection1.close()
assert conn._available_connections(key) == 2
assert conn._available_connections(other_host_key2) == 2
@pytest.mark.parametrize("limit_per_host", [0, 10])
async def test_available_connections_without_limit_per_host( # type: ignore[misc]
key: ConnectionKey, other_host_key2: ConnectionKey, limit_per_host: int
) -> None:
"""Verify expected values based on active connections with higher host limit."""
conn = aiohttp.BaseConnector(limit=3, limit_per_host=limit_per_host)
assert conn._available_connections(key) == 3
assert conn._available_connections(other_host_key2) == 3
proto1 = create_mocked_conn()
connection1 = _acquired_connection(conn, proto1, key)
assert conn._available_connections(key) == 2
assert conn._available_connections(other_host_key2) == 2
proto2 = create_mocked_conn()
connection2 = _acquired_connection(conn, proto2, key)
assert conn._available_connections(key) == 1
assert conn._available_connections(other_host_key2) == 1
connection1.close()
assert conn._available_connections(key) == 2
assert conn._available_connections(other_host_key2) == 2
connection2.close()
other_proto1 = create_mocked_conn()
other_connection1 = _acquired_connection(conn, other_proto1, other_host_key2)
assert conn._available_connections(key) == 2
assert conn._available_connections(other_host_key2) == 2
other_connection1.close()
assert conn._available_connections(key) == 3
assert conn._available_connections(other_host_key2) == 3
async def test_available_connections_no_limits(
key: ConnectionKey, other_host_key2: ConnectionKey
) -> None:
"""Verify expected values based on active connections with no limits."""
# No limits is a special case where available connections should always be 1.
conn = aiohttp.BaseConnector(limit=0, limit_per_host=0)
assert conn._available_connections(key) == 1
assert conn._available_connections(other_host_key2) == 1
proto1 = create_mocked_conn()
connection1 = _acquired_connection(conn, proto1, key)
assert conn._available_connections(key) == 1
assert conn._available_connections(other_host_key2) == 1
connection1.close()
assert conn._available_connections(key) == 1
assert conn._available_connections(other_host_key2) == 1
async def test_connect_tunnel_connection_release(
loop: asyncio.AbstractEventLoop,
) -> None:
"""Test _ConnectTunnelConnection.release() does not pool the connection."""
connector = mock.create_autospec(
aiohttp.BaseConnector, spec_set=True, instance=True
)
key = mock.create_autospec(ConnectionKey, spec_set=True, instance=True)
protocol = mock.create_autospec(ResponseHandler, spec_set=True, instance=True)
# Create a connect tunnel connection
conn = _ConnectTunnelConnection(connector, key, protocol, loop)
# Verify protocol is set
assert conn._protocol is protocol
# Release should do nothing (not pool the connection)
conn.release()
# Protocol should still be there (not released to pool)
assert conn._protocol is protocol
# Connector._release should NOT have been called
connector._release.assert_not_called()
# Clean up to avoid resource warning
conn.close()
|
./temp_repos/aiohttp/aiohttp/connector.py
|
./temp_repos/aiohttp/tests/test_connector.py
|
aiohttp
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'Connection'.
Context:
- Class Name: Connection
- Dependencies to Mock: path, key, limit, loop, ttl, keepalive_timeout, force_close, connector, protocol, closed_future, limit_per_host
- Key Imports: collections, contextlib, tracing, helpers, typing, aiohappyeyeballs, ssl, client_proto, asyncio, itertools
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
Connection
|
python
|
"""Low level HTTP server."""
import asyncio
import warnings
from collections.abc import Awaitable, Callable
from typing import Any, Generic, TypeVar, overload
from .abc import AbstractStreamWriter
from .http_parser import RawRequestMessage
from .streams import StreamReader
from .web_protocol import RequestHandler
from .web_request import BaseRequest
from .web_response import StreamResponse
__all__ = ("Server",)
_Request = TypeVar("_Request", bound=BaseRequest)
_RequestFactory = Callable[
[
RawRequestMessage,
StreamReader,
"RequestHandler[_Request]",
AbstractStreamWriter,
"asyncio.Task[None]",
],
_Request,
]
class Server(Generic[_Request]):
request_factory: _RequestFactory[_Request]
@overload
def __init__(
self: "Server[BaseRequest]",
handler: Callable[[_Request], Awaitable[StreamResponse]],
*,
debug: bool | None = None,
handler_cancellation: bool = False,
**kwargs: Any, # TODO(PY311): Use Unpack to define kwargs from RequestHandler
) -> None: ...
@overload
def __init__(
self,
handler: Callable[[_Request], Awaitable[StreamResponse]],
*,
request_factory: _RequestFactory[_Request] | None,
debug: bool | None = None,
handler_cancellation: bool = False,
**kwargs: Any,
) -> None: ...
def __init__(
self,
handler: Callable[[_Request], Awaitable[StreamResponse]],
*,
request_factory: _RequestFactory[_Request] | None = None,
debug: bool | None = None,
handler_cancellation: bool = False,
**kwargs: Any,
) -> None:
if debug is not None:
warnings.warn(
"debug argument is no-op since 4.0 and scheduled for removal in 5.0",
DeprecationWarning,
stacklevel=2,
)
self._loop = asyncio.get_running_loop()
self._connections: dict[RequestHandler[_Request], asyncio.Transport] = {}
self._kwargs = kwargs
# requests_count is the number of requests being processed by the server
# for the lifetime of the server.
self.requests_count = 0
self.request_handler = handler
self.request_factory = request_factory or self._make_request # type: ignore[assignment]
self.handler_cancellation = handler_cancellation
@property
def connections(self) -> list[RequestHandler[_Request]]:
return list(self._connections.keys())
def connection_made(
self, handler: RequestHandler[_Request], transport: asyncio.Transport
) -> None:
self._connections[handler] = transport
def connection_lost(
self, handler: RequestHandler[_Request], exc: BaseException | None = None
) -> None:
if handler in self._connections:
if handler._task_handler:
handler._task_handler.add_done_callback(
lambda f: self._connections.pop(handler, None)
)
else:
del self._connections[handler]
def _make_request(
self,
message: RawRequestMessage,
payload: StreamReader,
protocol: RequestHandler[BaseRequest],
writer: AbstractStreamWriter,
task: "asyncio.Task[None]",
) -> BaseRequest:
return BaseRequest(message, payload, protocol, writer, task, self._loop)
def pre_shutdown(self) -> None:
for conn in self._connections:
conn.close()
async def shutdown(self, timeout: float | None = None) -> None:
coros = (conn.shutdown(timeout) for conn in self._connections)
await asyncio.gather(*coros)
self._connections.clear()
def __call__(self) -> RequestHandler[_Request]:
try:
return RequestHandler(self, loop=self._loop, **self._kwargs)
except TypeError:
# Failsafe creation: remove all custom handler_args
kwargs = {
k: v
for k, v in self._kwargs.items()
if k in ["debug", "access_log_class"]
}
return RequestHandler(self, loop=self._loop, **kwargs)
|
import asyncio
import socket
from contextlib import suppress
from typing import NoReturn
from unittest import mock
import pytest
from aiohttp import client, web
from aiohttp.http_exceptions import BadHttpMethod, BadStatusLine
from aiohttp.pytest_plugin import AiohttpClient, AiohttpRawServer
async def test_simple_server(
aiohttp_raw_server: AiohttpRawServer, aiohttp_client: AiohttpClient
) -> None:
async def handler(request: web.BaseRequest) -> web.Response:
return web.Response(text=str(request.rel_url))
server = await aiohttp_raw_server(handler)
cli = await aiohttp_client(server)
resp = await cli.get("/path/to")
assert resp.status == 200
txt = await resp.text()
assert txt == "/path/to"
async def test_unsupported_upgrade(
aiohttp_raw_server: AiohttpRawServer, aiohttp_client: AiohttpClient
) -> None:
# don't fail if a client probes for an unsupported protocol upgrade
# https://github.com/aio-libs/aiohttp/issues/6446#issuecomment-999032039
async def handler(request: web.BaseRequest) -> web.Response:
return web.Response(body=await request.read())
upgrade_headers = {"Connection": "Upgrade", "Upgrade": "unsupported_proto"}
server = await aiohttp_raw_server(handler)
cli = await aiohttp_client(server)
test_data = b"Test"
resp = await cli.post("/path/to", data=test_data, headers=upgrade_headers)
assert resp.status == 200
data = await resp.read()
assert data == test_data
async def test_raw_server_not_http_exception(
aiohttp_raw_server: AiohttpRawServer,
aiohttp_client: AiohttpClient,
loop: asyncio.AbstractEventLoop,
) -> None:
# disable debug mode not to print traceback
loop.set_debug(False)
exc = RuntimeError("custom runtime error")
async def handler(request: web.BaseRequest) -> NoReturn:
raise exc
logger = mock.Mock()
server = await aiohttp_raw_server(handler, logger=logger)
cli = await aiohttp_client(server)
resp = await cli.get("/path/to")
assert resp.status == 500
assert resp.headers["Content-Type"].startswith("text/plain")
txt = await resp.text()
assert txt.startswith("500 Internal Server Error")
assert "Traceback" not in txt
logger.exception.assert_called_with(
"Error handling request from %s", cli.host, exc_info=exc
)
async def test_raw_server_logs_invalid_method_with_loop_debug(
aiohttp_raw_server: AiohttpRawServer,
aiohttp_client: AiohttpClient,
loop: asyncio.AbstractEventLoop,
) -> None:
exc = BadHttpMethod(b"\x16\x03\x03\x01F\x01".decode(), "error")
async def handler(request: web.BaseRequest) -> NoReturn:
raise exc
loop = asyncio.get_event_loop()
loop.set_debug(True)
logger = mock.Mock()
server = await aiohttp_raw_server(handler, logger=logger)
cli = await aiohttp_client(server)
resp = await cli.get("/path/to")
assert resp.status == 500
assert resp.headers["Content-Type"].startswith("text/plain")
txt = await resp.text()
assert "Traceback (most recent call last):\n" in txt
# BadHttpMethod should be logged as debug
# on the first request since the client may
# be probing for TLS/SSL support which is
# expected to fail
logger.debug.assert_called_with(
"Error handling request from %s", cli.host, exc_info=exc
)
logger.debug.reset_mock()
# Now make another connection to the server
# to make sure that the exception is logged
# at debug on a second fresh connection
cli2 = await aiohttp_client(server)
resp = await cli2.get("/path/to")
assert resp.status == 500
assert resp.headers["Content-Type"].startswith("text/plain")
# BadHttpMethod should be logged as debug
# on the first request since the client may
# be probing for TLS/SSL support which is
# expected to fail
logger.debug.assert_called_with(
"Error handling request from %s", cli.host, exc_info=exc
)
async def test_raw_server_logs_invalid_method_without_loop_debug(
aiohttp_raw_server: AiohttpRawServer,
aiohttp_client: AiohttpClient,
loop: asyncio.AbstractEventLoop,
) -> None:
exc = BadHttpMethod(b"\x16\x03\x03\x01F\x01".decode(), "error")
async def handler(request: web.BaseRequest) -> NoReturn:
raise exc
loop = asyncio.get_event_loop()
loop.set_debug(False)
logger = mock.Mock()
server = await aiohttp_raw_server(handler, logger=logger)
cli = await aiohttp_client(server)
resp = await cli.get("/path/to")
assert resp.status == 500
assert resp.headers["Content-Type"].startswith("text/plain")
txt = await resp.text()
assert "Traceback (most recent call last):\n" not in txt
# BadHttpMethod should be logged as debug
# on the first request since the client may
# be probing for TLS/SSL support which is
# expected to fail
logger.debug.assert_called_with(
"Error handling request from %s", cli.host, exc_info=exc
)
async def test_raw_server_logs_invalid_method_second_request(
aiohttp_raw_server: AiohttpRawServer,
aiohttp_client: AiohttpClient,
loop: asyncio.AbstractEventLoop,
) -> None:
exc = BadHttpMethod(b"\x16\x03\x03\x01F\x01".decode(), "error")
request_count = 0
async def handler(request: web.BaseRequest) -> web.Response:
nonlocal request_count
request_count += 1
if request_count == 2:
raise exc
return web.Response()
loop = asyncio.get_event_loop()
loop.set_debug(False)
logger = mock.Mock()
server = await aiohttp_raw_server(handler, logger=logger)
cli = await aiohttp_client(server)
resp = await cli.get("/path/to")
assert resp.status == 200
resp = await cli.get("/path/to")
assert resp.status == 500
assert resp.headers["Content-Type"].startswith("text/plain")
# BadHttpMethod should be logged as an exception
# if its not the first request since we know
# that the client already was speaking HTTP
logger.exception.assert_called_with(
"Error handling request from %s", cli.host, exc_info=exc
)
async def test_raw_server_logs_bad_status_line_as_exception(
aiohttp_raw_server: AiohttpRawServer,
aiohttp_client: AiohttpClient,
loop: asyncio.AbstractEventLoop,
) -> None:
exc = BadStatusLine(b"\x16\x03\x03\x01F\x01".decode(), "error")
async def handler(request: web.BaseRequest) -> NoReturn:
raise exc
loop = asyncio.get_event_loop()
loop.set_debug(False)
logger = mock.Mock()
server = await aiohttp_raw_server(handler, logger=logger)
cli = await aiohttp_client(server)
resp = await cli.get("/path/to")
assert resp.status == 500
assert resp.headers["Content-Type"].startswith("text/plain")
txt = await resp.text()
assert "Traceback (most recent call last):\n" not in txt
logger.exception.assert_called_with(
"Error handling request from %s", cli.host, exc_info=exc
)
async def test_raw_server_handler_timeout(
aiohttp_raw_server: AiohttpRawServer, aiohttp_client: AiohttpClient
) -> None:
loop = asyncio.get_event_loop()
loop.set_debug(True)
exc = asyncio.TimeoutError("error")
async def handler(request: web.BaseRequest) -> NoReturn:
raise exc
logger = mock.Mock()
server = await aiohttp_raw_server(handler, logger=logger)
cli = await aiohttp_client(server)
resp = await cli.get("/path/to")
assert resp.status == 504
await resp.text()
logger.debug.assert_called_with("Request handler timed out.", exc_info=exc)
async def test_raw_server_do_not_swallow_exceptions(
aiohttp_raw_server: AiohttpRawServer, aiohttp_client: AiohttpClient
) -> None:
async def handler(request: web.BaseRequest) -> NoReturn:
raise asyncio.CancelledError()
loop = asyncio.get_event_loop()
loop.set_debug(True)
logger = mock.Mock()
server = await aiohttp_raw_server(handler, logger=logger)
cli = await aiohttp_client(server)
with pytest.raises(client.ServerDisconnectedError):
await cli.get("/path/to")
logger.debug.assert_called_with("Ignored premature client disconnection")
async def test_raw_server_does_not_swallow_base_exceptions(
aiohttp_raw_server: AiohttpRawServer, aiohttp_client: AiohttpClient
) -> None:
class UnexpectedException(BaseException):
"""Dummy base exception."""
async def handler(request: web.BaseRequest) -> NoReturn:
raise UnexpectedException()
loop = asyncio.get_event_loop()
loop.set_debug(True)
server = await aiohttp_raw_server(handler)
cli = await aiohttp_client(server)
with pytest.raises(client.ServerDisconnectedError):
await cli.get("/path/to", timeout=client.ClientTimeout(10))
async def test_raw_server_cancelled_in_write_eof(
aiohttp_raw_server: AiohttpRawServer, aiohttp_client: AiohttpClient
) -> None:
class MyResponse(web.Response):
async def write_eof(self, data: bytes = b"") -> NoReturn:
raise asyncio.CancelledError("error")
async def handler(request: web.BaseRequest) -> MyResponse:
resp = MyResponse(text=str(request.rel_url))
return resp
loop = asyncio.get_event_loop()
loop.set_debug(True)
logger = mock.Mock()
server = await aiohttp_raw_server(handler, logger=logger)
cli = await aiohttp_client(server)
with pytest.raises(client.ServerDisconnectedError):
await cli.get("/path/to")
logger.debug.assert_called_with("Ignored premature client disconnection")
async def test_raw_server_not_http_exception_debug(
aiohttp_raw_server: AiohttpRawServer, aiohttp_client: AiohttpClient
) -> None:
exc = RuntimeError("custom runtime error")
async def handler(request: web.BaseRequest) -> NoReturn:
raise exc
loop = asyncio.get_event_loop()
loop.set_debug(True)
logger = mock.Mock()
server = await aiohttp_raw_server(handler, logger=logger)
cli = await aiohttp_client(server)
resp = await cli.get("/path/to")
assert resp.status == 500
assert resp.headers["Content-Type"].startswith("text/plain")
txt = await resp.text()
assert "Traceback (most recent call last):\n" in txt
logger.exception.assert_called_with(
"Error handling request from %s", cli.host, exc_info=exc
)
async def test_raw_server_html_exception(
aiohttp_raw_server: AiohttpRawServer,
aiohttp_client: AiohttpClient,
loop: asyncio.AbstractEventLoop,
) -> None:
# disable debug mode not to print traceback
loop.set_debug(False)
exc = RuntimeError("custom runtime error")
async def handler(request: web.BaseRequest) -> NoReturn:
raise exc
logger = mock.Mock()
server = await aiohttp_raw_server(handler, logger=logger)
cli = await aiohttp_client(server)
resp = await cli.get("/path/to", headers={"Accept": "text/html"})
assert resp.status == 500
assert resp.headers["Content-Type"].startswith("text/html")
txt = await resp.text()
assert txt == (
"<html><head><title>500 Internal Server Error</title></head><body>\n"
"<h1>500 Internal Server Error</h1>\n"
"Server got itself in trouble\n"
"</body></html>\n"
)
logger.exception.assert_called_with(
"Error handling request from %s", cli.host, exc_info=exc
)
async def test_raw_server_html_exception_debug(
aiohttp_raw_server: AiohttpRawServer, aiohttp_client: AiohttpClient
) -> None:
exc = RuntimeError("custom runtime error")
async def handler(request: web.BaseRequest) -> NoReturn:
raise exc
loop = asyncio.get_event_loop()
loop.set_debug(True)
logger = mock.Mock()
server = await aiohttp_raw_server(handler, logger=logger)
cli = await aiohttp_client(server)
resp = await cli.get("/path/to", headers={"Accept": "text/html"})
assert resp.status == 500
assert resp.headers["Content-Type"].startswith("text/html")
txt = await resp.text()
assert txt.startswith(
"<html><head><title>500 Internal Server Error</title></head><body>\n"
"<h1>500 Internal Server Error</h1>\n"
"<h2>Traceback:</h2>\n"
"<pre>Traceback (most recent call last):\n"
)
logger.exception.assert_called_with(
"Error handling request from %s", cli.host, exc_info=exc
)
async def test_handler_cancellation(unused_port_socket: socket.socket) -> None:
event = asyncio.Event()
sock = unused_port_socket
port = sock.getsockname()[1]
async def on_request(request: web.Request) -> web.Response:
try:
await asyncio.sleep(10)
except asyncio.CancelledError:
event.set()
raise
else:
raise web.HTTPInternalServerError()
app = web.Application()
app.router.add_route("GET", "/", on_request)
runner = web.AppRunner(app, handler_cancellation=True)
await runner.setup()
site = web.SockSite(runner, sock=sock)
await site.start()
assert runner.server is not None
try:
assert runner.server.handler_cancellation, "Flag was not propagated"
async with client.ClientSession(
timeout=client.ClientTimeout(total=0.15)
) as sess:
with pytest.raises(asyncio.TimeoutError):
await sess.get(f"http://127.0.0.1:{port}/")
with suppress(asyncio.TimeoutError):
await asyncio.wait_for(event.wait(), timeout=1)
assert event.is_set(), "Request handler hasn't been cancelled"
finally:
await asyncio.gather(runner.shutdown(), site.stop())
async def test_no_handler_cancellation(unused_port_socket: socket.socket) -> None:
timeout_event = asyncio.Event()
done_event = asyncio.Event()
sock = unused_port_socket
port = sock.getsockname()[1]
started = False
async def on_request(request: web.Request) -> web.Response:
nonlocal started
started = True
await asyncio.wait_for(timeout_event.wait(), timeout=5)
done_event.set()
return web.Response()
app = web.Application()
app.router.add_route("GET", "/", on_request)
runner = web.AppRunner(app)
await runner.setup()
site = web.SockSite(runner, sock=sock)
await site.start()
try:
async with client.ClientSession(
timeout=client.ClientTimeout(total=0.2)
) as sess:
with pytest.raises(asyncio.TimeoutError):
await sess.get(f"http://127.0.0.1:{port}/")
await asyncio.sleep(0.1)
timeout_event.set()
with suppress(asyncio.TimeoutError):
await asyncio.wait_for(done_event.wait(), timeout=1)
assert started
assert done_event.is_set()
finally:
await asyncio.gather(runner.shutdown(), site.stop())
|
./temp_repos/aiohttp/aiohttp/web_server.py
|
./temp_repos/aiohttp/tests/test_web_server.py
|
aiohttp
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'Server'.
Context:
- Class Name: Server
- Dependencies to Mock: handler
- Key Imports: http_parser, streams, web_request, web_response, warnings, typing, collections.abc, asyncio, web_protocol, abc
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
Server
|
python
|
import asyncio
import logging
import warnings
from collections.abc import (
AsyncIterator,
Awaitable,
Callable,
Iterable,
Iterator,
Mapping,
MutableMapping,
Sequence,
)
from functools import lru_cache, partial, update_wrapper
from typing import Any, TypeVar, cast, final, overload
from aiosignal import Signal
from frozenlist import FrozenList
from . import hdrs
from .helpers import AppKey
from .log import web_logger
from .typedefs import Handler, Middleware
from .web_exceptions import NotAppKeyWarning
from .web_middlewares import _fix_request_current_app
from .web_request import Request
from .web_response import StreamResponse
from .web_routedef import AbstractRouteDef
from .web_urldispatcher import (
AbstractResource,
AbstractRoute,
Domain,
MaskDomain,
MatchedSubAppResource,
PrefixedSubAppResource,
SystemRoute,
UrlDispatcher,
)
__all__ = ("Application", "CleanupError")
_AppSignal = Signal["Application"]
_RespPrepareSignal = Signal[Request, StreamResponse]
_Middlewares = FrozenList[Middleware]
_MiddlewaresHandlers = Sequence[Middleware]
_Subapps = list["Application"]
_T = TypeVar("_T")
_U = TypeVar("_U")
_Resource = TypeVar("_Resource", bound=AbstractResource)
def _build_middlewares(
handler: Handler, apps: tuple["Application", ...]
) -> Callable[[Request], Awaitable[StreamResponse]]:
"""Apply middlewares to handler."""
# The slice is to reverse the order of the apps
# so they are applied in the order they were added
for app in apps[::-1]:
assert app.pre_frozen, "middleware handlers are not ready"
for m in app._middlewares_handlers:
handler = update_wrapper(partial(m, handler=handler), handler)
return handler
_cached_build_middleware = lru_cache(maxsize=1024)(_build_middlewares)
@final
class Application(MutableMapping[str | AppKey[Any], Any]):
__slots__ = (
"logger",
"_router",
"_loop",
"_handler_args",
"_middlewares",
"_middlewares_handlers",
"_run_middlewares",
"_state",
"_frozen",
"_pre_frozen",
"_subapps",
"_on_response_prepare",
"_on_startup",
"_on_shutdown",
"_on_cleanup",
"_client_max_size",
"_cleanup_ctx",
)
def __init__(
self,
*,
logger: logging.Logger = web_logger,
middlewares: Iterable[Middleware] = (),
handler_args: Mapping[str, Any] | None = None,
client_max_size: int = 1024**2,
debug: Any = ..., # mypy doesn't support ellipsis
) -> None:
if debug is not ...:
warnings.warn(
"debug argument is no-op since 4.0 and scheduled for removal in 5.0",
DeprecationWarning,
stacklevel=2,
)
self._router = UrlDispatcher()
self._handler_args = handler_args
self.logger = logger
self._middlewares: _Middlewares = FrozenList(middlewares)
# initialized on freezing
self._middlewares_handlers: _MiddlewaresHandlers = tuple()
# initialized on freezing
self._run_middlewares: bool | None = None
self._state: dict[AppKey[Any] | str, object] = {}
self._frozen = False
self._pre_frozen = False
self._subapps: _Subapps = []
self._on_response_prepare: _RespPrepareSignal = Signal(self)
self._on_startup: _AppSignal = Signal(self)
self._on_shutdown: _AppSignal = Signal(self)
self._on_cleanup: _AppSignal = Signal(self)
self._cleanup_ctx = CleanupContext()
self._on_startup.append(self._cleanup_ctx._on_startup)
self._on_cleanup.append(self._cleanup_ctx._on_cleanup)
self._client_max_size = client_max_size
def __init_subclass__(cls: type["Application"]) -> None:
raise TypeError(
f"Inheritance class {cls.__name__} from web.Application is forbidden"
)
# MutableMapping API
def __eq__(self, other: object) -> bool:
return self is other
@overload # type: ignore[override]
def __getitem__(self, key: AppKey[_T]) -> _T: ...
@overload
def __getitem__(self, key: str) -> Any: ...
def __getitem__(self, key: str | AppKey[_T]) -> Any:
return self._state[key]
def _check_frozen(self) -> None:
if self._frozen:
raise RuntimeError(
"Changing state of started or joined application is forbidden"
)
@overload # type: ignore[override]
def __setitem__(self, key: AppKey[_T], value: _T) -> None: ...
@overload
def __setitem__(self, key: str, value: Any) -> None: ...
def __setitem__(self, key: str | AppKey[_T], value: Any) -> None:
self._check_frozen()
if not isinstance(key, AppKey):
warnings.warn(
"It is recommended to use web.AppKey instances for keys.\n"
+ "https://docs.aiohttp.org/en/stable/web_advanced.html"
+ "#application-s-config",
category=NotAppKeyWarning,
stacklevel=2,
)
self._state[key] = value
def __delitem__(self, key: str | AppKey[_T]) -> None:
self._check_frozen()
del self._state[key]
def __len__(self) -> int:
return len(self._state)
def __iter__(self) -> Iterator[str | AppKey[Any]]:
return iter(self._state)
def __hash__(self) -> int:
return id(self)
@overload # type: ignore[override]
def get(self, key: AppKey[_T], default: None = ...) -> _T | None: ...
@overload
def get(self, key: AppKey[_T], default: _U) -> _T | _U: ...
@overload
def get(self, key: str, default: Any = ...) -> Any: ...
def get(self, key: str | AppKey[_T], default: Any = None) -> Any:
return self._state.get(key, default)
########
def _set_loop(self, loop: asyncio.AbstractEventLoop | None) -> None:
warnings.warn(
"_set_loop() is no-op since 4.0 and scheduled for removal in 5.0",
DeprecationWarning,
stacklevel=2,
)
@property
def pre_frozen(self) -> bool:
return self._pre_frozen
def pre_freeze(self) -> None:
if self._pre_frozen:
return
self._pre_frozen = True
self._middlewares.freeze()
self._router.freeze()
self._on_response_prepare.freeze()
self._cleanup_ctx.freeze()
self._on_startup.freeze()
self._on_shutdown.freeze()
self._on_cleanup.freeze()
self._middlewares_handlers = tuple(self._prepare_middleware())
# If current app and any subapp do not have middlewares avoid run all
# of the code footprint that it implies, which have a middleware
# hardcoded per app that sets up the current_app attribute. If no
# middlewares are configured the handler will receive the proper
# current_app without needing all of this code.
self._run_middlewares = True if self.middlewares else False
for subapp in self._subapps:
subapp.pre_freeze()
self._run_middlewares = self._run_middlewares or subapp._run_middlewares
@property
def frozen(self) -> bool:
return self._frozen
def freeze(self) -> None:
if self._frozen:
return
self.pre_freeze()
self._frozen = True
for subapp in self._subapps:
subapp.freeze()
@property
def debug(self) -> bool:
warnings.warn(
"debug property is deprecated since 4.0 and scheduled for removal in 5.0",
DeprecationWarning,
stacklevel=2,
)
return asyncio.get_event_loop().get_debug()
def _reg_subapp_signals(self, subapp: "Application") -> None:
def reg_handler(signame: str) -> None:
subsig = getattr(subapp, signame)
async def handler(app: "Application") -> None:
await subsig.send(subapp)
appsig = getattr(self, signame)
appsig.append(handler)
reg_handler("on_startup")
reg_handler("on_shutdown")
reg_handler("on_cleanup")
def add_subapp(self, prefix: str, subapp: "Application") -> PrefixedSubAppResource:
if not isinstance(prefix, str):
raise TypeError("Prefix must be str")
prefix = prefix.rstrip("/")
if not prefix:
raise ValueError("Prefix cannot be empty")
factory = partial(PrefixedSubAppResource, prefix, subapp)
return self._add_subapp(factory, subapp)
def _add_subapp(
self, resource_factory: Callable[[], _Resource], subapp: "Application"
) -> _Resource:
if self.frozen:
raise RuntimeError("Cannot add sub application to frozen application")
if subapp.frozen:
raise RuntimeError("Cannot add frozen application")
resource = resource_factory()
self.router.register_resource(resource)
self._reg_subapp_signals(subapp)
self._subapps.append(subapp)
subapp.pre_freeze()
return resource
def add_domain(self, domain: str, subapp: "Application") -> MatchedSubAppResource:
if not isinstance(domain, str):
raise TypeError("Domain must be str")
elif "*" in domain:
rule: Domain = MaskDomain(domain)
else:
rule = Domain(domain)
factory = partial(MatchedSubAppResource, rule, subapp)
return self._add_subapp(factory, subapp)
def add_routes(self, routes: Iterable[AbstractRouteDef]) -> list[AbstractRoute]:
return self.router.add_routes(routes)
@property
def on_response_prepare(self) -> _RespPrepareSignal:
return self._on_response_prepare
@property
def on_startup(self) -> _AppSignal:
return self._on_startup
@property
def on_shutdown(self) -> _AppSignal:
return self._on_shutdown
@property
def on_cleanup(self) -> _AppSignal:
return self._on_cleanup
@property
def cleanup_ctx(self) -> "CleanupContext":
return self._cleanup_ctx
@property
def router(self) -> UrlDispatcher:
return self._router
@property
def middlewares(self) -> _Middlewares:
return self._middlewares
async def startup(self) -> None:
"""Causes on_startup signal
Should be called in the event loop along with the request handler.
"""
await self.on_startup.send(self)
async def shutdown(self) -> None:
"""Causes on_shutdown signal
Should be called before cleanup()
"""
await self.on_shutdown.send(self)
async def cleanup(self) -> None:
"""Causes on_cleanup signal
Should be called after shutdown()
"""
if self.on_cleanup.frozen:
await self.on_cleanup.send(self)
else:
# If an exception occurs in startup, ensure cleanup contexts are completed.
await self._cleanup_ctx._on_cleanup(self)
def _prepare_middleware(self) -> Iterator[Middleware]:
yield from reversed(self._middlewares)
yield _fix_request_current_app(self)
async def _handle(self, request: Request) -> StreamResponse:
match_info = await self._router.resolve(request)
match_info.add_app(self)
match_info.freeze()
request._match_info = match_info
if request.headers.get(hdrs.EXPECT):
resp = await match_info.expect_handler(request)
await request.writer.drain()
if resp is not None:
return resp
handler = match_info.handler
if self._run_middlewares:
# If its a SystemRoute, don't cache building the middlewares since
# they are constructed for every MatchInfoError as a new handler
# is made each time.
if isinstance(match_info.route, SystemRoute):
handler = _build_middlewares(handler, match_info.apps)
else:
handler = _cached_build_middleware(handler, match_info.apps)
return await handler(request)
def __call__(self) -> "Application":
"""gunicorn compatibility"""
return self
def __repr__(self) -> str:
return f"<Application 0x{id(self):x}>"
def __bool__(self) -> bool:
return True
class CleanupError(RuntimeError):
@property
def exceptions(self) -> list[BaseException]:
return cast(list[BaseException], self.args[1])
_CleanupContextBase = FrozenList[Callable[[Application], AsyncIterator[None]]]
class CleanupContext(_CleanupContextBase):
def __init__(self) -> None:
super().__init__()
self._exits: list[AsyncIterator[None]] = []
async def _on_startup(self, app: Application) -> None:
for cb in self:
it = cb(app).__aiter__()
await it.__anext__()
self._exits.append(it)
async def _on_cleanup(self, app: Application) -> None:
errors = []
for it in reversed(self._exits):
try:
await it.__anext__()
except StopAsyncIteration:
pass
except (Exception, asyncio.CancelledError) as exc:
errors.append(exc)
else:
errors.append(RuntimeError(f"{it!r} has more than one 'yield'"))
if errors:
if len(errors) == 1:
raise errors[0]
else:
raise CleanupError("Multiple errors on cleanup stage", errors)
|
import asyncio
import sys
from collections.abc import AsyncIterator, Callable, Iterator
from typing import NoReturn
from unittest import mock
import pytest
from aiohttp import log, web
from aiohttp.pytest_plugin import AiohttpClient
from aiohttp.typedefs import Handler
async def test_app_ctor() -> None:
app = web.Application()
assert app.logger is log.web_logger
def test_app_call() -> None:
app = web.Application()
assert app is app()
async def test_app_register_on_finish() -> None:
app = web.Application()
cb1 = mock.AsyncMock(return_value=None)
cb2 = mock.AsyncMock(return_value=None)
app.on_cleanup.append(cb1)
app.on_cleanup.append(cb2)
app.freeze()
await app.cleanup()
cb1.assert_called_once_with(app)
cb2.assert_called_once_with(app)
async def test_app_register_coro() -> None:
app = web.Application()
fut = asyncio.get_event_loop().create_future()
async def cb(app: web.Application) -> None:
await asyncio.sleep(0.001)
fut.set_result(123)
app.on_cleanup.append(cb)
app.freeze()
await app.cleanup()
assert fut.done()
assert 123 == fut.result()
def test_logging() -> None:
logger = mock.Mock()
app = web.Application()
app.logger = logger
assert app.logger is logger
async def test_on_shutdown() -> None:
app = web.Application()
called = False
async def on_shutdown(app_param: web.Application) -> None:
nonlocal called
assert app is app_param
called = True
app.on_shutdown.append(on_shutdown)
app.freeze()
await app.shutdown()
assert called
async def test_on_startup() -> None:
app = web.Application()
long_running1_called = False
long_running2_called = False
all_long_running_called = False
async def long_running1(app_param: web.Application) -> None:
nonlocal long_running1_called
assert app is app_param
long_running1_called = True
async def long_running2(app_param: web.Application) -> None:
nonlocal long_running2_called
assert app is app_param
long_running2_called = True
async def on_startup_all_long_running(app_param: web.Application) -> None:
nonlocal all_long_running_called
assert app is app_param
all_long_running_called = True
await asyncio.gather(long_running1(app_param), long_running2(app_param))
app.on_startup.append(on_startup_all_long_running)
app.freeze()
await app.startup()
assert long_running1_called
assert long_running2_called
assert all_long_running_called
def test_appkey() -> None:
key = web.AppKey("key", str)
app = web.Application()
app[key] = "value"
assert app[key] == "value"
assert len(app) == 1
del app[key]
assert len(app) == 0
def test_appkey_repr_concrete() -> None:
key = web.AppKey("key", int)
assert repr(key) in (
"<AppKey(__channelexec__.key, type=int)>", # pytest-xdist
"<AppKey(__main__.key, type=int)>",
)
key2 = web.AppKey("key", web.Request)
assert repr(key2) in (
# pytest-xdist:
"<AppKey(__channelexec__.key, type=aiohttp.web_request.Request)>",
"<AppKey(__main__.key, type=aiohttp.web_request.Request)>",
)
def test_appkey_repr_nonconcrete() -> None:
key = web.AppKey("key", Iterator[int])
if sys.version_info < (3, 11):
assert repr(key) in (
# pytest-xdist:
"<AppKey(__channelexec__.key, type=collections.abc.Iterator)>",
"<AppKey(__main__.key, type=collections.abc.Iterator)>",
)
else:
assert repr(key) in (
# pytest-xdist:
"<AppKey(__channelexec__.key, type=collections.abc.Iterator[int])>",
"<AppKey(__main__.key, type=collections.abc.Iterator[int])>",
)
def test_appkey_repr_annotated() -> None:
key = web.AppKey[Iterator[int]]("key")
if sys.version_info < (3, 11):
assert repr(key) in (
# pytest-xdist:
"<AppKey(__channelexec__.key, type=collections.abc.Iterator)>",
"<AppKey(__main__.key, type=collections.abc.Iterator)>",
)
else:
assert repr(key) in (
# pytest-xdist:
"<AppKey(__channelexec__.key, type=collections.abc.Iterator[int])>",
"<AppKey(__main__.key, type=collections.abc.Iterator[int])>",
)
def test_app_str_keys() -> None:
app = web.Application()
with pytest.warns(
UserWarning, match=r"web_advanced\.html#application-s-config"
) as checker:
app["key"] = "value"
# Check that the error is emitted at the call site (stacklevel=2)
assert checker[0].filename == __file__
assert app["key"] == "value"
def test_app_get() -> None:
key = web.AppKey("key", int)
app = web.Application()
assert app.get(key, "foo") == "foo"
app[key] = 5
assert app.get(key, "foo") == 5
def test_app_freeze() -> None:
app = web.Application()
subapp = mock.Mock()
subapp._middlewares = ()
app._subapps.append(subapp)
app.freeze()
assert subapp.freeze.called
app.freeze()
assert len(subapp.freeze.call_args_list) == 1
def test_equality() -> None:
app1 = web.Application()
app2 = web.Application()
assert app1 == app1
assert app1 != app2
def test_app_run_middlewares() -> None:
root = web.Application()
sub = web.Application()
root.add_subapp("/sub", sub)
root.freeze()
assert root._run_middlewares is False
async def middleware(request: web.Request, handler: Handler) -> web.StreamResponse:
assert False
root = web.Application(middlewares=[middleware])
sub = web.Application()
root.add_subapp("/sub", sub)
root.freeze()
assert root._run_middlewares is True
root = web.Application()
sub = web.Application(middlewares=[middleware])
root.add_subapp("/sub", sub)
root.freeze()
assert root._run_middlewares is True
def test_subapp_pre_frozen_after_adding() -> None:
app = web.Application()
subapp = web.Application()
app.add_subapp("/prefix", subapp)
assert subapp.pre_frozen
assert not subapp.frozen
def test_app_inheritance() -> None:
with pytest.raises(TypeError):
class A(web.Application): # type: ignore[misc]
pass
def test_app_custom_attr() -> None:
app = web.Application()
with pytest.raises(AttributeError):
app.custom = None # type: ignore[attr-defined]
async def test_cleanup_ctx() -> None:
app = web.Application()
out = []
def f(num: int) -> Callable[[web.Application], AsyncIterator[None]]:
async def inner(app: web.Application) -> AsyncIterator[None]:
out.append("pre_" + str(num))
yield None
out.append("post_" + str(num))
return inner
app.cleanup_ctx.append(f(1))
app.cleanup_ctx.append(f(2))
app.freeze()
await app.startup()
assert out == ["pre_1", "pre_2"]
await app.cleanup()
assert out == ["pre_1", "pre_2", "post_2", "post_1"]
async def test_cleanup_ctx_exception_on_startup() -> None:
app = web.Application()
out = []
exc = Exception("fail")
def f(
num: int, fail: bool = False
) -> Callable[[web.Application], AsyncIterator[None]]:
async def inner(app: web.Application) -> AsyncIterator[None]:
out.append("pre_" + str(num))
if fail:
raise exc
yield None
out.append("post_" + str(num))
return inner
app.cleanup_ctx.append(f(1))
app.cleanup_ctx.append(f(2, True))
app.cleanup_ctx.append(f(3))
app.freeze()
with pytest.raises(Exception) as ctx:
await app.startup()
assert ctx.value is exc
assert out == ["pre_1", "pre_2"]
await app.cleanup()
assert out == ["pre_1", "pre_2", "post_1"]
async def test_cleanup_ctx_exception_on_cleanup() -> None:
app = web.Application()
out = []
exc = Exception("fail")
def f(
num: int, fail: bool = False
) -> Callable[[web.Application], AsyncIterator[None]]:
async def inner(app: web.Application) -> AsyncIterator[None]:
out.append("pre_" + str(num))
yield None
out.append("post_" + str(num))
if fail:
raise exc
return inner
app.cleanup_ctx.append(f(1))
app.cleanup_ctx.append(f(2, True))
app.cleanup_ctx.append(f(3))
app.freeze()
await app.startup()
assert out == ["pre_1", "pre_2", "pre_3"]
with pytest.raises(Exception) as ctx:
await app.cleanup()
assert ctx.value is exc
assert out == ["pre_1", "pre_2", "pre_3", "post_3", "post_2", "post_1"]
async def test_cleanup_ctx_cleanup_after_exception() -> None:
app = web.Application()
ctx_state = None
async def success_ctx(app: web.Application) -> AsyncIterator[None]:
nonlocal ctx_state
ctx_state = "START"
yield
ctx_state = "CLEAN"
async def fail_ctx(app: web.Application) -> AsyncIterator[NoReturn]:
raise Exception()
yield # type: ignore[unreachable] # pragma: no cover
app.cleanup_ctx.append(success_ctx)
app.cleanup_ctx.append(fail_ctx)
runner = web.AppRunner(app)
try:
with pytest.raises(Exception):
await runner.setup()
finally:
await runner.cleanup()
assert ctx_state == "CLEAN"
@pytest.mark.parametrize("exc_cls", (Exception, asyncio.CancelledError))
async def test_cleanup_ctx_exception_on_cleanup_multiple(
exc_cls: type[BaseException],
) -> None:
app = web.Application()
out = []
def f(
num: int, fail: bool = False
) -> Callable[[web.Application], AsyncIterator[None]]:
async def inner(app: web.Application) -> AsyncIterator[None]:
out.append("pre_" + str(num))
yield None
out.append("post_" + str(num))
if fail:
raise exc_cls("fail_" + str(num))
return inner
app.cleanup_ctx.append(f(1))
app.cleanup_ctx.append(f(2, True))
app.cleanup_ctx.append(f(3, True))
app.freeze()
await app.startup()
assert out == ["pre_1", "pre_2", "pre_3"]
with pytest.raises(web.CleanupError) as ctx:
await app.cleanup()
exc = ctx.value
assert len(exc.exceptions) == 2
assert str(exc.exceptions[0]) == "fail_3"
assert str(exc.exceptions[1]) == "fail_2"
assert out == ["pre_1", "pre_2", "pre_3", "post_3", "post_2", "post_1"]
async def test_cleanup_ctx_multiple_yields() -> None:
app = web.Application()
out = []
def f(num: int) -> Callable[[web.Application], AsyncIterator[None]]:
async def inner(app: web.Application) -> AsyncIterator[None]:
out.append("pre_" + str(num))
yield None
out.append("post_" + str(num))
yield None
return inner
app.cleanup_ctx.append(f(1))
app.freeze()
await app.startup()
assert out == ["pre_1"]
with pytest.raises(RuntimeError) as ctx:
await app.cleanup()
assert "has more than one 'yield'" in str(ctx.value)
assert out == ["pre_1", "post_1"]
async def test_subapp_chained_config_dict_visibility(
aiohttp_client: AiohttpClient,
) -> None:
key1 = web.AppKey("key1", str)
key2 = web.AppKey("key2", str)
async def main_handler(request: web.Request) -> web.Response:
assert request.config_dict[key1] == "val1"
assert key2 not in request.config_dict
return web.Response(status=200)
root = web.Application()
root[key1] = "val1"
root.add_routes([web.get("/", main_handler)])
async def sub_handler(request: web.Request) -> web.Response:
assert request.config_dict[key1] == "val1"
assert request.config_dict[key2] == "val2"
return web.Response(status=201)
sub = web.Application()
sub[key2] = "val2"
sub.add_routes([web.get("/", sub_handler)])
root.add_subapp("/sub", sub)
client = await aiohttp_client(root)
resp = await client.get("/")
assert resp.status == 200
resp = await client.get("/sub/")
assert resp.status == 201
async def test_subapp_chained_config_dict_overriding(
aiohttp_client: AiohttpClient,
) -> None:
key = web.AppKey("key", str)
async def main_handler(request: web.Request) -> web.Response:
assert request.config_dict[key] == "val1"
return web.Response(status=200)
root = web.Application()
root[key] = "val1"
root.add_routes([web.get("/", main_handler)])
async def sub_handler(request: web.Request) -> web.Response:
assert request.config_dict[key] == "val2"
return web.Response(status=201)
sub = web.Application()
sub[key] = "val2"
sub.add_routes([web.get("/", sub_handler)])
root.add_subapp("/sub", sub)
client = await aiohttp_client(root)
resp = await client.get("/")
assert resp.status == 200
resp = await client.get("/sub/")
assert resp.status == 201
async def test_subapp_on_startup(aiohttp_client: AiohttpClient) -> None:
subapp = web.Application()
startup = web.AppKey("startup", bool)
cleanup = web.AppKey("cleanup", bool)
startup_called = False
async def on_startup(app: web.Application) -> None:
nonlocal startup_called
startup_called = True
app[startup] = True
subapp.on_startup.append(on_startup)
ctx_pre_called = False
ctx_post_called = False
async def cleanup_ctx(app: web.Application) -> AsyncIterator[None]:
nonlocal ctx_pre_called, ctx_post_called
ctx_pre_called = True
app[cleanup] = True
yield None
ctx_post_called = True
subapp.cleanup_ctx.append(cleanup_ctx)
shutdown_called = False
async def on_shutdown(app: web.Application) -> None:
nonlocal shutdown_called
shutdown_called = True
subapp.on_shutdown.append(on_shutdown)
cleanup_called = False
async def on_cleanup(app: web.Application) -> None:
nonlocal cleanup_called
cleanup_called = True
subapp.on_cleanup.append(on_cleanup)
app = web.Application()
app.add_subapp("/subapp", subapp)
assert not startup_called
assert not ctx_pre_called
assert not ctx_post_called
assert not shutdown_called
assert not cleanup_called
assert subapp.on_startup.frozen
assert subapp.cleanup_ctx.frozen
assert subapp.on_shutdown.frozen
assert subapp.on_cleanup.frozen
assert subapp.router.frozen
client = await aiohttp_client(app)
assert startup_called
assert ctx_pre_called # type: ignore[unreachable]
assert not ctx_post_called
assert not shutdown_called
assert not cleanup_called
await client.close()
assert startup_called
assert ctx_pre_called
assert ctx_post_called
assert shutdown_called
assert cleanup_called
@pytest.mark.filterwarnings(r"ignore:.*web\.AppKey:UserWarning")
def test_app_iter() -> None:
app = web.Application()
b = web.AppKey("b", str)
c = web.AppKey("c", str)
app["a"] = "0"
app[b] = "1"
app[c] = "2"
app["d"] = "4"
assert sorted(list(app)) == [b, c, "a", "d"]
def test_app_forbid_nonslot_attr() -> None:
app = web.Application()
with pytest.raises(AttributeError):
app.unknow_attr # type: ignore[attr-defined]
with pytest.raises(AttributeError):
app.unknow_attr = 1 # type: ignore[attr-defined]
def test_forbid_changing_frozen_app() -> None:
app = web.Application()
app.freeze()
with pytest.raises(RuntimeError):
app["key"] = "value"
def test_app_boolean() -> None:
app = web.Application()
assert app
|
./temp_repos/aiohttp/aiohttp/web_app.py
|
./temp_repos/aiohttp/tests/test_web_app.py
|
aiohttp
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'Application'.
Context:
- Class Name: Application
- Dependencies to Mock: None detected
- Key Imports: web_exceptions, frozenlist, helpers, web_request, log, logging, warnings, typedefs, web_middlewares, functools
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
Application
|
python
|
"""Http related parsers and protocol."""
import asyncio
import sys
from typing import ( # noqa
TYPE_CHECKING,
Any,
Awaitable,
Callable,
Iterable,
List,
NamedTuple,
Optional,
Union,
)
from multidict import CIMultiDict
from .abc import AbstractStreamWriter
from .base_protocol import BaseProtocol
from .client_exceptions import ClientConnectionResetError
from .compression_utils import ZLibCompressor
from .helpers import NO_EXTENSIONS
__all__ = ("StreamWriter", "HttpVersion", "HttpVersion10", "HttpVersion11")
MIN_PAYLOAD_FOR_WRITELINES = 2048
IS_PY313_BEFORE_313_2 = (3, 13, 0) <= sys.version_info < (3, 13, 2)
IS_PY_BEFORE_312_9 = sys.version_info < (3, 12, 9)
SKIP_WRITELINES = IS_PY313_BEFORE_313_2 or IS_PY_BEFORE_312_9
# writelines is not safe for use
# on Python 3.12+ until 3.12.9
# on Python 3.13+ until 3.13.2
# and on older versions it not any faster than write
# CVE-2024-12254: https://github.com/python/cpython/pull/127656
class HttpVersion(NamedTuple):
major: int
minor: int
HttpVersion10 = HttpVersion(1, 0)
HttpVersion11 = HttpVersion(1, 1)
_T_OnChunkSent = Optional[
Callable[
[Union[bytes, bytearray, "memoryview[int]", "memoryview[bytes]"]],
Awaitable[None],
]
]
_T_OnHeadersSent = Optional[Callable[["CIMultiDict[str]"], Awaitable[None]]]
class StreamWriter(AbstractStreamWriter):
length: int | None = None
chunked: bool = False
_eof: bool = False
_compress: ZLibCompressor | None = None
def __init__(
self,
protocol: BaseProtocol,
loop: asyncio.AbstractEventLoop,
on_chunk_sent: _T_OnChunkSent = None,
on_headers_sent: _T_OnHeadersSent = None,
) -> None:
self._protocol = protocol
self.loop = loop
self._on_chunk_sent: _T_OnChunkSent = on_chunk_sent
self._on_headers_sent: _T_OnHeadersSent = on_headers_sent
self._headers_buf: bytes | None = None
self._headers_written: bool = False
@property
def transport(self) -> asyncio.Transport | None:
return self._protocol.transport
@property
def protocol(self) -> BaseProtocol:
return self._protocol
def enable_chunking(self) -> None:
self.chunked = True
def enable_compression(
self, encoding: str = "deflate", strategy: int | None = None
) -> None:
self._compress = ZLibCompressor(encoding=encoding, strategy=strategy)
def _write(
self, chunk: Union[bytes, bytearray, "memoryview[int]", "memoryview[bytes]"]
) -> None:
size = len(chunk)
self.buffer_size += size
self.output_size += size
transport = self._protocol.transport
if transport is None or transport.is_closing():
raise ClientConnectionResetError("Cannot write to closing transport")
transport.write(chunk)
def _writelines(
self,
chunks: Iterable[
Union[bytes, bytearray, "memoryview[int]", "memoryview[bytes]"]
],
) -> None:
size = 0
for chunk in chunks:
size += len(chunk)
self.buffer_size += size
self.output_size += size
transport = self._protocol.transport
if transport is None or transport.is_closing():
raise ClientConnectionResetError("Cannot write to closing transport")
if SKIP_WRITELINES or size < MIN_PAYLOAD_FOR_WRITELINES:
transport.write(b"".join(chunks))
else:
transport.writelines(chunks)
def _write_chunked_payload(
self, chunk: Union[bytes, bytearray, "memoryview[int]", "memoryview[bytes]"]
) -> None:
"""Write a chunk with proper chunked encoding."""
chunk_len_pre = f"{len(chunk):x}\r\n".encode("ascii")
self._writelines((chunk_len_pre, chunk, b"\r\n"))
def _send_headers_with_payload(
self,
chunk: Union[bytes, bytearray, "memoryview[int]", "memoryview[bytes]"],
is_eof: bool,
) -> None:
"""Send buffered headers with payload, coalescing into single write."""
# Mark headers as written
self._headers_written = True
headers_buf = self._headers_buf
self._headers_buf = None
if TYPE_CHECKING:
# Safe because callers (write() and write_eof()) only invoke this method
# after checking that self._headers_buf is truthy
assert headers_buf is not None
if not self.chunked:
# Non-chunked: coalesce headers with body
if chunk:
self._writelines((headers_buf, chunk))
else:
self._write(headers_buf)
return
# Coalesce headers with chunked data
if chunk:
chunk_len_pre = f"{len(chunk):x}\r\n".encode("ascii")
if is_eof:
self._writelines((headers_buf, chunk_len_pre, chunk, b"\r\n0\r\n\r\n"))
else:
self._writelines((headers_buf, chunk_len_pre, chunk, b"\r\n"))
elif is_eof:
self._writelines((headers_buf, b"0\r\n\r\n"))
else:
self._write(headers_buf)
async def write(
self,
chunk: Union[bytes, bytearray, "memoryview[int]", "memoryview[bytes]"],
*,
drain: bool = True,
LIMIT: int = 0x10000,
) -> None:
"""
Writes chunk of data to a stream.
write_eof() indicates end of stream.
writer can't be used after write_eof() method being called.
write() return drain future.
"""
if self._on_chunk_sent is not None:
await self._on_chunk_sent(chunk)
if isinstance(chunk, memoryview):
if chunk.nbytes != len(chunk):
# just reshape it
chunk = chunk.cast("c")
if self._compress is not None:
chunk = await self._compress.compress(chunk)
if not chunk:
return
if self.length is not None:
chunk_len = len(chunk)
if self.length >= chunk_len:
self.length = self.length - chunk_len
else:
chunk = chunk[: self.length]
self.length = 0
if not chunk:
return
# Handle buffered headers for small payload optimization
if self._headers_buf and not self._headers_written:
self._send_headers_with_payload(chunk, False)
if drain and self.buffer_size > LIMIT:
self.buffer_size = 0
await self.drain()
return
if chunk:
if self.chunked:
self._write_chunked_payload(chunk)
else:
self._write(chunk)
if drain and self.buffer_size > LIMIT:
self.buffer_size = 0
await self.drain()
async def write_headers(
self, status_line: str, headers: "CIMultiDict[str]"
) -> None:
"""Write headers to the stream."""
if self._on_headers_sent is not None:
await self._on_headers_sent(headers)
# status + headers
buf = _serialize_headers(status_line, headers)
self._headers_written = False
self._headers_buf = buf
def send_headers(self) -> None:
"""Force sending buffered headers if not already sent."""
if not self._headers_buf or self._headers_written:
return
self._headers_written = True
headers_buf = self._headers_buf
self._headers_buf = None
if TYPE_CHECKING:
# Safe because we only enter this block when self._headers_buf is truthy
assert headers_buf is not None
self._write(headers_buf)
def set_eof(self) -> None:
"""Indicate that the message is complete."""
if self._eof:
return
# If headers haven't been sent yet, send them now
# This handles the case where there's no body at all
if self._headers_buf and not self._headers_written:
self._headers_written = True
headers_buf = self._headers_buf
self._headers_buf = None
if TYPE_CHECKING:
# Safe because we only enter this block when self._headers_buf is truthy
assert headers_buf is not None
# Combine headers and chunked EOF marker in a single write
if self.chunked:
self._writelines((headers_buf, b"0\r\n\r\n"))
else:
self._write(headers_buf)
elif self.chunked and self._headers_written:
# Headers already sent, just send the final chunk marker
self._write(b"0\r\n\r\n")
self._eof = True
async def write_eof(self, chunk: bytes = b"") -> None:
if self._eof:
return
if chunk and self._on_chunk_sent is not None:
await self._on_chunk_sent(chunk)
# Handle body/compression
if self._compress:
chunks: list[bytes] = []
chunks_len = 0
if chunk and (compressed_chunk := await self._compress.compress(chunk)):
chunks_len = len(compressed_chunk)
chunks.append(compressed_chunk)
flush_chunk = self._compress.flush()
chunks_len += len(flush_chunk)
chunks.append(flush_chunk)
assert chunks_len
# Send buffered headers with compressed data if not yet sent
if self._headers_buf and not self._headers_written:
self._headers_written = True
headers_buf = self._headers_buf
self._headers_buf = None
if self.chunked:
# Coalesce headers with compressed chunked data
chunk_len_pre = f"{chunks_len:x}\r\n".encode("ascii")
self._writelines(
(headers_buf, chunk_len_pre, *chunks, b"\r\n0\r\n\r\n")
)
else:
# Coalesce headers with compressed data
self._writelines((headers_buf, *chunks))
await self.drain()
self._eof = True
return
# Headers already sent, just write compressed data
if self.chunked:
chunk_len_pre = f"{chunks_len:x}\r\n".encode("ascii")
self._writelines((chunk_len_pre, *chunks, b"\r\n0\r\n\r\n"))
elif len(chunks) > 1:
self._writelines(chunks)
else:
self._write(chunks[0])
await self.drain()
self._eof = True
return
# No compression - send buffered headers if not yet sent
if self._headers_buf and not self._headers_written:
# Use helper to send headers with payload
self._send_headers_with_payload(chunk, True)
await self.drain()
self._eof = True
return
# Handle remaining body
if self.chunked:
if chunk:
# Write final chunk with EOF marker
self._writelines(
(f"{len(chunk):x}\r\n".encode("ascii"), chunk, b"\r\n0\r\n\r\n")
)
else:
self._write(b"0\r\n\r\n")
await self.drain()
self._eof = True
return
if chunk:
self._write(chunk)
await self.drain()
self._eof = True
async def drain(self) -> None:
"""Flush the write buffer.
The intended use is to write
await w.write(data)
await w.drain()
"""
protocol = self._protocol
if protocol.transport is not None and protocol._paused:
await protocol._drain_helper()
def _safe_header(string: str) -> str:
if "\r" in string or "\n" in string:
raise ValueError(
"Newline or carriage return detected in headers. "
"Potential header injection attack."
)
return string
def _py_serialize_headers(status_line: str, headers: "CIMultiDict[str]") -> bytes:
headers_gen = (_safe_header(k) + ": " + _safe_header(v) for k, v in headers.items())
line = status_line + "\r\n" + "\r\n".join(headers_gen) + "\r\n\r\n"
return line.encode("utf-8")
_serialize_headers = _py_serialize_headers
try:
import aiohttp._http_writer as _http_writer # type: ignore[import-not-found]
_c_serialize_headers = _http_writer._serialize_headers
if not NO_EXTENSIONS:
_serialize_headers = _c_serialize_headers
except ImportError:
pass
|
# Tests for aiohttp/http_writer.py
import array
import asyncio
import zlib
from collections.abc import Generator, Iterable
from typing import Any
from unittest import mock
import pytest
from multidict import CIMultiDict
from aiohttp import ClientConnectionResetError, hdrs, http
from aiohttp.base_protocol import BaseProtocol
from aiohttp.compression_utils import ZLibBackend
from aiohttp.http_writer import _serialize_headers
@pytest.fixture
def enable_writelines() -> Generator[None, None, None]:
with mock.patch("aiohttp.http_writer.SKIP_WRITELINES", False):
yield
@pytest.fixture
def disable_writelines() -> Generator[None, None, None]:
with mock.patch("aiohttp.http_writer.SKIP_WRITELINES", True):
yield
@pytest.fixture
def force_writelines_small_payloads() -> Generator[None, None, None]:
with mock.patch("aiohttp.http_writer.MIN_PAYLOAD_FOR_WRITELINES", 1):
yield
@pytest.fixture
def buf() -> bytearray:
return bytearray()
@pytest.fixture
def transport(buf: bytearray) -> Any:
transport = mock.create_autospec(asyncio.Transport, spec_set=True, instance=True)
def write(chunk: bytes) -> None:
buf.extend(chunk)
def writelines(chunks: Iterable[bytes]) -> None:
for chunk in chunks:
buf.extend(chunk)
transport.write.side_effect = write
transport.writelines.side_effect = writelines
transport.is_closing.return_value = False
return transport
@pytest.fixture
def protocol(loop: asyncio.AbstractEventLoop, transport: asyncio.Transport) -> Any:
return mock.create_autospec(
BaseProtocol, spec_set=True, instance=True, transport=transport
)
def decompress(data: bytes) -> bytes:
d = ZLibBackend.decompressobj()
return d.decompress(data)
def decode_chunked(chunked: bytes | bytearray) -> bytes:
i = 0
out = b""
while i < len(chunked):
j = chunked.find(b"\r\n", i)
assert j != -1, "Malformed chunk"
size = int(chunked[i:j], 16)
if size == 0:
break
i = j + 2
out += chunked[i : i + size]
i += size + 2 # skip \r\n after the chunk
return out
def test_payloadwriter_properties(
transport: asyncio.Transport,
protocol: BaseProtocol,
loop: asyncio.AbstractEventLoop,
) -> None:
writer = http.StreamWriter(protocol, loop)
assert writer.protocol == protocol
assert writer.transport == transport
async def test_write_headers_buffered_small_payload(
buf: bytearray,
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
msg = http.StreamWriter(protocol, loop)
headers = CIMultiDict({"Content-Length": "11", "Host": "example.com"})
# Write headers - should be buffered
await msg.write_headers("GET / HTTP/1.1", headers)
assert len(buf) == 0 # Headers not sent yet
# Write small body - should coalesce with headers
await msg.write(b"Hello World", drain=False)
# Verify content
assert b"GET / HTTP/1.1\r\n" in buf
assert b"Host: example.com\r\n" in buf
assert b"Content-Length: 11\r\n" in buf
assert b"\r\n\r\nHello World" in buf
async def test_write_headers_chunked_coalescing(
buf: bytearray,
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
msg = http.StreamWriter(protocol, loop)
msg.enable_chunking()
headers = CIMultiDict({"Transfer-Encoding": "chunked", "Host": "example.com"})
# Write headers - should be buffered
await msg.write_headers("POST /upload HTTP/1.1", headers)
assert len(buf) == 0 # Headers not sent yet
# Write first chunk - should coalesce with headers
await msg.write(b"First chunk", drain=False)
# Verify content
assert b"POST /upload HTTP/1.1\r\n" in buf
assert b"Transfer-Encoding: chunked\r\n" in buf
# "b" is hex for 11 (length of "First chunk")
assert b"\r\n\r\nb\r\nFirst chunk\r\n" in buf
async def test_write_eof_with_buffered_headers(
buf: bytearray,
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
msg = http.StreamWriter(protocol, loop)
headers = CIMultiDict({"Content-Length": "9", "Host": "example.com"})
# Write headers - should be buffered
await msg.write_headers("POST /data HTTP/1.1", headers)
assert len(buf) == 0
# Call write_eof with body - should coalesce
await msg.write_eof(b"Last data")
# Verify content
assert b"POST /data HTTP/1.1\r\n" in buf
assert b"\r\n\r\nLast data" in buf
async def test_set_eof_sends_buffered_headers(
buf: bytearray,
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
msg = http.StreamWriter(protocol, loop)
headers = CIMultiDict({"Host": "example.com"})
# Write headers - should be buffered
await msg.write_headers("GET /empty HTTP/1.1", headers)
assert len(buf) == 0
# Call set_eof without body - headers should be sent
msg.set_eof()
# Headers should be sent
assert len(buf) > 0
assert b"GET /empty HTTP/1.1\r\n" in buf
async def test_write_payload_eof(
transport: asyncio.Transport,
protocol: BaseProtocol,
loop: asyncio.AbstractEventLoop,
) -> None:
msg = http.StreamWriter(protocol, loop)
await msg.write(b"data1")
await msg.write(b"data2")
await msg.write_eof()
content = b"".join([c[1][0] for c in list(transport.write.mock_calls)]) # type: ignore[attr-defined]
assert b"data1data2" == content.split(b"\r\n\r\n", 1)[-1]
async def test_write_payload_chunked(
buf: bytearray,
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
msg = http.StreamWriter(protocol, loop)
msg.enable_chunking()
await msg.write(b"data")
await msg.write_eof()
assert b"4\r\ndata\r\n0\r\n\r\n" == buf
async def test_write_payload_chunked_multiple(
buf: bytearray,
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
msg = http.StreamWriter(protocol, loop)
msg.enable_chunking()
await msg.write(b"data1")
await msg.write(b"data2")
await msg.write_eof()
assert b"5\r\ndata1\r\n5\r\ndata2\r\n0\r\n\r\n" == buf
async def test_write_payload_length(
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
msg = http.StreamWriter(protocol, loop)
msg.length = 2
await msg.write(b"d")
await msg.write(b"ata")
await msg.write_eof()
content = b"".join([c[1][0] for c in list(transport.write.mock_calls)]) # type: ignore[attr-defined]
assert b"da" == content.split(b"\r\n\r\n", 1)[-1]
@pytest.mark.usefixtures("disable_writelines")
@pytest.mark.internal # Used for performance benchmarking
async def test_write_large_payload_deflate_compression_data_in_eof(
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
msg = http.StreamWriter(protocol, loop)
msg.enable_compression("deflate")
await msg.write(b"data" * 4096)
assert transport.write.called # type: ignore[attr-defined]
chunks = [c[1][0] for c in list(transport.write.mock_calls)] # type: ignore[attr-defined]
transport.write.reset_mock() # type: ignore[attr-defined]
# This payload compresses to 20447 bytes
payload = b"".join(
[bytes((*range(0, i), *range(i, 0, -1))) for i in range(255) for _ in range(64)]
)
await msg.write_eof(payload)
chunks.extend([c[1][0] for c in list(transport.write.mock_calls)]) # type: ignore[attr-defined]
assert all(chunks)
content = b"".join(chunks)
assert zlib.decompress(content) == (b"data" * 4096) + payload
@pytest.mark.usefixtures("disable_writelines")
@pytest.mark.usefixtures("parametrize_zlib_backend")
async def test_write_large_payload_deflate_compression_data_in_eof_all_zlib(
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
msg = http.StreamWriter(protocol, loop)
msg.enable_compression("deflate")
await msg.write(b"data" * 4096)
# Behavior depends on zlib backend, isal compress() returns b'' initially
# and the entire compressed bytes at flush() for this data
backend_to_write_called = {
"isal.isal_zlib": False,
"zlib": True,
"zlib_ng.zlib_ng": True,
}
assert transport.write.called == backend_to_write_called[ZLibBackend.name] # type: ignore[attr-defined]
chunks = [c[1][0] for c in list(transport.write.mock_calls)] # type: ignore[attr-defined]
transport.write.reset_mock() # type: ignore[attr-defined]
# This payload compresses to 20447 bytes
payload = b"".join(
[bytes((*range(0, i), *range(i, 0, -1))) for i in range(255) for _ in range(64)]
)
await msg.write_eof(payload)
chunks.extend([c[1][0] for c in list(transport.write.mock_calls)]) # type: ignore[attr-defined]
assert all(chunks)
content = b"".join(chunks)
assert ZLibBackend.decompress(content) == (b"data" * 4096) + payload
@pytest.mark.usefixtures("enable_writelines")
@pytest.mark.internal # Used for performance benchmarking
async def test_write_large_payload_deflate_compression_data_in_eof_writelines(
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
msg = http.StreamWriter(protocol, loop)
msg.enable_compression("deflate")
await msg.write(b"data" * 4096)
assert transport.write.called # type: ignore[attr-defined]
chunks = [c[1][0] for c in list(transport.write.mock_calls)] # type: ignore[attr-defined]
transport.write.reset_mock() # type: ignore[attr-defined]
assert not transport.writelines.called # type: ignore[attr-defined]
# This payload compresses to 20447 bytes
payload = b"".join(
[bytes((*range(0, i), *range(i, 0, -1))) for i in range(255) for _ in range(64)]
)
await msg.write_eof(payload)
assert not transport.write.called # type: ignore[attr-defined]
assert transport.writelines.called # type: ignore[attr-defined]
chunks.extend(transport.writelines.mock_calls[0][1][0]) # type: ignore[attr-defined]
content = b"".join(chunks)
assert zlib.decompress(content) == (b"data" * 4096) + payload
@pytest.mark.usefixtures("enable_writelines")
@pytest.mark.usefixtures("parametrize_zlib_backend")
async def test_write_large_payload_deflate_compression_data_in_eof_writelines_all_zlib(
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
msg = http.StreamWriter(protocol, loop)
msg.enable_compression("deflate")
await msg.write(b"data" * 4096)
# Behavior depends on zlib backend, isal compress() returns b'' initially
# and the entire compressed bytes at flush() for this data
backend_to_write_called = {
"isal.isal_zlib": False,
"zlib": True,
"zlib_ng.zlib_ng": True,
}
assert transport.write.called == backend_to_write_called[ZLibBackend.name] # type: ignore[attr-defined]
chunks = [c[1][0] for c in list(transport.write.mock_calls)] # type: ignore[attr-defined]
transport.write.reset_mock() # type: ignore[attr-defined]
assert not transport.writelines.called # type: ignore[attr-defined]
# This payload compresses to 20447 bytes
payload = b"".join(
[bytes((*range(0, i), *range(i, 0, -1))) for i in range(255) for _ in range(64)]
)
await msg.write_eof(payload)
assert transport.writelines.called != transport.write.called # type: ignore[attr-defined]
if transport.writelines.called: # type: ignore[attr-defined]
chunks.extend(transport.writelines.mock_calls[0][1][0]) # type: ignore[attr-defined]
else: # transport.write.called: # type: ignore[attr-defined]
chunks.extend([c[1][0] for c in list(transport.write.mock_calls)]) # type: ignore[attr-defined]
content = b"".join(chunks)
assert ZLibBackend.decompress(content) == (b"data" * 4096) + payload
async def test_write_payload_chunked_filter(
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
msg = http.StreamWriter(protocol, loop)
msg.enable_chunking()
await msg.write(b"da")
await msg.write(b"ta")
await msg.write_eof()
content = b"".join([b"".join(c[1][0]) for c in list(transport.writelines.mock_calls)]) # type: ignore[attr-defined]
content += b"".join([c[1][0] for c in list(transport.write.mock_calls)]) # type: ignore[attr-defined]
assert content.endswith(b"2\r\nda\r\n2\r\nta\r\n0\r\n\r\n")
async def test_write_payload_chunked_filter_multiple_chunks(
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
msg = http.StreamWriter(protocol, loop)
msg.enable_chunking()
await msg.write(b"da")
await msg.write(b"ta")
await msg.write(b"1d")
await msg.write(b"at")
await msg.write(b"a2")
await msg.write_eof()
content = b"".join([b"".join(c[1][0]) for c in list(transport.writelines.mock_calls)]) # type: ignore[attr-defined]
content += b"".join([c[1][0] for c in list(transport.write.mock_calls)]) # type: ignore[attr-defined]
assert content.endswith(
b"2\r\nda\r\n2\r\nta\r\n2\r\n1d\r\n2\r\nat\r\n2\r\na2\r\n0\r\n\r\n"
)
@pytest.mark.internal # Used for performance benchmarking
async def test_write_payload_deflate_compression(
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
COMPRESSED = b"x\x9cKI,I\x04\x00\x04\x00\x01\x9b"
msg = http.StreamWriter(protocol, loop)
msg.enable_compression("deflate")
await msg.write(b"data")
await msg.write_eof()
chunks = [c[1][0] for c in list(transport.write.mock_calls)] # type: ignore[attr-defined]
assert all(chunks)
content = b"".join(chunks)
assert COMPRESSED == content.split(b"\r\n\r\n", 1)[-1]
@pytest.mark.usefixtures("parametrize_zlib_backend")
async def test_write_payload_deflate_compression_all_zlib(
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
msg = http.StreamWriter(protocol, loop)
msg.enable_compression("deflate")
await msg.write(b"data")
await msg.write_eof()
chunks = [c[1][0] for c in list(transport.write.mock_calls)] # type: ignore[attr-defined]
assert all(chunks)
content = b"".join(chunks)
assert b"data" == decompress(content)
@pytest.mark.internal # Used for performance benchmarking
async def test_write_payload_deflate_compression_chunked(
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
expected = b"2\r\nx\x9c\r\na\r\nKI,I\x04\x00\x04\x00\x01\x9b\r\n0\r\n\r\n"
msg = http.StreamWriter(protocol, loop)
msg.enable_compression("deflate")
msg.enable_chunking()
await msg.write(b"data")
await msg.write_eof()
chunks = [c[1][0] for c in list(transport.write.mock_calls)] # type: ignore[attr-defined]
assert all(chunks)
content = b"".join(chunks)
assert content == expected
@pytest.mark.usefixtures("parametrize_zlib_backend")
async def test_write_payload_deflate_compression_chunked_all_zlib(
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
msg = http.StreamWriter(protocol, loop)
msg.enable_compression("deflate")
msg.enable_chunking()
await msg.write(b"data")
await msg.write_eof()
chunks = [c[1][0] for c in list(transport.write.mock_calls)] # type: ignore[attr-defined]
assert all(chunks)
content = b"".join(chunks)
assert b"data" == decompress(decode_chunked(content))
@pytest.mark.usefixtures("enable_writelines")
@pytest.mark.usefixtures("force_writelines_small_payloads")
@pytest.mark.internal # Used for performance benchmarking
async def test_write_payload_deflate_compression_chunked_writelines(
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
expected = b"2\r\nx\x9c\r\na\r\nKI,I\x04\x00\x04\x00\x01\x9b\r\n0\r\n\r\n"
msg = http.StreamWriter(protocol, loop)
msg.enable_compression("deflate")
msg.enable_chunking()
await msg.write(b"data")
await msg.write_eof()
chunks = [b"".join(c[1][0]) for c in list(transport.writelines.mock_calls)] # type: ignore[attr-defined]
assert all(chunks)
content = b"".join(chunks)
assert content == expected
@pytest.mark.usefixtures("enable_writelines")
@pytest.mark.usefixtures("force_writelines_small_payloads")
@pytest.mark.usefixtures("parametrize_zlib_backend")
async def test_write_payload_deflate_compression_chunked_writelines_all_zlib(
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
msg = http.StreamWriter(protocol, loop)
msg.enable_compression("deflate")
msg.enable_chunking()
await msg.write(b"data")
await msg.write_eof()
chunks = [b"".join(c[1][0]) for c in list(transport.writelines.mock_calls)] # type: ignore[attr-defined]
assert all(chunks)
content = b"".join(chunks)
assert b"data" == decompress(decode_chunked(content))
@pytest.mark.internal # Used for performance benchmarking
async def test_write_payload_deflate_and_chunked(
buf: bytearray,
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
msg = http.StreamWriter(protocol, loop)
msg.enable_compression("deflate")
msg.enable_chunking()
await msg.write(b"da")
await msg.write(b"ta")
await msg.write_eof()
thing = b"2\r\nx\x9c\r\na\r\nKI,I\x04\x00\x04\x00\x01\x9b\r\n0\r\n\r\n"
assert thing == buf
@pytest.mark.usefixtures("parametrize_zlib_backend")
async def test_write_payload_deflate_and_chunked_all_zlib(
buf: bytearray,
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
msg = http.StreamWriter(protocol, loop)
msg.enable_compression("deflate")
msg.enable_chunking()
await msg.write(b"da")
await msg.write(b"ta")
await msg.write_eof()
assert b"data" == decompress(decode_chunked(buf))
@pytest.mark.internal # Used for performance benchmarking
async def test_write_payload_deflate_compression_chunked_data_in_eof(
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
expected = b"2\r\nx\x9c\r\nd\r\nKI,IL\xcdK\x01\x00\x0b@\x02\xd2\r\n0\r\n\r\n"
msg = http.StreamWriter(protocol, loop)
msg.enable_compression("deflate")
msg.enable_chunking()
await msg.write(b"data")
await msg.write_eof(b"end")
chunks = [c[1][0] for c in list(transport.write.mock_calls)] # type: ignore[attr-defined]
assert all(chunks)
content = b"".join(chunks)
assert content == expected
@pytest.mark.usefixtures("parametrize_zlib_backend")
async def test_write_payload_deflate_compression_chunked_data_in_eof_all_zlib(
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
msg = http.StreamWriter(protocol, loop)
msg.enable_compression("deflate")
msg.enable_chunking()
await msg.write(b"data")
await msg.write_eof(b"end")
chunks = [c[1][0] for c in list(transport.write.mock_calls)] # type: ignore[attr-defined]
assert all(chunks)
content = b"".join(chunks)
assert b"dataend" == decompress(decode_chunked(content))
@pytest.mark.usefixtures("enable_writelines")
@pytest.mark.usefixtures("force_writelines_small_payloads")
@pytest.mark.internal # Used for performance benchmarking
async def test_write_payload_deflate_compression_chunked_data_in_eof_writelines(
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
expected = b"2\r\nx\x9c\r\nd\r\nKI,IL\xcdK\x01\x00\x0b@\x02\xd2\r\n0\r\n\r\n"
msg = http.StreamWriter(protocol, loop)
msg.enable_compression("deflate")
msg.enable_chunking()
await msg.write(b"data")
await msg.write_eof(b"end")
chunks = [b"".join(c[1][0]) for c in list(transport.writelines.mock_calls)] # type: ignore[attr-defined]
assert all(chunks)
content = b"".join(chunks)
assert content == expected
@pytest.mark.usefixtures("enable_writelines")
@pytest.mark.usefixtures("force_writelines_small_payloads")
@pytest.mark.usefixtures("parametrize_zlib_backend")
async def test_write_payload_deflate_compression_chunked_data_in_eof_writelines_all_zlib(
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
msg = http.StreamWriter(protocol, loop)
msg.enable_compression("deflate")
msg.enable_chunking()
await msg.write(b"data")
await msg.write_eof(b"end")
chunks = [b"".join(c[1][0]) for c in list(transport.writelines.mock_calls)] # type: ignore[attr-defined]
assert all(chunks)
content = b"".join(chunks)
assert b"dataend" == decompress(decode_chunked(content))
@pytest.mark.internal # Used for performance benchmarking
async def test_write_large_payload_deflate_compression_chunked_data_in_eof(
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
msg = http.StreamWriter(protocol, loop)
msg.enable_compression("deflate")
msg.enable_chunking()
await msg.write(b"data" * 4096)
# This payload compresses to 1111 bytes
payload = b"".join([bytes((*range(0, i), *range(i, 0, -1))) for i in range(255)])
await msg.write_eof(payload)
compressed = []
chunks = [c[1][0] for c in list(transport.write.mock_calls)] # type: ignore[attr-defined]
chunked_body = b"".join(chunks)
split_body = chunked_body.split(b"\r\n")
while split_body:
if split_body.pop(0):
compressed.append(split_body.pop(0))
content = b"".join(compressed)
assert zlib.decompress(content) == (b"data" * 4096) + payload
@pytest.mark.usefixtures("parametrize_zlib_backend")
async def test_write_large_payload_deflate_compression_chunked_data_in_eof_all_zlib(
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
msg = http.StreamWriter(protocol, loop)
msg.enable_compression("deflate")
msg.enable_chunking()
await msg.write(b"data" * 4096)
# This payload compresses to 1111 bytes
payload = b"".join([bytes((*range(0, i), *range(i, 0, -1))) for i in range(255)])
await msg.write_eof(payload)
compressed = []
chunks = [c[1][0] for c in list(transport.write.mock_calls)] # type: ignore[attr-defined]
chunked_body = b"".join(chunks)
split_body = chunked_body.split(b"\r\n")
while split_body:
if split_body.pop(0):
compressed.append(split_body.pop(0))
content = b"".join(compressed)
assert ZLibBackend.decompress(content) == (b"data" * 4096) + payload
@pytest.mark.usefixtures("enable_writelines")
@pytest.mark.usefixtures("force_writelines_small_payloads")
@pytest.mark.internal # Used for performance benchmarking
async def test_write_large_payload_deflate_compression_chunked_data_in_eof_writelines(
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
msg = http.StreamWriter(protocol, loop)
msg.enable_compression("deflate")
msg.enable_chunking()
await msg.write(b"data" * 4096)
# This payload compresses to 1111 bytes
payload = b"".join([bytes((*range(0, i), *range(i, 0, -1))) for i in range(255)])
await msg.write_eof(payload)
assert not transport.write.called # type: ignore[attr-defined]
chunks = []
for write_lines_call in transport.writelines.mock_calls: # type: ignore[attr-defined]
chunked_payload = list(write_lines_call[1][0])[1:]
chunked_payload.pop()
chunks.extend(chunked_payload)
assert all(chunks)
content = b"".join(chunks)
assert zlib.decompress(content) == (b"data" * 4096) + payload
@pytest.mark.usefixtures("enable_writelines")
@pytest.mark.usefixtures("force_writelines_small_payloads")
@pytest.mark.usefixtures("parametrize_zlib_backend")
async def test_write_large_payload_deflate_compression_chunked_data_in_eof_writelines_all_zlib(
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
msg = http.StreamWriter(protocol, loop)
msg.enable_compression("deflate")
msg.enable_chunking()
await msg.write(b"data" * 4096)
# This payload compresses to 1111 bytes
payload = b"".join([bytes((*range(0, i), *range(i, 0, -1))) for i in range(255)])
await msg.write_eof(payload)
assert not transport.write.called # type: ignore[attr-defined]
chunks = []
for write_lines_call in transport.writelines.mock_calls: # type: ignore[attr-defined]
chunked_payload = list(write_lines_call[1][0])[1:]
chunked_payload.pop()
chunks.extend(chunked_payload)
assert all(chunks)
content = b"".join(chunks)
assert ZLibBackend.decompress(content) == (b"data" * 4096) + payload
@pytest.mark.internal # Used for performance benchmarking
async def test_write_payload_deflate_compression_chunked_connection_lost(
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
msg = http.StreamWriter(protocol, loop)
msg.enable_compression("deflate")
msg.enable_chunking()
await msg.write(b"data")
with (
pytest.raises(
ClientConnectionResetError, match="Cannot write to closing transport"
),
mock.patch.object(transport, "is_closing", return_value=True),
):
await msg.write_eof(b"end")
@pytest.mark.usefixtures("parametrize_zlib_backend")
async def test_write_payload_deflate_compression_chunked_connection_lost_all_zlib(
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
msg = http.StreamWriter(protocol, loop)
msg.enable_compression("deflate")
msg.enable_chunking()
await msg.write(b"data")
with (
pytest.raises(
ClientConnectionResetError, match="Cannot write to closing transport"
),
mock.patch.object(transport, "is_closing", return_value=True),
):
await msg.write_eof(b"end")
async def test_write_payload_bytes_memoryview(
buf: bytearray,
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
msg = http.StreamWriter(protocol, loop)
mv = memoryview(b"abcd")
await msg.write(mv)
await msg.write_eof()
thing = b"abcd"
assert thing == buf
async def test_write_payload_short_ints_memoryview(
buf: bytearray,
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
msg = http.StreamWriter(protocol, loop)
msg.enable_chunking()
payload = memoryview(array.array("H", [65, 66, 67]))
await msg.write(payload)
await msg.write_eof()
endians = (
(b"6\r\n\x00A\x00B\x00C\r\n0\r\n\r\n"),
(b"6\r\nA\x00B\x00C\x00\r\n0\r\n\r\n"),
)
assert buf in endians
async def test_write_payload_2d_shape_memoryview(
buf: bytearray,
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
msg = http.StreamWriter(protocol, loop)
msg.enable_chunking()
mv = memoryview(b"ABCDEF")
payload = mv.cast("c", [3, 2])
await msg.write(payload)
await msg.write_eof()
thing = b"6\r\nABCDEF\r\n0\r\n\r\n"
assert thing == buf
async def test_write_payload_slicing_long_memoryview(
buf: bytearray,
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
msg = http.StreamWriter(protocol, loop)
msg.length = 4
mv = memoryview(b"ABCDEF")
payload = mv.cast("c", [3, 2])
await msg.write(payload)
await msg.write_eof()
thing = b"ABCD"
assert thing == buf
async def test_write_drain(
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
msg = http.StreamWriter(protocol, loop)
with mock.patch.object(msg, "drain", autospec=True, spec_set=True) as m:
await msg.write(b"1" * (64 * 1024 * 2), drain=False)
assert not m.called
await msg.write(b"1", drain=True)
assert m.called
assert msg.buffer_size == 0 # type: ignore[unreachable]
async def test_write_calls_callback(
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
on_chunk_sent = mock.AsyncMock()
msg = http.StreamWriter(protocol, loop, on_chunk_sent=on_chunk_sent)
chunk = b"1"
await msg.write(chunk)
assert on_chunk_sent.called
assert on_chunk_sent.call_args == mock.call(chunk)
async def test_write_eof_calls_callback(
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
on_chunk_sent = mock.AsyncMock()
msg = http.StreamWriter(protocol, loop, on_chunk_sent=on_chunk_sent)
chunk = b"1"
await msg.write_eof(chunk=chunk)
assert on_chunk_sent.called
assert on_chunk_sent.call_args == mock.call(chunk)
async def test_write_to_closing_transport(
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
msg = http.StreamWriter(protocol, loop)
await msg.write(b"Before closing")
transport.is_closing.return_value = True # type: ignore[attr-defined]
with pytest.raises(ClientConnectionResetError):
await msg.write(b"After closing")
async def test_write_to_closed_transport(
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
"""Test that writing to a closed transport raises ClientConnectionResetError.
The StreamWriter checks to see if protocol.transport is None before
writing to the transport. If it is None, it raises ConnectionResetError.
"""
msg = http.StreamWriter(protocol, loop)
await msg.write(b"Before transport close")
protocol.transport = None
with pytest.raises(
ClientConnectionResetError, match="Cannot write to closing transport"
):
await msg.write(b"After transport closed")
async def test_drain(
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
msg = http.StreamWriter(protocol, loop)
await msg.drain()
assert protocol._drain_helper.called # type: ignore[attr-defined]
async def test_drain_no_transport(
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
msg = http.StreamWriter(protocol, loop)
msg._protocol.transport = None
await msg.drain()
assert not protocol._drain_helper.called # type: ignore[attr-defined]
async def test_write_headers_prevents_injection(
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
msg = http.StreamWriter(protocol, loop)
status_line = "HTTP/1.1 200 OK"
wrong_headers = CIMultiDict({"Set-Cookie: abc=123\r\nContent-Length": "256"})
with pytest.raises(ValueError):
await msg.write_headers(status_line, wrong_headers)
wrong_headers = CIMultiDict({"Content-Length": "256\r\nSet-Cookie: abc=123"})
with pytest.raises(ValueError):
await msg.write_headers(status_line, wrong_headers)
async def test_set_eof_after_write_headers(
protocol: BaseProtocol,
transport: mock.Mock,
loop: asyncio.AbstractEventLoop,
) -> None:
msg = http.StreamWriter(protocol, loop)
status_line = "HTTP/1.1 200 OK"
good_headers = CIMultiDict({"Set-Cookie": "abc=123"})
# Write headers - should be buffered
await msg.write_headers(status_line, good_headers)
assert not transport.write.called # Headers are buffered
# set_eof should send the buffered headers
msg.set_eof()
assert transport.write.called
# Subsequent write_eof should do nothing
transport.write.reset_mock()
await msg.write_eof()
assert not transport.write.called
async def test_write_headers_does_not_write_immediately(
protocol: BaseProtocol,
transport: mock.Mock,
loop: asyncio.AbstractEventLoop,
) -> None:
msg = http.StreamWriter(protocol, loop)
status_line = "HTTP/1.1 200 OK"
headers = CIMultiDict({"Content-Type": "text/plain"})
# write_headers should buffer, not write immediately
await msg.write_headers(status_line, headers)
assert not transport.write.called
assert not transport.writelines.called
# Headers should be sent when set_eof is called
msg.set_eof()
assert transport.write.called
async def test_write_headers_with_compression_coalescing(
buf: bytearray,
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
msg = http.StreamWriter(protocol, loop)
msg.enable_compression("deflate")
headers = CIMultiDict({"Content-Encoding": "deflate", "Host": "example.com"})
# Write headers - should be buffered
await msg.write_headers("POST /data HTTP/1.1", headers)
assert len(buf) == 0
# Write compressed data via write_eof - should coalesce
await msg.write_eof(b"Hello World")
# Verify headers are present
assert b"POST /data HTTP/1.1\r\n" in buf
assert b"Content-Encoding: deflate\r\n" in buf
# Verify compressed data is present
# The data should contain headers + compressed payload
assert len(buf) > 50 # Should have headers + some compressed data
@pytest.mark.parametrize(
"char",
[
"\n",
"\r",
],
)
def test_serialize_headers_raises_on_new_line_or_carriage_return(char: str) -> None:
"""Verify serialize_headers raises on cr or nl in the headers."""
status_line = "HTTP/1.1 200 OK"
headers = CIMultiDict(
{
hdrs.CONTENT_TYPE: f"text/plain{char}",
}
)
with pytest.raises(
ValueError,
match=(
"Newline or carriage return detected in headers. "
"Potential header injection attack."
),
):
_serialize_headers(status_line, headers)
async def test_write_compressed_data_with_headers_coalescing(
buf: bytearray,
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
"""Test that headers are coalesced with compressed data in write() method."""
msg = http.StreamWriter(protocol, loop)
msg.enable_compression("deflate")
headers = CIMultiDict({"Content-Encoding": "deflate", "Host": "example.com"})
# Write headers - should be buffered
await msg.write_headers("POST /data HTTP/1.1", headers)
assert len(buf) == 0
# Write compressed data - should coalesce with headers
await msg.write(b"Hello World")
# Headers and compressed data should be written together
assert b"POST /data HTTP/1.1\r\n" in buf
assert b"Content-Encoding: deflate\r\n" in buf
assert len(buf) > 50 # Headers + compressed data
async def test_write_compressed_chunked_with_headers_coalescing(
buf: bytearray,
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
"""Test headers coalescing with compressed chunked data."""
msg = http.StreamWriter(protocol, loop)
msg.enable_compression("deflate")
msg.enable_chunking()
headers = CIMultiDict(
{"Content-Encoding": "deflate", "Transfer-Encoding": "chunked"}
)
# Write headers - should be buffered
await msg.write_headers("POST /data HTTP/1.1", headers)
assert len(buf) == 0
# Write compressed chunked data - should coalesce
await msg.write(b"Hello World")
# Check headers are present
assert b"POST /data HTTP/1.1\r\n" in buf
assert b"Transfer-Encoding: chunked\r\n" in buf
# Should have chunk size marker for compressed data
output = buf.decode("latin-1", errors="ignore")
assert "\r\n" in output # Should have chunk markers
async def test_write_multiple_compressed_chunks_after_headers_sent(
buf: bytearray,
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
"""Test multiple compressed writes after headers are already sent."""
msg = http.StreamWriter(protocol, loop)
msg.enable_compression("deflate")
headers = CIMultiDict({"Content-Encoding": "deflate"})
# Write headers and send them immediately by writing first chunk
await msg.write_headers("POST /data HTTP/1.1", headers)
assert len(buf) == 0 # Headers buffered
# Write first chunk - this will send headers + compressed data
await msg.write(b"First chunk of data that should compress")
len_after_first = len(buf)
assert len_after_first > 0 # Headers + first chunk written
# Write second chunk and force flush via EOF
await msg.write(b"Second chunk of data that should also compress well")
await msg.write_eof()
# After EOF, all compressed data should be flushed
final_len = len(buf)
assert final_len > len_after_first
async def test_write_eof_empty_compressed_with_buffered_headers(
buf: bytearray,
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
"""Test write_eof with no data but compression enabled and buffered headers."""
msg = http.StreamWriter(protocol, loop)
msg.enable_compression("deflate")
headers = CIMultiDict({"Content-Encoding": "deflate"})
# Write headers - should be buffered
await msg.write_headers("GET /data HTTP/1.1", headers)
assert len(buf) == 0
# Write EOF with no data - should still coalesce headers with compression flush
await msg.write_eof()
# Headers should be present
assert b"GET /data HTTP/1.1\r\n" in buf
assert b"Content-Encoding: deflate\r\n" in buf
# Should have compression flush data
assert len(buf) > 40
async def test_write_compressed_gzip_with_headers_coalescing(
buf: bytearray,
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
"""Test gzip compression with header coalescing."""
msg = http.StreamWriter(protocol, loop)
msg.enable_compression("gzip")
headers = CIMultiDict({"Content-Encoding": "gzip"})
# Write headers - should be buffered
await msg.write_headers("POST /data HTTP/1.1", headers)
assert len(buf) == 0
# Write gzip compressed data via write_eof
await msg.write_eof(b"Test gzip compression")
# Verify coalescing happened
assert b"POST /data HTTP/1.1\r\n" in buf
assert b"Content-Encoding: gzip\r\n" in buf
# Gzip typically produces more overhead than deflate
assert len(buf) > 60
async def test_compression_with_content_length_constraint(
buf: bytearray,
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
"""Test compression respects content length constraints."""
msg = http.StreamWriter(protocol, loop)
msg.enable_compression("deflate")
msg.length = 5 # Set small content length
headers = CIMultiDict({"Content-Length": "5"})
await msg.write_headers("POST /data HTTP/1.1", headers)
# Write some initial data to trigger headers to be sent
await msg.write(b"12345") # This matches our content length of 5
headers_and_first_chunk_len = len(buf)
# Try to write more data than content length allows
await msg.write(b"This is a longer message")
# The second write should not add any data since content length is exhausted
# After writing 5 bytes, length becomes 0, so additional writes are ignored
assert len(buf) == headers_and_first_chunk_len # No additional data written
async def test_write_compressed_zero_length_chunk(
buf: bytearray,
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
"""Test writing empty chunk with compression."""
msg = http.StreamWriter(protocol, loop)
msg.enable_compression("deflate")
await msg.write_headers("POST /data HTTP/1.1", CIMultiDict())
# Force headers to be sent by writing something
await msg.write(b"x") # Write something to trigger header send
buf.clear()
# Write empty chunk - compression may still produce output
await msg.write(b"")
# With compression, even empty input might produce small output
# due to compression state, but it should be minimal
assert len(buf) < 10 # Should be very small if anything
async def test_chunked_compressed_eof_coalescing(
buf: bytearray,
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
"""Test chunked compressed data with EOF marker coalescing."""
msg = http.StreamWriter(protocol, loop)
msg.enable_compression("deflate")
msg.enable_chunking()
headers = CIMultiDict(
{"Content-Encoding": "deflate", "Transfer-Encoding": "chunked"}
)
# Buffer headers
await msg.write_headers("POST /data HTTP/1.1", headers)
assert len(buf) == 0
# Write compressed chunked data with EOF
await msg.write_eof(b"Final compressed chunk")
# Should have headers
assert b"POST /data HTTP/1.1\r\n" in buf
# Should end with chunked EOF marker
assert buf.endswith(b"0\r\n\r\n")
# Should have chunk size in hex before the compressed data
output = buf
# Verify we have chunk markers - look for \r\n followed by hex digits
# The chunk size should be between the headers and the compressed data
assert b"\r\n\r\n" in output # End of headers
# After headers, we should have a hex chunk size
headers_end = output.find(b"\r\n\r\n") + 4
chunk_data = output[headers_end:]
# Should start with hex digits followed by \r\n
assert (
chunk_data[:10]
.strip()
.decode("ascii", errors="ignore")
.replace("\r\n", "")
.isalnum()
)
async def test_compression_different_strategies(
buf: bytearray,
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
"""Test compression with different strategies."""
# Test with best speed strategy (default)
msg1 = http.StreamWriter(protocol, loop)
msg1.enable_compression("deflate") # Default strategy
await msg1.write_headers("POST /fast HTTP/1.1", CIMultiDict())
await msg1.write_eof(b"Test data for compression test data for compression")
buf1_len = len(buf)
# Both should produce output
assert buf1_len > 0
# Headers should be present
assert b"POST /fast HTTP/1.1\r\n" in buf
# Since we can't easily test different compression strategies
# (the compressor initialization might not support strategy parameter),
# we just verify that compression works
async def test_chunked_headers_single_write_with_set_eof(
buf: bytearray,
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
"""Test that set_eof combines headers and chunked EOF in single write."""
msg = http.StreamWriter(protocol, loop)
msg.enable_chunking()
# Write headers - should be buffered
headers = CIMultiDict({"Transfer-Encoding": "chunked", "Host": "example.com"})
await msg.write_headers("GET /test HTTP/1.1", headers)
assert len(buf) == 0 # Headers not sent yet
assert not transport.writelines.called # type: ignore[attr-defined] # No writelines calls yet
# Call set_eof - should send headers + chunked EOF in single write call
msg.set_eof()
# Should have exactly one write call (since payload is small, writelines falls back to write)
assert transport.write.call_count == 1 # type: ignore[attr-defined]
assert transport.writelines.call_count == 0 # type: ignore[attr-defined] # Not called for small payloads
# The write call should have the combined headers and chunked EOF marker
write_data = transport.write.call_args[0][0] # type: ignore[attr-defined]
assert write_data.startswith(b"GET /test HTTP/1.1\r\n")
assert b"Transfer-Encoding: chunked\r\n" in write_data
assert write_data.endswith(b"\r\n\r\n0\r\n\r\n") # Headers end + chunked EOF
# Verify final output
assert b"GET /test HTTP/1.1\r\n" in buf
assert b"Transfer-Encoding: chunked\r\n" in buf
assert buf.endswith(b"0\r\n\r\n")
async def test_send_headers_forces_header_write(
buf: bytearray,
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
"""Test that send_headers() forces writing buffered headers."""
msg = http.StreamWriter(protocol, loop)
headers = CIMultiDict({"Content-Length": "10", "Host": "example.com"})
# Write headers (should be buffered)
await msg.write_headers("GET /test HTTP/1.1", headers)
assert len(buf) == 0 # Headers buffered
# Force send headers
msg.send_headers()
# Headers should now be written
assert b"GET /test HTTP/1.1\r\n" in buf
assert b"Content-Length: 10\r\n" in buf
assert b"Host: example.com\r\n" in buf
# Writing body should not resend headers
buf.clear()
await msg.write(b"0123456789")
assert b"GET /test" not in buf # Headers not repeated
assert buf == b"0123456789" # Just the body
async def test_send_headers_idempotent(
buf: bytearray,
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
"""Test that send_headers() is idempotent and safe to call multiple times."""
msg = http.StreamWriter(protocol, loop)
headers = CIMultiDict({"Content-Length": "5", "Host": "example.com"})
# Write headers (should be buffered)
await msg.write_headers("GET /test HTTP/1.1", headers)
assert len(buf) == 0 # Headers buffered
# Force send headers
msg.send_headers()
headers_output = bytes(buf)
# Call send_headers again - should be no-op
msg.send_headers()
assert buf == headers_output # No additional output
# Call send_headers after headers already sent - should be no-op
await msg.write(b"hello")
msg.send_headers()
assert buf[len(headers_output) :] == b"hello" # Only body added
async def test_send_headers_no_buffered_headers(
buf: bytearray,
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
"""Test that send_headers() is safe when no headers are buffered."""
msg = http.StreamWriter(protocol, loop)
# Call send_headers without writing headers first
msg.send_headers() # Should not crash
assert len(buf) == 0 # No output
async def test_write_drain_condition_with_small_buffer(
buf: bytearray,
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
"""Test that drain is not called when buffer_size <= LIMIT."""
msg = http.StreamWriter(protocol, loop)
# Write headers first
await msg.write_headers("GET /test HTTP/1.1", CIMultiDict())
msg.send_headers() # Send headers to start with clean state
# Reset buffer size manually since send_headers doesn't do it
msg.buffer_size = 0
# Reset drain helper mock
protocol._drain_helper.reset_mock() # type: ignore[attr-defined]
# Write small amount of data with drain=True but buffer under limit
small_data = b"x" * 100 # Much less than LIMIT (2**16)
await msg.write(small_data, drain=True)
# Drain should NOT be called because buffer_size <= LIMIT
assert not protocol._drain_helper.called # type: ignore[attr-defined]
assert msg.buffer_size == 100
assert small_data in buf
async def test_write_drain_condition_with_large_buffer(
buf: bytearray,
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
"""Test that drain is called only when drain=True AND buffer_size > LIMIT."""
msg = http.StreamWriter(protocol, loop)
# Write headers first
await msg.write_headers("GET /test HTTP/1.1", CIMultiDict())
msg.send_headers() # Send headers to start with clean state
# Reset buffer size manually since send_headers doesn't do it
msg.buffer_size = 0
# Reset drain helper mock
protocol._drain_helper.reset_mock() # type: ignore[attr-defined]
# Write large amount of data with drain=True
large_data = b"x" * (2**16 + 1) # Just over LIMIT
await msg.write(large_data, drain=True)
# Drain should be called because drain=True AND buffer_size > LIMIT
assert protocol._drain_helper.called # type: ignore[attr-defined]
assert msg.buffer_size == 0 # Buffer reset after drain
assert large_data in buf
async def test_write_no_drain_with_large_buffer(
buf: bytearray,
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
"""Test that drain is not called when drain=False even with large buffer."""
msg = http.StreamWriter(protocol, loop)
# Write headers first
await msg.write_headers("GET /test HTTP/1.1", CIMultiDict())
msg.send_headers() # Send headers to start with clean state
# Reset buffer size manually since send_headers doesn't do it
msg.buffer_size = 0
# Reset drain helper mock
protocol._drain_helper.reset_mock() # type: ignore[attr-defined]
# Write large amount of data with drain=False
large_data = b"x" * (2**16 + 1) # Just over LIMIT
await msg.write(large_data, drain=False)
# Drain should NOT be called because drain=False
assert not protocol._drain_helper.called # type: ignore[attr-defined]
assert msg.buffer_size == (2**16 + 1) # Buffer not reset
assert large_data in buf
async def test_set_eof_idempotent(
buf: bytearray,
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
"""Test that set_eof() is idempotent and can be called multiple times safely."""
msg = http.StreamWriter(protocol, loop)
# Test 1: Multiple set_eof calls with buffered headers
headers = CIMultiDict({"Content-Length": "0"})
await msg.write_headers("GET /test HTTP/1.1", headers)
# First set_eof should send headers
msg.set_eof()
first_output = buf
assert b"GET /test HTTP/1.1\r\n" in first_output
assert b"Content-Length: 0\r\n" in first_output
# Second set_eof should be no-op
msg.set_eof()
assert bytes(buf) == first_output # No additional output
# Third set_eof should also be no-op
msg.set_eof()
assert bytes(buf) == first_output # Still no additional output
# Test 2: set_eof with chunked encoding
buf.clear()
msg2 = http.StreamWriter(protocol, loop)
msg2.enable_chunking()
headers2 = CIMultiDict({"Transfer-Encoding": "chunked"})
await msg2.write_headers("POST /data HTTP/1.1", headers2)
# First set_eof should send headers + chunked EOF
msg2.set_eof()
chunked_output = buf
assert b"POST /data HTTP/1.1\r\n" in buf
assert b"Transfer-Encoding: chunked\r\n" in buf
assert b"0\r\n\r\n" in buf # Chunked EOF marker
# Second set_eof should be no-op
msg2.set_eof()
assert buf == chunked_output # No additional output
# Test 3: set_eof after headers already sent
buf.clear()
msg3 = http.StreamWriter(protocol, loop)
headers3 = CIMultiDict({"Content-Length": "5"})
await msg3.write_headers("PUT /update HTTP/1.1", headers3)
# Send headers by writing some data
await msg3.write(b"hello")
headers_and_body = buf
# set_eof after headers sent should be no-op
msg3.set_eof()
assert buf == headers_and_body # No additional output
# Another set_eof should still be no-op
msg3.set_eof()
assert buf == headers_and_body # Still no additional output
async def test_non_chunked_write_empty_body(
buf: bytearray,
protocol: BaseProtocol,
transport: mock.Mock,
loop: asyncio.AbstractEventLoop,
) -> None:
"""Test non-chunked response with empty body."""
msg = http.StreamWriter(protocol, loop)
# Non-chunked response with Content-Length: 0
headers = CIMultiDict({"Content-Length": "0"})
await msg.write_headers("GET /empty HTTP/1.1", headers)
# Write empty body
await msg.write(b"")
# Check the output
assert b"GET /empty HTTP/1.1\r\n" in buf
assert b"Content-Length: 0\r\n" in buf
async def test_chunked_headers_sent_with_empty_chunk_not_eof(
buf: bytearray,
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
"""Test chunked encoding where headers are sent without data and not EOF."""
msg = http.StreamWriter(protocol, loop)
msg.enable_chunking()
headers = CIMultiDict({"Transfer-Encoding": "chunked"})
await msg.write_headers("POST /upload HTTP/1.1", headers)
# This should trigger the else case in _send_headers_with_payload
# by having no chunk data and is_eof=False
await msg.write(b"")
# Headers should be sent alone
assert b"POST /upload HTTP/1.1\r\n" in buf
assert b"Transfer-Encoding: chunked\r\n" in buf
# Should not have any chunk markers yet
assert b"0\r\n" not in buf
async def test_chunked_set_eof_after_headers_sent(
buf: bytearray,
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
"""Test chunked encoding where set_eof is called after headers already sent."""
msg = http.StreamWriter(protocol, loop)
msg.enable_chunking()
headers = CIMultiDict({"Transfer-Encoding": "chunked"})
await msg.write_headers("POST /data HTTP/1.1", headers)
# Send headers by writing some data
await msg.write(b"test data")
buf.clear() # Clear buffer to check only what set_eof writes
# This should trigger writing chunked EOF when headers already sent
msg.set_eof()
# Should only have the chunked EOF marker
assert buf == b"0\r\n\r\n"
@pytest.mark.usefixtures("enable_writelines")
@pytest.mark.usefixtures("force_writelines_small_payloads")
async def test_write_eof_chunked_with_data_using_writelines(
buf: bytearray,
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
"""Test write_eof with chunked data that uses writelines (line 336)."""
msg = http.StreamWriter(protocol, loop)
msg.enable_chunking()
headers = CIMultiDict({"Transfer-Encoding": "chunked"})
await msg.write_headers("POST /data HTTP/1.1", headers)
# Send headers first
await msg.write(b"initial")
transport.writelines.reset_mock() # type: ignore[attr-defined]
# This should trigger writelines for final chunk with EOF
await msg.write_eof(b"final chunk data")
# Should have used writelines
assert transport.writelines.called # type: ignore[attr-defined]
# Get the data from writelines call
writelines_data = transport.writelines.call_args[0][0] # type: ignore[attr-defined]
combined = b"".join(writelines_data)
# Should have chunk size, data, and EOF marker
assert b"10\r\n" in combined # hex for 16 (length of "final chunk data")
assert b"final chunk data" in combined
assert b"0\r\n\r\n" in combined
async def test_send_headers_with_payload_chunked_eof_no_data(
buf: bytearray,
protocol: BaseProtocol,
transport: asyncio.Transport,
loop: asyncio.AbstractEventLoop,
) -> None:
"""Test _send_headers_with_payload with chunked, is_eof=True but no chunk data."""
msg = http.StreamWriter(protocol, loop)
msg.enable_chunking()
headers = CIMultiDict({"Transfer-Encoding": "chunked"})
await msg.write_headers("GET /test HTTP/1.1", headers)
# This triggers the elif is_eof branch in _send_headers_with_payload
# by calling write_eof with empty chunk
await msg.write_eof(b"")
# Should have headers and chunked EOF marker together
assert b"GET /test HTTP/1.1\r\n" in buf
assert b"Transfer-Encoding: chunked\r\n" in buf
assert buf.endswith(b"0\r\n\r\n")
|
./temp_repos/aiohttp/aiohttp/http_writer.py
|
./temp_repos/aiohttp/tests/test_http_writer.py
|
aiohttp
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'HttpVersion'.
Context:
- Class Name: HttpVersion
- Dependencies to Mock: on_headers_sent, on_chunk_sent, protocol, loop
- Key Imports: aiohttp._http_writer, client_exceptions, helpers, typing, multidict, base_protocol, asyncio, abc, compression_utils, sys
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
HttpVersion
|
python
|
"""
Digest authentication middleware for aiohttp client.
This middleware implements HTTP Digest Authentication according to RFC 7616,
providing a more secure alternative to Basic Authentication. It supports all
standard hash algorithms including MD5, SHA, SHA-256, SHA-512 and their session
variants, as well as both 'auth' and 'auth-int' quality of protection (qop) options.
"""
import hashlib
import os
import re
import time
from collections.abc import Callable
from typing import Final, Literal, TypedDict
from yarl import URL
from . import hdrs
from .client_exceptions import ClientError
from .client_middlewares import ClientHandlerType
from .client_reqrep import ClientRequest, ClientResponse
from .payload import Payload
class DigestAuthChallenge(TypedDict, total=False):
realm: str
nonce: str
qop: str
algorithm: str
opaque: str
domain: str
stale: str
DigestFunctions: dict[str, Callable[[bytes], "hashlib._Hash"]] = {
"MD5": hashlib.md5,
"MD5-SESS": hashlib.md5,
"SHA": hashlib.sha1,
"SHA-SESS": hashlib.sha1,
"SHA256": hashlib.sha256,
"SHA256-SESS": hashlib.sha256,
"SHA-256": hashlib.sha256,
"SHA-256-SESS": hashlib.sha256,
"SHA512": hashlib.sha512,
"SHA512-SESS": hashlib.sha512,
"SHA-512": hashlib.sha512,
"SHA-512-SESS": hashlib.sha512,
}
# Compile the regex pattern once at module level for performance
_HEADER_PAIRS_PATTERN = re.compile(
r'(\w+)\s*=\s*(?:"((?:[^"\\]|\\.)*)"|([^\s,]+))'
# | | | | | | | | | || |
# +----|--|-|-|--|----|------|----|--||-----|--> alphanumeric key
# +--|-|-|--|----|------|----|--||-----|--> maybe whitespace
# | | | | | | | || |
# +-|-|--|----|------|----|--||-----|--> = (delimiter)
# +-|--|----|------|----|--||-----|--> maybe whitespace
# | | | | | || |
# +--|----|------|----|--||-----|--> group quoted or unquoted
# | | | | || |
# +----|------|----|--||-----|--> if quoted...
# +------|----|--||-----|--> anything but " or \
# +----|--||-----|--> escaped characters allowed
# +--||-----|--> or can be empty string
# || |
# +|-----|--> if unquoted...
# +-----|--> anything but , or <space>
# +--> at least one char req'd
)
# RFC 7616: Challenge parameters to extract
CHALLENGE_FIELDS: Final[
tuple[
Literal["realm", "nonce", "qop", "algorithm", "opaque", "domain", "stale"], ...
]
] = (
"realm",
"nonce",
"qop",
"algorithm",
"opaque",
"domain",
"stale",
)
# Supported digest authentication algorithms
# Use a tuple of sorted keys for predictable documentation and error messages
SUPPORTED_ALGORITHMS: Final[tuple[str, ...]] = tuple(sorted(DigestFunctions.keys()))
# RFC 7616: Fields that require quoting in the Digest auth header
# These fields must be enclosed in double quotes in the Authorization header.
# Algorithm, qop, and nc are never quoted per RFC specifications.
# This frozen set is used by the template-based header construction to
# automatically determine which fields need quotes.
QUOTED_AUTH_FIELDS: Final[frozenset[str]] = frozenset(
{"username", "realm", "nonce", "uri", "response", "opaque", "cnonce"}
)
def escape_quotes(value: str) -> str:
"""Escape double quotes for HTTP header values."""
return value.replace('"', '\\"')
def unescape_quotes(value: str) -> str:
"""Unescape double quotes in HTTP header values."""
return value.replace('\\"', '"')
def parse_header_pairs(header: str) -> dict[str, str]:
"""
Parse key-value pairs from WWW-Authenticate or similar HTTP headers.
This function handles the complex format of WWW-Authenticate header values,
supporting both quoted and unquoted values, proper handling of commas in
quoted values, and whitespace variations per RFC 7616.
Examples of supported formats:
- key1="value1", key2=value2
- key1 = "value1" , key2="value, with, commas"
- key1=value1,key2="value2"
- realm="example.com", nonce="12345", qop="auth"
Args:
header: The header value string to parse
Returns:
Dictionary mapping parameter names to their values
"""
return {
stripped_key: unescape_quotes(quoted_val) if quoted_val else unquoted_val
for key, quoted_val, unquoted_val in _HEADER_PAIRS_PATTERN.findall(header)
if (stripped_key := key.strip())
}
class DigestAuthMiddleware:
"""
HTTP digest authentication middleware for aiohttp client.
This middleware intercepts 401 Unauthorized responses containing a Digest
authentication challenge, calculates the appropriate digest credentials,
and automatically retries the request with the proper Authorization header.
Features:
- Handles all aspects of Digest authentication handshake automatically
- Supports all standard hash algorithms:
- MD5, MD5-SESS
- SHA, SHA-SESS
- SHA256, SHA256-SESS, SHA-256, SHA-256-SESS
- SHA512, SHA512-SESS, SHA-512, SHA-512-SESS
- Supports 'auth' and 'auth-int' quality of protection modes
- Properly handles quoted strings and parameter parsing
- Includes replay attack protection with client nonce count tracking
- Supports preemptive authentication per RFC 7616 Section 3.6
Standards compliance:
- RFC 7616: HTTP Digest Access Authentication (primary reference)
- RFC 2617: HTTP Authentication (deprecated by RFC 7616)
- RFC 1945: Section 11.1 (username restrictions)
Implementation notes:
The core digest calculation is inspired by the implementation in
https://github.com/requests/requests/blob/v2.18.4/requests/auth.py
with added support for modern digest auth features and error handling.
"""
def __init__(
self,
login: str,
password: str,
preemptive: bool = True,
) -> None:
if login is None:
raise ValueError("None is not allowed as login value")
if password is None:
raise ValueError("None is not allowed as password value")
if ":" in login:
raise ValueError('A ":" is not allowed in username (RFC 1945#section-11.1)')
self._login_str: Final[str] = login
self._login_bytes: Final[bytes] = login.encode("utf-8")
self._password_bytes: Final[bytes] = password.encode("utf-8")
self._last_nonce_bytes = b""
self._nonce_count = 0
self._challenge: DigestAuthChallenge = {}
self._preemptive: bool = preemptive
# Set of URLs defining the protection space
self._protection_space: list[str] = []
async def _encode(self, method: str, url: URL, body: Payload | Literal[b""]) -> str:
"""
Build digest authorization header for the current challenge.
Args:
method: The HTTP method (GET, POST, etc.)
url: The request URL
body: The request body (used for qop=auth-int)
Returns:
A fully formatted Digest authorization header string
Raises:
ClientError: If the challenge is missing required parameters or
contains unsupported values
"""
challenge = self._challenge
if "realm" not in challenge:
raise ClientError(
"Malformed Digest auth challenge: Missing 'realm' parameter"
)
if "nonce" not in challenge:
raise ClientError(
"Malformed Digest auth challenge: Missing 'nonce' parameter"
)
# Empty realm values are allowed per RFC 7616 (SHOULD, not MUST, contain host name)
realm = challenge["realm"]
nonce = challenge["nonce"]
# Empty nonce values are not allowed as they are security-critical for replay protection
if not nonce:
raise ClientError(
"Security issue: Digest auth challenge contains empty 'nonce' value"
)
qop_raw = challenge.get("qop", "")
# Preserve original algorithm case for response while using uppercase for processing
algorithm_original = challenge.get("algorithm", "MD5")
algorithm = algorithm_original.upper()
opaque = challenge.get("opaque", "")
# Convert string values to bytes once
nonce_bytes = nonce.encode("utf-8")
realm_bytes = realm.encode("utf-8")
path = URL(url).path_qs
# Process QoP
qop = ""
qop_bytes = b""
if qop_raw:
valid_qops = {"auth", "auth-int"}.intersection(
{q.strip() for q in qop_raw.split(",") if q.strip()}
)
if not valid_qops:
raise ClientError(
f"Digest auth error: Unsupported Quality of Protection (qop) value(s): {qop_raw}"
)
qop = "auth-int" if "auth-int" in valid_qops else "auth"
qop_bytes = qop.encode("utf-8")
if algorithm not in DigestFunctions:
raise ClientError(
f"Digest auth error: Unsupported hash algorithm: {algorithm}. "
f"Supported algorithms: {', '.join(SUPPORTED_ALGORITHMS)}"
)
hash_fn: Final = DigestFunctions[algorithm]
def H(x: bytes) -> bytes:
"""RFC 7616 Section 3: Hash function H(data) = hex(hash(data))."""
return hash_fn(x).hexdigest().encode()
def KD(s: bytes, d: bytes) -> bytes:
"""RFC 7616 Section 3: KD(secret, data) = H(concat(secret, ":", data))."""
return H(b":".join((s, d)))
# Calculate A1 and A2
A1 = b":".join((self._login_bytes, realm_bytes, self._password_bytes))
A2 = f"{method.upper()}:{path}".encode()
if qop == "auth-int":
if isinstance(body, Payload): # will always be empty bytes unless Payload
entity_bytes = await body.as_bytes() # Get bytes from Payload
else:
entity_bytes = body
entity_hash = H(entity_bytes)
A2 = b":".join((A2, entity_hash))
HA1 = H(A1)
HA2 = H(A2)
# Nonce count handling
if nonce_bytes == self._last_nonce_bytes:
self._nonce_count += 1
else:
self._nonce_count = 1
self._last_nonce_bytes = nonce_bytes
ncvalue = f"{self._nonce_count:08x}"
ncvalue_bytes = ncvalue.encode("utf-8")
# Generate client nonce
cnonce = hashlib.sha1(
b"".join(
[
str(self._nonce_count).encode("utf-8"),
nonce_bytes,
time.ctime().encode("utf-8"),
os.urandom(8),
]
)
).hexdigest()[:16]
cnonce_bytes = cnonce.encode("utf-8")
# Special handling for session-based algorithms
if algorithm.upper().endswith("-SESS"):
HA1 = H(b":".join((HA1, nonce_bytes, cnonce_bytes)))
# Calculate the response digest
if qop:
noncebit = b":".join(
(nonce_bytes, ncvalue_bytes, cnonce_bytes, qop_bytes, HA2)
)
response_digest = KD(HA1, noncebit)
else:
response_digest = KD(HA1, b":".join((nonce_bytes, HA2)))
# Define a dict mapping of header fields to their values
# Group fields into always-present, optional, and qop-dependent
header_fields = {
# Always present fields
"username": escape_quotes(self._login_str),
"realm": escape_quotes(realm),
"nonce": escape_quotes(nonce),
"uri": path,
"response": response_digest.decode(),
"algorithm": algorithm_original,
}
# Optional fields
if opaque:
header_fields["opaque"] = escape_quotes(opaque)
# QoP-dependent fields
if qop:
header_fields["qop"] = qop
header_fields["nc"] = ncvalue
header_fields["cnonce"] = cnonce
# Build header using templates for each field type
pairs: list[str] = []
for field, value in header_fields.items():
if field in QUOTED_AUTH_FIELDS:
pairs.append(f'{field}="{value}"')
else:
pairs.append(f"{field}={value}")
return f"Digest {', '.join(pairs)}"
def _in_protection_space(self, url: URL) -> bool:
"""
Check if the given URL is within the current protection space.
According to RFC 7616, a URI is in the protection space if any URI
in the protection space is a prefix of it (after both have been made absolute).
"""
request_str = str(url)
for space_str in self._protection_space:
# Check if request starts with space URL
if not request_str.startswith(space_str):
continue
# Exact match or space ends with / (proper directory prefix)
if len(request_str) == len(space_str) or space_str[-1] == "/":
return True
# Check next char is / to ensure proper path boundary
if request_str[len(space_str)] == "/":
return True
return False
def _authenticate(self, response: ClientResponse) -> bool:
"""
Takes the given response and tries digest-auth, if needed.
Returns true if the original request must be resent.
"""
if response.status != 401:
return False
auth_header = response.headers.get("www-authenticate", "")
if not auth_header:
return False # No authentication header present
method, sep, headers = auth_header.partition(" ")
if not sep:
# No space found in www-authenticate header
return False # Malformed auth header, missing scheme separator
if method.lower() != "digest":
# Not a digest auth challenge (could be Basic, Bearer, etc.)
return False
if not headers:
# We have a digest scheme but no parameters
return False # Malformed digest header, missing parameters
# We have a digest auth header with content
if not (header_pairs := parse_header_pairs(headers)):
# Failed to parse any key-value pairs
return False # Malformed digest header, no valid parameters
# Extract challenge parameters
self._challenge = {}
for field in CHALLENGE_FIELDS:
if value := header_pairs.get(field):
self._challenge[field] = value
# Update protection space based on domain parameter or default to origin
origin = response.url.origin()
if domain := self._challenge.get("domain"):
# Parse space-separated list of URIs
self._protection_space = []
for uri in domain.split():
# Remove quotes if present
uri = uri.strip('"')
if uri.startswith("/"):
# Path-absolute, relative to origin
self._protection_space.append(str(origin.join(URL(uri))))
else:
# Absolute URI
self._protection_space.append(str(URL(uri)))
else:
# No domain specified, protection space is entire origin
self._protection_space = [str(origin)]
# Return True only if we found at least one challenge parameter
return bool(self._challenge)
async def __call__(
self, request: ClientRequest, handler: ClientHandlerType
) -> ClientResponse:
"""Run the digest auth middleware."""
response = None
for retry_count in range(2):
# Apply authorization header if:
# 1. This is a retry after 401 (retry_count > 0), OR
# 2. Preemptive auth is enabled AND we have a challenge AND the URL is in protection space
if retry_count > 0 or (
self._preemptive
and self._challenge
and self._in_protection_space(request.url)
):
request.headers[hdrs.AUTHORIZATION] = await self._encode(
request.method, request.url, request.body
)
# Send the request
response = await handler(request)
# Check if we need to authenticate
if not self._authenticate(response):
break
# At this point, response is guaranteed to be defined
assert response is not None
return response
|
"""Test digest authentication middleware for aiohttp client."""
import io
import re
from collections.abc import Generator
from hashlib import md5, sha1
from typing import Literal
from unittest import mock
import pytest
from yarl import URL
from aiohttp import ClientSession, hdrs
from aiohttp.client_exceptions import ClientError
from aiohttp.client_middleware_digest_auth import (
DigestAuthChallenge,
DigestAuthMiddleware,
DigestFunctions,
escape_quotes,
parse_header_pairs,
unescape_quotes,
)
from aiohttp.client_reqrep import ClientResponse
from aiohttp.payload import BytesIOPayload
from aiohttp.pytest_plugin import AiohttpServer
from aiohttp.web import Application, Request, Response
@pytest.fixture
def digest_auth_mw() -> DigestAuthMiddleware:
return DigestAuthMiddleware("user", "pass")
@pytest.fixture
def basic_challenge() -> DigestAuthChallenge:
"""Return a basic digest auth challenge with required fields only."""
return DigestAuthChallenge(realm="test", nonce="abc")
@pytest.fixture
def complete_challenge() -> DigestAuthChallenge:
"""Return a complete digest auth challenge with all fields."""
return DigestAuthChallenge(
realm="test", nonce="abc", qop="auth", algorithm="MD5", opaque="xyz"
)
@pytest.fixture
def qop_challenge() -> DigestAuthChallenge:
"""Return a digest auth challenge with qop field."""
return DigestAuthChallenge(realm="test", nonce="abc", qop="auth")
@pytest.fixture
def no_qop_challenge() -> DigestAuthChallenge:
"""Return a digest auth challenge without qop."""
return DigestAuthChallenge(realm="test-realm", nonce="testnonce", algorithm="MD5")
@pytest.fixture
def auth_mw_with_challenge(
digest_auth_mw: DigestAuthMiddleware, complete_challenge: DigestAuthChallenge
) -> DigestAuthMiddleware:
"""Return a digest auth middleware with pre-set challenge."""
digest_auth_mw._challenge = complete_challenge
digest_auth_mw._last_nonce_bytes = complete_challenge["nonce"].encode("utf-8")
digest_auth_mw._nonce_count = 0
return digest_auth_mw
@pytest.fixture
def mock_sha1_digest() -> Generator[mock.MagicMock, None, None]:
"""Mock SHA1 to return a predictable value for testing."""
mock_digest = mock.MagicMock(spec=sha1())
mock_digest.hexdigest.return_value = "deadbeefcafebabe"
with mock.patch("hashlib.sha1", return_value=mock_digest) as patched:
yield patched
@pytest.fixture
def mock_md5_digest() -> Generator[mock.MagicMock, None, None]:
"""Mock MD5 to return a predictable value for testing."""
mock_digest = mock.MagicMock(spec=md5())
mock_digest.hexdigest.return_value = "abcdef0123456789"
with mock.patch("hashlib.md5", return_value=mock_digest) as patched:
yield patched
@pytest.mark.parametrize(
("response_status", "headers", "expected_result", "expected_challenge"),
[
# Valid digest with all fields
(
401,
{
"www-authenticate": 'Digest realm="test", nonce="abc", '
'qop="auth", opaque="xyz", algorithm=MD5'
},
True,
{
"realm": "test",
"nonce": "abc",
"qop": "auth",
"algorithm": "MD5",
"opaque": "xyz",
},
),
# Valid digest without opaque
(
401,
{"www-authenticate": 'Digest realm="test", nonce="abc", qop="auth"'},
True,
{"realm": "test", "nonce": "abc", "qop": "auth"},
),
# Non-401 status
(200, {}, False, {}), # No challenge should be set
],
)
async def test_authenticate_scenarios(
digest_auth_mw: DigestAuthMiddleware,
response_status: int,
headers: dict[str, str],
expected_result: bool,
expected_challenge: dict[str, str],
) -> None:
"""Test different authentication scenarios."""
response = mock.MagicMock(spec=ClientResponse)
response.status = response_status
response.headers = headers
result = digest_auth_mw._authenticate(response)
assert result == expected_result
if expected_result:
challenge_dict = dict(digest_auth_mw._challenge)
for key, value in expected_challenge.items():
assert challenge_dict[key] == value
@pytest.mark.parametrize(
("challenge", "expected_error"),
[
(
DigestAuthChallenge(),
"Malformed Digest auth challenge: Missing 'realm' parameter",
),
(
DigestAuthChallenge(nonce="abc"),
"Malformed Digest auth challenge: Missing 'realm' parameter",
),
(
DigestAuthChallenge(realm="test"),
"Malformed Digest auth challenge: Missing 'nonce' parameter",
),
(
DigestAuthChallenge(realm="test", nonce=""),
"Security issue: Digest auth challenge contains empty 'nonce' value",
),
],
)
async def test_encode_validation_errors(
digest_auth_mw: DigestAuthMiddleware,
challenge: DigestAuthChallenge,
expected_error: str,
) -> None:
"""Test validation errors when encoding digest auth headers."""
digest_auth_mw._challenge = challenge
with pytest.raises(ClientError, match=expected_error):
await digest_auth_mw._encode("GET", URL("http://example.com/resource"), b"")
async def test_encode_digest_with_md5(
auth_mw_with_challenge: DigestAuthMiddleware,
) -> None:
header = await auth_mw_with_challenge._encode(
"GET", URL("http://example.com/resource"), b""
)
assert header.startswith("Digest ")
assert 'username="user"' in header
assert "algorithm=MD5" in header
@pytest.mark.parametrize(
"algorithm", ["MD5-SESS", "SHA-SESS", "SHA-256-SESS", "SHA-512-SESS"]
)
async def test_encode_digest_with_sess_algorithms(
digest_auth_mw: DigestAuthMiddleware,
qop_challenge: DigestAuthChallenge,
algorithm: str,
) -> None:
"""Test that all session-based digest algorithms work correctly."""
# Create a modified challenge with the test algorithm
challenge = qop_challenge.copy()
challenge["algorithm"] = algorithm
digest_auth_mw._challenge = challenge
header = await digest_auth_mw._encode(
"GET", URL("http://example.com/resource"), b""
)
assert f"algorithm={algorithm}" in header
async def test_encode_unsupported_algorithm(
digest_auth_mw: DigestAuthMiddleware, basic_challenge: DigestAuthChallenge
) -> None:
"""Test that unsupported algorithm raises ClientError."""
# Create a modified challenge with an unsupported algorithm
challenge = basic_challenge.copy()
challenge["algorithm"] = "UNSUPPORTED"
digest_auth_mw._challenge = challenge
with pytest.raises(ClientError, match="Unsupported hash algorithm"):
await digest_auth_mw._encode("GET", URL("http://example.com/resource"), b"")
@pytest.mark.parametrize("algorithm", ["MD5", "MD5-SESS", "SHA-256"])
async def test_encode_algorithm_case_preservation_uppercase(
digest_auth_mw: DigestAuthMiddleware,
qop_challenge: DigestAuthChallenge,
algorithm: str,
) -> None:
"""Test that uppercase algorithm case is preserved in the response header."""
# Create a challenge with the specific algorithm case
challenge = qop_challenge.copy()
challenge["algorithm"] = algorithm
digest_auth_mw._challenge = challenge
header = await digest_auth_mw._encode(
"GET", URL("http://example.com/resource"), b""
)
# The algorithm in the response should match the exact case from the challenge
assert f"algorithm={algorithm}" in header
@pytest.mark.parametrize("algorithm", ["md5", "MD5-sess", "sha-256"])
async def test_encode_algorithm_case_preservation_lowercase(
digest_auth_mw: DigestAuthMiddleware,
qop_challenge: DigestAuthChallenge,
algorithm: str,
) -> None:
"""Test that lowercase/mixed-case algorithm is preserved in the response header."""
# Create a challenge with the specific algorithm case
challenge = qop_challenge.copy()
challenge["algorithm"] = algorithm
digest_auth_mw._challenge = challenge
header = await digest_auth_mw._encode(
"GET", URL("http://example.com/resource"), b""
)
# The algorithm in the response should match the exact case from the challenge
assert f"algorithm={algorithm}" in header
# Also verify it's not the uppercase version
assert f"algorithm={algorithm.upper()}" not in header
async def test_invalid_qop_rejected(
digest_auth_mw: DigestAuthMiddleware, basic_challenge: DigestAuthChallenge
) -> None:
"""Test that invalid Quality of Protection values are rejected."""
# Use bad QoP value to trigger error
challenge = basic_challenge.copy()
challenge["qop"] = "badvalue"
challenge["algorithm"] = "MD5"
digest_auth_mw._challenge = challenge
# This should raise an error about unsupported QoP
with pytest.raises(ClientError, match="Unsupported Quality of Protection"):
await digest_auth_mw._encode("GET", URL("http://example.com"), b"")
def compute_expected_digest(
algorithm: str,
username: str,
password: str,
realm: str,
nonce: str,
uri: str,
method: str,
qop: str,
nc: str,
cnonce: str,
body: str = "",
) -> str:
hash_fn = DigestFunctions[algorithm]
def H(x: str) -> str:
return hash_fn(x.encode()).hexdigest()
def KD(secret: str, data: str) -> str:
return H(f"{secret}:{data}")
A1 = f"{username}:{realm}:{password}"
HA1 = H(A1)
if algorithm.upper().endswith("-SESS"):
HA1 = H(f"{HA1}:{nonce}:{cnonce}")
A2 = f"{method}:{uri}"
if "auth-int" in qop:
entity_hash = H(body)
A2 = f"{A2}:{entity_hash}"
HA2 = H(A2)
if qop:
return KD(HA1, f"{nonce}:{nc}:{cnonce}:{qop}:{HA2}")
else:
return KD(HA1, f"{nonce}:{HA2}")
@pytest.mark.parametrize("qop", ["auth", "auth-int", "auth,auth-int", ""])
@pytest.mark.parametrize("algorithm", sorted(DigestFunctions.keys()))
@pytest.mark.parametrize(
("body", "body_str"),
[
(b"", ""), # Bytes case
(
BytesIOPayload(io.BytesIO(b"this is a body")),
"this is a body",
), # BytesIOPayload case
],
)
async def test_digest_response_exact_match(
qop: str,
algorithm: str,
body: Literal[b""] | BytesIOPayload,
body_str: str,
mock_sha1_digest: mock.MagicMock,
) -> None:
# Fixed input values
login = "user"
password = "pass"
realm = "example.com"
nonce = "abc123nonce"
cnonce = "deadbeefcafebabe"
nc = 1
ncvalue = f"{nc+1:08x}"
method = "GET"
uri = "/secret"
qop = "auth-int" if "auth-int" in qop else "auth"
# Create the auth object
auth = DigestAuthMiddleware(login, password)
auth._challenge = DigestAuthChallenge(
realm=realm, nonce=nonce, qop=qop, algorithm=algorithm
)
auth._last_nonce_bytes = nonce.encode("utf-8")
auth._nonce_count = nc
header = await auth._encode(method, URL(f"http://host{uri}"), body)
# Get expected digest
expected = compute_expected_digest(
algorithm=algorithm,
username=login,
password=password,
realm=realm,
nonce=nonce,
uri=uri,
method=method,
qop=qop,
nc=ncvalue,
cnonce=cnonce,
body=body_str,
)
# Check that the response digest is exactly correct
assert f'response="{expected}"' in header
@pytest.mark.parametrize(
("header", "expected_result"),
[
# Normal quoted values
(
'realm="example.com", nonce="12345", qop="auth"',
{"realm": "example.com", "nonce": "12345", "qop": "auth"},
),
# Unquoted values
(
"realm=example.com, nonce=12345, qop=auth",
{"realm": "example.com", "nonce": "12345", "qop": "auth"},
),
# Mixed quoted/unquoted with commas in quoted values
(
'realm="ex,ample", nonce=12345, qop="auth", domain="/test"',
{
"realm": "ex,ample",
"nonce": "12345",
"qop": "auth",
"domain": "/test",
},
),
# Header with scheme
(
'Digest realm="example.com", nonce="12345", qop="auth"',
{"realm": "example.com", "nonce": "12345", "qop": "auth"},
),
# No spaces after commas
(
'realm="test",nonce="123",qop="auth"',
{"realm": "test", "nonce": "123", "qop": "auth"},
),
# Extra whitespace
(
'realm = "test" , nonce = "123"',
{"realm": "test", "nonce": "123"},
),
# Escaped quotes
(
'realm="test\\"realm", nonce="123"',
{"realm": 'test"realm', "nonce": "123"},
),
# Single quotes (treated as regular chars)
(
"realm='test', nonce=123",
{"realm": "'test'", "nonce": "123"},
),
# Empty header
("", {}),
],
ids=[
"fully_quoted_header",
"unquoted_header",
"mixed_quoted_unquoted_with_commas",
"header_with_scheme",
"no_spaces_after_commas",
"extra_whitespace",
"escaped_quotes",
"single_quotes_as_regular_chars",
"empty_header",
],
)
def test_parse_header_pairs(header: str, expected_result: dict[str, str]) -> None:
"""Test parsing HTTP header pairs with various formats."""
result = parse_header_pairs(header)
assert result == expected_result
def test_digest_auth_middleware_callable(digest_auth_mw: DigestAuthMiddleware) -> None:
"""Test that DigestAuthMiddleware is callable."""
assert callable(digest_auth_mw)
def test_middleware_invalid_login() -> None:
"""Test that invalid login values raise errors."""
with pytest.raises(ValueError, match="None is not allowed as login value"):
DigestAuthMiddleware(None, "pass") # type: ignore[arg-type]
with pytest.raises(ValueError, match="None is not allowed as password value"):
DigestAuthMiddleware("user", None) # type: ignore[arg-type]
with pytest.raises(ValueError, match=r"A \":\" is not allowed in username"):
DigestAuthMiddleware("user:name", "pass")
async def test_escaping_quotes_in_auth_header() -> None:
"""Test that double quotes are properly escaped in auth header."""
auth = DigestAuthMiddleware('user"with"quotes', "pass")
auth._challenge = DigestAuthChallenge(
realm='realm"with"quotes',
nonce='nonce"with"quotes',
qop="auth",
algorithm="MD5",
opaque='opaque"with"quotes',
)
header = await auth._encode("GET", URL("http://example.com/path"), b"")
# Check that quotes are escaped in the header
assert 'username="user\\"with\\"quotes"' in header
assert 'realm="realm\\"with\\"quotes"' in header
assert 'nonce="nonce\\"with\\"quotes"' in header
assert 'opaque="opaque\\"with\\"quotes"' in header
async def test_template_based_header_construction(
auth_mw_with_challenge: DigestAuthMiddleware,
mock_sha1_digest: mock.MagicMock,
mock_md5_digest: mock.MagicMock,
) -> None:
"""Test that the template-based header construction works correctly."""
header = await auth_mw_with_challenge._encode(
"GET", URL("http://example.com/test"), b""
)
# Split the header into scheme and parameters
scheme, params_str = header.split(" ", 1)
assert scheme == "Digest"
# Parse the parameters into a dictionary
params = {
key: value[1:-1] if value.startswith('"') and value.endswith('"') else value
for key, value in (param.split("=", 1) for param in params_str.split(", "))
}
# Check all required fields are present
assert "username" in params
assert "realm" in params
assert "nonce" in params
assert "uri" in params
assert "response" in params
assert "algorithm" in params
assert "qop" in params
assert "nc" in params
assert "cnonce" in params
assert "opaque" in params
# Check that fields are quoted correctly
quoted_fields = [
"username",
"realm",
"nonce",
"uri",
"response",
"opaque",
"cnonce",
]
unquoted_fields = ["algorithm", "qop", "nc"]
# Re-check the original header for proper quoting
for field in quoted_fields:
assert f'{field}="{params[field]}"' in header
for field in unquoted_fields:
assert f"{field}={params[field]}" in header
# Check specific values
assert params["username"] == "user"
assert params["realm"] == "test"
assert params["algorithm"] == "MD5"
assert params["nc"] == "00000001" # nonce_count = 1 (incremented from 0)
assert params["uri"] == "/test" # path component of URL
@pytest.mark.parametrize(
("test_string", "expected_escaped", "description"),
[
('value"with"quotes', 'value\\"with\\"quotes', "Basic string with quotes"),
("", "", "Empty string"),
("no quotes", "no quotes", "String without quotes"),
('with"one"quote', 'with\\"one\\"quote', "String with one quoted segment"),
(
'many"quotes"in"string',
'many\\"quotes\\"in\\"string',
"String with multiple quoted segments",
),
('""', '\\"\\"', "Just double quotes"),
('"', '\\"', "Single double quote"),
('already\\"escaped', 'already\\\\"escaped', "Already escaped quotes"),
],
)
def test_quote_escaping_functions(
test_string: str, expected_escaped: str, description: str
) -> None:
"""Test that escape_quotes and unescape_quotes work correctly."""
# Test escaping
escaped = escape_quotes(test_string)
assert escaped == expected_escaped
# Test unescaping (should return to original)
unescaped = unescape_quotes(escaped)
assert unescaped == test_string
# Test that they're inverse operations
assert unescape_quotes(escape_quotes(test_string)) == test_string
async def test_middleware_retry_on_401(
aiohttp_server: AiohttpServer, digest_auth_mw: DigestAuthMiddleware
) -> None:
"""Test that the middleware retries on 401 errors."""
request_count = 0
async def handler(request: Request) -> Response:
nonlocal request_count
request_count += 1
if request_count == 1:
# First request returns 401 with digest challenge
challenge = 'Digest realm="test", nonce="abc123", qop="auth", algorithm=MD5'
return Response(
status=401,
headers={"WWW-Authenticate": challenge},
text="Unauthorized",
)
# Second request should have Authorization header
auth_header = request.headers.get(hdrs.AUTHORIZATION)
if auth_header and auth_header.startswith("Digest "):
# Return success response
return Response(text="OK")
# This branch should not be reached in the tests
assert False, "This branch should not be reached"
app = Application()
app.router.add_get("/", handler)
server = await aiohttp_server(app)
async with ClientSession(middlewares=(digest_auth_mw,)) as session:
async with session.get(server.make_url("/")) as resp:
assert resp.status == 200
text_content = await resp.text()
assert text_content == "OK"
assert request_count == 2 # Initial request + retry with auth
async def test_digest_auth_no_qop(
aiohttp_server: AiohttpServer,
digest_auth_mw: DigestAuthMiddleware,
no_qop_challenge: DigestAuthChallenge,
mock_sha1_digest: mock.MagicMock,
) -> None:
"""Test digest auth with a server that doesn't provide a QoP parameter."""
request_count = 0
realm = no_qop_challenge["realm"]
nonce = no_qop_challenge["nonce"]
algorithm = no_qop_challenge["algorithm"]
username = "user"
password = "pass"
uri = "/"
async def handler(request: Request) -> Response:
nonlocal request_count
request_count += 1
if request_count == 1:
# First request returns 401 with digest challenge without qop
challenge = (
f'Digest realm="{realm}", nonce="{nonce}", algorithm={algorithm}'
)
return Response(
status=401,
headers={"WWW-Authenticate": challenge},
text="Unauthorized",
)
# Second request should have Authorization header
auth_header = request.headers.get(hdrs.AUTHORIZATION)
assert auth_header and auth_header.startswith("Digest ")
# Successful auth should have no qop param
assert "qop=" not in auth_header
assert "nc=" not in auth_header
assert "cnonce=" not in auth_header
expected_digest = compute_expected_digest(
algorithm=algorithm,
username=username,
password=password,
realm=realm,
nonce=nonce,
uri=uri,
method="GET",
qop="", # This is the key part - explicitly setting qop=""
nc="", # Not needed for non-qop digest
cnonce="", # Not needed for non-qop digest
)
# We mock the cnonce, so we can check the expected digest
assert expected_digest in auth_header
return Response(text="OK")
app = Application()
app.router.add_get("/", handler)
server = await aiohttp_server(app)
async with ClientSession(middlewares=(digest_auth_mw,)) as session:
async with session.get(server.make_url("/")) as resp:
assert resp.status == 200
text_content = await resp.text()
assert text_content == "OK"
assert request_count == 2 # Initial request + retry with auth
async def test_digest_auth_without_opaque(
aiohttp_server: AiohttpServer, digest_auth_mw: DigestAuthMiddleware
) -> None:
"""Test digest auth with a server that doesn't provide an opaque parameter."""
request_count = 0
async def handler(request: Request) -> Response:
nonlocal request_count
request_count += 1
if request_count == 1:
# First request returns 401 with digest challenge without opaque
challenge = (
'Digest realm="test-realm", nonce="testnonce", '
'qop="auth", algorithm=MD5'
)
return Response(
status=401,
headers={"WWW-Authenticate": challenge},
text="Unauthorized",
)
# Second request should have Authorization header
auth_header = request.headers.get(hdrs.AUTHORIZATION)
assert auth_header and auth_header.startswith("Digest ")
# Successful auth should have no opaque param
assert "opaque=" not in auth_header
return Response(text="OK")
app = Application()
app.router.add_get("/", handler)
server = await aiohttp_server(app)
async with ClientSession(middlewares=(digest_auth_mw,)) as session:
async with session.get(server.make_url("/")) as resp:
assert resp.status == 200
text_content = await resp.text()
assert text_content == "OK"
assert request_count == 2 # Initial request + retry with auth
@pytest.mark.parametrize(
"www_authenticate",
[
None,
"DigestWithoutSpace",
'Basic realm="test"',
"Digest ",
"Digest =invalid, format",
],
)
async def test_auth_header_no_retry(
aiohttp_server: AiohttpServer,
www_authenticate: str,
digest_auth_mw: DigestAuthMiddleware,
) -> None:
"""Test that middleware doesn't retry with invalid WWW-Authenticate headers."""
request_count = 0
async def handler(request: Request) -> Response:
nonlocal request_count
request_count += 1
# First (and only) request returns 401
headers = {}
if www_authenticate is not None:
headers["WWW-Authenticate"] = www_authenticate
# Use a custom HTTPUnauthorized instead of the helper since
# we're specifically testing malformed headers
return Response(status=401, headers=headers, text="Unauthorized")
app = Application()
app.router.add_get("/", handler)
server = await aiohttp_server(app)
async with ClientSession(middlewares=(digest_auth_mw,)) as session:
async with session.get(server.make_url("/")) as resp:
assert resp.status == 401
# No retry should happen
assert request_count == 1
async def test_direct_success_no_auth_needed(
aiohttp_server: AiohttpServer, digest_auth_mw: DigestAuthMiddleware
) -> None:
"""Test middleware with a direct 200 response with no auth challenge."""
request_count = 0
async def handler(request: Request) -> Response:
nonlocal request_count
request_count += 1
# Return success without auth challenge
return Response(text="OK")
app = Application()
app.router.add_get("/", handler)
server = await aiohttp_server(app)
async with ClientSession(middlewares=(digest_auth_mw,)) as session:
async with session.get(server.make_url("/")) as resp:
text = await resp.text()
assert resp.status == 200
assert text == "OK"
# Verify only one request was made
assert request_count == 1
async def test_no_retry_on_second_401(
aiohttp_server: AiohttpServer, digest_auth_mw: DigestAuthMiddleware
) -> None:
"""Test digest auth does not retry on second 401."""
request_count = 0
async def handler(request: Request) -> Response:
nonlocal request_count
request_count += 1
# Always return 401 challenge
challenge = 'Digest realm="test", nonce="abc123", qop="auth", algorithm=MD5'
return Response(
status=401,
headers={"WWW-Authenticate": challenge},
text="Unauthorized",
)
app = Application()
app.router.add_get("/", handler)
server = await aiohttp_server(app)
# Create a session that uses the digest auth middleware
async with ClientSession(middlewares=(digest_auth_mw,)) as session:
async with session.get(server.make_url("/")) as resp:
await resp.text()
assert resp.status == 401
# Verify we made exactly 2 requests (initial + 1 retry)
assert request_count == 2
async def test_preemptive_auth_disabled(
aiohttp_server: AiohttpServer,
) -> None:
"""Test that preemptive authentication can be disabled."""
digest_auth_mw = DigestAuthMiddleware("user", "pass", preemptive=False)
request_count = 0
auth_headers = []
async def handler(request: Request) -> Response:
nonlocal request_count
request_count += 1
auth_headers.append(request.headers.get(hdrs.AUTHORIZATION))
if not request.headers.get(hdrs.AUTHORIZATION):
# Return 401 with digest challenge
challenge = 'Digest realm="test", nonce="abc123", qop="auth", algorithm=MD5'
return Response(
status=401,
headers={"WWW-Authenticate": challenge},
text="Unauthorized",
)
return Response(text="OK")
app = Application()
app.router.add_get("/", handler)
server = await aiohttp_server(app)
async with ClientSession(middlewares=(digest_auth_mw,)) as session:
# First request will get 401 and store challenge
async with session.get(server.make_url("/")) as resp:
assert resp.status == 200
text = await resp.text()
assert text == "OK"
# Second request should NOT send auth preemptively (preemptive=False)
async with session.get(server.make_url("/")) as resp:
assert resp.status == 200
text = await resp.text()
assert text == "OK"
# With preemptive disabled, each request needs 401 challenge first
assert request_count == 4 # 2 requests * 2 (401 + retry)
assert auth_headers[0] is None # First request has no auth
assert auth_headers[1] is not None # Second request has auth after 401
assert auth_headers[2] is None # Third request has no auth (preemptive disabled)
assert auth_headers[3] is not None # Fourth request has auth after 401
async def test_preemptive_auth_with_stale_nonce(
aiohttp_server: AiohttpServer,
) -> None:
"""Test preemptive auth handles stale nonce responses correctly."""
digest_auth_mw = DigestAuthMiddleware("user", "pass", preemptive=True)
request_count = 0
current_nonce = 0
async def handler(request: Request) -> Response:
nonlocal request_count, current_nonce
request_count += 1
auth_header = request.headers.get(hdrs.AUTHORIZATION)
if not auth_header:
# First request without auth
current_nonce = 1
challenge = f'Digest realm="test", nonce="nonce{current_nonce}", qop="auth", algorithm=MD5'
return Response(
status=401,
headers={"WWW-Authenticate": challenge},
text="Unauthorized",
)
# For the second set of requests, always consider the first nonce stale
if request_count == 3 and current_nonce == 1:
# Stale nonce - request new auth with stale=true
current_nonce = 2
challenge = f'Digest realm="test", nonce="nonce{current_nonce}", qop="auth", algorithm=MD5, stale=true'
return Response(
status=401,
headers={"WWW-Authenticate": challenge},
text="Unauthorized - Stale nonce",
)
return Response(text="OK")
app = Application()
app.router.add_get("/", handler)
server = await aiohttp_server(app)
async with ClientSession(middlewares=(digest_auth_mw,)) as session:
# First request - will get 401, then retry with auth
async with session.get(server.make_url("/")) as resp:
assert resp.status == 200
text = await resp.text()
assert text == "OK"
# Second request - will use preemptive auth with nonce1, get 401 stale, retry with nonce2
async with session.get(server.make_url("/")) as resp:
assert resp.status == 200
text = await resp.text()
assert text == "OK"
# Verify the expected flow:
# Request 1: no auth -> 401
# Request 2: retry with auth -> 200
# Request 3: preemptive auth with old nonce -> 401 stale
# Request 4: retry with new nonce -> 200
assert request_count == 4
async def test_preemptive_auth_updates_nonce_count(
aiohttp_server: AiohttpServer,
) -> None:
"""Test that preemptive auth properly increments nonce count."""
digest_auth_mw = DigestAuthMiddleware("user", "pass", preemptive=True)
request_count = 0
nonce_counts = []
async def handler(request: Request) -> Response:
nonlocal request_count
request_count += 1
auth_header = request.headers.get(hdrs.AUTHORIZATION)
if not auth_header:
# First request without auth
challenge = 'Digest realm="test", nonce="abc123", qop="auth", algorithm=MD5'
return Response(
status=401,
headers={"WWW-Authenticate": challenge},
text="Unauthorized",
)
# Extract nc (nonce count) from auth header
nc_match = auth_header.split("nc=")[1].split(",")[0].strip()
nonce_counts.append(nc_match)
return Response(text="OK")
app = Application()
app.router.add_get("/", handler)
server = await aiohttp_server(app)
async with ClientSession(middlewares=(digest_auth_mw,)) as session:
# Make multiple requests to see nonce count increment
for _ in range(3):
async with session.get(server.make_url("/")) as resp:
assert resp.status == 200
await resp.text()
# First request has no auth, then gets 401 and retries with nc=00000001
# Second and third requests use preemptive auth with nc=00000002 and nc=00000003
assert len(nonce_counts) == 3
assert nonce_counts[0] == "00000001"
assert nonce_counts[1] == "00000002"
assert nonce_counts[2] == "00000003"
async def test_preemptive_auth_respects_protection_space(
aiohttp_server: AiohttpServer,
) -> None:
"""Test that preemptive auth only applies to URLs within the protection space."""
digest_auth_mw = DigestAuthMiddleware("user", "pass", preemptive=True)
request_count = 0
auth_headers = []
requested_paths = []
async def handler(request: Request) -> Response:
nonlocal request_count
request_count += 1
auth_headers.append(request.headers.get(hdrs.AUTHORIZATION))
requested_paths.append(request.path)
if not request.headers.get(hdrs.AUTHORIZATION):
# Return 401 with digest challenge including domain parameter
challenge = 'Digest realm="test", nonce="abc123", qop="auth", algorithm=MD5, domain="/api /admin"'
return Response(
status=401,
headers={"WWW-Authenticate": challenge},
text="Unauthorized",
)
return Response(text="OK")
app = Application()
app.router.add_get("/api/endpoint", handler)
app.router.add_get("/admin/panel", handler)
app.router.add_get("/public/page", handler)
server = await aiohttp_server(app)
async with ClientSession(middlewares=(digest_auth_mw,)) as session:
# First request to /api/endpoint - should get 401 and retry with auth
async with session.get(server.make_url("/api/endpoint")) as resp:
assert resp.status == 200
# Second request to /api/endpoint - should use preemptive auth (in protection space)
async with session.get(server.make_url("/api/endpoint")) as resp:
assert resp.status == 200
# Third request to /admin/panel - should use preemptive auth (in protection space)
async with session.get(server.make_url("/admin/panel")) as resp:
assert resp.status == 200
# Fourth request to /public/page - should NOT use preemptive auth (outside protection space)
async with session.get(server.make_url("/public/page")) as resp:
assert resp.status == 200
# Verify auth headers
assert auth_headers[0] is None # First request to /api/endpoint - no auth
assert auth_headers[1] is not None # Retry with auth
assert (
auth_headers[2] is not None
) # Second request to /api/endpoint - preemptive auth
assert auth_headers[3] is not None # Request to /admin/panel - preemptive auth
assert auth_headers[4] is None # First request to /public/page - no preemptive auth
assert auth_headers[5] is not None # Retry with auth
# Verify paths
assert requested_paths == [
"/api/endpoint", # Initial request
"/api/endpoint", # Retry with auth
"/api/endpoint", # Second request with preemptive auth
"/admin/panel", # Request with preemptive auth
"/public/page", # Initial request (no preemptive auth)
"/public/page", # Retry with auth
]
async def test_preemptive_auth_with_absolute_domain_uris(
aiohttp_server: AiohttpServer,
) -> None:
"""Test preemptive auth with absolute URIs in domain parameter."""
digest_auth_mw = DigestAuthMiddleware("user", "pass", preemptive=True)
request_count = 0
auth_headers = []
async def handler(request: Request) -> Response:
nonlocal request_count
request_count += 1
auth_headers.append(request.headers.get(hdrs.AUTHORIZATION))
if not request.headers.get(hdrs.AUTHORIZATION):
# Return 401 with digest challenge including absolute URI in domain
server_url = str(request.url.with_path("/protected"))
challenge = f'Digest realm="test", nonce="abc123", qop="auth", algorithm=MD5, domain="{server_url}"'
return Response(
status=401,
headers={"WWW-Authenticate": challenge},
text="Unauthorized",
)
return Response(text="OK")
app = Application()
app.router.add_get("/protected/resource", handler)
app.router.add_get("/unprotected/resource", handler)
server = await aiohttp_server(app)
async with ClientSession(middlewares=(digest_auth_mw,)) as session:
# First request to protected resource
async with session.get(server.make_url("/protected/resource")) as resp:
assert resp.status == 200
# Second request to protected resource - should use preemptive auth
async with session.get(server.make_url("/protected/resource")) as resp:
assert resp.status == 200
# Request to unprotected resource - should NOT use preemptive auth
async with session.get(server.make_url("/unprotected/resource")) as resp:
assert resp.status == 200
# Verify auth pattern
assert auth_headers[0] is None # First request - no auth
assert auth_headers[1] is not None # Retry with auth
assert auth_headers[2] is not None # Second request - preemptive auth
assert auth_headers[3] is None # Unprotected resource - no preemptive auth
assert auth_headers[4] is not None # Retry with auth
async def test_preemptive_auth_without_domain_uses_origin(
aiohttp_server: AiohttpServer,
) -> None:
"""Test that preemptive auth without domain parameter applies to entire origin."""
digest_auth_mw = DigestAuthMiddleware("user", "pass", preemptive=True)
request_count = 0
auth_headers = []
async def handler(request: Request) -> Response:
nonlocal request_count
request_count += 1
auth_headers.append(request.headers.get(hdrs.AUTHORIZATION))
if not request.headers.get(hdrs.AUTHORIZATION):
# Return 401 with digest challenge without domain parameter
challenge = 'Digest realm="test", nonce="abc123", qop="auth", algorithm=MD5'
return Response(
status=401,
headers={"WWW-Authenticate": challenge},
text="Unauthorized",
)
return Response(text="OK")
app = Application()
app.router.add_get("/path1", handler)
app.router.add_get("/path2", handler)
server = await aiohttp_server(app)
async with ClientSession(middlewares=(digest_auth_mw,)) as session:
# First request
async with session.get(server.make_url("/path1")) as resp:
assert resp.status == 200
# Second request to different path - should still use preemptive auth
async with session.get(server.make_url("/path2")) as resp:
assert resp.status == 200
# Verify auth pattern
assert auth_headers[0] is None # First request - no auth
assert auth_headers[1] is not None # Retry with auth
assert (
auth_headers[2] is not None
) # Second request - preemptive auth (entire origin)
@pytest.mark.parametrize(
("status", "headers", "expected"),
[
(200, {}, False),
(401, {"www-authenticate": ""}, False),
(401, {"www-authenticate": "DigestWithoutSpace"}, False),
(401, {"www-authenticate": "Basic realm=test"}, False),
(401, {"www-authenticate": "Digest "}, False),
(401, {"www-authenticate": "Digest =invalid, format"}, False),
],
ids=[
"different_status_code",
"empty_www_authenticate_header",
"no_space_after_scheme",
"different_scheme",
"empty_parameters",
"malformed_parameters",
],
)
def test_authenticate_with_malformed_headers(
digest_auth_mw: DigestAuthMiddleware,
status: int,
headers: dict[str, str],
expected: bool,
) -> None:
"""Test _authenticate method with various edge cases."""
response = mock.MagicMock(spec=ClientResponse)
response.status = status
response.headers = headers
result = digest_auth_mw._authenticate(response)
assert result == expected
@pytest.mark.parametrize(
("protection_space_url", "request_url", "expected"),
[
# Exact match
("http://example.com/app1", "http://example.com/app1", True),
# Path with trailing slash should match
("http://example.com/app1", "http://example.com/app1/", True),
# Subpaths should match
("http://example.com/app1", "http://example.com/app1/resource", True),
("http://example.com/app1", "http://example.com/app1/sub/path", True),
# Should NOT match different paths that start with same prefix
("http://example.com/app1", "http://example.com/app1xx", False),
("http://example.com/app1", "http://example.com/app123", False),
# Protection space with trailing slash
("http://example.com/app1/", "http://example.com/app1/", True),
("http://example.com/app1/", "http://example.com/app1/resource", True),
(
"http://example.com/app1/",
"http://example.com/app1",
False,
), # No trailing slash
# Root protection space
("http://example.com/", "http://example.com/", True),
("http://example.com/", "http://example.com/anything", True),
("http://example.com/", "http://example.com", False), # No trailing slash
# Different origins should not match
("http://example.com/app1", "https://example.com/app1", False),
("http://example.com/app1", "http://other.com/app1", False),
("http://example.com:8080/app1", "http://example.com/app1", False),
],
ids=[
"exact_match",
"path_with_trailing_slash",
"subpath_match",
"deep_subpath_match",
"no_match_app1xx",
"no_match_app123",
"protection_with_slash_exact",
"protection_with_slash_subpath",
"protection_with_slash_no_match_without",
"root_protection_exact",
"root_protection_subpath",
"root_protection_no_match_without_slash",
"different_scheme",
"different_host",
"different_port",
],
)
def test_in_protection_space(
digest_auth_mw: DigestAuthMiddleware,
protection_space_url: str,
request_url: str,
expected: bool,
) -> None:
"""Test _in_protection_space method with various URL patterns."""
digest_auth_mw._protection_space = [protection_space_url]
result = digest_auth_mw._in_protection_space(URL(request_url))
assert result == expected
def test_in_protection_space_multiple_spaces(
digest_auth_mw: DigestAuthMiddleware,
) -> None:
"""Test _in_protection_space with multiple protection spaces."""
digest_auth_mw._protection_space = [
"http://example.com/api",
"http://example.com/admin/",
"http://example.com/secure/area",
]
# Test various URLs
assert digest_auth_mw._in_protection_space(URL("http://example.com/api")) is True
assert digest_auth_mw._in_protection_space(URL("http://example.com/api/v1")) is True
assert (
digest_auth_mw._in_protection_space(URL("http://example.com/admin/panel"))
is True
)
assert (
digest_auth_mw._in_protection_space(
URL("http://example.com/secure/area/resource")
)
is True
)
# These should not match
assert digest_auth_mw._in_protection_space(URL("http://example.com/apiv2")) is False
assert (
digest_auth_mw._in_protection_space(URL("http://example.com/admin")) is False
) # No trailing slash
assert (
digest_auth_mw._in_protection_space(URL("http://example.com/secure")) is False
)
assert digest_auth_mw._in_protection_space(URL("http://example.com/other")) is False
async def test_case_sensitive_algorithm_server(
aiohttp_server: AiohttpServer,
) -> None:
"""Test authentication with a server that requires exact algorithm case matching.
This simulates servers like Prusa printers that expect the algorithm
to be returned with the exact same case as sent in the challenge.
"""
digest_auth_mw = DigestAuthMiddleware("testuser", "testpass")
request_count = 0
auth_algorithms: list[str] = []
async def handler(request: Request) -> Response:
nonlocal request_count
request_count += 1
if not (auth_header := request.headers.get(hdrs.AUTHORIZATION)):
# Send challenge with lowercase-sess algorithm (like Prusa)
challenge = 'Digest realm="Administrator", nonce="test123", qop="auth", algorithm="MD5-sess", opaque="xyz123"'
return Response(
status=401,
headers={"WWW-Authenticate": challenge},
text="Unauthorized",
)
# Extract algorithm from auth response
algo_match = re.search(r"algorithm=([^,\s]+)", auth_header)
assert algo_match is not None
auth_algorithms.append(algo_match.group(1))
# Case-sensitive server: only accept exact case match
assert "algorithm=MD5-sess" in auth_header
return Response(text="Success")
app = Application()
app.router.add_get("/api/test", handler)
server = await aiohttp_server(app)
async with (
ClientSession(middlewares=(digest_auth_mw,)) as session,
session.get(server.make_url("/api/test")) as resp,
):
assert resp.status == 200
text = await resp.text()
assert text == "Success"
# Verify the middleware preserved the exact algorithm case
assert request_count == 2 # Initial 401 + successful retry
assert len(auth_algorithms) == 1
assert auth_algorithms[0] == "MD5-sess" # Not "MD5-SESS"
|
./temp_repos/aiohttp/aiohttp/client_middleware_digest_auth.py
|
./temp_repos/aiohttp/tests/test_client_middleware_digest_auth.py
|
aiohttp
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'DigestAuthChallenge'.
Context:
- Class Name: DigestAuthChallenge
- Dependencies to Mock: password, preemptive, login
- Key Imports: yarl, time, client_middlewares, client_exceptions, typing, collections.abc, hashlib, client_reqrep, os, payload
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
DigestAuthChallenge
|
python
|
"""Async gunicorn worker for aiohttp.web"""
import asyncio
import inspect
import os
import re
import signal
import sys
from types import FrameType
from typing import Any, Optional
from gunicorn.config import AccessLogFormat as GunicornAccessLogFormat
from gunicorn.workers import base
from aiohttp import web
from .helpers import set_result
from .web_app import Application
from .web_log import AccessLogger
try:
import ssl
SSLContext = ssl.SSLContext
except ImportError: # pragma: no cover
ssl = None # type: ignore[assignment]
SSLContext = object # type: ignore[misc,assignment]
__all__ = ("GunicornWebWorker", "GunicornUVLoopWebWorker")
class GunicornWebWorker(base.Worker): # type: ignore[misc,no-any-unimported]
DEFAULT_AIOHTTP_LOG_FORMAT = AccessLogger.LOG_FORMAT
DEFAULT_GUNICORN_LOG_FORMAT = GunicornAccessLogFormat.default
def __init__(self, *args: Any, **kw: Any) -> None:
super().__init__(*args, **kw)
self._task: asyncio.Task[None] | None = None
self.exit_code = 0
self._notify_waiter: asyncio.Future[bool] | None = None
def init_process(self) -> None:
# create new event_loop after fork
self.loop = asyncio.new_event_loop()
asyncio.set_event_loop(self.loop)
super().init_process()
def run(self) -> None:
self._task = self.loop.create_task(self._run())
try: # ignore all finalization problems
self.loop.run_until_complete(self._task)
except Exception:
self.log.exception("Exception in gunicorn worker")
self.loop.run_until_complete(self.loop.shutdown_asyncgens())
self.loop.close()
sys.exit(self.exit_code)
async def _run(self) -> None:
runner = None
if isinstance(self.wsgi, Application):
app = self.wsgi
elif inspect.iscoroutinefunction(self.wsgi) or (
sys.version_info < (3, 14) and asyncio.iscoroutinefunction(self.wsgi)
):
wsgi = await self.wsgi()
if isinstance(wsgi, web.AppRunner):
runner = wsgi
app = runner.app
else:
app = wsgi
else:
raise RuntimeError(
"wsgi app should be either Application or "
f"async function returning Application, got {self.wsgi}"
)
if runner is None:
access_log = self.log.access_log if self.cfg.accesslog else None
runner = web.AppRunner(
app,
logger=self.log,
keepalive_timeout=self.cfg.keepalive,
access_log=access_log,
access_log_format=self._get_valid_log_format(
self.cfg.access_log_format
),
shutdown_timeout=self.cfg.graceful_timeout / 100 * 95,
)
await runner.setup()
ctx = self._create_ssl_context(self.cfg) if self.cfg.is_ssl else None
assert runner is not None
server = runner.server
assert server is not None
for sock in self.sockets:
site = web.SockSite(
runner,
sock,
ssl_context=ctx,
)
await site.start()
# If our parent changed then we shut down.
pid = os.getpid()
try:
while self.alive: # type: ignore[has-type]
self.notify()
cnt = server.requests_count
if self.max_requests and cnt > self.max_requests:
self.alive = False
self.log.info("Max requests, shutting down: %s", self)
elif pid == os.getpid() and self.ppid != os.getppid():
self.alive = False
self.log.info("Parent changed, shutting down: %s", self)
else:
await self._wait_next_notify()
except BaseException:
pass
await runner.cleanup()
def _wait_next_notify(self) -> "asyncio.Future[bool]":
self._notify_waiter_done()
loop = self.loop
assert loop is not None
self._notify_waiter = waiter = loop.create_future()
self.loop.call_later(1.0, self._notify_waiter_done, waiter)
return waiter
def _notify_waiter_done(
self, waiter: Optional["asyncio.Future[bool]"] = None
) -> None:
if waiter is None:
waiter = self._notify_waiter
if waiter is not None:
set_result(waiter, True)
if waiter is self._notify_waiter:
self._notify_waiter = None
def init_signals(self) -> None:
# Set up signals through the event loop API.
self.loop.add_signal_handler(
signal.SIGQUIT, self.handle_quit, signal.SIGQUIT, None
)
self.loop.add_signal_handler(
signal.SIGTERM, self.handle_exit, signal.SIGTERM, None
)
self.loop.add_signal_handler(
signal.SIGINT, self.handle_quit, signal.SIGINT, None
)
self.loop.add_signal_handler(
signal.SIGWINCH, self.handle_winch, signal.SIGWINCH, None
)
self.loop.add_signal_handler(
signal.SIGUSR1, self.handle_usr1, signal.SIGUSR1, None
)
self.loop.add_signal_handler(
signal.SIGABRT, self.handle_abort, signal.SIGABRT, None
)
# Don't let SIGTERM and SIGUSR1 disturb active requests
# by interrupting system calls
signal.siginterrupt(signal.SIGTERM, False)
signal.siginterrupt(signal.SIGUSR1, False)
# Reset signals so Gunicorn doesn't swallow subprocess return codes
# See: https://github.com/aio-libs/aiohttp/issues/6130
def handle_quit(self, sig: int, frame: FrameType | None) -> None:
self.alive = False
# worker_int callback
self.cfg.worker_int(self)
# wakeup closing process
self._notify_waiter_done()
def handle_abort(self, sig: int, frame: FrameType | None) -> None:
self.alive = False
self.exit_code = 1
self.cfg.worker_abort(self)
sys.exit(1)
@staticmethod
def _create_ssl_context(cfg: Any) -> "SSLContext":
"""Creates SSLContext instance for usage in asyncio.create_server.
See ssl.SSLSocket.__init__ for more details.
"""
if ssl is None: # pragma: no cover
raise RuntimeError("SSL is not supported.")
ctx = ssl.SSLContext(cfg.ssl_version)
ctx.load_cert_chain(cfg.certfile, cfg.keyfile)
ctx.verify_mode = cfg.cert_reqs
if cfg.ca_certs:
ctx.load_verify_locations(cfg.ca_certs)
if cfg.ciphers:
ctx.set_ciphers(cfg.ciphers)
return ctx
def _get_valid_log_format(self, source_format: str) -> str:
if source_format == self.DEFAULT_GUNICORN_LOG_FORMAT:
return self.DEFAULT_AIOHTTP_LOG_FORMAT
elif re.search(r"%\([^\)]+\)", source_format):
raise ValueError(
"Gunicorn's style options in form of `%(name)s` are not "
"supported for the log formatting. Please use aiohttp's "
"format specification to configure access log formatting: "
"http://docs.aiohttp.org/en/stable/logging.html"
"#format-specification"
)
else:
return source_format
class GunicornUVLoopWebWorker(GunicornWebWorker):
def init_process(self) -> None:
import uvloop
# Setup uvloop policy, so that every
# asyncio.get_event_loop() will create an instance
# of uvloop event loop.
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
super().init_process()
|
# Tests for aiohttp/worker.py
import asyncio
import os
import socket
import ssl
from typing import TYPE_CHECKING
from unittest import mock
import pytest
from _pytest.fixtures import SubRequest
from aiohttp import web
if TYPE_CHECKING:
from aiohttp import worker as base_worker
else:
base_worker = pytest.importorskip("aiohttp.worker")
try:
import uvloop
except ImportError:
uvloop = None # type: ignore[assignment]
WRONG_LOG_FORMAT = '%a "%{Referrer}i" %(h)s %(l)s %s'
ACCEPTABLE_LOG_FORMAT = '%a "%{Referrer}i" %s'
class BaseTestWorker:
def __init__(self) -> None:
self.servers: dict[object, object] = {}
self.exit_code = 0
self._notify_waiter: asyncio.Future[bool] | None = None
self.cfg = mock.Mock()
self.cfg.graceful_timeout = 100
self.pid = "pid"
self.wsgi = web.Application()
class AsyncioWorker(BaseTestWorker, base_worker.GunicornWebWorker):
pass
PARAMS = [AsyncioWorker]
if uvloop is not None:
class UvloopWorker(BaseTestWorker, base_worker.GunicornUVLoopWebWorker):
pass
PARAMS.append(UvloopWorker)
@pytest.fixture(params=PARAMS)
def worker(
request: SubRequest, loop: asyncio.AbstractEventLoop
) -> base_worker.GunicornWebWorker:
asyncio.set_event_loop(loop)
ret = request.param()
ret.notify = mock.Mock()
return ret # type: ignore[no-any-return]
def test_init_process(worker: base_worker.GunicornWebWorker) -> None:
with mock.patch("aiohttp.worker.asyncio") as m_asyncio:
try:
worker.init_process()
except TypeError:
pass
assert m_asyncio.new_event_loop.called
assert m_asyncio.set_event_loop.called
def test_run(
worker: base_worker.GunicornWebWorker, loop: asyncio.AbstractEventLoop
) -> None:
worker.log = mock.Mock()
worker.cfg = mock.Mock()
worker.cfg.access_log_format = ACCEPTABLE_LOG_FORMAT
worker.cfg.is_ssl = False
worker.cfg.graceful_timeout = 100
worker.sockets = []
worker.loop = loop
with pytest.raises(SystemExit):
worker.run()
worker.log.exception.assert_not_called()
assert loop.is_closed()
def test_run_async_factory(
worker: base_worker.GunicornWebWorker, loop: asyncio.AbstractEventLoop
) -> None:
worker.log = mock.Mock()
worker.cfg = mock.Mock()
worker.cfg.access_log_format = ACCEPTABLE_LOG_FORMAT
worker.cfg.is_ssl = False
worker.cfg.graceful_timeout = 100
worker.sockets = []
app = worker.wsgi
async def make_app() -> web.Application:
return app # type: ignore[no-any-return]
worker.wsgi = make_app
worker.loop = loop
worker.alive = False
with pytest.raises(SystemExit):
worker.run()
worker.log.exception.assert_not_called()
assert loop.is_closed()
def test_run_not_app(
worker: base_worker.GunicornWebWorker, loop: asyncio.AbstractEventLoop
) -> None:
worker.log = mock.Mock()
worker.cfg = mock.Mock()
worker.cfg.access_log_format = ACCEPTABLE_LOG_FORMAT
worker.loop = loop
worker.wsgi = "not-app"
worker.alive = False
with pytest.raises(SystemExit):
worker.run()
worker.log.exception.assert_called_with("Exception in gunicorn worker")
assert loop.is_closed()
def test_handle_abort(worker: base_worker.GunicornWebWorker) -> None:
with mock.patch("aiohttp.worker.sys") as m_sys:
worker.handle_abort(0, None)
assert not worker.alive
assert worker.exit_code == 1
m_sys.exit.assert_called_with(1)
def test__wait_next_notify(worker: base_worker.GunicornWebWorker) -> None:
worker.loop = mloop = mock.create_autospec(asyncio.AbstractEventLoop)
with mock.patch.object(worker, "_notify_waiter_done", autospec=True):
fut = worker._wait_next_notify()
assert worker._notify_waiter == fut
mloop.call_later.assert_called_with(1.0, worker._notify_waiter_done, fut)
def test__notify_waiter_done(worker: base_worker.GunicornWebWorker) -> None:
worker._notify_waiter = None
worker._notify_waiter_done()
assert worker._notify_waiter is None
waiter = worker._notify_waiter = mock.Mock()
worker._notify_waiter.done.return_value = False
worker._notify_waiter_done()
assert worker._notify_waiter is None
waiter.set_result.assert_called_with(True) # type: ignore[unreachable]
def test__notify_waiter_done_explicit_waiter(
worker: base_worker.GunicornWebWorker,
) -> None:
worker._notify_waiter = None
assert worker._notify_waiter is None
waiter = worker._notify_waiter = mock.Mock()
waiter.done.return_value = False
waiter2 = worker._notify_waiter = mock.Mock()
worker._notify_waiter_done(waiter)
assert worker._notify_waiter is waiter2
waiter.set_result.assert_called_with(True)
assert not waiter2.set_result.called
def test_init_signals(worker: base_worker.GunicornWebWorker) -> None:
worker.loop = mock.Mock()
worker.init_signals()
assert worker.loop.add_signal_handler.called
@pytest.mark.parametrize(
"source,result",
[
(ACCEPTABLE_LOG_FORMAT, ACCEPTABLE_LOG_FORMAT),
(
AsyncioWorker.DEFAULT_GUNICORN_LOG_FORMAT,
AsyncioWorker.DEFAULT_AIOHTTP_LOG_FORMAT,
),
],
)
def test__get_valid_log_format_ok(
worker: base_worker.GunicornWebWorker, source: str, result: str
) -> None:
assert result == worker._get_valid_log_format(source)
def test__get_valid_log_format_exc(worker: base_worker.GunicornWebWorker) -> None:
with pytest.raises(ValueError) as exc:
worker._get_valid_log_format(WRONG_LOG_FORMAT)
assert "%(name)s" in str(exc.value)
async def test__run_ok_parent_changed(
worker: base_worker.GunicornWebWorker,
loop: asyncio.AbstractEventLoop,
unused_port_socket: socket.socket,
) -> None:
worker.ppid = 0
worker.alive = True
sock = unused_port_socket
worker.sockets = [sock]
worker.log = mock.Mock()
worker.loop = loop
worker.max_requests = 0
worker.cfg.access_log_format = ACCEPTABLE_LOG_FORMAT
worker.cfg.is_ssl = False
await worker._run()
worker.notify.assert_called_with()
worker.log.info.assert_called_with("Parent changed, shutting down: %s", worker)
async def test__run_exc(
worker: base_worker.GunicornWebWorker,
loop: asyncio.AbstractEventLoop,
unused_port_socket: socket.socket,
) -> None:
worker.ppid = os.getppid()
worker.alive = True
sock = unused_port_socket
worker.sockets = [sock]
worker.log = mock.Mock()
worker.loop = loop
worker.max_requests = 0
worker.cfg.access_log_format = ACCEPTABLE_LOG_FORMAT
worker.cfg.is_ssl = False
def raiser() -> None:
waiter = worker._notify_waiter
worker.alive = False
assert waiter is not None
waiter.set_exception(RuntimeError())
loop.call_later(0.1, raiser)
await worker._run()
worker.notify.assert_called_with()
def test__create_ssl_context_without_certs_and_ciphers(
worker: base_worker.GunicornWebWorker,
tls_certificate_pem_path: str,
) -> None:
worker.cfg.ssl_version = ssl.PROTOCOL_TLS_CLIENT
worker.cfg.cert_reqs = ssl.CERT_OPTIONAL
worker.cfg.certfile = tls_certificate_pem_path
worker.cfg.keyfile = tls_certificate_pem_path
worker.cfg.ca_certs = None
worker.cfg.ciphers = None
ctx = worker._create_ssl_context(worker.cfg)
assert isinstance(ctx, ssl.SSLContext)
def test__create_ssl_context_with_ciphers(
worker: base_worker.GunicornWebWorker,
tls_certificate_pem_path: str,
) -> None:
worker.cfg.ssl_version = ssl.PROTOCOL_TLS_CLIENT
worker.cfg.cert_reqs = ssl.CERT_OPTIONAL
worker.cfg.certfile = tls_certificate_pem_path
worker.cfg.keyfile = tls_certificate_pem_path
worker.cfg.ca_certs = None
worker.cfg.ciphers = "3DES PSK"
ctx = worker._create_ssl_context(worker.cfg)
assert isinstance(ctx, ssl.SSLContext)
def test__create_ssl_context_with_ca_certs(
worker: base_worker.GunicornWebWorker,
tls_ca_certificate_pem_path: str,
tls_certificate_pem_path: str,
) -> None:
worker.cfg.ssl_version = ssl.PROTOCOL_TLS_CLIENT
worker.cfg.cert_reqs = ssl.CERT_OPTIONAL
worker.cfg.certfile = tls_certificate_pem_path
worker.cfg.keyfile = tls_certificate_pem_path
worker.cfg.ca_certs = tls_ca_certificate_pem_path
worker.cfg.ciphers = None
ctx = worker._create_ssl_context(worker.cfg)
assert isinstance(ctx, ssl.SSLContext)
|
./temp_repos/aiohttp/aiohttp/worker.py
|
./temp_repos/aiohttp/tests/test_worker.py
|
aiohttp
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'GunicornWebWorker'.
Context:
- Class Name: GunicornWebWorker
- Dependencies to Mock: None detected
- Key Imports: inspect, helpers, gunicorn.workers, web_app, web_log, gunicorn.config, typing, ssl, aiohttp, asyncio
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
GunicornWebWorker
|
python
|
import abc
import asyncio
import re
import string
from contextlib import suppress
from enum import IntEnum
from re import Pattern
from typing import Any, ClassVar, Final, Generic, Literal, NamedTuple, TypeVar
from multidict import CIMultiDict, CIMultiDictProxy, istr
from yarl import URL
from . import hdrs
from .base_protocol import BaseProtocol
from .compression_utils import (
HAS_BROTLI,
HAS_ZSTD,
BrotliDecompressor,
ZLibDecompressor,
ZSTDDecompressor,
)
from .helpers import (
_EXC_SENTINEL,
DEBUG,
EMPTY_BODY_METHODS,
EMPTY_BODY_STATUS_CODES,
NO_EXTENSIONS,
BaseTimerContext,
set_exception,
)
from .http_exceptions import (
BadHttpMessage,
BadHttpMethod,
BadStatusLine,
ContentEncodingError,
ContentLengthError,
InvalidHeader,
InvalidURLError,
LineTooLong,
TransferEncodingError,
)
from .http_writer import HttpVersion, HttpVersion10
from .streams import EMPTY_PAYLOAD, StreamReader
from .typedefs import RawHeaders
__all__ = (
"HeadersParser",
"HttpParser",
"HttpRequestParser",
"HttpResponseParser",
"RawRequestMessage",
"RawResponseMessage",
)
_SEP = Literal[b"\r\n", b"\n"]
ASCIISET: Final[set[str]] = set(string.printable)
# See https://www.rfc-editor.org/rfc/rfc9110.html#name-overview
# and https://www.rfc-editor.org/rfc/rfc9110.html#name-tokens
#
# method = token
# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" / "+" / "-" / "." /
# "^" / "_" / "`" / "|" / "~" / DIGIT / ALPHA
# token = 1*tchar
_TCHAR_SPECIALS: Final[str] = re.escape("!#$%&'*+-.^_`|~")
TOKENRE: Final[Pattern[str]] = re.compile(f"[0-9A-Za-z{_TCHAR_SPECIALS}]+")
VERSRE: Final[Pattern[str]] = re.compile(r"HTTP/(\d)\.(\d)", re.ASCII)
DIGITS: Final[Pattern[str]] = re.compile(r"\d+", re.ASCII)
HEXDIGITS: Final[Pattern[bytes]] = re.compile(rb"[0-9a-fA-F]+")
class RawRequestMessage(NamedTuple):
method: str
path: str
version: HttpVersion
headers: CIMultiDictProxy[str]
raw_headers: RawHeaders
should_close: bool
compression: str | None
upgrade: bool
chunked: bool
url: URL
class RawResponseMessage(NamedTuple):
version: HttpVersion
code: int
reason: str
headers: CIMultiDictProxy[str]
raw_headers: RawHeaders
should_close: bool
compression: str | None
upgrade: bool
chunked: bool
_MsgT = TypeVar("_MsgT", RawRequestMessage, RawResponseMessage)
class ParseState(IntEnum):
PARSE_NONE = 0
PARSE_LENGTH = 1
PARSE_CHUNKED = 2
PARSE_UNTIL_EOF = 3
class ChunkState(IntEnum):
PARSE_CHUNKED_SIZE = 0
PARSE_CHUNKED_CHUNK = 1
PARSE_CHUNKED_CHUNK_EOF = 2
PARSE_TRAILERS = 4
class HeadersParser:
def __init__(
self, max_line_size: int = 8190, max_field_size: int = 8190, lax: bool = False
) -> None:
self.max_line_size = max_line_size
self.max_field_size = max_field_size
self._lax = lax
def parse_headers(
self, lines: list[bytes]
) -> tuple["CIMultiDictProxy[str]", RawHeaders]:
headers: CIMultiDict[str] = CIMultiDict()
# note: "raw" does not mean inclusion of OWS before/after the field value
raw_headers = []
lines_idx = 0
line = lines[lines_idx]
line_count = len(lines)
while line:
# Parse initial header name : value pair.
try:
bname, bvalue = line.split(b":", 1)
except ValueError:
raise InvalidHeader(line) from None
if len(bname) == 0:
raise InvalidHeader(bname)
# https://www.rfc-editor.org/rfc/rfc9112.html#section-5.1-2
if {bname[0], bname[-1]} & {32, 9}: # {" ", "\t"}
raise InvalidHeader(line)
bvalue = bvalue.lstrip(b" \t")
if len(bname) > self.max_field_size:
raise LineTooLong(
"request header name {}".format(
bname.decode("utf8", "backslashreplace")
),
str(self.max_field_size),
str(len(bname)),
)
name = bname.decode("utf-8", "surrogateescape")
if not TOKENRE.fullmatch(name):
raise InvalidHeader(bname)
header_length = len(bvalue)
# next line
lines_idx += 1
line = lines[lines_idx]
# consume continuation lines
continuation = self._lax and line and line[0] in (32, 9) # (' ', '\t')
# Deprecated: https://www.rfc-editor.org/rfc/rfc9112.html#name-obsolete-line-folding
if continuation:
bvalue_lst = [bvalue]
while continuation:
header_length += len(line)
if header_length > self.max_field_size:
raise LineTooLong(
"request header field {}".format(
bname.decode("utf8", "backslashreplace")
),
str(self.max_field_size),
str(header_length),
)
bvalue_lst.append(line)
# next line
lines_idx += 1
if lines_idx < line_count:
line = lines[lines_idx]
if line:
continuation = line[0] in (32, 9) # (' ', '\t')
else:
line = b""
break
bvalue = b"".join(bvalue_lst)
else:
if header_length > self.max_field_size:
raise LineTooLong(
"request header field {}".format(
bname.decode("utf8", "backslashreplace")
),
str(self.max_field_size),
str(header_length),
)
bvalue = bvalue.strip(b" \t")
value = bvalue.decode("utf-8", "surrogateescape")
# https://www.rfc-editor.org/rfc/rfc9110.html#section-5.5-5
if "\n" in value or "\r" in value or "\x00" in value:
raise InvalidHeader(bvalue)
headers.add(name, value)
raw_headers.append((bname, bvalue))
return (CIMultiDictProxy(headers), tuple(raw_headers))
def _is_supported_upgrade(headers: CIMultiDictProxy[str]) -> bool:
"""Check if the upgrade header is supported."""
return headers.get(hdrs.UPGRADE, "").lower() in {"tcp", "websocket"}
class HttpParser(abc.ABC, Generic[_MsgT]):
lax: ClassVar[bool] = False
def __init__(
self,
protocol: BaseProtocol,
loop: asyncio.AbstractEventLoop,
limit: int,
max_line_size: int = 8190,
max_field_size: int = 8190,
timer: BaseTimerContext | None = None,
code: int | None = None,
method: str | None = None,
payload_exception: type[BaseException] | None = None,
response_with_body: bool = True,
read_until_eof: bool = False,
auto_decompress: bool = True,
) -> None:
self.protocol = protocol
self.loop = loop
self.max_line_size = max_line_size
self.max_field_size = max_field_size
self.timer = timer
self.code = code
self.method = method
self.payload_exception = payload_exception
self.response_with_body = response_with_body
self.read_until_eof = read_until_eof
self._lines: list[bytes] = []
self._tail = b""
self._upgraded = False
self._payload = None
self._payload_parser: HttpPayloadParser | None = None
self._auto_decompress = auto_decompress
self._limit = limit
self._headers_parser = HeadersParser(max_line_size, max_field_size, self.lax)
@abc.abstractmethod
def parse_message(self, lines: list[bytes]) -> _MsgT: ...
@abc.abstractmethod
def _is_chunked_te(self, te: str) -> bool: ...
def feed_eof(self) -> _MsgT | None:
if self._payload_parser is not None:
self._payload_parser.feed_eof()
self._payload_parser = None
else:
# try to extract partial message
if self._tail:
self._lines.append(self._tail)
if self._lines:
if self._lines[-1] != "\r\n":
self._lines.append(b"")
with suppress(Exception):
return self.parse_message(self._lines)
return None
def feed_data(
self,
data: bytes,
SEP: _SEP = b"\r\n",
EMPTY: bytes = b"",
CONTENT_LENGTH: istr = hdrs.CONTENT_LENGTH,
METH_CONNECT: str = hdrs.METH_CONNECT,
SEC_WEBSOCKET_KEY1: istr = hdrs.SEC_WEBSOCKET_KEY1,
) -> tuple[list[tuple[_MsgT, StreamReader]], bool, bytes]:
messages = []
if self._tail:
data, self._tail = self._tail + data, b""
data_len = len(data)
start_pos = 0
loop = self.loop
should_close = False
while start_pos < data_len:
# read HTTP message (request/response line + headers), \r\n\r\n
# and split by lines
if self._payload_parser is None and not self._upgraded:
pos = data.find(SEP, start_pos)
# consume \r\n
if pos == start_pos and not self._lines:
start_pos = pos + len(SEP)
continue
if pos >= start_pos:
if should_close:
raise BadHttpMessage("Data after `Connection: close`")
# line found
line = data[start_pos:pos]
if SEP == b"\n": # For lax response parsing
line = line.rstrip(b"\r")
self._lines.append(line)
start_pos = pos + len(SEP)
# \r\n\r\n found
if self._lines[-1] == EMPTY:
try:
msg: _MsgT = self.parse_message(self._lines)
finally:
self._lines.clear()
def get_content_length() -> int | None:
# payload length
length_hdr = msg.headers.get(CONTENT_LENGTH)
if length_hdr is None:
return None
# Shouldn't allow +/- or other number formats.
# https://www.rfc-editor.org/rfc/rfc9110#section-8.6-2
# msg.headers is already stripped of leading/trailing wsp
if not DIGITS.fullmatch(length_hdr):
raise InvalidHeader(CONTENT_LENGTH)
return int(length_hdr)
length = get_content_length()
# do not support old websocket spec
if SEC_WEBSOCKET_KEY1 in msg.headers:
raise InvalidHeader(SEC_WEBSOCKET_KEY1)
self._upgraded = msg.upgrade and _is_supported_upgrade(
msg.headers
)
method = getattr(msg, "method", self.method)
# code is only present on responses
code = getattr(msg, "code", 0)
assert self.protocol is not None
# calculate payload
empty_body = code in EMPTY_BODY_STATUS_CODES or bool(
method and method in EMPTY_BODY_METHODS
)
if not empty_body and (
((length is not None and length > 0) or msg.chunked)
and not self._upgraded
):
payload = StreamReader(
self.protocol,
timer=self.timer,
loop=loop,
limit=self._limit,
)
payload_parser = HttpPayloadParser(
payload,
length=length,
chunked=msg.chunked,
method=method,
compression=msg.compression,
code=self.code,
response_with_body=self.response_with_body,
auto_decompress=self._auto_decompress,
lax=self.lax,
headers_parser=self._headers_parser,
)
if not payload_parser.done:
self._payload_parser = payload_parser
elif method == METH_CONNECT:
assert isinstance(msg, RawRequestMessage)
payload = StreamReader(
self.protocol,
timer=self.timer,
loop=loop,
limit=self._limit,
)
self._upgraded = True
self._payload_parser = HttpPayloadParser(
payload,
method=msg.method,
compression=msg.compression,
auto_decompress=self._auto_decompress,
lax=self.lax,
headers_parser=self._headers_parser,
)
elif not empty_body and length is None and self.read_until_eof:
payload = StreamReader(
self.protocol,
timer=self.timer,
loop=loop,
limit=self._limit,
)
payload_parser = HttpPayloadParser(
payload,
length=length,
chunked=msg.chunked,
method=method,
compression=msg.compression,
code=self.code,
response_with_body=self.response_with_body,
auto_decompress=self._auto_decompress,
lax=self.lax,
headers_parser=self._headers_parser,
)
if not payload_parser.done:
self._payload_parser = payload_parser
else:
payload = EMPTY_PAYLOAD
messages.append((msg, payload))
should_close = msg.should_close
else:
self._tail = data[start_pos:]
data = EMPTY
break
# no parser, just store
elif self._payload_parser is None and self._upgraded:
assert not self._lines
break
# feed payload
elif data and start_pos < data_len:
assert not self._lines
assert self._payload_parser is not None
try:
eof, data = self._payload_parser.feed_data(data[start_pos:], SEP)
except BaseException as underlying_exc:
reraised_exc = underlying_exc
if self.payload_exception is not None:
reraised_exc = self.payload_exception(str(underlying_exc))
set_exception(
self._payload_parser.payload,
reraised_exc,
underlying_exc,
)
eof = True
data = b""
if isinstance(
underlying_exc, (InvalidHeader, TransferEncodingError)
):
raise
if eof:
start_pos = 0
data_len = len(data)
self._payload_parser = None
continue
else:
break
if data and start_pos < data_len:
data = data[start_pos:]
else:
data = EMPTY
return messages, self._upgraded, data
def parse_headers(
self, lines: list[bytes]
) -> tuple[
"CIMultiDictProxy[str]", RawHeaders, bool | None, str | None, bool, bool
]:
"""Parses RFC 5322 headers from a stream.
Line continuations are supported. Returns list of header name
and value pairs. Header name is in upper case.
"""
headers, raw_headers = self._headers_parser.parse_headers(lines)
close_conn = None
encoding = None
upgrade = False
chunked = False
# https://www.rfc-editor.org/rfc/rfc9110.html#section-5.5-6
# https://www.rfc-editor.org/rfc/rfc9110.html#name-collected-abnf
singletons = (
hdrs.CONTENT_LENGTH,
hdrs.CONTENT_LOCATION,
hdrs.CONTENT_RANGE,
hdrs.CONTENT_TYPE,
hdrs.ETAG,
hdrs.HOST,
hdrs.MAX_FORWARDS,
hdrs.SERVER,
hdrs.TRANSFER_ENCODING,
hdrs.USER_AGENT,
)
bad_hdr = next((h for h in singletons if len(headers.getall(h, ())) > 1), None)
if bad_hdr is not None:
raise BadHttpMessage(f"Duplicate '{bad_hdr}' header found.")
# keep-alive
conn = headers.get(hdrs.CONNECTION)
if conn:
v = conn.lower()
if v == "close":
close_conn = True
elif v == "keep-alive":
close_conn = False
# https://www.rfc-editor.org/rfc/rfc9110.html#name-101-switching-protocols
elif v == "upgrade" and headers.get(hdrs.UPGRADE):
upgrade = True
# encoding
enc = headers.get(hdrs.CONTENT_ENCODING)
if enc:
enc = enc.lower()
if enc in ("gzip", "deflate", "br", "zstd"):
encoding = enc
# chunking
te = headers.get(hdrs.TRANSFER_ENCODING)
if te is not None:
if self._is_chunked_te(te):
chunked = True
if hdrs.CONTENT_LENGTH in headers:
raise BadHttpMessage(
"Transfer-Encoding can't be present with Content-Length",
)
return (headers, raw_headers, close_conn, encoding, upgrade, chunked)
def set_upgraded(self, val: bool) -> None:
"""Set connection upgraded (to websocket) mode.
:param bool val: new state.
"""
self._upgraded = val
class HttpRequestParser(HttpParser[RawRequestMessage]):
"""Read request status line.
Exception .http_exceptions.BadStatusLine
could be raised in case of any errors in status line.
Returns RawRequestMessage.
"""
def parse_message(self, lines: list[bytes]) -> RawRequestMessage:
# request line
line = lines[0].decode("utf-8", "surrogateescape")
try:
method, path, version = line.split(" ", maxsplit=2)
except ValueError:
raise BadHttpMethod(line) from None
if len(path) > self.max_line_size:
raise LineTooLong(
"Status line is too long", str(self.max_line_size), str(len(path))
)
# method
if not TOKENRE.fullmatch(method):
raise BadHttpMethod(method)
# version
match = VERSRE.fullmatch(version)
if match is None:
raise BadStatusLine(line)
version_o = HttpVersion(int(match.group(1)), int(match.group(2)))
if method == "CONNECT":
# authority-form,
# https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.3
url = URL.build(authority=path, encoded=True)
elif path.startswith("/"):
# origin-form,
# https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.1
path_part, _hash_separator, url_fragment = path.partition("#")
path_part, _question_mark_separator, qs_part = path_part.partition("?")
# NOTE: `yarl.URL.build()` is used to mimic what the Cython-based
# NOTE: parser does, otherwise it results into the same
# NOTE: HTTP Request-Line input producing different
# NOTE: `yarl.URL()` objects
url = URL.build(
path=path_part,
query_string=qs_part,
fragment=url_fragment,
encoded=True,
)
elif path == "*" and method == "OPTIONS":
# asterisk-form,
url = URL(path, encoded=True)
else:
# absolute-form for proxy maybe,
# https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.2
url = URL(path, encoded=True)
if url.scheme == "":
# not absolute-form
raise InvalidURLError(
path.encode(errors="surrogateescape").decode("latin1")
)
# read headers
(
headers,
raw_headers,
close,
compression,
upgrade,
chunked,
) = self.parse_headers(lines[1:])
if close is None: # then the headers weren't set in the request
if version_o <= HttpVersion10: # HTTP 1.0 must asks to not close
close = True
else: # HTTP 1.1 must ask to close.
close = False
return RawRequestMessage(
method,
path,
version_o,
headers,
raw_headers,
close,
compression,
upgrade,
chunked,
url,
)
def _is_chunked_te(self, te: str) -> bool:
if te.rsplit(",", maxsplit=1)[-1].strip(" \t").lower() == "chunked":
return True
# https://www.rfc-editor.org/rfc/rfc9112#section-6.3-2.4.3
raise BadHttpMessage("Request has invalid `Transfer-Encoding`")
class HttpResponseParser(HttpParser[RawResponseMessage]):
"""Read response status line and headers.
BadStatusLine could be raised in case of any errors in status line.
Returns RawResponseMessage.
"""
# Lax mode should only be enabled on response parser.
lax = not DEBUG
def feed_data(
self,
data: bytes,
SEP: _SEP | None = None,
*args: Any,
**kwargs: Any,
) -> tuple[list[tuple[RawResponseMessage, StreamReader]], bool, bytes]:
if SEP is None:
SEP = b"\r\n" if DEBUG else b"\n"
return super().feed_data(data, SEP, *args, **kwargs)
def parse_message(self, lines: list[bytes]) -> RawResponseMessage:
line = lines[0].decode("utf-8", "surrogateescape")
try:
version, status = line.split(maxsplit=1)
except ValueError:
raise BadStatusLine(line) from None
try:
status, reason = status.split(maxsplit=1)
except ValueError:
status = status.strip()
reason = ""
if len(reason) > self.max_line_size:
raise LineTooLong(
"Status line is too long", str(self.max_line_size), str(len(reason))
)
# version
match = VERSRE.fullmatch(version)
if match is None:
raise BadStatusLine(line)
version_o = HttpVersion(int(match.group(1)), int(match.group(2)))
# The status code is a three-digit ASCII number, no padding
if len(status) != 3 or not DIGITS.fullmatch(status):
raise BadStatusLine(line)
status_i = int(status)
# read headers
(
headers,
raw_headers,
close,
compression,
upgrade,
chunked,
) = self.parse_headers(lines[1:])
if close is None:
if version_o <= HttpVersion10:
close = True
# https://www.rfc-editor.org/rfc/rfc9112.html#name-message-body-length
elif 100 <= status_i < 200 or status_i in {204, 304}:
close = False
elif hdrs.CONTENT_LENGTH in headers or hdrs.TRANSFER_ENCODING in headers:
close = False
else:
# https://www.rfc-editor.org/rfc/rfc9112.html#section-6.3-2.8
close = True
return RawResponseMessage(
version_o,
status_i,
reason.strip(),
headers,
raw_headers,
close,
compression,
upgrade,
chunked,
)
def _is_chunked_te(self, te: str) -> bool:
# https://www.rfc-editor.org/rfc/rfc9112#section-6.3-2.4.2
return te.rsplit(",", maxsplit=1)[-1].strip(" \t").lower() == "chunked"
class HttpPayloadParser:
def __init__(
self,
payload: StreamReader,
length: int | None = None,
chunked: bool = False,
compression: str | None = None,
code: int | None = None,
method: str | None = None,
response_with_body: bool = True,
auto_decompress: bool = True,
lax: bool = False,
*,
headers_parser: HeadersParser,
) -> None:
self._length = 0
self._type = ParseState.PARSE_UNTIL_EOF
self._chunk = ChunkState.PARSE_CHUNKED_SIZE
self._chunk_size = 0
self._chunk_tail = b""
self._auto_decompress = auto_decompress
self._lax = lax
self._headers_parser = headers_parser
self._trailer_lines: list[bytes] = []
self.done = False
# payload decompression wrapper
if response_with_body and compression and self._auto_decompress:
real_payload: StreamReader | DeflateBuffer = DeflateBuffer(
payload, compression
)
else:
real_payload = payload
# payload parser
if not response_with_body:
# don't parse payload if it's not expected to be received
self._type = ParseState.PARSE_NONE
real_payload.feed_eof()
self.done = True
elif chunked:
self._type = ParseState.PARSE_CHUNKED
elif length is not None:
self._type = ParseState.PARSE_LENGTH
self._length = length
if self._length == 0:
real_payload.feed_eof()
self.done = True
self.payload = real_payload
def feed_eof(self) -> None:
if self._type == ParseState.PARSE_UNTIL_EOF:
self.payload.feed_eof()
elif self._type == ParseState.PARSE_LENGTH:
raise ContentLengthError(
"Not enough data to satisfy content length header."
)
elif self._type == ParseState.PARSE_CHUNKED:
raise TransferEncodingError(
"Not enough data to satisfy transfer length header."
)
def feed_data(
self, chunk: bytes, SEP: _SEP = b"\r\n", CHUNK_EXT: bytes = b";"
) -> tuple[bool, bytes]:
# Read specified amount of bytes
if self._type == ParseState.PARSE_LENGTH:
required = self._length
self._length = max(required - len(chunk), 0)
self.payload.feed_data(chunk[:required])
if self._length == 0:
self.payload.feed_eof()
return True, chunk[required:]
# Chunked transfer encoding parser
elif self._type == ParseState.PARSE_CHUNKED:
if self._chunk_tail:
chunk = self._chunk_tail + chunk
self._chunk_tail = b""
while chunk:
# read next chunk size
if self._chunk == ChunkState.PARSE_CHUNKED_SIZE:
pos = chunk.find(SEP)
if pos >= 0:
i = chunk.find(CHUNK_EXT, 0, pos)
if i >= 0:
size_b = chunk[:i] # strip chunk-extensions
# Verify no LF in the chunk-extension
if b"\n" in (ext := chunk[i:pos]):
exc = TransferEncodingError(
f"Unexpected LF in chunk-extension: {ext!r}"
)
set_exception(self.payload, exc)
raise exc
else:
size_b = chunk[:pos]
if self._lax: # Allow whitespace in lax mode.
size_b = size_b.strip()
if not re.fullmatch(HEXDIGITS, size_b):
exc = TransferEncodingError(
chunk[:pos].decode("ascii", "surrogateescape")
)
set_exception(self.payload, exc)
raise exc
size = int(bytes(size_b), 16)
chunk = chunk[pos + len(SEP) :]
if size == 0: # eof marker
self._chunk = ChunkState.PARSE_TRAILERS
if self._lax and chunk.startswith(b"\r"):
chunk = chunk[1:]
else:
self._chunk = ChunkState.PARSE_CHUNKED_CHUNK
self._chunk_size = size
self.payload.begin_http_chunk_receiving()
else:
self._chunk_tail = chunk
return False, b""
# read chunk and feed buffer
if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK:
required = self._chunk_size
self._chunk_size = max(required - len(chunk), 0)
self.payload.feed_data(chunk[:required])
if self._chunk_size:
return False, b""
chunk = chunk[required:]
self._chunk = ChunkState.PARSE_CHUNKED_CHUNK_EOF
self.payload.end_http_chunk_receiving()
# toss the CRLF at the end of the chunk
if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK_EOF:
if self._lax and chunk.startswith(b"\r"):
chunk = chunk[1:]
if chunk[: len(SEP)] == SEP:
chunk = chunk[len(SEP) :]
self._chunk = ChunkState.PARSE_CHUNKED_SIZE
else:
self._chunk_tail = chunk
return False, b""
if self._chunk == ChunkState.PARSE_TRAILERS:
pos = chunk.find(SEP)
if pos < 0: # No line found
self._chunk_tail = chunk
return False, b""
line = chunk[:pos]
chunk = chunk[pos + len(SEP) :]
if SEP == b"\n": # For lax response parsing
line = line.rstrip(b"\r")
self._trailer_lines.append(line)
# \r\n\r\n found, end of stream
if self._trailer_lines[-1] == b"":
# Headers and trailers are defined the same way,
# so we reuse the HeadersParser here.
try:
trailers, raw_trailers = self._headers_parser.parse_headers(
self._trailer_lines
)
finally:
self._trailer_lines.clear()
self.payload.feed_eof()
return True, chunk
# Read all bytes until eof
elif self._type == ParseState.PARSE_UNTIL_EOF:
self.payload.feed_data(chunk)
return False, b""
class DeflateBuffer:
"""DeflateStream decompress stream and feed data into specified stream."""
def __init__(self, out: StreamReader, encoding: str | None) -> None:
self.out = out
self.size = 0
out.total_compressed_bytes = self.size
self.encoding = encoding
self._started_decoding = False
self.decompressor: BrotliDecompressor | ZLibDecompressor | ZSTDDecompressor
if encoding == "br":
if not HAS_BROTLI:
raise ContentEncodingError(
"Can not decode content-encoding: brotli (br). "
"Please install `Brotli`"
)
self.decompressor = BrotliDecompressor()
elif encoding == "zstd":
if not HAS_ZSTD:
raise ContentEncodingError(
"Can not decode content-encoding: zstandard (zstd). "
"Please install `backports.zstd`"
)
self.decompressor = ZSTDDecompressor()
else:
self.decompressor = ZLibDecompressor(encoding=encoding)
def set_exception(
self,
exc: type[BaseException] | BaseException,
exc_cause: BaseException = _EXC_SENTINEL,
) -> None:
set_exception(self.out, exc, exc_cause)
def feed_data(self, chunk: bytes) -> None:
if not chunk:
return
self.size += len(chunk)
self.out.total_compressed_bytes = self.size
# RFC1950
# bits 0..3 = CM = 0b1000 = 8 = "deflate"
# bits 4..7 = CINFO = 1..7 = windows size.
if (
not self._started_decoding
and self.encoding == "deflate"
and chunk[0] & 0xF != 8
):
# Change the decoder to decompress incorrectly compressed data
# Actually we should issue a warning about non-RFC-compliant data.
self.decompressor = ZLibDecompressor(
encoding=self.encoding, suppress_deflate_header=True
)
try:
chunk = self.decompressor.decompress_sync(chunk)
except Exception:
raise ContentEncodingError(
"Can not decode content-encoding: %s" % self.encoding
)
self._started_decoding = True
if chunk:
self.out.feed_data(chunk)
def feed_eof(self) -> None:
chunk = self.decompressor.flush()
if chunk or self.size > 0:
self.out.feed_data(chunk)
# decompressor is not brotli unless encoding is "br"
if self.encoding == "deflate" and not self.decompressor.eof: # type: ignore[union-attr]
raise ContentEncodingError("deflate")
self.out.feed_eof()
def begin_http_chunk_receiving(self) -> None:
self.out.begin_http_chunk_receiving()
def end_http_chunk_receiving(self) -> None:
self.out.end_http_chunk_receiving()
HttpRequestParserPy = HttpRequestParser
HttpResponseParserPy = HttpResponseParser
RawRequestMessagePy = RawRequestMessage
RawResponseMessagePy = RawResponseMessage
with suppress(ImportError):
if not NO_EXTENSIONS:
from ._http_parser import ( # type: ignore[import-not-found,no-redef]
HttpRequestParser,
HttpResponseParser,
RawRequestMessage,
RawResponseMessage,
)
HttpRequestParserC = HttpRequestParser
HttpResponseParserC = HttpResponseParser
RawRequestMessageC = RawRequestMessage
RawResponseMessageC = RawResponseMessage
|
# Tests for aiohttp/protocol.py
import asyncio
import re
import sys
from collections.abc import Iterable
from contextlib import suppress
from typing import Any
from unittest import mock
from urllib.parse import quote
import pytest
from multidict import CIMultiDict
from yarl import URL
import aiohttp
from aiohttp import http_exceptions, streams
from aiohttp.base_protocol import BaseProtocol
from aiohttp.helpers import NO_EXTENSIONS
from aiohttp.http_parser import (
DeflateBuffer,
HeadersParser,
HttpParser,
HttpPayloadParser,
HttpRequestParser,
HttpRequestParserPy,
HttpResponseParser,
HttpResponseParserPy,
)
from aiohttp.http_writer import HttpVersion
try:
try:
import brotlicffi as brotli
except ImportError:
import brotli
except ImportError:
brotli = None
try:
if sys.version_info >= (3, 14):
import compression.zstd as zstandard # noqa: I900
else:
import backports.zstd as zstandard
except ImportError:
zstandard = None # type: ignore[assignment]
REQUEST_PARSERS = [HttpRequestParserPy]
RESPONSE_PARSERS = [HttpResponseParserPy]
with suppress(ImportError):
from aiohttp.http_parser import HttpRequestParserC, HttpResponseParserC
REQUEST_PARSERS.append(HttpRequestParserC)
RESPONSE_PARSERS.append(HttpResponseParserC)
@pytest.fixture
def protocol() -> Any:
return mock.create_autospec(BaseProtocol, spec_set=True, instance=True)
def _gen_ids(parsers: Iterable[type[HttpParser[Any]]]) -> list[str]:
return [
"py-parser" if parser.__module__ == "aiohttp.http_parser" else "c-parser"
for parser in parsers
]
@pytest.fixture(params=REQUEST_PARSERS, ids=_gen_ids(REQUEST_PARSERS))
def parser(
loop: asyncio.AbstractEventLoop,
protocol: BaseProtocol,
request: pytest.FixtureRequest,
) -> HttpRequestParser:
# Parser implementations
return request.param( # type: ignore[no-any-return]
protocol,
loop,
2**16,
max_line_size=8190,
max_field_size=8190,
)
@pytest.fixture(params=REQUEST_PARSERS, ids=_gen_ids(REQUEST_PARSERS))
def request_cls(request: pytest.FixtureRequest) -> type[HttpRequestParser]:
# Request Parser class
return request.param # type: ignore[no-any-return]
@pytest.fixture(params=RESPONSE_PARSERS, ids=_gen_ids(RESPONSE_PARSERS))
def response(
loop: asyncio.AbstractEventLoop,
protocol: BaseProtocol,
request: pytest.FixtureRequest,
) -> HttpResponseParser:
# Parser implementations
return request.param( # type: ignore[no-any-return]
protocol,
loop,
2**16,
max_line_size=8190,
max_field_size=8190,
read_until_eof=True,
)
@pytest.fixture(params=RESPONSE_PARSERS, ids=_gen_ids(RESPONSE_PARSERS))
def response_cls(request: pytest.FixtureRequest) -> type[HttpResponseParser]:
# Parser implementations
return request.param # type: ignore[no-any-return]
@pytest.mark.skipif(NO_EXTENSIONS, reason="Extensions available but not imported")
def test_c_parser_loaded() -> None:
assert "HttpRequestParserC" in dir(aiohttp.http_parser)
assert "HttpResponseParserC" in dir(aiohttp.http_parser)
assert "RawRequestMessageC" in dir(aiohttp.http_parser)
assert "RawResponseMessageC" in dir(aiohttp.http_parser)
def test_parse_headers(parser: HttpRequestParser) -> None:
text = b"""GET /test HTTP/1.1\r
test: a line\r
test2: data\r
\r
"""
messages, upgrade, tail = parser.feed_data(text)
assert len(messages) == 1
msg = messages[0][0]
assert list(msg.headers.items()) == [("test", "a line"), ("test2", "data")]
assert msg.raw_headers == ((b"test", b"a line"), (b"test2", b"data"))
assert not msg.should_close
assert msg.compression is None
assert not msg.upgrade
def test_reject_obsolete_line_folding(parser: HttpRequestParser) -> None:
text = b"""GET /test HTTP/1.1\r
test: line\r
Content-Length: 48\r
test2: data\r
\r
"""
with pytest.raises(http_exceptions.BadHttpMessage):
parser.feed_data(text)
@pytest.mark.skipif(NO_EXTENSIONS, reason="Only tests C parser.")
def test_invalid_character(
loop: asyncio.AbstractEventLoop,
protocol: BaseProtocol,
request: pytest.FixtureRequest,
) -> None:
parser = HttpRequestParserC(
protocol,
loop,
2**16,
max_line_size=8190,
max_field_size=8190,
)
text = b"POST / HTTP/1.1\r\nHost: localhost:8080\r\nSet-Cookie: abc\x01def\r\n\r\n"
error_detail = re.escape(
r""":
b'Set-Cookie: abc\x01def'
^"""
)
with pytest.raises(http_exceptions.BadHttpMessage, match=error_detail):
parser.feed_data(text)
@pytest.mark.skipif(NO_EXTENSIONS, reason="Only tests C parser.")
def test_invalid_linebreak(
loop: asyncio.AbstractEventLoop,
protocol: BaseProtocol,
request: pytest.FixtureRequest,
) -> None:
parser = HttpRequestParserC(
protocol,
loop,
2**16,
max_line_size=8190,
max_field_size=8190,
)
text = b"GET /world HTTP/1.1\r\nHost: 127.0.0.1\n\r\n"
error_detail = re.escape(
r""":
b'Host: 127.0.0.1\n'
^"""
)
with pytest.raises(http_exceptions.BadHttpMessage, match=error_detail):
parser.feed_data(text)
def test_cve_2023_37276(parser: HttpRequestParser) -> None:
text = b"""POST / HTTP/1.1\r\nHost: localhost:8080\r\nX-Abc: \rxTransfer-Encoding: chunked\r\n\r\n"""
with pytest.raises(http_exceptions.BadHttpMessage):
parser.feed_data(text)
@pytest.mark.parametrize(
"rfc9110_5_6_2_token_delim",
r'"(),/:;<=>?@[\]{}',
)
def test_bad_header_name(
parser: HttpRequestParser, rfc9110_5_6_2_token_delim: str
) -> None:
text = f"POST / HTTP/1.1\r\nhead{rfc9110_5_6_2_token_delim}er: val\r\n\r\n".encode()
if rfc9110_5_6_2_token_delim == ":":
# Inserting colon into header just splits name/value earlier.
parser.feed_data(text)
return
with pytest.raises(http_exceptions.BadHttpMessage):
parser.feed_data(text)
@pytest.mark.parametrize(
"hdr",
(
"Content-Length: -5", # https://www.rfc-editor.org/rfc/rfc9110.html#name-content-length
"Content-Length: +256",
"Content-Length: \N{SUPERSCRIPT ONE}",
"Content-Length: \N{MATHEMATICAL DOUBLE-STRUCK DIGIT ONE}",
"Foo: abc\rdef", # https://www.rfc-editor.org/rfc/rfc9110.html#section-5.5-5
"Bar: abc\ndef",
"Baz: abc\x00def",
"Foo : bar", # https://www.rfc-editor.org/rfc/rfc9112.html#section-5.1-2
"Foo\t: bar",
"\xffoo: bar",
),
)
def test_bad_headers(parser: HttpRequestParser, hdr: str) -> None:
text = f"POST / HTTP/1.1\r\n{hdr}\r\n\r\n".encode()
with pytest.raises(http_exceptions.BadHttpMessage):
parser.feed_data(text)
def test_unpaired_surrogate_in_header_py(
loop: asyncio.AbstractEventLoop, protocol: BaseProtocol
) -> None:
parser = HttpRequestParserPy(
protocol,
loop,
2**16,
max_line_size=8190,
max_field_size=8190,
)
text = b"POST / HTTP/1.1\r\n\xff\r\n\r\n"
message = None
try:
parser.feed_data(text)
except http_exceptions.InvalidHeader as e:
message = e.message.encode("utf-8")
assert message is not None
def test_content_length_transfer_encoding(parser: HttpRequestParser) -> None:
text = (
b"GET / HTTP/1.1\r\nHost: a\r\nContent-Length: 5\r\nTransfer-Encoding: a\r\n\r\n"
+ b"apple\r\n"
)
with pytest.raises(http_exceptions.BadHttpMessage):
parser.feed_data(text)
def test_bad_chunked(parser: HttpRequestParser) -> None:
"""Test that invalid chunked encoding doesn't allow content-length to be used."""
text = (
b"GET / HTTP/1.1\r\nHost: a\r\nTransfer-Encoding: chunked\r\n\r\n0_2e\r\n\r\n"
+ b"GET / HTTP/1.1\r\nHost: a\r\nContent-Length: 5\r\n\r\n0\r\n\r\n"
)
with pytest.raises(http_exceptions.BadHttpMessage, match="0_2e"):
parser.feed_data(text)
def test_whitespace_before_header(parser: HttpRequestParser) -> None:
text = b"GET / HTTP/1.1\r\n\tContent-Length: 1\r\n\r\nX"
with pytest.raises(http_exceptions.BadHttpMessage):
parser.feed_data(text)
def test_parse_headers_longline(parser: HttpRequestParser) -> None:
invalid_unicode_byte = b"\xd9"
header_name = b"Test" + invalid_unicode_byte + b"Header" + b"A" * 8192
text = b"GET /test HTTP/1.1\r\n" + header_name + b": test\r\n" + b"\r\n" + b"\r\n"
with pytest.raises((http_exceptions.LineTooLong, http_exceptions.BadHttpMessage)):
# FIXME: `LineTooLong` doesn't seem to actually be happening
parser.feed_data(text)
@pytest.fixture
def xfail_c_parser_status(request: pytest.FixtureRequest) -> None:
if isinstance(request.getfixturevalue("parser"), HttpRequestParserPy):
return
request.node.add_marker(
pytest.mark.xfail(
reason="Regression test for Py parser. May match C behaviour later.",
raises=http_exceptions.BadStatusLine,
)
)
@pytest.mark.usefixtures("xfail_c_parser_status")
def test_parse_unusual_request_line(parser: HttpRequestParser) -> None:
text = b"#smol //a HTTP/1.3\r\n\r\n"
messages, upgrade, tail = parser.feed_data(text)
assert len(messages) == 1
msg, _ = messages[0]
assert msg.compression is None
assert not msg.upgrade
assert msg.method == "#smol"
assert msg.path == "//a"
assert msg.version == (1, 3)
def test_parse(parser: HttpRequestParser) -> None:
text = b"GET /test HTTP/1.1\r\n\r\n"
messages, upgrade, tail = parser.feed_data(text)
assert len(messages) == 1
msg, _ = messages[0]
assert msg.compression is None
assert not msg.upgrade
assert msg.method == "GET"
assert msg.path == "/test"
assert msg.version == (1, 1)
async def test_parse_body(parser: HttpRequestParser) -> None:
text = b"GET /test HTTP/1.1\r\nContent-Length: 4\r\n\r\nbody"
messages, upgrade, tail = parser.feed_data(text)
assert len(messages) == 1
_, payload = messages[0]
body = await payload.read(4)
assert body == b"body"
async def test_parse_body_with_CRLF(parser: HttpRequestParser) -> None:
text = b"\r\nGET /test HTTP/1.1\r\nContent-Length: 4\r\n\r\nbody"
messages, upgrade, tail = parser.feed_data(text)
assert len(messages) == 1
_, payload = messages[0]
body = await payload.read(4)
assert body == b"body"
def test_parse_delayed(parser: HttpRequestParser) -> None:
text = b"GET /test HTTP/1.1\r\n"
messages, upgrade, tail = parser.feed_data(text)
assert len(messages) == 0
assert not upgrade
messages, upgrade, tail = parser.feed_data(b"\r\n")
assert len(messages) == 1
msg = messages[0][0]
assert msg.method == "GET"
def test_headers_multi_feed(parser: HttpRequestParser) -> None:
text1 = b"GET /test HTTP/1.1\r\n"
text2 = b"test: line"
text3 = b" continue\r\n\r\n"
messages, upgrade, tail = parser.feed_data(text1)
assert len(messages) == 0
messages, upgrade, tail = parser.feed_data(text2)
assert len(messages) == 0
messages, upgrade, tail = parser.feed_data(text3)
assert len(messages) == 1
msg = messages[0][0]
assert list(msg.headers.items()) == [("test", "line continue")]
assert msg.raw_headers == ((b"test", b"line continue"),)
assert not msg.should_close
assert msg.compression is None
assert not msg.upgrade
def test_headers_split_field(parser: HttpRequestParser) -> None:
text1 = b"GET /test HTTP/1.1\r\n"
text2 = b"t"
text3 = b"es"
text4 = b"t: value\r\n\r\n"
messages, upgrade, tail = parser.feed_data(text1)
messages, upgrade, tail = parser.feed_data(text2)
messages, upgrade, tail = parser.feed_data(text3)
assert len(messages) == 0
messages, upgrade, tail = parser.feed_data(text4)
assert len(messages) == 1
msg = messages[0][0]
assert list(msg.headers.items()) == [("test", "value")]
assert msg.raw_headers == ((b"test", b"value"),)
assert not msg.should_close
assert msg.compression is None
assert not msg.upgrade
def test_parse_headers_multi(parser: HttpRequestParser) -> None:
text = (
b"GET /test HTTP/1.1\r\n"
b"Set-Cookie: c1=cookie1\r\n"
b"Set-Cookie: c2=cookie2\r\n\r\n"
)
messages, upgrade, tail = parser.feed_data(text)
assert len(messages) == 1
msg = messages[0][0]
assert list(msg.headers.items()) == [
("Set-Cookie", "c1=cookie1"),
("Set-Cookie", "c2=cookie2"),
]
assert msg.raw_headers == (
(b"Set-Cookie", b"c1=cookie1"),
(b"Set-Cookie", b"c2=cookie2"),
)
assert not msg.should_close
assert msg.compression is None
def test_conn_default_1_0(parser: HttpRequestParser) -> None:
text = b"GET /test HTTP/1.0\r\n\r\n"
messages, upgrade, tail = parser.feed_data(text)
msg = messages[0][0]
assert msg.should_close
def test_conn_default_1_1(parser: HttpRequestParser) -> None:
text = b"GET /test HTTP/1.1\r\n\r\n"
messages, upgrade, tail = parser.feed_data(text)
msg = messages[0][0]
assert not msg.should_close
def test_conn_close(parser: HttpRequestParser) -> None:
text = b"GET /test HTTP/1.1\r\nconnection: close\r\n\r\n"
messages, upgrade, tail = parser.feed_data(text)
msg = messages[0][0]
assert msg.should_close
def test_conn_close_1_0(parser: HttpRequestParser) -> None:
text = b"GET /test HTTP/1.0\r\nconnection: close\r\n\r\n"
messages, upgrade, tail = parser.feed_data(text)
msg = messages[0][0]
assert msg.should_close
def test_conn_keep_alive_1_0(parser: HttpRequestParser) -> None:
text = b"GET /test HTTP/1.0\r\nconnection: keep-alive\r\n\r\n"
messages, upgrade, tail = parser.feed_data(text)
msg = messages[0][0]
assert not msg.should_close
def test_conn_keep_alive_1_1(parser: HttpRequestParser) -> None:
text = b"GET /test HTTP/1.1\r\nconnection: keep-alive\r\n\r\n"
messages, upgrade, tail = parser.feed_data(text)
msg = messages[0][0]
assert not msg.should_close
def test_conn_other_1_0(parser: HttpRequestParser) -> None:
text = b"GET /test HTTP/1.0\r\nconnection: test\r\n\r\n"
messages, upgrade, tail = parser.feed_data(text)
msg = messages[0][0]
assert msg.should_close
def test_conn_other_1_1(parser: HttpRequestParser) -> None:
text = b"GET /test HTTP/1.1\r\nconnection: test\r\n\r\n"
messages, upgrade, tail = parser.feed_data(text)
msg = messages[0][0]
assert not msg.should_close
def test_request_chunked(parser: HttpRequestParser) -> None:
text = b"GET /test HTTP/1.1\r\ntransfer-encoding: chunked\r\n\r\n"
messages, upgrade, tail = parser.feed_data(text)
msg, payload = messages[0]
assert msg.chunked
assert not upgrade
assert isinstance(payload, streams.StreamReader)
def test_request_te_chunked_with_content_length(parser: HttpRequestParser) -> None:
text = (
b"GET /test HTTP/1.1\r\n"
b"content-length: 1234\r\n"
b"transfer-encoding: chunked\r\n\r\n"
)
with pytest.raises(
http_exceptions.BadHttpMessage,
match="Transfer-Encoding can't be present with Content-Length",
):
parser.feed_data(text)
def test_request_te_chunked123(parser: HttpRequestParser) -> None:
text = b"GET /test HTTP/1.1\r\ntransfer-encoding: chunked123\r\n\r\n"
with pytest.raises(
http_exceptions.BadHttpMessage,
match="Request has invalid `Transfer-Encoding`",
):
parser.feed_data(text)
async def test_request_te_last_chunked(parser: HttpRequestParser) -> None:
text = b"GET /test HTTP/1.1\r\nTransfer-Encoding: not, chunked\r\n\r\n1\r\nT\r\n3\r\nest\r\n0\r\n\r\n"
messages, upgrade, tail = parser.feed_data(text)
# https://www.rfc-editor.org/rfc/rfc9112#section-6.3-2.4.3
assert await messages[0][1].read() == b"Test"
def test_request_te_first_chunked(parser: HttpRequestParser) -> None:
text = b"GET /test HTTP/1.1\r\nTransfer-Encoding: chunked, not\r\n\r\n1\r\nT\r\n3\r\nest\r\n0\r\n\r\n"
# https://www.rfc-editor.org/rfc/rfc9112#section-6.3-2.4.3
with pytest.raises(
http_exceptions.BadHttpMessage,
match="nvalid `Transfer-Encoding`",
):
parser.feed_data(text)
def test_conn_upgrade(parser: HttpRequestParser) -> None:
text = (
b"GET /test HTTP/1.1\r\n"
b"connection: upgrade\r\n"
b"upgrade: websocket\r\n\r\n"
)
messages, upgrade, tail = parser.feed_data(text)
msg = messages[0][0]
assert not msg.should_close
assert msg.upgrade
assert upgrade
def test_bad_upgrade(parser: HttpRequestParser) -> None:
"""Test not upgraded if missing Upgrade header."""
text = b"GET /test HTTP/1.1\r\nconnection: upgrade\r\n\r\n"
messages, upgrade, tail = parser.feed_data(text)
msg = messages[0][0]
assert not msg.upgrade
assert not upgrade
def test_compression_empty(parser: HttpRequestParser) -> None:
text = b"GET /test HTTP/1.1\r\ncontent-encoding: \r\n\r\n"
messages, upgrade, tail = parser.feed_data(text)
msg = messages[0][0]
assert msg.compression is None
def test_compression_deflate(parser: HttpRequestParser) -> None:
text = b"GET /test HTTP/1.1\r\ncontent-encoding: deflate\r\n\r\n"
messages, upgrade, tail = parser.feed_data(text)
msg = messages[0][0]
assert msg.compression == "deflate"
def test_compression_gzip(parser: HttpRequestParser) -> None:
text = b"GET /test HTTP/1.1\r\ncontent-encoding: gzip\r\n\r\n"
messages, upgrade, tail = parser.feed_data(text)
msg = messages[0][0]
assert msg.compression == "gzip"
@pytest.mark.skipif(brotli is None, reason="brotli is not installed")
def test_compression_brotli(parser: HttpRequestParser) -> None:
text = b"GET /test HTTP/1.1\r\ncontent-encoding: br\r\n\r\n"
messages, upgrade, tail = parser.feed_data(text)
msg = messages[0][0]
assert msg.compression == "br"
@pytest.mark.skipif(zstandard is None, reason="zstandard is not installed")
def test_compression_zstd(parser: HttpRequestParser) -> None:
text = b"GET /test HTTP/1.1\r\ncontent-encoding: zstd\r\n\r\n"
messages, upgrade, tail = parser.feed_data(text)
msg = messages[0][0]
assert msg.compression == "zstd"
def test_compression_unknown(parser: HttpRequestParser) -> None:
text = b"GET /test HTTP/1.1\r\ncontent-encoding: compress\r\n\r\n"
messages, upgrade, tail = parser.feed_data(text)
msg = messages[0][0]
assert msg.compression is None
def test_url_connect(parser: HttpRequestParser) -> None:
text = b"CONNECT www.google.com HTTP/1.1\r\ncontent-length: 0\r\n\r\n"
messages, upgrade, tail = parser.feed_data(text)
msg, payload = messages[0]
assert upgrade
assert msg.url == URL.build(authority="www.google.com")
def test_headers_connect(parser: HttpRequestParser) -> None:
text = b"CONNECT www.google.com HTTP/1.1\r\ncontent-length: 0\r\n\r\n"
messages, upgrade, tail = parser.feed_data(text)
msg, payload = messages[0]
assert upgrade
assert isinstance(payload, streams.StreamReader)
def test_url_absolute(parser: HttpRequestParser) -> None:
text = (
b"GET https://www.google.com/path/to.html HTTP/1.1\r\n"
b"content-length: 0\r\n\r\n"
)
messages, upgrade, tail = parser.feed_data(text)
msg, payload = messages[0]
assert not upgrade
assert msg.method == "GET"
assert msg.url == URL("https://www.google.com/path/to.html")
def test_headers_old_websocket_key1(parser: HttpRequestParser) -> None:
text = b"GET /test HTTP/1.1\r\nSEC-WEBSOCKET-KEY1: line\r\n\r\n"
with pytest.raises(http_exceptions.BadHttpMessage):
parser.feed_data(text)
def test_headers_content_length_err_1(parser: HttpRequestParser) -> None:
text = b"GET /test HTTP/1.1\r\ncontent-length: line\r\n\r\n"
with pytest.raises(http_exceptions.BadHttpMessage):
parser.feed_data(text)
def test_headers_content_length_err_2(parser: HttpRequestParser) -> None:
text = b"GET /test HTTP/1.1\r\ncontent-length: -1\r\n\r\n"
with pytest.raises(http_exceptions.BadHttpMessage):
parser.feed_data(text)
_pad: dict[bytes, str] = {
b"": "empty",
# not a typo. Python likes triple zero
b"\000": "NUL",
b" ": "SP",
b" ": "SPSP",
# not a typo: both 0xa0 and 0x0a in case of 8-bit fun
b"\n": "LF",
b"\xa0": "NBSP",
b"\t ": "TABSP",
}
@pytest.mark.parametrize("hdr", [b"", b"foo"], ids=["name-empty", "with-name"])
@pytest.mark.parametrize("pad2", _pad.keys(), ids=["post-" + n for n in _pad.values()])
@pytest.mark.parametrize("pad1", _pad.keys(), ids=["pre-" + n for n in _pad.values()])
def test_invalid_header_spacing(
parser: HttpRequestParser, pad1: bytes, pad2: bytes, hdr: bytes
) -> None:
text = b"GET /test HTTP/1.1\r\n%s%s%s: value\r\n\r\n" % (pad1, hdr, pad2)
if pad1 == pad2 == b"" and hdr != b"":
# one entry in param matrix is correct: non-empty name, not padded
parser.feed_data(text)
return
with pytest.raises(http_exceptions.BadHttpMessage):
parser.feed_data(text)
def test_empty_header_name(parser: HttpRequestParser) -> None:
text = b"GET /test HTTP/1.1\r\n:test\r\n\r\n"
with pytest.raises(http_exceptions.BadHttpMessage):
parser.feed_data(text)
def test_invalid_header(parser: HttpRequestParser) -> None:
text = b"GET /test HTTP/1.1\r\ntest line\r\n\r\n"
with pytest.raises(http_exceptions.BadHttpMessage):
parser.feed_data(text)
def test_invalid_name(parser: HttpRequestParser) -> None:
text = b"GET /test HTTP/1.1\r\ntest[]: line\r\n\r\n"
with pytest.raises(http_exceptions.BadHttpMessage):
parser.feed_data(text)
@pytest.mark.parametrize("size", [40960, 8191])
def test_max_header_field_size(parser: HttpRequestParser, size: int) -> None:
name = b"t" * size
text = b"GET /test HTTP/1.1\r\n" + name + b":data\r\n\r\n"
match = f"400, message:\n Got more than 8190 bytes \\({size}\\) when reading"
with pytest.raises(http_exceptions.LineTooLong, match=match):
parser.feed_data(text)
def test_max_header_field_size_under_limit(parser: HttpRequestParser) -> None:
name = b"t" * 8190
text = b"GET /test HTTP/1.1\r\n" + name + b":data\r\n\r\n"
messages, upgrade, tail = parser.feed_data(text)
msg = messages[0][0]
assert msg.method == "GET"
assert msg.path == "/test"
assert msg.version == (1, 1)
assert msg.headers == CIMultiDict({name.decode(): "data"})
assert msg.raw_headers == ((name, b"data"),)
assert not msg.should_close
assert msg.compression is None
assert not msg.upgrade
assert not msg.chunked
assert msg.url == URL("/test")
@pytest.mark.parametrize("size", [40960, 8191])
def test_max_header_value_size(parser: HttpRequestParser, size: int) -> None:
name = b"t" * size
text = b"GET /test HTTP/1.1\r\ndata:" + name + b"\r\n\r\n"
match = f"400, message:\n Got more than 8190 bytes \\({size}\\) when reading"
with pytest.raises(http_exceptions.LineTooLong, match=match):
parser.feed_data(text)
def test_max_header_value_size_under_limit(parser: HttpRequestParser) -> None:
value = b"A" * 8190
text = b"GET /test HTTP/1.1\r\ndata:" + value + b"\r\n\r\n"
messages, upgrade, tail = parser.feed_data(text)
msg = messages[0][0]
assert msg.method == "GET"
assert msg.path == "/test"
assert msg.version == (1, 1)
assert msg.headers == CIMultiDict({"data": value.decode()})
assert msg.raw_headers == ((b"data", value),)
assert not msg.should_close
assert msg.compression is None
assert not msg.upgrade
assert not msg.chunked
assert msg.url == URL("/test")
@pytest.mark.parametrize("size", [40965, 8191])
def test_max_header_value_size_continuation(
response: HttpResponseParser, size: int
) -> None:
name = b"T" * (size - 5)
text = b"HTTP/1.1 200 Ok\r\ndata: test\r\n " + name + b"\r\n\r\n"
match = f"400, message:\n Got more than 8190 bytes \\({size}\\) when reading"
with pytest.raises(http_exceptions.LineTooLong, match=match):
response.feed_data(text)
def test_max_header_value_size_continuation_under_limit(
response: HttpResponseParser,
) -> None:
value = b"A" * 8185
text = b"HTTP/1.1 200 Ok\r\ndata: test\r\n " + value + b"\r\n\r\n"
messages, upgrade, tail = response.feed_data(text)
msg = messages[0][0]
assert msg.code == 200
assert msg.reason == "Ok"
assert msg.version == (1, 1)
assert msg.headers == CIMultiDict({"data": "test " + value.decode()})
assert msg.raw_headers == ((b"data", b"test " + value),)
assert msg.should_close
assert msg.compression is None
assert not msg.upgrade
assert not msg.chunked
def test_http_request_parser(parser: HttpRequestParser) -> None:
text = b"GET /path HTTP/1.1\r\n\r\n"
messages, upgrade, tail = parser.feed_data(text)
msg = messages[0][0]
assert msg.method == "GET"
assert msg.path == "/path"
assert msg.version == (1, 1)
assert msg.headers == CIMultiDict()
assert msg.raw_headers == ()
assert not msg.should_close
assert msg.compression is None
assert not msg.upgrade
assert not msg.chunked
assert msg.url == URL("/path")
def test_http_request_bad_status_line(parser: HttpRequestParser) -> None:
text = b"getpath \r\n\r\n"
with pytest.raises(http_exceptions.BadStatusLine) as exc_info:
parser.feed_data(text)
# Check for accidentally escaped message.
assert r"\n" not in exc_info.value.message
_num: dict[bytes, str] = {
# dangerous: accepted by Python int()
# unicodedata.category("\U0001D7D9") == 'Nd'
"\N{MATHEMATICAL DOUBLE-STRUCK DIGIT ONE}".encode(): "utf8digit",
# only added for interop tests, refused by Python int()
# unicodedata.category("\U000000B9") == 'No'
"\N{SUPERSCRIPT ONE}".encode(): "utf8number",
"\N{SUPERSCRIPT ONE}".encode("latin-1"): "latin1number",
}
@pytest.mark.parametrize("nonascii_digit", _num.keys(), ids=_num.values())
def test_http_request_bad_status_line_number(
parser: HttpRequestParser, nonascii_digit: bytes
) -> None:
text = b"GET /digit HTTP/1." + nonascii_digit + b"\r\n\r\n"
with pytest.raises(http_exceptions.BadStatusLine):
parser.feed_data(text)
def test_http_request_bad_status_line_separator(parser: HttpRequestParser) -> None:
# single code point, old, multibyte NFKC, multibyte NFKD
utf8sep = "\N{ARABIC LIGATURE SALLALLAHOU ALAYHE WASALLAM}".encode()
text = b"GET /ligature HTTP/1" + utf8sep + b"1\r\n\r\n"
with pytest.raises(http_exceptions.BadStatusLine):
parser.feed_data(text)
def test_http_request_bad_status_line_whitespace(parser: HttpRequestParser) -> None:
text = b"GET\n/path\fHTTP/1.1\r\n\r\n"
with pytest.raises(http_exceptions.BadStatusLine):
parser.feed_data(text)
def test_http_request_message_after_close(parser: HttpRequestParser) -> None:
text = b"GET / HTTP/1.1\r\nConnection: close\r\n\r\nInvalid\r\n\r\n"
with pytest.raises(
http_exceptions.BadHttpMessage, match="Data after `Connection: close`"
):
parser.feed_data(text)
def test_http_request_upgrade(parser: HttpRequestParser) -> None:
text = (
b"GET /test HTTP/1.1\r\n"
b"connection: upgrade\r\n"
b"upgrade: websocket\r\n\r\n"
b"some raw data"
)
messages, upgrade, tail = parser.feed_data(text)
msg = messages[0][0]
assert not msg.should_close
assert msg.upgrade
assert upgrade
assert tail == b"some raw data"
async def test_http_request_upgrade_unknown(parser: HttpRequestParser) -> None:
text = (
b"POST / HTTP/1.1\r\n"
b"Connection: Upgrade\r\n"
b"Content-Length: 2\r\n"
b"Upgrade: unknown\r\n"
b"Content-Type: application/json\r\n\r\n"
b"{}"
)
messages, upgrade, tail = parser.feed_data(text)
msg = messages[0][0]
assert not msg.should_close
assert msg.upgrade
assert not upgrade
assert not msg.chunked
assert tail == b""
assert await messages[0][-1].read() == b"{}"
@pytest.fixture
def xfail_c_parser_url(request: pytest.FixtureRequest) -> None:
if isinstance(request.getfixturevalue("parser"), HttpRequestParserPy):
return
request.node.add_marker(
pytest.mark.xfail(
reason="Regression test for Py parser. May match C behaviour later.",
raises=http_exceptions.InvalidURLError,
)
)
@pytest.mark.usefixtures("xfail_c_parser_url")
def test_http_request_parser_utf8_request_line(parser: HttpRequestParser) -> None:
messages, upgrade, tail = parser.feed_data(
# note the truncated unicode sequence
b"GET /P\xc3\xbcnktchen\xa0\xef\xb7 HTTP/1.1\r\n" +
# for easier grep: ASCII 0xA0 more commonly known as non-breaking space
# note the leading and trailing spaces
"sTeP: \N{LATIN SMALL LETTER SHARP S}nek\t\N{NO-BREAK SPACE} "
"\r\n\r\n".encode()
)
msg = messages[0][0]
assert msg.method == "GET"
assert msg.path == "/Pünktchen\udca0\udcef\udcb7"
assert msg.version == (1, 1)
assert msg.headers == CIMultiDict([("STEP", "ßnek\t\xa0")])
assert msg.raw_headers == ((b"sTeP", "ßnek\t\xa0".encode()),)
assert not msg.should_close
assert msg.compression is None
assert not msg.upgrade
assert not msg.chunked
# python HTTP parser depends on Cython and CPython URL to match
# .. but yarl.URL("/abs") is not equal to URL.build(path="/abs"), see #6409
assert msg.url == URL.build(path="/Pünktchen\udca0\udcef\udcb7", encoded=True)
def test_http_request_parser_utf8(parser: HttpRequestParser) -> None:
text = "GET /path HTTP/1.1\r\nx-test:тест\r\n\r\n".encode()
messages, upgrade, tail = parser.feed_data(text)
msg = messages[0][0]
assert msg.method == "GET"
assert msg.path == "/path"
assert msg.version == (1, 1)
assert msg.headers == CIMultiDict([("X-TEST", "тест")])
assert msg.raw_headers == ((b"x-test", "тест".encode()),)
assert not msg.should_close
assert msg.compression is None
assert not msg.upgrade
assert not msg.chunked
assert msg.url == URL("/path")
def test_http_request_parser_non_utf8(parser: HttpRequestParser) -> None:
text = "GET /path HTTP/1.1\r\nx-test:тест\r\n\r\n".encode("cp1251")
msg = parser.feed_data(text)[0][0][0]
assert msg.method == "GET"
assert msg.path == "/path"
assert msg.version == (1, 1)
assert msg.headers == CIMultiDict(
[("X-TEST", "тест".encode("cp1251").decode("utf8", "surrogateescape"))]
)
assert msg.raw_headers == ((b"x-test", "тест".encode("cp1251")),)
assert not msg.should_close
assert msg.compression is None
assert not msg.upgrade
assert not msg.chunked
assert msg.url == URL("/path")
def test_http_request_parser_two_slashes(parser: HttpRequestParser) -> None:
text = b"GET //path HTTP/1.1\r\n\r\n"
msg = parser.feed_data(text)[0][0][0]
assert msg.method == "GET"
assert msg.path == "//path"
assert msg.url.path == "//path"
assert msg.version == (1, 1)
assert not msg.should_close
assert msg.compression is None
assert not msg.upgrade
assert not msg.chunked
@pytest.mark.parametrize(
"rfc9110_5_6_2_token_delim",
[bytes([i]) for i in rb'"(),/:;<=>?@[\]{}'],
)
def test_http_request_parser_bad_method(
parser: HttpRequestParser, rfc9110_5_6_2_token_delim: bytes
) -> None:
with pytest.raises(http_exceptions.BadHttpMethod):
parser.feed_data(rfc9110_5_6_2_token_delim + b'ET" /get HTTP/1.1\r\n\r\n')
def test_http_request_parser_bad_version(parser: HttpRequestParser) -> None:
with pytest.raises(http_exceptions.BadHttpMessage):
parser.feed_data(b"GET //get HT/11\r\n\r\n")
def test_http_request_parser_bad_version_number(parser: HttpRequestParser) -> None:
with pytest.raises(http_exceptions.BadHttpMessage):
parser.feed_data(b"GET /test HTTP/1.32\r\n\r\n")
def test_http_request_parser_bad_ascii_uri(parser: HttpRequestParser) -> None:
with pytest.raises(http_exceptions.InvalidURLError):
parser.feed_data(b"GET ! HTTP/1.1\r\n\r\n")
def test_http_request_parser_bad_nonascii_uri(parser: HttpRequestParser) -> None:
with pytest.raises(http_exceptions.InvalidURLError):
parser.feed_data(b"GET \xff HTTP/1.1\r\n\r\n")
@pytest.mark.parametrize("size", [40965, 8191])
def test_http_request_max_status_line(parser: HttpRequestParser, size: int) -> None:
path = b"t" * (size - 5)
match = f"400, message:\n Got more than 8190 bytes \\({size}\\) when reading"
with pytest.raises(http_exceptions.LineTooLong, match=match):
parser.feed_data(b"GET /path" + path + b" HTTP/1.1\r\n\r\n")
def test_http_request_max_status_line_under_limit(parser: HttpRequestParser) -> None:
path = b"t" * (8190 - 5)
messages, upgraded, tail = parser.feed_data(
b"GET /path" + path + b" HTTP/1.1\r\n\r\n"
)
msg = messages[0][0]
assert msg.method == "GET"
assert msg.path == "/path" + path.decode()
assert msg.version == (1, 1)
assert msg.headers == CIMultiDict()
assert msg.raw_headers == ()
assert not msg.should_close
assert msg.compression is None
assert not msg.upgrade
assert not msg.chunked
assert msg.url == URL("/path" + path.decode())
def test_http_response_parser_utf8(response: HttpResponseParser) -> None:
text = "HTTP/1.1 200 Ok\r\nx-test:тест\r\n\r\n".encode()
messages, upgraded, tail = response.feed_data(text)
assert len(messages) == 1
msg = messages[0][0]
assert msg.version == (1, 1)
assert msg.code == 200
assert msg.reason == "Ok"
assert msg.headers == CIMultiDict([("X-TEST", "тест")])
assert msg.raw_headers == ((b"x-test", "тест".encode()),)
assert not upgraded
assert not tail
def test_http_response_parser_utf8_without_reason(response: HttpResponseParser) -> None:
text = "HTTP/1.1 200 \r\nx-test:тест\r\n\r\n".encode()
messages, upgraded, tail = response.feed_data(text)
assert len(messages) == 1
msg = messages[0][0]
assert msg.version == (1, 1)
assert msg.code == 200
assert msg.reason == ""
assert msg.headers == CIMultiDict([("X-TEST", "тест")])
assert msg.raw_headers == ((b"x-test", "тест".encode()),)
assert not upgraded
assert not tail
def test_http_response_parser_obs_line_folding(response: HttpResponseParser) -> None:
text = b"HTTP/1.1 200 Ok\r\ntest: line\r\n continue\r\n\r\n"
messages, upgraded, tail = response.feed_data(text)
assert len(messages) == 1
msg = messages[0][0]
assert msg.version == (1, 1)
assert msg.code == 200
assert msg.reason == "Ok"
assert msg.headers == CIMultiDict([("TEST", "line continue")])
assert msg.raw_headers == ((b"test", b"line continue"),)
assert not upgraded
assert not tail
@pytest.mark.dev_mode
def test_http_response_parser_strict_obs_line_folding(
response: HttpResponseParser,
) -> None:
text = b"HTTP/1.1 200 Ok\r\ntest: line\r\n continue\r\n\r\n"
with pytest.raises(http_exceptions.BadHttpMessage):
response.feed_data(text)
@pytest.mark.parametrize("size", [40962, 8191])
def test_http_response_parser_bad_status_line_too_long(
response: HttpResponseParser, size: int
) -> None:
reason = b"t" * (size - 2)
match = f"400, message:\n Got more than 8190 bytes \\({size}\\) when reading"
with pytest.raises(http_exceptions.LineTooLong, match=match):
response.feed_data(b"HTTP/1.1 200 Ok" + reason + b"\r\n\r\n")
def test_http_response_parser_status_line_under_limit(
response: HttpResponseParser,
) -> None:
reason = b"O" * 8190
messages, upgraded, tail = response.feed_data(
b"HTTP/1.1 200 " + reason + b"\r\n\r\n"
)
msg = messages[0][0]
assert msg.version == (1, 1)
assert msg.code == 200
assert msg.reason == reason.decode()
def test_http_response_parser_bad_version(response: HttpResponseParser) -> None:
with pytest.raises(http_exceptions.BadHttpMessage):
response.feed_data(b"HT/11 200 Ok\r\n\r\n")
def test_http_response_parser_bad_version_number(response: HttpResponseParser) -> None:
with pytest.raises(http_exceptions.BadHttpMessage):
response.feed_data(b"HTTP/12.3 200 Ok\r\n\r\n")
def test_http_response_parser_no_reason(response: HttpResponseParser) -> None:
msg = response.feed_data(b"HTTP/1.1 200\r\n\r\n")[0][0][0]
assert msg.version == (1, 1)
assert msg.code == 200
assert msg.reason == ""
def test_http_response_parser_lenient_headers(response: HttpResponseParser) -> None:
messages, upgrade, tail = response.feed_data(
b"HTTP/1.1 200 test\r\nFoo: abc\x01def\r\n\r\n"
)
msg = messages[0][0]
assert msg.headers["Foo"] == "abc\x01def"
@pytest.mark.dev_mode
def test_http_response_parser_strict_headers(response: HttpResponseParser) -> None:
if isinstance(response, HttpResponseParserPy):
pytest.xfail("Py parser is lenient. May update py-parser later.")
with pytest.raises(http_exceptions.BadHttpMessage): # type: ignore[unreachable]
response.feed_data(b"HTTP/1.1 200 test\r\nFoo: abc\x01def\r\n\r\n")
def test_http_response_parser_bad_crlf(response: HttpResponseParser) -> None:
"""Still a lot of dodgy servers sending bad requests like this."""
messages, upgrade, tail = response.feed_data(
b"HTTP/1.0 200 OK\nFoo: abc\nBar: def\n\nBODY\n"
)
msg = messages[0][0]
assert msg.headers["Foo"] == "abc"
assert msg.headers["Bar"] == "def"
async def test_http_response_parser_bad_chunked_lax(
response: HttpResponseParser,
) -> None:
text = (
b"HTTP/1.1 200 OK\r\nTransfer-Encoding: chunked\r\n\r\n5 \r\nabcde\r\n0\r\n\r\n"
)
messages, upgrade, tail = response.feed_data(text)
assert await messages[0][1].read(5) == b"abcde"
@pytest.mark.dev_mode
async def test_http_response_parser_bad_chunked_strict_py(
loop: asyncio.AbstractEventLoop, protocol: BaseProtocol
) -> None:
response = HttpResponseParserPy(
protocol,
loop,
2**16,
max_line_size=8190,
max_field_size=8190,
)
text = (
b"HTTP/1.1 200 OK\r\nTransfer-Encoding: chunked\r\n\r\n5 \r\nabcde\r\n0\r\n\r\n"
)
with pytest.raises(http_exceptions.TransferEncodingError, match="5"):
response.feed_data(text)
@pytest.mark.dev_mode
@pytest.mark.skipif(
"HttpRequestParserC" not in dir(aiohttp.http_parser),
reason="C based HTTP parser not available",
)
async def test_http_response_parser_bad_chunked_strict_c(
loop: asyncio.AbstractEventLoop, protocol: BaseProtocol
) -> None:
response = HttpResponseParserC(
protocol,
loop,
2**16,
max_line_size=8190,
max_field_size=8190,
)
text = (
b"HTTP/1.1 200 OK\r\nTransfer-Encoding: chunked\r\n\r\n5 \r\nabcde\r\n0\r\n\r\n"
)
with pytest.raises(http_exceptions.BadHttpMessage):
response.feed_data(text)
async def test_http_response_parser_notchunked(
response: HttpResponseParser,
) -> None:
text = b"HTTP/1.1 200 OK\r\nTransfer-Encoding: notchunked\r\n\r\n1\r\nT\r\n3\r\nest\r\n0\r\n\r\n"
messages, upgrade, tail = response.feed_data(text)
response.feed_eof()
# https://www.rfc-editor.org/rfc/rfc9112#section-6.3-2.4.2
assert await messages[0][1].read() == b"1\r\nT\r\n3\r\nest\r\n0\r\n\r\n"
async def test_http_response_parser_last_chunked(
response: HttpResponseParser,
) -> None:
text = b"HTTP/1.1 200 OK\r\nTransfer-Encoding: not, chunked\r\n\r\n1\r\nT\r\n3\r\nest\r\n0\r\n\r\n"
messages, upgrade, tail = response.feed_data(text)
# https://www.rfc-editor.org/rfc/rfc9112#section-6.3-2.4.2
assert await messages[0][1].read() == b"Test"
def test_http_response_parser_bad(response: HttpResponseParser) -> None:
with pytest.raises(http_exceptions.BadHttpMessage):
response.feed_data(b"HTT/1\r\n\r\n")
def test_http_response_parser_code_under_100(response: HttpResponseParser) -> None:
with pytest.raises(http_exceptions.BadStatusLine):
response.feed_data(b"HTTP/1.1 99 test\r\n\r\n")
def test_http_response_parser_code_above_999(response: HttpResponseParser) -> None:
with pytest.raises(http_exceptions.BadStatusLine):
response.feed_data(b"HTTP/1.1 9999 test\r\n\r\n")
def test_http_response_parser_code_not_int(response: HttpResponseParser) -> None:
with pytest.raises(http_exceptions.BadStatusLine):
response.feed_data(b"HTTP/1.1 ttt test\r\n\r\n")
@pytest.mark.parametrize("nonascii_digit", _num.keys(), ids=_num.values())
def test_http_response_parser_code_not_ascii(
response: HttpResponseParser, nonascii_digit: bytes
) -> None:
with pytest.raises(http_exceptions.BadStatusLine):
response.feed_data(b"HTTP/1.1 20" + nonascii_digit + b" test\r\n\r\n")
def test_http_request_chunked_payload(parser: HttpRequestParser) -> None:
text = b"GET /test HTTP/1.1\r\ntransfer-encoding: chunked\r\n\r\n"
msg, payload = parser.feed_data(text)[0][0]
assert msg.chunked
assert not payload.is_eof()
assert isinstance(payload, streams.StreamReader)
parser.feed_data(b"4\r\ndata\r\n4\r\nline\r\n0\r\n\r\n")
assert b"dataline" == b"".join(d for d in payload._buffer)
assert [4, 8] == payload._http_chunk_splits
assert payload.is_eof()
def test_http_request_chunked_payload_and_next_message(
parser: HttpRequestParser,
) -> None:
text = b"GET /test HTTP/1.1\r\ntransfer-encoding: chunked\r\n\r\n"
msg, payload = parser.feed_data(text)[0][0]
messages, upgraded, tail = parser.feed_data(
b"4\r\ndata\r\n4\r\nline\r\n0\r\n\r\n"
b"POST /test2 HTTP/1.1\r\n"
b"transfer-encoding: chunked\r\n\r\n"
)
assert b"dataline" == b"".join(d for d in payload._buffer)
assert [4, 8] == payload._http_chunk_splits
assert payload.is_eof()
assert len(messages) == 1
msg2, payload2 = messages[0]
assert msg2.method == "POST"
assert msg2.chunked
assert not payload2.is_eof()
def test_http_request_chunked_payload_chunks(parser: HttpRequestParser) -> None:
text = b"GET /test HTTP/1.1\r\ntransfer-encoding: chunked\r\n\r\n"
msg, payload = parser.feed_data(text)[0][0]
parser.feed_data(b"4\r\ndata\r")
parser.feed_data(b"\n4")
parser.feed_data(b"\r")
parser.feed_data(b"\n")
parser.feed_data(b"li")
parser.feed_data(b"ne\r\n0\r\n")
parser.feed_data(b"test: test\r\n")
assert b"dataline" == b"".join(d for d in payload._buffer)
assert [4, 8] == payload._http_chunk_splits
assert not payload.is_eof()
parser.feed_data(b"\r\n")
assert b"dataline" == b"".join(d for d in payload._buffer)
assert [4, 8] == payload._http_chunk_splits
assert payload.is_eof()
def test_parse_chunked_payload_chunk_extension(parser: HttpRequestParser) -> None:
text = b"GET /test HTTP/1.1\r\ntransfer-encoding: chunked\r\n\r\n"
msg, payload = parser.feed_data(text)[0][0]
parser.feed_data(b"4;test\r\ndata\r\n4\r\nline\r\n0\r\ntest: test\r\n\r\n")
assert b"dataline" == b"".join(d for d in payload._buffer)
assert [4, 8] == payload._http_chunk_splits
assert payload.is_eof()
async def test_request_chunked_with_trailer(parser: HttpRequestParser) -> None:
text = b"GET /test HTTP/1.1\r\nTransfer-Encoding: chunked\r\n\r\n4\r\ntest\r\n0\r\ntest: trailer\r\nsecond: test trailer\r\n\r\n"
messages, upgraded, tail = parser.feed_data(text)
assert not tail
msg, payload = messages[0]
assert await payload.read() == b"test"
# TODO: Add assertion of trailers when API added.
async def test_request_chunked_reject_bad_trailer(parser: HttpRequestParser) -> None:
text = b"GET /test HTTP/1.1\r\nTransfer-Encoding: chunked\r\n\r\n0\r\nbad\ntrailer\r\n\r\n"
with pytest.raises(http_exceptions.BadHttpMessage, match=r"b'bad\\ntrailer'"):
parser.feed_data(text)
def test_parse_no_length_or_te_on_post(
loop: asyncio.AbstractEventLoop,
protocol: BaseProtocol,
request_cls: type[HttpRequestParser],
) -> None:
parser = request_cls(protocol, loop, limit=2**16)
text = b"POST /test HTTP/1.1\r\n\r\n"
msg, payload = parser.feed_data(text)[0][0]
assert payload.is_eof()
def test_parse_payload_response_without_body(
loop: asyncio.AbstractEventLoop,
protocol: BaseProtocol,
response_cls: type[HttpResponseParser],
) -> None:
parser = response_cls(protocol, loop, 2**16, response_with_body=False)
text = b"HTTP/1.1 200 Ok\r\ncontent-length: 10\r\n\r\n"
msg, payload = parser.feed_data(text)[0][0]
assert payload.is_eof()
def test_parse_length_payload(response: HttpResponseParser) -> None:
text = b"HTTP/1.1 200 Ok\r\ncontent-length: 4\r\n\r\n"
msg, payload = response.feed_data(text)[0][0]
assert not payload.is_eof()
response.feed_data(b"da")
response.feed_data(b"t")
response.feed_data(b"aHT")
assert payload.is_eof()
assert b"data" == b"".join(d for d in payload._buffer)
def test_parse_no_length_payload(parser: HttpRequestParser) -> None:
text = b"PUT / HTTP/1.1\r\n\r\n"
msg, payload = parser.feed_data(text)[0][0]
assert payload.is_eof()
def test_parse_content_length_payload_multiple(response: HttpResponseParser) -> None:
text = b"HTTP/1.1 200 OK\r\ncontent-length: 5\r\n\r\nfirst"
msg, payload = response.feed_data(text)[0][0]
assert msg.version == HttpVersion(major=1, minor=1)
assert msg.code == 200
assert msg.reason == "OK"
assert msg.headers == CIMultiDict(
[
("Content-Length", "5"),
]
)
assert msg.raw_headers == ((b"content-length", b"5"),)
assert not msg.should_close
assert msg.compression is None
assert not msg.upgrade
assert not msg.chunked
assert payload.is_eof()
assert b"first" == b"".join(d for d in payload._buffer)
text = b"HTTP/1.1 200 OK\r\ncontent-length: 6\r\n\r\nsecond"
msg, payload = response.feed_data(text)[0][0]
assert msg.version == HttpVersion(major=1, minor=1)
assert msg.code == 200
assert msg.reason == "OK"
assert msg.headers == CIMultiDict(
[
("Content-Length", "6"),
]
)
assert msg.raw_headers == ((b"content-length", b"6"),)
assert not msg.should_close
assert msg.compression is None
assert not msg.upgrade
assert not msg.chunked
assert payload.is_eof()
assert b"second" == b"".join(d for d in payload._buffer)
def test_parse_content_length_than_chunked_payload(
response: HttpResponseParser,
) -> None:
text = b"HTTP/1.1 200 OK\r\ncontent-length: 5\r\n\r\nfirst"
msg, payload = response.feed_data(text)[0][0]
assert msg.version == HttpVersion(major=1, minor=1)
assert msg.code == 200
assert msg.reason == "OK"
assert msg.headers == CIMultiDict(
[
("Content-Length", "5"),
]
)
assert msg.raw_headers == ((b"content-length", b"5"),)
assert not msg.should_close
assert msg.compression is None
assert not msg.upgrade
assert not msg.chunked
assert payload.is_eof()
assert b"first" == b"".join(d for d in payload._buffer)
text = (
b"HTTP/1.1 200 OK\r\n"
b"transfer-encoding: chunked\r\n\r\n"
b"6\r\nsecond\r\n0\r\n\r\n"
)
msg, payload = response.feed_data(text)[0][0]
assert msg.version == HttpVersion(major=1, minor=1)
assert msg.code == 200
assert msg.reason == "OK"
assert msg.headers == CIMultiDict(
[
("Transfer-Encoding", "chunked"),
]
)
assert msg.raw_headers == ((b"transfer-encoding", b"chunked"),)
assert not msg.should_close
assert msg.compression is None
assert not msg.upgrade
assert msg.chunked
assert payload.is_eof()
assert b"second" == b"".join(d for d in payload._buffer)
@pytest.mark.parametrize("code", (204, 304, 101, 102))
def test_parse_chunked_payload_empty_body_than_another_chunked(
response: HttpResponseParser, code: int
) -> None:
head = f"HTTP/1.1 {code} OK\r\n".encode()
text = head + b"transfer-encoding: chunked\r\n\r\n"
msg, payload = response.feed_data(text)[0][0]
assert msg.version == HttpVersion(major=1, minor=1)
assert msg.code == code
assert msg.reason == "OK"
assert msg.headers == CIMultiDict(
[
("Transfer-Encoding", "chunked"),
]
)
assert msg.raw_headers == ((b"transfer-encoding", b"chunked"),)
assert not msg.should_close
assert msg.compression is None
assert not msg.upgrade
assert msg.chunked
assert payload.is_eof()
text = (
b"HTTP/1.1 200 OK\r\n"
b"transfer-encoding: chunked\r\n\r\n"
b"6\r\nsecond\r\n0\r\n\r\n"
)
msg, payload = response.feed_data(text)[0][0]
assert msg.version == HttpVersion(major=1, minor=1)
assert msg.code == 200
assert msg.reason == "OK"
assert msg.headers == CIMultiDict(
[
("Transfer-Encoding", "chunked"),
]
)
assert msg.raw_headers == ((b"transfer-encoding", b"chunked"),)
assert not msg.should_close
assert msg.compression is None
assert not msg.upgrade
assert msg.chunked
assert payload.is_eof()
assert b"second" == b"".join(d for d in payload._buffer)
async def test_parse_chunked_payload_split_chunks(response: HttpResponseParser) -> None:
network_chunks = (
b"HTTP/1.1 200 OK\r\nTransfer-Encoding: chunked\r\n\r\n",
b"5\r\nfi",
b"rst",
# This simulates a bug in lax mode caused when the \r\n separator, before the
# next HTTP chunk, appears at the start of the next network chunk.
b"\r\n",
b"6",
b"\r",
b"\n",
b"second\r",
b"\n0\r\n\r\n",
)
reader = response.feed_data(network_chunks[0])[0][0][1]
for c in network_chunks[1:]:
response.feed_data(c)
assert response.feed_eof() is None
assert reader.is_eof()
assert await reader.read() == b"firstsecond"
async def test_parse_chunked_payload_with_lf_in_extensions(
parser: HttpRequestParser,
) -> None:
"""Test chunked payload that has a LF in the chunk extensions."""
payload = (
b"GET / HTTP/1.1\r\nHost: localhost:5001\r\n"
b"Transfer-Encoding: chunked\r\n\r\n2;\nxx\r\n4c\r\n0\r\n\r\n"
b"GET /admin HTTP/1.1\r\nHost: localhost:5001\r\n"
b"Transfer-Encoding: chunked\r\n\r\n0\r\n\r\n"
)
with pytest.raises(http_exceptions.BadHttpMessage, match="\\\\nxx"):
parser.feed_data(payload)
def test_partial_url(parser: HttpRequestParser) -> None:
messages, upgrade, tail = parser.feed_data(b"GET /te")
assert len(messages) == 0
messages, upgrade, tail = parser.feed_data(b"st HTTP/1.1\r\n\r\n")
assert len(messages) == 1
msg, payload = messages[0]
assert msg.method == "GET"
assert msg.path == "/test"
assert msg.version == (1, 1)
assert payload.is_eof()
@pytest.mark.parametrize(
("uri", "path", "query", "fragment"),
[
("/path%23frag", "/path#frag", {}, ""),
("/path%2523frag", "/path%23frag", {}, ""),
("/path?key=value%23frag", "/path", {"key": "value#frag"}, ""),
("/path?key=value%2523frag", "/path", {"key": "value%23frag"}, ""),
("/path#frag%20", "/path", {}, "frag "),
("/path#frag%2520", "/path", {}, "frag%20"),
],
)
def test_parse_uri_percent_encoded(
parser: HttpRequestParser, uri: str, path: str, query: dict[str, str], fragment: str
) -> None:
text = (f"GET {uri} HTTP/1.1\r\n\r\n").encode()
messages, upgrade, tail = parser.feed_data(text)
msg = messages[0][0]
assert msg.path == uri
assert msg.url == URL(uri)
assert msg.url.path == path
assert msg.url.query == query
assert msg.url.fragment == fragment
def test_parse_uri_utf8(parser: HttpRequestParser) -> None:
if not isinstance(parser, HttpRequestParserPy):
pytest.xfail("Not valid HTTP. Maybe update py-parser to reject later.")
text = ("GET /путь?ключ=знач#фраг HTTP/1.1\r\n\r\n").encode()
messages, upgrade, tail = parser.feed_data(text)
msg = messages[0][0]
assert msg.path == "/путь?ключ=знач#фраг"
assert msg.url.path == "/путь"
assert msg.url.query == {"ключ": "знач"}
assert msg.url.fragment == "фраг"
def test_parse_uri_utf8_percent_encoded(parser: HttpRequestParser) -> None:
text = (
"GET %s HTTP/1.1\r\n\r\n" % quote("/путь?ключ=знач#фраг", safe="/?=#")
).encode()
messages, upgrade, tail = parser.feed_data(text)
msg = messages[0][0]
assert msg.path == quote("/путь?ключ=знач#фраг", safe="/?=#")
assert msg.url == URL("/путь?ключ=знач#фраг")
assert msg.url.path == "/путь"
assert msg.url.query == {"ключ": "знач"}
assert msg.url.fragment == "фраг"
@pytest.mark.skipif(
"HttpRequestParserC" not in dir(aiohttp.http_parser),
reason="C based HTTP parser not available",
)
def test_parse_bad_method_for_c_parser_raises(
loop: asyncio.AbstractEventLoop, protocol: BaseProtocol
) -> None:
payload = b"GET1 /test HTTP/1.1\r\n\r\n"
parser = HttpRequestParserC(
protocol,
loop,
2**16,
max_line_size=8190,
max_field_size=8190,
)
with pytest.raises(aiohttp.http_exceptions.BadStatusLine):
messages, upgrade, tail = parser.feed_data(payload)
class TestParsePayload:
async def test_parse_eof_payload(self, protocol: BaseProtocol) -> None:
out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
p = HttpPayloadParser(out, headers_parser=HeadersParser())
p.feed_data(b"data")
p.feed_eof()
assert out.is_eof()
assert [bytearray(b"data")] == list(out._buffer)
async def test_parse_length_payload_eof(self, protocol: BaseProtocol) -> None:
out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
p = HttpPayloadParser(out, length=4, headers_parser=HeadersParser())
p.feed_data(b"da")
with pytest.raises(http_exceptions.ContentLengthError):
p.feed_eof()
async def test_parse_chunked_payload_size_error(
self, protocol: BaseProtocol
) -> None:
out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
p = HttpPayloadParser(out, chunked=True, headers_parser=HeadersParser())
with pytest.raises(http_exceptions.TransferEncodingError):
p.feed_data(b"blah\r\n")
assert isinstance(out.exception(), http_exceptions.TransferEncodingError)
async def test_parse_chunked_payload_split_end(
self, protocol: BaseProtocol
) -> None:
out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
p = HttpPayloadParser(out, chunked=True, headers_parser=HeadersParser())
p.feed_data(b"4\r\nasdf\r\n0\r\n")
p.feed_data(b"\r\n")
assert out.is_eof()
assert b"asdf" == b"".join(out._buffer)
async def test_parse_chunked_payload_split_end2(
self, protocol: BaseProtocol
) -> None:
out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
p = HttpPayloadParser(out, chunked=True, headers_parser=HeadersParser())
p.feed_data(b"4\r\nasdf\r\n0\r\n\r")
p.feed_data(b"\n")
assert out.is_eof()
assert b"asdf" == b"".join(out._buffer)
async def test_parse_chunked_payload_split_end_trailers(
self, protocol: BaseProtocol
) -> None:
out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
p = HttpPayloadParser(out, chunked=True, headers_parser=HeadersParser())
p.feed_data(b"4\r\nasdf\r\n0\r\n")
p.feed_data(b"Content-MD5: 912ec803b2ce49e4a541068d495ab570\r\n")
p.feed_data(b"\r\n")
assert out.is_eof()
assert b"asdf" == b"".join(out._buffer)
async def test_parse_chunked_payload_split_end_trailers2(
self, protocol: BaseProtocol
) -> None:
out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
p = HttpPayloadParser(out, chunked=True, headers_parser=HeadersParser())
p.feed_data(b"4\r\nasdf\r\n0\r\n")
p.feed_data(b"Content-MD5: 912ec803b2ce49e4a541068d495ab570\r\n\r")
p.feed_data(b"\n")
assert out.is_eof()
assert b"asdf" == b"".join(out._buffer)
async def test_parse_chunked_payload_split_end_trailers3(
self, protocol: BaseProtocol
) -> None:
out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
p = HttpPayloadParser(out, chunked=True, headers_parser=HeadersParser())
p.feed_data(b"4\r\nasdf\r\n0\r\nContent-MD5: ")
p.feed_data(b"912ec803b2ce49e4a541068d495ab570\r\n\r\n")
assert out.is_eof()
assert b"asdf" == b"".join(out._buffer)
async def test_parse_chunked_payload_split_end_trailers4(
self, protocol: BaseProtocol
) -> None:
out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
p = HttpPayloadParser(out, chunked=True, headers_parser=HeadersParser())
p.feed_data(b"4\r\nasdf\r\n0\r\nC")
p.feed_data(b"ontent-MD5: 912ec803b2ce49e4a541068d495ab570\r\n\r\n")
assert out.is_eof()
assert b"asdf" == b"".join(out._buffer)
async def test_http_payload_parser_length(self, protocol: BaseProtocol) -> None:
out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
p = HttpPayloadParser(out, length=2, headers_parser=HeadersParser())
eof, tail = p.feed_data(b"1245")
assert eof
assert b"12" == out._buffer[0]
assert b"45" == tail
async def test_http_payload_parser_deflate(self, protocol: BaseProtocol) -> None:
# c=compressobj(wbits=15); b''.join([c.compress(b'data'), c.flush()])
COMPRESSED = b"x\x9cKI,I\x04\x00\x04\x00\x01\x9b"
length = len(COMPRESSED)
out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
p = HttpPayloadParser(
out, length=length, compression="deflate", headers_parser=HeadersParser()
)
p.feed_data(COMPRESSED)
assert b"data" == out._buffer[0]
assert out.is_eof()
async def test_http_payload_parser_deflate_no_hdrs(
self, protocol: BaseProtocol
) -> None:
"""Tests incorrectly formed data (no zlib headers)."""
# c=compressobj(wbits=-15); b''.join([c.compress(b'data'), c.flush()])
COMPRESSED = b"KI,I\x04\x00"
length = len(COMPRESSED)
out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
p = HttpPayloadParser(
out, length=length, compression="deflate", headers_parser=HeadersParser()
)
p.feed_data(COMPRESSED)
assert b"data" == out._buffer[0]
assert out.is_eof()
async def test_http_payload_parser_deflate_light(
self, protocol: BaseProtocol
) -> None:
# c=compressobj(wbits=9); b''.join([c.compress(b'data'), c.flush()])
COMPRESSED = b"\x18\x95KI,I\x04\x00\x04\x00\x01\x9b"
length = len(COMPRESSED)
out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
p = HttpPayloadParser(
out, length=length, compression="deflate", headers_parser=HeadersParser()
)
p.feed_data(COMPRESSED)
assert b"data" == out._buffer[0]
assert out.is_eof()
async def test_http_payload_parser_deflate_split(
self, protocol: BaseProtocol
) -> None:
out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
p = HttpPayloadParser(
out, compression="deflate", headers_parser=HeadersParser()
)
# Feeding one correct byte should be enough to choose exact
# deflate decompressor
p.feed_data(b"x")
p.feed_data(b"\x9cKI,I\x04\x00\x04\x00\x01\x9b")
p.feed_eof()
assert b"data" == out._buffer[0]
async def test_http_payload_parser_deflate_split_err(
self, protocol: BaseProtocol
) -> None:
out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
p = HttpPayloadParser(
out, compression="deflate", headers_parser=HeadersParser()
)
# Feeding one wrong byte should be enough to choose exact
# deflate decompressor
p.feed_data(b"K")
p.feed_data(b"I,I\x04\x00")
p.feed_eof()
assert b"data" == out._buffer[0]
async def test_http_payload_parser_length_zero(
self, protocol: BaseProtocol
) -> None:
out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
p = HttpPayloadParser(out, length=0, headers_parser=HeadersParser())
assert p.done
assert out.is_eof()
@pytest.mark.skipif(brotli is None, reason="brotli is not installed")
async def test_http_payload_brotli(self, protocol: BaseProtocol) -> None:
compressed = brotli.compress(b"brotli data")
out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
p = HttpPayloadParser(
out,
length=len(compressed),
compression="br",
headers_parser=HeadersParser(),
)
p.feed_data(compressed)
assert b"brotli data" == out._buffer[0]
assert out.is_eof()
@pytest.mark.skipif(zstandard is None, reason="zstandard is not installed")
async def test_http_payload_zstandard(self, protocol: BaseProtocol) -> None:
compressed = zstandard.compress(b"zstd data")
out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
p = HttpPayloadParser(
out,
length=len(compressed),
compression="zstd",
headers_parser=HeadersParser(),
)
p.feed_data(compressed)
assert b"zstd data" == out._buffer[0]
assert out.is_eof()
class TestDeflateBuffer:
async def test_feed_data(self, protocol: BaseProtocol) -> None:
buf = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
dbuf = DeflateBuffer(buf, "deflate")
dbuf.decompressor = mock.Mock()
dbuf.decompressor.decompress_sync.return_value = b"line"
# First byte should be b'x' in order code not to change the decoder.
dbuf.feed_data(b"xxxx")
assert [b"line"] == list(buf._buffer)
async def test_feed_data_err(self, protocol: BaseProtocol) -> None:
buf = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
dbuf = DeflateBuffer(buf, "deflate")
exc = ValueError()
dbuf.decompressor = mock.Mock()
dbuf.decompressor.decompress_sync.side_effect = exc
with pytest.raises(http_exceptions.ContentEncodingError):
# Should be more than 4 bytes to trigger deflate FSM error.
# Should start with b'x', otherwise code switch mocked decoder.
dbuf.feed_data(b"xsomedata")
async def test_feed_eof(self, protocol: BaseProtocol) -> None:
buf = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
dbuf = DeflateBuffer(buf, "deflate")
dbuf.decompressor = mock.Mock()
dbuf.decompressor.flush.return_value = b"line"
dbuf.feed_eof()
assert [b"line"] == list(buf._buffer)
assert buf._eof
async def test_feed_eof_err_deflate(self, protocol: BaseProtocol) -> None:
buf = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
dbuf = DeflateBuffer(buf, "deflate")
dbuf.decompressor = mock.Mock()
dbuf.decompressor.flush.return_value = b"line"
dbuf.decompressor.eof = False
with pytest.raises(http_exceptions.ContentEncodingError):
dbuf.feed_eof()
async def test_feed_eof_no_err_gzip(self, protocol: BaseProtocol) -> None:
buf = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
dbuf = DeflateBuffer(buf, "gzip")
dbuf.decompressor = mock.Mock()
dbuf.decompressor.flush.return_value = b"line"
dbuf.decompressor.eof = False
dbuf.feed_eof()
assert [b"line"] == list(buf._buffer)
async def test_feed_eof_no_err_brotli(self, protocol: BaseProtocol) -> None:
buf = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
dbuf = DeflateBuffer(buf, "br")
dbuf.decompressor = mock.Mock()
dbuf.decompressor.flush.return_value = b"line"
dbuf.decompressor.eof = False
dbuf.feed_eof()
assert [b"line"] == list(buf._buffer)
@pytest.mark.skipif(zstandard is None, reason="zstandard is not installed")
async def test_feed_eof_no_err_zstandard(self, protocol: BaseProtocol) -> None:
buf = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
dbuf = DeflateBuffer(buf, "zstd")
dbuf.decompressor = mock.Mock()
dbuf.decompressor.flush.return_value = b"line"
dbuf.decompressor.eof = False
dbuf.feed_eof()
assert [b"line"] == list(buf._buffer)
async def test_empty_body(self, protocol: BaseProtocol) -> None:
buf = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
dbuf = DeflateBuffer(buf, "deflate")
dbuf.feed_eof()
assert buf.at_eof()
|
./temp_repos/aiohttp/aiohttp/http_parser.py
|
./temp_repos/aiohttp/tests/test_http_parser.py
|
aiohttp
|
You are an expert Python testing engineer using unittest and unittest.mock.
Task: Write a robust unit test for the class 'RawRequestMessage'.
Context:
- Class Name: RawRequestMessage
- Dependencies to Mock: lax, timer, compression, loop, read_until_eof, protocol, max_field_size, encoding, auto_decompress, code, payload_exception, method, limit, max_line_size, chunked, out, response_with_body, payload, length
- Key Imports: yarl, contextlib, string, streams, helpers, _http_parser, enum, typedefs, http_writer, typing
Requirements:
1. Use 'unittest.mock' library (MagicMock, patch).
2. Mock the external dependencies listed above.
3. Test both success and failure scenarios.
4. Use the AAA (Arrange, Act, Assert) pattern.
|
RawRequestMessage
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.