core: bump black from 25.12.0 to 26.1.0 (#19567)

* core: bump black from 25.12.0 to 26.1.0

Bumps [black](https://github.com/psf/black) from 25.12.0 to 26.1.0.
- [Release notes](https://github.com/psf/black/releases)
- [Changelog](https://github.com/psf/black/blob/main/CHANGES.md)
- [Commits](https://github.com/psf/black/compare/25.12.0...26.1.0)

---
updated-dependencies:
- dependency-name: black
  dependency-version: 26.1.0
  dependency-type: direct:development
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>

* lint

Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>

---------

Signed-off-by: dependabot[bot] <support@github.com>
Signed-off-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Marc 'risson' Schmitt <marc.schmitt@risson.space>
This commit is contained in:
dependabot[bot]
2026-01-20 01:12:31 +01:00
committed by GitHub
parent 0b5dde1225
commit a908efb792
32 changed files with 67 additions and 95 deletions

View File

@@ -16,7 +16,7 @@ def backport_is_backchannel(apps: Apps, schema_editor: BaseDatabaseSchemaEditor)
for obj in model.objects.using(db_alias).only("is_backchannel"):
obj.is_backchannel = True
obj.save()
except (DatabaseError, InternalError, ProgrammingError):
except DatabaseError, InternalError, ProgrammingError:
# The model might not have been migrated yet/doesn't exist yet
# so we don't need to worry about backporting the data
pass

View File

@@ -66,7 +66,7 @@ class SessionStore(SessionBase):
def decode(self, session_data):
try:
return pickle.loads(session_data) # nosec
except (pickle.PickleError, AttributeError, TypeError):
except pickle.PickleError, AttributeError, TypeError:
# PickleError, ValueError - unpickling exceptions
# AttributeError - can happen when Django model fields (e.g., FileField) are unpickled
# and their descriptors fail to initialize (e.g., missing storage)

View File

@@ -41,7 +41,7 @@ def backfill_certificate_metadata(apps, schema_editor): # noqa: ARG001
"fingerprint_sha1",
]
)
except (ValueError, TypeError, AttributeError):
except ValueError, TypeError, AttributeError:
pass
# Backfill kid with MD5 for backwards compatibility

View File

@@ -107,7 +107,7 @@ class LicenseKey:
intermediate = load_der_x509_certificate(b64decode(x5c[1]))
our_cert.verify_directly_issued_by(intermediate)
intermediate.verify_directly_issued_by(get_licensing_key())
except (InvalidSignature, TypeError, ValueError, Error):
except InvalidSignature, TypeError, ValueError, Error:
raise ValidationError("Unable to verify license") from None
try:
body = from_dict(

View File

@@ -37,8 +37,7 @@ class JSONSearchField(StrField):
def json_field_keys(self) -> Generator[tuple[str]]:
with connection.cursor() as cursor:
cursor.execute(
f"""
cursor.execute(f"""
WITH RECURSIVE "{self.name}_keys" AS (
SELECT
ARRAY[jsonb_object_keys("{self.name}")] AS key_path_array,
@@ -62,8 +61,7 @@ class JSONSearchField(StrField):
)
SELECT key_path_array FROM unique_paths;
""" # nosec
)
""") # nosec
return (x[0] for x in cursor.fetchall())
def get_fixed_structure(self, serializer: DjangoQLSchemaSerializer) -> OrderedDict:

View File

@@ -57,7 +57,7 @@ class ASNContextProcessor(MMDBContextProcessor):
self.check_expired()
try:
return self.reader.asn(ip_address)
except (GeoIP2Error, ValueError):
except GeoIP2Error, ValueError:
return None
def asn_to_dict(self, asn: ASN | None) -> ASNDict | dict:

View File

@@ -58,7 +58,7 @@ class GeoIPContextProcessor(MMDBContextProcessor):
self.check_expired()
try:
return self.reader.city(ip_address)
except (GeoIP2Error, ValueError):
except GeoIP2Error, ValueError:
return None
def city_to_dict(self, city: City | None) -> GeoIPDict | dict:

View File

@@ -187,7 +187,7 @@ def avatar_mode_url(user: User, mode: str) -> str | None:
cache.set(cache_key_image_url, None, timeout=AVATAR_STATUS_TTL_SECONDS)
return None
res.raise_for_status()
except (Timeout, ConnectionError, HTTPError):
except Timeout, ConnectionError, HTTPError:
cache.set(cache_key_hostname_available, False, timeout=AVATAR_STATUS_TTL_SECONDS)
return None
except RequestException:

View File

@@ -13,5 +13,5 @@ def load_fixture(path: str, **kwargs) -> str:
fixture = _fixture.read()
try:
return fixture % kwargs
except (TypeError, ValueError):
except TypeError, ValueError:
return fixture

View File

@@ -72,7 +72,7 @@ class DockerClient(UpstreamDockerClient, BaseClient):
def fetch_state(self) -> OutpostServiceConnectionState:
try:
return OutpostServiceConnectionState(version=self.info()["ServerVersion"], healthy=True)
except (ServiceConnectionInvalid, DockerException):
except ServiceConnectionInvalid, DockerException:
return OutpostServiceConnectionState(version="", healthy=False)
def __exit__(self, exc_type, exc_value, traceback):

View File

@@ -49,7 +49,7 @@ class KubernetesClient(ApiClient, BaseClient):
api_instance = VersionApi(self)
version: VersionInfo = api_instance.get_code()
return OutpostServiceConnectionState(version=version.git_version, healthy=True)
except (OpenApiException, HTTPError, ServiceConnectionInvalid):
except OpenApiException, HTTPError, ServiceConnectionInvalid:
return OutpostServiceConnectionState(version="", healthy=False)

View File

@@ -39,7 +39,7 @@ class PolicyBindingModelForeignKey(PrimaryKeyRelatedField):
return self.get_queryset().get_subclass(pk=data)
except ObjectDoesNotExist:
self.fail("does_not_exist", pk_value=data)
except (TypeError, ValueError):
except TypeError, ValueError:
self.fail("incorrect_type", data_type=type(data).__name__)
def to_representation(self, value):

View File

@@ -121,7 +121,7 @@ def extract_client_auth(request: HttpRequest) -> tuple[str, str]:
try:
user_pass = b64decode(b64_user_pass).decode("utf-8").partition(":")
client_id, _, client_secret = user_pass
except (ValueError, Error):
except ValueError, Error:
client_id = client_secret = "" # nosec
else:
client_id = request.POST.get("client_id", "")

View File

@@ -329,7 +329,7 @@ class TokenParams:
try:
user, _, password = b64decode(self.client_secret).decode("utf-8").partition(":")
return self.__post_init_client_credentials_creds(request, user, password)
except (ValueError, Error):
except ValueError, Error:
raise TokenError("invalid_grant") from None
def __post_init_client_credentials_creds(

View File

@@ -266,7 +266,7 @@ class TestLogoutRequestProcessor(TestCase):
decoded_sig = base64.b64decode(signature)
self.assertIsNotNone(decoded_sig)
self.assertGreater(len(decoded_sig), 0)
except (ValueError, TypeError):
except ValueError, TypeError:
self.fail("Signature is not valid base64")
def test_signature_parameter_ordering(self):
@@ -471,5 +471,5 @@ class TestLogoutRequestProcessor(TestCase):
decoded_sig = base64.b64decode(signature)
self.assertIsNotNone(decoded_sig)
self.assertGreater(len(decoded_sig), 0)
except (ValueError, TypeError):
except ValueError, TypeError:
self.fail("Signature is not valid base64")

View File

@@ -205,7 +205,7 @@ class SCIMGroupClient(SCIMClient[Group, SCIMProviderGroup, SCIMGroupSchema]):
),
)
return self.patch_compare_users(group)
except (SCIMRequestException, ObjectExistsSyncException):
except SCIMRequestException, ObjectExistsSyncException:
# Some providers don't support PUT on groups, so this is mainly a fix for the initial
# sync, send patch add requests for all the users the group currently has
return self._update_patch(group, scim_group, connection)

View File

@@ -57,7 +57,7 @@ class SessionMiddleware(UpstreamSessionMiddleware):
try:
session_payload = decode(key, SIGNING_HASH, algorithms=["HS256"])
session_key = session_payload["sid"]
except (KeyError, PyJWTError):
except KeyError, PyJWTError:
pass
return session_key

View File

@@ -170,7 +170,7 @@ class PytestTestRunner(DiscoverRunner): # pragma: no cover
self.args.append(path)
valid_label_found = True
break
except (TypeError, IndexError):
except TypeError, IndexError:
continue
if not valid_label_found:

View File

@@ -54,13 +54,11 @@ class SPNEGOView(View):
"if/error.html",
context={
"title": _("SPNEGO authentication required"),
"message": _(
"""
"message": _("""
Make sure you have valid tickets (obtainable via kinit)
and configured the browser correctly.
Please contact your administrator.
"""
),
"""),
},
status=401,
)
@@ -130,7 +128,7 @@ class SPNEGOView(View):
try:
in_token = b64decode(authstr)
except (TypeError, ValueError):
except TypeError, ValueError:
return self.challenge(request)
with Krb5ConfContext(self.source):

View File

@@ -79,7 +79,7 @@ class LDAPPasswordChanger:
attributes=["pwdProperties"],
)
root_attrs = list(root_attrs)[0]
except (LDAPAttributeError, LDAPUnwillingToPerformResult, KeyError, IndexError):
except LDAPAttributeError, LDAPUnwillingToPerformResult, KeyError, IndexError:
return False
raw_pwd_properties = root_attrs.get("attributes", {}).get("pwdProperties", None)
if not raw_pwd_properties:
@@ -102,7 +102,7 @@ class LDAPPasswordChanger:
return
try:
self._connection.extend.microsoft.modify_password(user_dn, password)
except (LDAPAttributeError, LDAPUnwillingToPerformResult, LDAPNoSuchAttributeResult):
except LDAPAttributeError, LDAPUnwillingToPerformResult, LDAPNoSuchAttributeResult:
self._connection.extend.standard.modify_password(user_dn, new_password=password)
def _ad_check_password_existing(self, password: str, user_dn: str) -> bool:

View File

@@ -143,7 +143,7 @@ class Device(CreatedUpdatedModel):
if for_verify:
device_set = device_set.select_for_update()
device = device_set.first()
except (ValueError, LookupError):
except ValueError, LookupError:
pass
return device

View File

@@ -26,7 +26,7 @@ def convert_string_to_integer_format(apps, schema_editor):
minutes_value = int(td.total_seconds() / 60)
stage.token_expiry = minutes_value
stage.save(using=db_alias)
except (ValueError, TypeError):
except ValueError, TypeError:
# If the string can't be parsed or converted properly, skip
pass

View File

@@ -24,7 +24,7 @@ class Flag[T]:
flags = {}
try:
flags: dict[str, Any] = get_current_tenant(["flags"]).flags
except (DatabaseError, ProgrammingError, InternalError):
except DatabaseError, ProgrammingError, InternalError:
pass
value = flags.get(self.__key, None)
if value is None:

View File

@@ -10,8 +10,7 @@ class Migration(BaseMigration):
return not bool(self.cur.rowcount)
def run(self):
self.cur.execute(
"""
self.cur.execute("""
BEGIN TRANSACTION;
CREATE TABLE IF NOT EXISTS authentik_version_history (
id BIGSERIAL PRIMARY KEY,
@@ -20,5 +19,4 @@ class Migration(BaseMigration):
build text NOT NULL
);
COMMIT;
"""
)
""")

View File

@@ -26,13 +26,11 @@ class Migration(BaseMigration):
""",
(datetime.now(), authentik_version(), authentik_build_hash()),
)
self.cur.execute(
"""
self.cur.execute("""
DELETE FROM authentik_version_history WHERE id NOT IN (
SELECT id FROM authentik_version_history
ORDER BY "timestamp" DESC
LIMIT 1000
)
"""
)
""")
self.con.commit()

View File

@@ -30,7 +30,7 @@ def create_anonymous_user(sender, **kwargs):
lookup = {User.USERNAME_FIELD: guardian_settings.ANONYMOUS_USER_NAME}
# fixing #770
User.objects.using(kwargs["using"]).filter(**lookup).only(User.USERNAME_FIELD).get()
except (User.DoesNotExist, DatabaseError):
except User.DoesNotExist, DatabaseError:
# Handle both cases: user doesn't exist AND table doesn't exist (rollback scenario)
try:
retrieve_anonymous_function = import_string(guardian_settings.GET_INIT_ANONYMOUS_USER)

View File

@@ -194,13 +194,11 @@ class PostgresChannelLoopLayer(BaseChannelLayer):
async with await self.connection() as conn:
async with conn.cursor() as cursor:
await cursor.execute(
sql.SQL(
"""
sql.SQL("""
INSERT INTO {table}
({id}, {channel}, {message}, {expires})
VALUES (%s, %s, %s, %s)
"""
).format(
""").format(
table=sql.Identifier(MESSAGE_TABLE),
id=sql.Identifier("id"),
channel=sql.Identifier("channel"),
@@ -236,18 +234,16 @@ class PostgresChannelLoopLayer(BaseChannelLayer):
q = self.channels[channel]
try:
while True:
(message_id, message) = await q.get()
message_id, message = await q.get()
if message is None:
async with await self.connection() as conn:
async with conn.cursor() as cursor:
await cursor.execute(
sql.SQL(
"""
sql.SQL("""
SELECT {table}.{message}
FROM {table}
WHERE {table}.{id} = %s
"""
).format(
""").format(
table=sql.Identifier(MESSAGE_TABLE),
id=sql.Identifier("id"),
message=sql.Identifier("message"),
@@ -259,7 +255,7 @@ class PostgresChannelLoopLayer(BaseChannelLayer):
continue
message = row[0]
break
except (asyncio.CancelledError, TimeoutError, GeneratorExit):
except asyncio.CancelledError, TimeoutError, GeneratorExit:
# We assume here that the reason we are cancelled is because the consumer
# is exiting, therefore we need to cleanup by unsubscribe below. Indeed,
# currently the way that Django Channels works, this is a safe assumption.
@@ -296,13 +292,11 @@ class PostgresChannelLoopLayer(BaseChannelLayer):
async with await self.connection() as conn:
async with conn.cursor() as cursor:
await cursor.execute(
sql.SQL(
"""
sql.SQL("""
INSERT INTO {table}
({id}, {group_key}, {channel}, {expires})
VALUES (%s, %s, %s, %s)
"""
).format(
""").format(
table=sql.Identifier(GROUP_CHANNEL_TABLE),
id=sql.Identifier("id"),
group_key=sql.Identifier("group_key"),
@@ -331,14 +325,12 @@ class PostgresChannelLoopLayer(BaseChannelLayer):
async with await self.connection() as conn:
async with conn.cursor() as cursor:
await cursor.execute(
sql.SQL(
"""
sql.SQL("""
DELETE
FROM {table}
WHERE {table}.{group_key} = %s
AND {table}.{channel} = %s
"""
).format(
""").format(
table=sql.Identifier(GROUP_CHANNEL_TABLE),
group_key=sql.Identifier("group_key"),
channel=sql.Identifier("channel"),
@@ -359,13 +351,11 @@ class PostgresChannelLoopLayer(BaseChannelLayer):
async with await self.connection() as conn:
async with conn.cursor() as cursor:
await cursor.execute(
sql.SQL(
"""
sql.SQL("""
SELECT DISTINCT {table}.{channel}
FROM {table}
WHERE {table}.{group_key} = %s
"""
).format(
""").format(
table=sql.Identifier(GROUP_CHANNEL_TABLE),
channel=sql.Identifier("channel"),
group_key=sql.Identifier("group_key"),
@@ -379,13 +369,11 @@ class PostgresChannelLoopLayer(BaseChannelLayer):
]
async with conn.cursor() as cursor:
await cursor.executemany(
sql.SQL(
"""
sql.SQL("""
INSERT INTO {table}
({id}, {channel}, {message}, {expires})
VALUES (%s, %s, %s, %s)
"""
).format(
""").format(
table=sql.Identifier(MESSAGE_TABLE),
id=sql.Identifier("id"),
channel=sql.Identifier("channel"),
@@ -456,7 +444,7 @@ class PostgresChannelLayerReceiver:
while True:
async for notify in conn.notifies(timeout=30):
await self._receive_notify(notify)
except (asyncio.CancelledError, TimeoutError, GeneratorExit):
except asyncio.CancelledError, TimeoutError, GeneratorExit:
raise
except PsycopgError as exc:
LOGGER.warning("Postgres connection is not healthy", exc=exc)
@@ -469,15 +457,13 @@ class PostgresChannelLayerReceiver:
return
async with conn.cursor() as cursor:
await cursor.execute(
sql.SQL(
"""
sql.SQL("""
DELETE
FROM {table}
WHERE {table}.{channel} = ANY(%s)
AND {table}.{expires} >= %s
RETURNING {table}.{id}, {table}.{channel}, {table}.{message}
"""
).format(
""").format(
table=sql.Identifier(MESSAGE_TABLE),
id=sql.Identifier("id"),
channel=sql.Identifier("channel"),

View File

@@ -329,8 +329,7 @@ class _PostgresConsumer(Consumer):
with self.locks_connection.cursor() as cursor:
cursor.execute(
sql.SQL(
"""
sql.SQL("""
UPDATE {table}
SET {state} = %(state)s, {mtime} = %(mtime)s
WHERE
@@ -341,8 +340,7 @@ class _PostgresConsumer(Consumer):
({table}.{eta} < %(maximum_eta)s OR {table}.{eta} IS NULL)
AND
pg_try_advisory_lock(%(lock_id)s)
"""
).format(
""").format(
table=sql.Identifier(self.query_set.model._meta.db_table),
state=sql.Identifier("state"),
mtime=sql.Identifier("mtime"),

View File

@@ -79,7 +79,7 @@ dependencies = [
dev = [
"aws-cdk-lib==2.235.0",
"bandit==1.9.2",
"black==25.12.0",
"black==26.1.0",
"bpython==0.26",
"codespell==2.4.1",
"colorama==0.4.6",

View File

@@ -59,8 +59,7 @@ if __name__ == "__main__":
config_file_name = "local.env.yml"
with open(config_file_name, "w", encoding="utf-8") as _config:
_config.write(
"""
_config.write("""
# Local authentik configuration overrides
#
# https://docs.goauthentik.io/docs/install-config/configuration/
@@ -71,8 +70,7 @@ if __name__ == "__main__":
# make gen-dev-config
# ```
"""
)
""")
safe_dump(
generate_local_config(),
@@ -80,8 +78,7 @@ if __name__ == "__main__":
default_flow_style=False,
)
print(
f"""
print(f"""
---
Generated configuration file: {config_file_name}
@@ -91,5 +88,4 @@ For more information on how to use this configuration, see:
https://docs.goauthentik.io/docs/install-config/configuration/
---
"""
)
""")

View File

@@ -400,7 +400,7 @@ class SeleniumTestCase(DockerTestCase, StaticLiveServerTestCase):
if host is not None:
try:
inner_html = host.get_attribute("innerHTML") or "<no host>"
except (DetachedShadowRootException, StaleElementReferenceException):
except DetachedShadowRootException, StaleElementReferenceException:
inner_html = "<stale host>"
raise RuntimeError(

18
uv.lock generated
View File

@@ -394,7 +394,7 @@ requires-dist = [
dev = [
{ name = "aws-cdk-lib", specifier = "==2.235.0" },
{ name = "bandit", specifier = "==1.9.2" },
{ name = "black", specifier = "==25.12.0" },
{ name = "black", specifier = "==26.1.0" },
{ name = "bpython", specifier = "==0.26" },
{ name = "codespell", specifier = "==2.4.1" },
{ name = "colorama", specifier = "==0.4.6" },
@@ -611,7 +611,7 @@ wheels = [
[[package]]
name = "black"
version = "25.12.0"
version = "26.1.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "click" },
@@ -621,14 +621,14 @@ dependencies = [
{ name = "platformdirs" },
{ name = "pytokens" },
]
sdist = { url = "https://files.pythonhosted.org/packages/c4/d9/07b458a3f1c525ac392b5edc6b191ff140b596f9d77092429417a54e249d/black-25.12.0.tar.gz", hash = "sha256:8d3dd9cea14bff7ddc0eb243c811cdb1a011ebb4800a5f0335a01a68654796a7", size = 659264, upload-time = "2025-12-08T01:40:52.501Z" }
sdist = { url = "https://files.pythonhosted.org/packages/13/88/560b11e521c522440af991d46848a2bde64b5f7202ec14e1f46f9509d328/black-26.1.0.tar.gz", hash = "sha256:d294ac3340eef9c9eb5d29288e96dc719ff269a88e27b396340459dd85da4c58", size = 658785, upload-time = "2026-01-18T04:50:11.993Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/35/46/1d8f2542210c502e2ae1060b2e09e47af6a5e5963cb78e22ec1a11170b28/black-25.12.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:0a0953b134f9335c2434864a643c842c44fba562155c738a2a37a4d61f00cad5", size = 1917015, upload-time = "2025-12-08T01:53:27.987Z" },
{ url = "https://files.pythonhosted.org/packages/41/37/68accadf977672beb8e2c64e080f568c74159c1aaa6414b4cd2aef2d7906/black-25.12.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:2355bbb6c3b76062870942d8cc450d4f8ac71f9c93c40122762c8784df49543f", size = 1741830, upload-time = "2025-12-08T01:54:36.861Z" },
{ url = "https://files.pythonhosted.org/packages/ac/76/03608a9d8f0faad47a3af3a3c8c53af3367f6c0dd2d23a84710456c7ac56/black-25.12.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9678bd991cc793e81d19aeeae57966ee02909877cb65838ccffef24c3ebac08f", size = 1791450, upload-time = "2025-12-08T01:44:52.581Z" },
{ url = "https://files.pythonhosted.org/packages/06/99/b2a4bd7dfaea7964974f947e1c76d6886d65fe5d24f687df2d85406b2609/black-25.12.0-cp314-cp314-win_amd64.whl", hash = "sha256:97596189949a8aad13ad12fcbb4ae89330039b96ad6742e6f6b45e75ad5cfd83", size = 1452042, upload-time = "2025-12-08T01:46:13.188Z" },
{ url = "https://files.pythonhosted.org/packages/b2/7c/d9825de75ae5dd7795d007681b752275ea85a1c5d83269b4b9c754c2aaab/black-25.12.0-cp314-cp314-win_arm64.whl", hash = "sha256:778285d9ea197f34704e3791ea9404cd6d07595745907dd2ce3da7a13627b29b", size = 1267446, upload-time = "2025-12-08T01:46:14.497Z" },
{ url = "https://files.pythonhosted.org/packages/68/11/21331aed19145a952ad28fca2756a1433ee9308079bd03bd898e903a2e53/black-25.12.0-py3-none-any.whl", hash = "sha256:48ceb36c16dbc84062740049eef990bb2ce07598272e673c17d1a7720c71c828", size = 206191, upload-time = "2025-12-08T01:40:50.963Z" },
{ url = "https://files.pythonhosted.org/packages/6a/83/be35a175aacfce4b05584ac415fd317dd6c24e93a0af2dcedce0f686f5d8/black-26.1.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:9dc8c71656a79ca49b8d3e2ce8103210c9481c57798b48deeb3a8bb02db5f115", size = 1871864, upload-time = "2026-01-18T04:59:47.586Z" },
{ url = "https://files.pythonhosted.org/packages/a5/f5/d33696c099450b1274d925a42b7a030cd3ea1f56d72e5ca8bbed5f52759c/black-26.1.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:b22b3810451abe359a964cc88121d57f7bce482b53a066de0f1584988ca36e79", size = 1701009, upload-time = "2026-01-18T04:59:49.443Z" },
{ url = "https://files.pythonhosted.org/packages/1b/87/670dd888c537acb53a863bc15abbd85b22b429237d9de1b77c0ed6b79c42/black-26.1.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:53c62883b3f999f14e5d30b5a79bd437236658ad45b2f853906c7cbe79de00af", size = 1767806, upload-time = "2026-01-18T04:59:50.769Z" },
{ url = "https://files.pythonhosted.org/packages/fe/9c/cd3deb79bfec5bcf30f9d2100ffeec63eecce826eb63e3961708b9431ff1/black-26.1.0-cp314-cp314-win_amd64.whl", hash = "sha256:f016baaadc423dc960cdddf9acae679e71ee02c4c341f78f3179d7e4819c095f", size = 1433217, upload-time = "2026-01-18T04:59:52.218Z" },
{ url = "https://files.pythonhosted.org/packages/4e/29/f3be41a1cf502a283506f40f5d27203249d181f7a1a2abce1c6ce188035a/black-26.1.0-cp314-cp314-win_arm64.whl", hash = "sha256:66912475200b67ef5a0ab665011964bf924745103f51977a78b4fb92a9fc1bf0", size = 1245773, upload-time = "2026-01-18T04:59:54.457Z" },
{ url = "https://files.pythonhosted.org/packages/e4/3d/51bdb3ecbfadfaf825ec0c75e1de6077422b4afa2091c6c9ba34fbfc0c2d/black-26.1.0-py3-none-any.whl", hash = "sha256:1054e8e47ebd686e078c0bb0eaf31e6ce69c966058d122f2c0c950311f9f3ede", size = 204010, upload-time = "2026-01-18T04:50:09.978Z" },
]
[[package]]